prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>cpp_code_generator.rs<|end_file_name|><|fim▁begin|>use crate::config::Config; use crate::cpp_checks::Condition; use crate::cpp_ffi_data::{ CppFfiArgumentMeaning, CppFfiFunctionKind, CppFfiType, CppFieldAccessorType, CppToFfiTypeConversion, QtSignalWrapper, QtSlotWrapper, }; use crate::cpp_ffi_data::{CppFfiFunction, CppFfiItem}; use crate::cpp_function::{CppFunction, ReturnValueAllocationPlace}; use crate::cpp_type::CppPointerLikeTypeKind; use crate::cpp_type::CppType; use crate::database::{DatabaseClient, DbItem}; use crate::rust_info::{RustItem, RustStructKind}; use itertools::Itertools; use ritual_common::cpp_lib_builder::version_to_number; use ritual_common::errors::{bail, err_msg, format_err, Result}; use ritual_common::file_utils::{create_file, os_str_to_str, path_to_str, read_dir}; use ritual_common::utils::MapIfOk; use std::collections::HashSet; use std::io::Write; use std::iter::once; use std::path::{Path, PathBuf}; struct Generator<'a>(&'a DatabaseClient); <|fim▁hole|> /// Generates function name, return type and arguments list /// as it appears in both function declaration and implementation. fn function_signature(&self, method: &CppFfiFunction) -> Result<String> { let mut arg_texts = Vec::new(); for arg in &method.arguments { arg_texts.push(arg.to_cpp_code()?); } let name_with_args = format!("{}({})", method.path.to_cpp_code()?, arg_texts.join(", ")); let return_type = method.return_type.ffi_type(); let r = if let CppType::FunctionPointer(..) = return_type { return_type.to_cpp_code(Some(&name_with_args))? } else { format!("{} {}", return_type.to_cpp_code(None)?, name_with_args) }; Ok(r) } /// Generates code for a Qt slot wrapper fn qt_slot_wrapper(&self, wrapper: &QtSlotWrapper) -> Result<String> { let func_type = CppType::FunctionPointer(wrapper.function_type.clone()); let method_args = wrapper .arguments .iter() .enumerate() .map_if_ok(|(num, t)| -> Result<_> { let arg_type = t.original_type().to_cpp_code(None)?; let arg_type = CppFunction::patch_receiver_argument_type(&arg_type); Ok(format!("{} arg{}", arg_type, num)) })? .join(", "); let func_args = once("m_callback.data()".to_string()) .chain( wrapper .arguments .iter() .enumerate() .map_if_ok(|(num, t)| self.convert_type_to_ffi(t, format!("arg{}", num)))?, ) .join(", "); Ok(format!( include_str!("../templates/c_lib/qt_slot_wrapper.h"), class_name = wrapper.class_path.to_cpp_code()?, callback_arg = func_type.to_cpp_code(Some("callback"))?, callback_type = func_type.to_cpp_code(Some(""))?, method_args = method_args, func_args = func_args )) } /// Generates code for a Qt signal wrapper fn qt_signal_wrapper(&self, wrapper: &QtSignalWrapper) -> Result<String> { let method_args = wrapper .signal_arguments .iter() .enumerate() .map_if_ok(|(num, t)| -> Result<_> { let arg_type = t.to_cpp_code(None)?; let arg_type = CppFunction::patch_receiver_argument_type(&arg_type); Ok(format!("{} arg{}", arg_type, num)) })? .join(", "); Ok(format!( include_str!("../templates/c_lib/qt_signal_wrapper.h"), class_name = wrapper.class_path.to_cpp_code()?, method_args = method_args, signal_impl = if self.0.crate_name().starts_with("moqt_") { "{}" } else { ";" } )) } /// Generates code that wraps `expression` of type `type1.original_type` and /// converts it to type `type1.ffi_type` fn convert_type_to_ffi(&self, type1: &CppFfiType, expression: String) -> Result<String> { Ok(match type1.conversion() { CppToFfiTypeConversion::NoChange | CppToFfiTypeConversion::ImplicitCast { .. } => { expression } CppToFfiTypeConversion::ValueToPointer { .. } => format!( "new {}({})", type1.original_type().to_cpp_code(None)?, expression ), CppToFfiTypeConversion::ReferenceToPointer => format!("&{}", expression), CppToFfiTypeConversion::QFlagsToInt => format!("int({})", expression), }) } /// Wraps `expression` returned by the original C++ method to /// convert it to return type of the FFI method. fn convert_return_type( &self, item: DbItem<&CppFfiFunction>, expression: String, ) -> Result<String> { let cpp_item = self .0 .source_cpp_item(&item.id)? .ok_or_else(|| format_err!("failed to find original cpp item for {:?}", item))?; let is_constructor = cpp_item .item .as_function_ref() .map_or(false, |f| f.is_constructor()); let method = item.item; let mut result = expression; match method.return_type.conversion() { CppToFfiTypeConversion::NoChange | CppToFfiTypeConversion::ImplicitCast { .. } => {} CppToFfiTypeConversion::ValueToPointer { .. } => { match method.allocation_place { ReturnValueAllocationPlace::Stack => { bail!("stack allocated wrappers are expected to return void"); } ReturnValueAllocationPlace::NotApplicable => { bail!("ValueToPointer conflicts with NotApplicable"); } ReturnValueAllocationPlace::Heap => { // constructors are said to return values in parse result, // but in reality we use `new` which returns a pointer, // so no conversion is necessary for constructors. if !is_constructor { result = format!( "new {}({})", method.return_type.original_type().to_cpp_code(None)?, result ); } } } } CppToFfiTypeConversion::ReferenceToPointer => { result = format!("&{}", result); } CppToFfiTypeConversion::QFlagsToInt => { result = format!("int({})", result); } } if method.allocation_place == ReturnValueAllocationPlace::Stack && !is_constructor { if let Some(arg) = method .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::ReturnValue) { let type1 = arg.argument_type.ffi_type().pointer_like_to_target()?; result = format!("new({}) {}({})", arg.name, type1.to_cpp_code(None)?, result); } } Ok(result) } /// Generates code for values passed to the original C++ method. fn arguments_values(&self, method: &CppFfiFunction) -> Result<String> { let r = method .arguments .iter() .filter(|arg| arg.meaning.is_argument()) .map_if_ok(|argument| -> Result<_> { let mut result = argument.name.clone(); match argument.argument_type.conversion() { CppToFfiTypeConversion::ValueToPointer { .. } | CppToFfiTypeConversion::ReferenceToPointer => result = format!("*{}", result), CppToFfiTypeConversion::NoChange | CppToFfiTypeConversion::ImplicitCast { .. } => {} CppToFfiTypeConversion::QFlagsToInt => { let type_text = if let CppType::PointerLike { kind, is_const, target, } = argument.argument_type.original_type() { if *kind == CppPointerLikeTypeKind::Reference && *is_const { target.to_cpp_code(None)? } else { bail!("Unsupported original type for QFlagsToUInt conversion"); } } else { argument.argument_type.original_type().to_cpp_code(None)? }; result = format!("{}({})", type_text, result); } } Ok(result) })?; Ok(r.join(", ")) } /// Generates code for the value returned by the FFI method. #[allow(clippy::collapsible_if)] fn returned_expression(&self, item: DbItem<&CppFfiFunction>) -> Result<String> { let cpp_item = self .0 .source_cpp_item(&item.id)? .ok_or_else(|| format_err!("failed to find original cpp item for {:?}", item))?; let is_destructor = cpp_item .item .as_function_ref() .map_or(false, |f| f.is_destructor()); let method = item.item; let result = if is_destructor { if let Some(arg) = method .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { format!("ritual::call_destructor({})", arg.name) } else { bail!("no this arg in destructor"); } } else { let result_without_args = if let Some(cpp_function) = cpp_item .item .as_function_ref() .filter(|m| m.is_constructor()) { match method.allocation_place { ReturnValueAllocationPlace::Stack => { if let Some(arg) = method .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::ReturnValue) { format!( "new({}) {}", arg.name, cpp_function.class_path()?.to_cpp_code()? ) } else { bail!("return value argument not found\n{:?}", method); } } ReturnValueAllocationPlace::Heap => { format!("new {}", cpp_function.class_path()?.to_cpp_code()?) } ReturnValueAllocationPlace::NotApplicable => { bail!("NotApplicable in constructor"); } } } else { let path = cpp_item.item.path().ok_or_else(|| { err_msg("cpp item (function or field) expected to have a path") })?; if let Some(arg) = method .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { format!("{}->{}", arg.name, path.last().to_cpp_code()?) } else { path.to_cpp_code()? } }; if let CppFfiFunctionKind::FieldAccessor { accessor_type, .. } = &method.kind { if accessor_type == &CppFieldAccessorType::Setter { format!( "{} = {}", result_without_args, self.arguments_values(method)? ) } else { result_without_args } } else { format!( "{}({})", result_without_args, self.arguments_values(method)? ) } }; self.convert_return_type(item, result) } /// Generates body of the FFI method implementation. fn source_body(&self, item: DbItem<&CppFfiFunction>) -> Result<String> { let cpp_item = self .0 .source_cpp_item(&item.id)? .ok_or_else(|| format_err!("failed to find original cpp item for {:?}", item))?; let is_destructor = cpp_item .item .as_function_ref() .map_or(false, |f| f.is_destructor()); let method = item.item; if is_destructor && method.allocation_place == ReturnValueAllocationPlace::Heap { if let Some(arg) = method .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { Ok(format!("delete {};\n", arg.name)) } else { panic!("Error: no this argument found\n{:?}", method); } } else { Ok(format!( "{}{};\n", if method.return_type.ffi_type().is_void() { "" } else { "return " }, self.returned_expression(item)? )) } } /// Generates implementation of the FFI method for the source file. fn function_implementation(&self, method: DbItem<&CppFfiFunction>) -> Result<String> { Ok(format!( "RITUAL_EXPORT {} {{\n {}}}\n\n", self.function_signature(method.item)?, self.source_body(method)? )) } fn condition_expression(&self, condition: &Condition) -> String { match condition { Condition::CppLibraryVersion(version) => { let value = version_to_number(version).expect("version_to_number failed"); format!("RITUAL_CPP_LIB_VERSION == {}", value) } Condition::Arch(_) => unimplemented!(), Condition::OS(_) => unimplemented!(), Condition::Family(_) => unimplemented!(), Condition::Env(_) => unimplemented!(), Condition::PointerWidth(_) => unimplemented!(), Condition::Endian(_) => unimplemented!(), Condition::And(conditions) => conditions .iter() .map(|c| format!("({})", self.condition_expression(c))) .join("&&"), Condition::Or(conditions) => conditions .iter() .map(|c| format!("({})", self.condition_expression(c))) .join("||"), Condition::Not(condition) => format!("!({})", self.condition_expression(condition)), Condition::True => "true".to_string(), Condition::False => "false".to_string(), } } fn wrap_with_condition(&self, code: &str, condition: &Condition) -> String { if condition == &Condition::True { return code.to_string(); } format!( "#if {}\n{}\n#endif\n", self.condition_expression(condition), code ) } /// Generates a source file with the specified FFI methods. fn generate_cpp_file(&self, file_path: &Path, global_header_name: &str) -> Result<()> { let mut cpp_file = create_file(file_path)?; writeln!(cpp_file, "#include \"{}\"", global_header_name)?; let used_ffi_functions = self .0 .rust_items() .filter_map(|item| item.item.as_function_ref()) .filter(|item| item.kind.is_ffi_function()) .map(|item| item.path.last()) .collect::<HashSet<&str>>(); let ffi_items = self .0 .ffi_items() .filter(|item| { !item.item.is_function() || used_ffi_functions.contains(item.item.path().last().name.as_str()) }) .collect_vec(); let mut needs_moc = false; for ffi_item in &ffi_items { match &ffi_item.item { CppFfiItem::QtSlotWrapper(qt_slot_wrapper) => { let checks = self.0.cpp_checks(&ffi_item.id)?; if !checks.any_success() { continue; } needs_moc = true; let condition = checks.condition(self.0.environments()); let code = self.qt_slot_wrapper(qt_slot_wrapper)?; write!(cpp_file, "{}", self.wrap_with_condition(&code, &condition))?; } CppFfiItem::QtSignalWrapper(qt_signal_wrapper) => { let checks = self.0.cpp_checks(&ffi_item.id)?; if !checks.any_success() { continue; } needs_moc = true; let condition = checks.condition(self.0.environments()); let code = self.qt_signal_wrapper(qt_signal_wrapper)?; write!(cpp_file, "{}", self.wrap_with_condition(&code, &condition))?; } _ => {} } } writeln!(cpp_file, "extern \"C\" {{")?; for ffi_item in &ffi_items { if let Some(item) = ffi_item.clone().filter_map(|item| item.as_function_ref()) { let checks = self.0.cpp_checks(&ffi_item.id)?; if !checks.any_success() { continue; } let condition = checks.condition(self.0.environments()); let code = self.function_implementation(item)?; writeln!(cpp_file, "{}", self.wrap_with_condition(&code, &condition))?; } } writeln!(cpp_file, "}} // extern \"C\"")?; if needs_moc && !self.0.crate_name().starts_with("moqt_") { let stem = file_path .file_stem() .ok_or_else(|| err_msg("failed to get file stem"))?; writeln!(cpp_file, "#include \"{}.moc\"", os_str_to_str(stem)?)?; } Ok(()) } /// Generates a C++ program that determines sizes of target C++ types /// on the current platform and outputs the Rust code for `sized_types.rs` module /// to the standard output. fn generate_cpp_type_size_requester( &self, include_directives: &[PathBuf], mut output: impl Write, ) -> Result<()> { for dir in include_directives { writeln!(output, "#include <{}>", path_to_str(dir)?)?; } writeln!(output, "#include <stdio.h>\n\nint main() {{")?; let rust_items = self.0.rust_items().map(|i| i.item); for item in rust_items { if let RustItem::Struct(data) = item { if let RustStructKind::SizedType(sized_type) = &data.kind { let cpp_path_code = sized_type.cpp_path.to_cpp_code()?; writeln!( output, "printf(\"#[repr(C, align(%zu))]\\n\", alignof({}));", cpp_path_code )?; writeln!( output, "printf(\"pub struct {}([u8; %zu]);\\n\\n\", sizeof({}));", data.path.last(), cpp_path_code )?; } } } writeln!(output, "}}")?; Ok(()) } } pub fn function_implementation( db: &DatabaseClient, method: DbItem<&CppFfiFunction>, ) -> Result<String> { Generator(db).function_implementation(method) } pub fn qt_slot_wrapper(db: &DatabaseClient, wrapper: &QtSlotWrapper) -> Result<String> { Generator(db).qt_slot_wrapper(wrapper) } pub fn qt_signal_wrapper(db: &DatabaseClient, wrapper: &QtSignalWrapper) -> Result<String> { Generator(db).qt_signal_wrapper(wrapper) } pub fn generate_cpp_file( db: &DatabaseClient, file_path: &Path, global_header_name: &str, ) -> Result<()> { Generator(db).generate_cpp_file(file_path, global_header_name) } pub fn generate_cpp_type_size_requester( db: &DatabaseClient, include_directives: &[PathBuf], output: impl Write, ) -> Result<()> { Generator(db).generate_cpp_type_size_requester(include_directives, output) } pub fn all_include_directives(config: &Config) -> Result<Vec<PathBuf>> { let mut all_include_directives = config.include_directives().to_vec(); if let Some(crate_template_path) = config.crate_template_path() { let extra_template = crate_template_path.join("c_lib/extra"); if extra_template.exists() { for item in read_dir(&extra_template)? { all_include_directives.push(PathBuf::from(format!( "extra/{}", os_str_to_str(&item?.file_name())? ))); } } } Ok(all_include_directives) } pub fn write_include_directives(mut destination: impl Write, directives: &[PathBuf]) -> Result<()> { for directive in directives { writeln!( &mut destination, "#include \"{}\"", path_to_str(&directive)? )?; } Ok(()) }<|fim▁end|>
impl Generator<'_> {
<|file_name|>ethiopic.js.uncompressed.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:f09386339b7a083e92b706c491817eb6f328805c49481a614f996a194d761fdc<|fim▁hole|><|fim▁end|>
size 1848
<|file_name|>angular-locale_wae-ch.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module("ngLocale", [], ["$provide", function ($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; function getDecimals(n) { n = n + ''; var i = n.indexOf('.'); return (i == -1) ? 0 : n.length - i - 1; } function getVF(n, opt_precision) { var v = opt_precision; if (undefined === v) { v = Math.min(getDecimals(n), 3); } var base = Math.pow(10, v); var f = ((n * base) | 0) % base; return {v: v, f: f}; } $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "AM", "PM" ], "DAY": [ "Sunntag", "M\u00e4ntag", "Zi\u0161tag", "Mittwu\u010d", "Fr\u00f3ntag", "Fritag", "Sam\u0161tag" ], "ERANAMES": [ "v. Chr.", "n. Chr" ], "ERAS": [ "v. Chr.", "n. Chr" ], "FIRSTDAYOFWEEK": 0, "MONTH": [ "Jenner", "Hornig", "M\u00e4rze", "Abrille", "Meije", "Br\u00e1\u010det", "Heiwet", "\u00d6ig\u0161te", "Herb\u0161tm\u00e1net", "W\u00edm\u00e1net", "Winterm\u00e1net", "Chri\u0161tm\u00e1net" ], "SHORTDAY": [ "Sun", "M\u00e4n", "Zi\u0161", "Mit", "Fr\u00f3", "Fri", "Sam" ], "SHORTMONTH": [ "Jen", "Hor", "M\u00e4r", "Abr", "Mei", "Br\u00e1", "Hei", "\u00d6ig", "Her", "W\u00edm", "Win", "Chr" ], "STANDALONEMONTH": [ "Jenner", "Hornig", "M\u00e4rze", "Abrille", "Meije", "Br\u00e1\u010det", "Heiwet", "\u00d6ig\u0161te", "Herb\u0161tm\u00e1net", "W\u00edm\u00e1net", "Winterm\u00e1net", "Chri\u0161tm\u00e1net" ],<|fim▁hole|> "fullDate": "EEEE, d. MMMM y", "longDate": "d. MMMM y", "medium": "d. MMM y HH:mm:ss", "mediumDate": "d. MMM y", "mediumTime": "HH:mm:ss", "short": "y-MM-dd HH:mm", "shortDate": "y-MM-dd", "shortTime": "HH:mm" }, "NUMBER_FORMATS": { "CURRENCY_SYM": "CHF", "DECIMAL_SEP": ",", "GROUP_SEP": "\u2019", "PATTERNS": [ { "gSize": 3, "lgSize": 3, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "-", "negSuf": "", "posPre": "", "posSuf": "" }, { "gSize": 3, "lgSize": 3, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "-\u00a4\u00a0", "negSuf": "", "posPre": "\u00a4\u00a0", "posSuf": "" } ] }, "id": "wae-ch", "localeID": "wae_CH", "pluralCat": function (n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER; } }); }]);<|fim▁end|>
"WEEKENDRANGE": [ 5, 6 ],
<|file_name|>limit_offset_clause.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> /// A helper query node that contains both limit and offset clauses /// /// This type is only relevant for implementing custom backends #[derive(Debug, Clone, Copy, QueryId)] pub struct LimitOffsetClause<Limit, Offset> { /// The limit clause pub limit_clause: Limit, /// The offset clause pub offset_clause: Offset, } /// A boxed variant of [`LimitOffsetClause`](crate::query_builder::LimitOffsetClause) /// /// This type is only relevant for implementing custom backends #[allow(missing_debug_implementations)] pub struct BoxedLimitOffsetClause<'a, DB> { /// The limit clause pub limit: Option<Box<dyn QueryFragment<DB> + Send + 'a>>, /// The offset clause pub offset: Option<Box<dyn QueryFragment<DB> + Send + 'a>>, }<|fim▁end|>
use super::QueryFragment; use crate::query_builder::QueryId;
<|file_name|>macro_crate_test.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // force-host #![feature(globs, plugin_registrar, macro_rules, quote, managed_boxes)] extern crate syntax; extern crate rustc; use syntax::ast::{TokenTree, Item, MetaItem}; use syntax::codemap::Span; use syntax::ext::base::*; use syntax::parse::token; use rustc::plugin::Registry; use std::gc::{Gc, GC}; #[macro_export] macro_rules! exported_macro (() => (2i))<|fim▁hole|> #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("make_a_1", expand_make_a_1); reg.register_syntax_extension( token::intern("into_foo"), ItemModifier(expand_into_foo)); } fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult> { if !tts.is_empty() { cx.span_fatal(sp, "make_a_1 takes no arguments"); } MacExpr::new(quote_expr!(cx, 1i)) } fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>) -> Gc<Item> { box(GC) Item { attrs: it.attrs.clone(), ..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone() } } pub fn foo() {}<|fim▁end|>
macro_rules! unexported_macro (() => (3i))
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * @license Apache-2.0 * * Copyright (c) 2018 The Stdlib Authors. *<|fim▁hole|>* you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; /** * Lévy distribution median. * * @module @stdlib/stats/base/dists/levy/median * * @example * var median = require( '@stdlib/stats/base/dists/levy/median' ); * * var y = median( 0.0, 1.0 ); * // returns ~2.198 * * y = median( 4.0, 2.0 ); * // returns ~8.396 */ // MODULES // var median = require( './median.js' ); // EXPORTS // module.exports = median;<|fim▁end|>
* Licensed under the Apache License, Version 2.0 (the "License");
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
mod foo;
<|file_name|>flow-stats-in.cc<|end_file_name|><|fim▁begin|>#include "flow-stats-in.hh" <|fim▁hole|>Flow_stats::Flow_stats(const ofp_flow_stats* ofs) { *(ofp_flow_stats*) this = *ofs; const ofp_action_header* headers = ofs->actions; size_t n_actions = (ntohs(ofs->length) - sizeof *ofs) / sizeof *headers; v_actions.assign(headers, headers + n_actions); } Flow_stats_in_event::Flow_stats_in_event(const datapathid& dpid, const ofp_stats_reply *osr, std::auto_ptr<Buffer> buf) : Event(static_get_name()), Ofp_msg_event(&osr->header, buf), more((osr->flags & htons(OFPSF_REPLY_MORE)) != 0) { datapath_id = dpid; size_t flow_len = htons(osr->header.length) - sizeof *osr; const ofp_flow_stats* ofs = (ofp_flow_stats*) osr->body; while (flow_len >= sizeof *ofs) { size_t length = ntohs(ofs->length); if (length > flow_len) { break; } flows.push_back(Flow_stats(ofs)); ofs = (const ofp_flow_stats*)((const char*) ofs + length); flow_len -= length; } } } // namespace vigil<|fim▁end|>
namespace vigil {
<|file_name|>diagnostic.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2013-2020 Blockstack PBC, a public benefit corporation // Copyright (C) 2020 Stacks Open Internet Foundation // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. use std::fmt; use vm::representations::Span; /// In a near future, we can go further in our static analysis and provide different levels /// of diagnostics, such as warnings, hints, best practices, etc. #[derive(Debug, Serialize, Deserialize, PartialEq)] pub enum Level { Error, } pub trait DiagnosableError { fn message(&self) -> String; fn suggestion(&self) -> Option<String>; } #[derive(Debug, Serialize, Deserialize, PartialEq)] pub struct Diagnostic { pub level: Level, pub message: String, pub spans: Vec<Span>, pub suggestion: Option<String>, }<|fim▁hole|> Diagnostic { spans: vec![], level: Level::Error, message: error.message(), suggestion: error.suggestion(), } } pub fn add_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { self.spans.push(Span { start_line, start_column, end_line, end_column, }); } } impl fmt::Display for Diagnostic { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self.level)?; if self.spans.len() == 1 { write!( f, " (line {}, column {})", self.spans[0].start_line, self.spans[0].start_column )?; } else if self.spans.len() > 1 { let lines: Vec<String> = self .spans .iter() .map(|s| format!("line: {}", s.start_line)) .collect(); write!(f, " ({})", lines.join(", "))?; } write!(f, ": {}.", &self.message)?; if let Some(suggestion) = &self.suggestion { write!(f, "\n{}", suggestion)?; } write!(f, "\n") } }<|fim▁end|>
impl Diagnostic { pub fn err(error: &dyn DiagnosableError) -> Diagnostic {
<|file_name|>SnapshotAction.java<|end_file_name|><|fim▁begin|>/* Copyright 2005,2006 Sven Reimers, Florian Vogler<|fim▁hole|> * * The Software Quality Environment Project is free software: * you can redistribute it and/or modify it under the terms of the * GNU General Public License as published by the Free Software Foundation, * either version 2 of the License, or (at your option) any later version. * * The Software Quality Environment Project is distributed in the hope that * it will be useful, but WITHOUT ANY WARRANTY; without even the implied * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Foobar. If not, see <http://www.gnu.org/licenses/>. */ package org.nbheaven.sqe.codedefects.history.action; import java.awt.EventQueue; import java.awt.event.ActionEvent; import java.util.Collection; import javax.swing.AbstractAction; import javax.swing.Action; import org.nbheaven.sqe.codedefects.core.util.SQECodedefectSupport; import org.nbheaven.sqe.codedefects.history.util.CodeDefectHistoryPersistence; import org.netbeans.api.project.Project; import org.openide.util.ContextAwareAction; import org.openide.util.ImageUtilities; import org.openide.util.Lookup; import org.openide.util.LookupEvent; import org.openide.util.LookupListener; import org.openide.util.NbBundle; import org.openide.util.Utilities; /** * * @author Sven Reimers */ public class SnapshotAction extends AbstractAction implements LookupListener, ContextAwareAction { private Lookup context; private Lookup.Result<Project> lkpInfo; public SnapshotAction() { this(Utilities.actionsGlobalContext()); } public SnapshotAction(Lookup context) { putValue("noIconInMenu", Boolean.TRUE); // NOI18N putValue(Action.SHORT_DESCRIPTION, NbBundle.getMessage(SnapshotAction.class, "HINT_Action")); putValue(SMALL_ICON, ImageUtilities.image2Icon(ImageUtilities.loadImage("org/nbheaven/sqe/codedefects/history/resources/camera.png"))); this.context = context; //The thing we want to listen for the presence or absence of //on the global selection Lookup.Template<Project> tpl = new Lookup.Template<Project>(Project.class); lkpInfo = context.lookup(tpl); lkpInfo.addLookupListener(this); resultChanged(null); } @Override public Action createContextAwareInstance(Lookup context) { return new SnapshotAction(context); } @Override public void resultChanged(LookupEvent ev) { updateEnableState(); } public String getName() { return NbBundle.getMessage(SnapshotAction.class, "LBL_Action"); } @Override public void actionPerformed(ActionEvent actionEvent) { if (null != getActiveProject()) { Project project = getActiveProject(); CodeDefectHistoryPersistence.addSnapshot(project); } } private void updateEnableState() { if (!EventQueue.isDispatchThread()) { EventQueue.invokeLater(() -> updateEnableState()); return; } setEnabled(SQECodedefectSupport.isQualityAwareProject(getActiveProject())); } private Project getActiveProject() { Collection<? extends Project> projects = lkpInfo.allInstances(); if (projects.size() == 1) { Project project = projects.iterator().next(); return project; } return null; } }<|fim▁end|>
* * This file is part of the Software Quality Environment Project.
<|file_name|>envelope_editor.rs<|end_file_name|><|fim▁begin|>use { Backend, Circle, Color, Colorable, Direction, Edge, Frameable, FramedRectangle, FontSize, IndexSlot, Labelable, Mouse, NodeIndex, Point, PointPath, Positionable, Scalar, Sizeable, Text, Widget, }; use num::Float; use std::any::Any; use std::cmp::Ordering; use std::default::Default; use std::fmt::Debug; use utils::{clamp, map_range, percentage, val_to_string}; use widget; /// Used for editing a series of 2D Points on a cartesian (X, Y) plane within some given range. /// /// Useful for things such as oscillator/automation envelopes or any value series represented /// periodically. pub struct EnvelopeEditor<'a, E:'a, F> where E: EnvelopePoint { common: widget::CommonBuilder, env: &'a mut Vec<E>, /// The value skewing for the envelope's y-axis. This is useful for displaying exponential /// ranges such as frequency. pub skew_y_range: f32, min_x: E::X, max_x: E::X, min_y: E::Y, max_y: E::Y, maybe_react: Option<F>, maybe_label: Option<&'a str>, style: Style, enabled: bool, } /// Unique kind for the widget. pub const KIND: widget::Kind = "EnvelopeEditor"; widget_style!{ KIND; /// Styling for the EnvelopeEditor, necessary for constructing its renderable Element. style Style { /// Coloring for the EnvelopeEditor's **FramedRectangle**. - color: Color { theme.shape_color } /// Thickness of the **FramedRectangle**'s frame. - frame: f64 { theme.frame_width } /// Color of the frame. - frame_color: Color { theme.frame_color } /// Color of the label. - label_color: Color { theme.label_color } /// The font size of the **EnvelopeEditor**'s label if one was given. - label_font_size: FontSize { theme.font_size_medium } /// The font size of the value label. - value_font_size: FontSize { 14 } /// The radius of the envelope points. - point_radius: Scalar { 6.0 } /// The thickness of the envelope lines. - line_thickness: Scalar { 2.0 } } } /// Represents the state of the EnvelopeEditor widget. #[derive(Clone, Debug, PartialEq)] pub struct State<E> where E: EnvelopePoint { interaction: Interaction, env: Vec<E>, min_x: E::X, max_x: E::X, min_y: E::Y, max_y: E::Y, skew_y_range: f32, rectangle_idx: IndexSlot, label_idx: IndexSlot, value_label_idx: IndexSlot, point_path_idx: IndexSlot, point_indices: Vec<NodeIndex>, } /// Describes an interaction with the EnvelopeEditor. #[derive(Debug, PartialEq, Clone, Copy)] pub enum Interaction { Normal, Highlighted(Elem), Clicked(Elem, MouseButton), } /// Represents the specific elements that the EnvelopeEditor is made up of. This is used to /// specify which element is Highlighted or Clicked when storing State. #[derive(Debug, PartialEq, Clone, Copy)] pub enum Elem { Rect, Pad, /// Represents an EnvelopePoint at `usize` index /// as well as the last mouse pos for comparison /// in determining new value. EnvPoint(usize, (f64, f64)), // /// Represents an EnvelopePoint's `curve` value. // CurvePoint(usize, (f64, f64)), } /// An enum to define which button is clicked. #[derive(Debug, PartialEq, Clone, Copy)] pub enum MouseButton { Left, Right, } /// `EnvPoint` must be implemented for any type that is used as a 2D point within the /// EnvelopeEditor. pub trait EnvelopePoint: Any + Clone + Debug + PartialEq { /// A value on the X-axis of the envelope. type X: Any + Debug + Default + Float + ToString; /// A value on the Y-axis of the envelope. type Y: Any + Debug + Default + Float + ToString; /// Return the X value. fn get_x(&self) -> Self::X; /// Return the Y value. fn get_y(&self) -> Self::Y; /// Set the X value. fn set_x(&mut self, _x: Self::X); /// Set the Y value. fn set_y(&mut self, _y: Self::Y); /// Return the bezier curve depth (-1. to 1.) for the next interpolation. fn get_curve(&self) -> f32 { 1.0 } /// Set the bezier curve depth (-1. to 1.) for the next interpolation. fn set_curve(&mut self, _curve: f32) {} /// Create a new EnvPoint. fn new(_x: Self::X, _y: Self::Y) -> Self; } impl EnvelopePoint for Point { type X = Scalar; type Y = Scalar; /// Return the X value. fn get_x(&self) -> Scalar { self[0] } /// Return the Y value. fn get_y(&self) -> Scalar { self[1] } /// Return the X value. fn set_x(&mut self, x: Scalar) { self[0] = x } /// Return the Y value. fn set_y(&mut self, y: Scalar) { self[1] = y } /// Create a new Envelope Point. fn new(x: Scalar, y: Scalar) -> Point { [x, y] } } impl Interaction { /// Alter the widget color depending on the state. fn color(&self, color: Color) -> Color { match *self { Interaction::Normal => color, Interaction::Highlighted(_) => color.highlighted(), Interaction::Clicked(_, _) => color.clicked(), } } } /// Determine and return the new state from the previous state and the mouse position. fn get_new_interaction(is_over_elem: Option<Elem>, prev: Interaction, mouse: Mouse) -> Interaction { use mouse::ButtonPosition::{Down, Up}; use self::Elem::{EnvPoint};//, CurvePoint}; use self::MouseButton::{Left, Right}; use self::Interaction::{Normal, Highlighted, Clicked}; match (is_over_elem, prev, mouse.left.position, mouse.right.position) { (Some(_), Normal, Down, Up) => Normal, (Some(elem), _, Up, Up) => Highlighted(elem), (Some(elem), Highlighted(_), Down, Up) => Clicked(elem, Left), (Some(_), Clicked(p_elem, m_button), Down, Up) | (Some(_), Clicked(p_elem, m_button), Up, Down) => { match p_elem { EnvPoint(idx, _) => Clicked(EnvPoint(idx, (mouse.xy[0], mouse.xy[1])), m_button), // CurvePoint(idx, _) => // Clicked(CurvePoint(idx, (mouse.xy[0], mouse.xy[1])), m_button), _ => Clicked(p_elem, m_button), } }, (None, Clicked(p_elem, m_button), Down, Up) => { match (p_elem, m_button) { (EnvPoint(idx, _), Left) => Clicked(EnvPoint(idx, (mouse.xy[0], mouse.xy[1])), Left), // (CurvePoint(idx, _), Left) => // Clicked(CurvePoint(idx, (mouse.xy[0], mouse.xy[1])), Left), _ => Clicked(p_elem, Left), } }, (Some(_), Highlighted(p_elem), Up, Down) => { match p_elem { EnvPoint(idx, _) => Clicked(EnvPoint(idx, (mouse.xy[0], mouse.xy[1])), Right), // CurvePoint(idx, _) => Clicked(CurvePoint(idx, (mouse.xy[0], mouse.xy[1])), Right), _ => Clicked(p_elem, Right), } }, _ => Normal, } } impl<'a, E, F> EnvelopeEditor<'a, E, F> where E: EnvelopePoint { /// Construct an EnvelopeEditor widget. pub fn new(env: &'a mut Vec<E>, min_x: E::X, max_x: E::X, min_y: E::Y, max_y: E::Y) -> EnvelopeEditor<'a, E, F> { EnvelopeEditor { common: widget::CommonBuilder::new(), env: env, skew_y_range: 1.0, // Default skew amount (no skew). min_x: min_x, max_x: max_x, min_y: min_y, max_y: max_y, maybe_react: None, maybe_label: None, style: Style::new(), enabled: true, } } builder_methods!{ pub point_radius { style.point_radius = Some(Scalar) } pub line_thickness { style.line_thickness = Some(Scalar) } pub value_font_size { style.value_font_size = Some(FontSize) } pub skew_y { skew_y_range = f32 } pub react { maybe_react = Some(F) } pub enabled { enabled = bool } } } impl<'a, E, F> Widget for EnvelopeEditor<'a, E, F> where E: EnvelopePoint, E::X: Any, E::Y: Any, F: FnMut(&mut Vec<E>, usize), { type State = State<E>; type Style = Style; fn common(&self) -> &widget::CommonBuilder { &self.common } fn common_mut(&mut self) -> &mut widget::CommonBuilder { &mut self.common } fn unique_kind(&self) -> widget::Kind { KIND } fn init_state(&self) -> State<E> { State { interaction: Interaction::Normal, env: Vec::new(), min_x: self.min_x, max_x: self.max_x, min_y: self.min_y, max_y: self.max_y, skew_y_range: self.skew_y_range, rectangle_idx: IndexSlot::new(), label_idx: IndexSlot::new(), value_label_idx: IndexSlot::new(), point_path_idx: IndexSlot::new(), point_indices: Vec::new(), } } fn style(&self) -> Style { self.style.clone() } /// Update the state of the EnvelopeEditor's cached state. fn update<B: Backend>(self, args: widget::UpdateArgs<Self, B>) { use self::Interaction::{Clicked, Highlighted, Normal}; let widget::UpdateArgs { idx, state, rect, style, mut ui, .. } = args; let EnvelopeEditor { env, skew_y_range, min_x, max_x, min_y, max_y, mut maybe_react, maybe_label, enabled, .. } = self; let maybe_mouse = ui.input(idx).maybe_mouse; let skew = skew_y_range; let point_radius = style.point_radius(ui.theme()); let frame = style.frame(ui.theme()); let inner_rect = rect.pad(frame); // Converts some envelope point's `x` value to a value in the given `Scalar` range. let map_x_to = |x: E::X, start: Scalar, end: Scalar| -> Scalar { map_range(x, min_x, max_x, start, end) }; // Converts some envelope point's `y` value to a value in the given `Scalar` range. let map_y_to = |y: E::Y, start: Scalar, end: Scalar| -> Scalar { let skewed_perc = percentage(y, min_y, max_y).powf(1.0 / skew_y_range); map_range(skewed_perc, 0.0, 1.0, start, end) }; // Converts some `Scalar` value in the given range to an `x` value for an envelope point. let map_to_x = |value: Scalar, start: Scalar, end: Scalar| -> E::X { map_range(value, start, end, min_x, max_x) }; // Converts some `Scalar` value in the given range to an `y` value for an envelope point. let map_to_y = |value: Scalar, start: Scalar, end: Scalar| -> E::Y { let unskewed_perc = percentage(value, start, end).powf(skew_y_range); map_range(unskewed_perc, 0.0, 1.0, min_y, max_y) }; // Determine the left and right X bounds for a point. let get_x_bounds = |env: &[E], idx: usize| -> (E::X, E::X) { let len = env.len(); let right_bound = if len > 0 && len - 1 > idx { env[idx + 1].get_x() } else { max_x }; let left_bound = if len > 0 && idx > 0 { env[idx - 1].get_x() } else { min_x }; (left_bound, right_bound) }; // Check for a new interaction. // // The reason we create the new interaction as mutable is because we may need to shift back // an index in the event that a point is removed. let mut new_interaction = match (enabled, maybe_mouse) { (false, _) | (true, None) => Normal, (true, Some(mouse)) => { // Determine whether or not the cursor is over the EnvelopeEditor. If it is, return // the element under the cursor and the closest EnvPoint to the cursor. let is_over_elem = |env: &[E]| if rect.is_over(mouse.xy) { if inner_rect.is_over(mouse.xy) { for (i, p) in env.iter().enumerate() { let px = p.get_x(); let py = p.get_y(); let x = map_x_to(px, inner_rect.left(), inner_rect.right()); let y = map_y_to(py, inner_rect.bottom(), inner_rect.top()); let distance = (mouse.xy[0] - x).powf(2.0) + (mouse.xy[1] - y).powf(2.0); if distance <= point_radius.powf(2.0) { return Some(Elem::EnvPoint(i, (x, y))); } } Some(Elem::Pad) } else { Some(Elem::Rect) } } else { None }; get_new_interaction(is_over_elem(&env), state.view().interaction, mouse) }, }; // Capture the mouse if clicked or uncapture the mouse if released. match (state.view().interaction, new_interaction) { (Highlighted(_), Clicked(_, _)) => { ui.capture_mouse(idx); }, (Clicked(_, _), Highlighted(_)) | (Clicked(_, _), Normal) => { ui.uncapture_mouse(idx); }, _ => (), } // Draw the closest envelope point and it's label. Return the idx if it is currently clicked. let is_clicked_env_point = match new_interaction { Clicked(elem, _) | Highlighted(elem) => { if let Elem::EnvPoint(idx, _) = elem { Some(idx) } else { None } }, _ => None, }; // If some new mouse state was given... if let Some(mouse) = maybe_mouse { // Determine new values. let get_new_value = |env: &[E], idx: usize| -> (E::X, E::Y) { let mouse_x_clamped = inner_rect.x.clamp_value(mouse.xy[0]); let mouse_y_clamped = inner_rect.y.clamp_value(mouse.xy[1]); let unbounded_x = map_to_x(mouse_x_clamped, inner_rect.left(), inner_rect.right()); let (left_bound, right_bound) = get_x_bounds(&env, idx); let new_x = clamp(unbounded_x, left_bound, right_bound); let new_y = map_to_y(mouse_y_clamped, inner_rect.bottom(), inner_rect.top()); (new_x, new_y) }; // If a point is currently clicked, check for react and value setting conditions. if let Some(idx) = is_clicked_env_point { // Call the `react` closure if mouse was released on one of the DropDownMenu items. match (state.view().interaction, new_interaction) { (Clicked(_, m_button), Highlighted(_)) | (Clicked(_, m_button), Normal) => { match m_button { MouseButton::Left => { // Adjust the point and trigger the reaction. let (new_x, new_y) = get_new_value(&env, idx); env[idx].set_x(new_x); env[idx].set_y(new_y); if let Some(ref mut react) = maybe_react { react(env, idx); } }, MouseButton::Right => { // Delete the point and trigger the reaction. env.remove(idx); // Check for whether or not the highlighted index is out of range // now that a point has been removed from the envelope. if let Highlighted(ref mut elem) = new_interaction { if env.is_empty() { *elem = Elem::Pad; } else if let Elem::EnvPoint(p_idx, p) = *elem { if p_idx >= env.len() { *elem = Elem::EnvPoint(env.len() - 1, p); } } } if let Some(ref mut react) = maybe_react { react(env, idx); } }, } }, (Clicked(_, prev_m_button), Clicked(_, m_button)) => { if let (MouseButton::Left, MouseButton::Left) = (prev_m_button, m_button) { let (new_x, new_y) = get_new_value(&env, idx); let current_x = env[idx].get_x(); let current_y = env[idx].get_y(); if new_x != current_x || new_y != current_y { // Adjust the point and trigger the reaction. env[idx].set_x(new_x); env[idx].set_y(new_y); if let Some(ref mut react) = maybe_react { react(env, idx); } } } }, _ => (), } } else { // Check if a there are no points. If so and the mouse was clicked, add a point. if env.len() == 0 { if let (Clicked(elem, m_button), Highlighted(_)) = (state.view().interaction, new_interaction) { if let (Elem::Pad, MouseButton::Left) = (elem, m_button) { let (new_x, new_y) = get_new_value(&env, 0); let new_point = EnvelopePoint::new(new_x, new_y); env.push(new_point); if let Some(ref mut react) = maybe_react { react(env, 0); } } } } else { // Check if a new point should be created. if let (Clicked(elem, m_button), Highlighted(_)) = (state.view().interaction, new_interaction) { if let (Elem::Pad, MouseButton::Left) = (elem, m_button) { let clamped_mouse_x = inner_rect.x.clamp_value(mouse.xy[0]); let clamped_mouse_y = inner_rect.y.clamp_value(mouse.xy[1]); let (left, right, bottom, top) = inner_rect.l_r_b_t(); let new_x = map_to_x(clamped_mouse_x, left, right); let new_y = map_to_y(clamped_mouse_y, bottom, top); let new_point = EnvelopePoint::new(new_x, new_y); env.push(new_point); env.sort_by(|a, b| if a.get_x() > b.get_x() { Ordering::Greater } else if a.get_x() < b.get_x() { Ordering::Less } else { Ordering::Equal }); if let Some(ref mut react) = maybe_react { let idx = env.iter().enumerate().find(|&(_, point)| { point.get_x() == new_x && point.get_y() == new_y }).map(|(idx, _)| idx).unwrap(); react(env, idx) } }<|fim▁hole|> } } // A function for finding the closest element to the cursor. let closest_elem = |env: &[E], target: Point| { let mut closest_distance = ::std::f64::MAX; let mut closest_elem = Elem::Pad; for (i, p) in env.iter().enumerate() { let px = p.get_x(); let py = p.get_y(); let x = map_x_to(px, inner_rect.left(), inner_rect.right()); let y = map_y_to(py, inner_rect.bottom(), inner_rect.top()); let distance = (target[0] - x).powf(2.0) + (target[1] - y).powf(2.0); if distance < closest_distance { closest_distance = distance; closest_elem = Elem::EnvPoint(i, (x, y)); } } closest_elem }; // Determine the closest point to the cursor. let maybe_closest_point = match new_interaction { Clicked(Elem::EnvPoint(idx, p), _) | Highlighted(Elem::EnvPoint(idx, p)) => Some((idx, p)), Clicked(_, _) | Highlighted(_) => maybe_mouse.and_then(|mouse| match closest_elem(&env, mouse.xy) { Elem::EnvPoint(idx, p) => Some((idx, p)), _ => None, }), _ => None, }; if state.view().interaction != new_interaction { state.update(|state| state.interaction = new_interaction); } if &state.view().env[..] != &env[..] { state.update(|state| state.env = env.clone()); } let bounds_have_changed = { let view = state.view(); view.min_x != min_x || view.max_x != max_x || view.min_y != min_y || view.max_y != max_y }; if bounds_have_changed { state.update(|state| { state.min_x = min_x; state.max_x = max_x; state.min_y = min_y; state.max_y = max_y; }); } if state.view().skew_y_range != skew { state.update(|state| state.skew_y_range = skew); } let rectangle_idx = state.view().rectangle_idx.get(&mut ui); let dim = rect.dim(); let frame = style.frame(ui.theme()); let color = new_interaction.color(style.color(ui.theme())); let frame_color = style.frame_color(ui.theme()); FramedRectangle::new(dim) .middle_of(idx) .graphics_for(idx) .color(color) .frame(frame) .frame_color(frame_color) .set(rectangle_idx, &mut ui); let label_color = style.label_color(ui.theme()); if let Some(label) = maybe_label { let label_idx = state.view().label_idx.get(&mut ui); let font_size = style.label_font_size(ui.theme()); Text::new(label) .middle_of(rectangle_idx) .graphics_for(idx) .color(label_color) .font_size(font_size) .set(label_idx, &mut ui); } let line_color = label_color.with_alpha(1.0); { let point_path_idx = state.view().point_path_idx.get(&mut ui); let thickness = style.line_thickness(ui.theme()); let points = env.iter().map(|point| { let x = map_x_to(point.get_x(), inner_rect.left(), inner_rect.right()); let y = map_y_to(point.get_y(), inner_rect.bottom(), inner_rect.top()); [x, y] }); PointPath::new(points) .wh(inner_rect.dim()) .xy(inner_rect.xy()) .graphics_for(idx) .parent(idx) .color(line_color) .thickness(thickness) .set(point_path_idx, &mut ui); } let num_point_indices = state.view().point_indices.len(); let len = env.len(); if num_point_indices < len { let new_indices = (num_point_indices..len).map(|_| ui.new_unique_node_index()); state.update(|state| state.point_indices.extend(new_indices)); } let iter = state.view().point_indices.iter().zip(env.iter()).enumerate(); for (i, (&point_idx, point)) in iter { let x = map_x_to(point.get_x(), inner_rect.left(), inner_rect.right()); let y = map_y_to(point.get_y(), inner_rect.bottom(), inner_rect.top()); let point_color = match new_interaction { Clicked(Elem::EnvPoint(idx, _), MouseButton::Left) if idx == i => line_color.clicked(), Highlighted(Elem::EnvPoint(idx, _)) if idx == i => line_color.highlighted(), _ => line_color, }; Circle::fill(point_radius) .color(point_color) .x_y(x, y) .graphics_for(idx) .parent(idx) .set(point_idx, &mut ui); } if let Some((closest_idx, (x, y))) = maybe_closest_point { let x_range = max_x - min_x; let y_range = max_y - min_y; let x_px_range = inner_rect.w() as usize; let y_px_range = inner_rect.h() as usize; let x_string = val_to_string(env[closest_idx].get_x(), max_x, x_range, x_px_range); let y_string = val_to_string(env[closest_idx].get_y(), max_y, y_range, y_px_range); let xy_string = format!("{}, {}", x_string, y_string); let x_direction = match inner_rect.x.closest_edge(x) { Edge::End => Direction::Backwards, Edge::Start => Direction::Forwards, }; let y_direction = match inner_rect.y.closest_edge(y) { Edge::End => Direction::Backwards, Edge::Start => Direction::Forwards, }; let value_font_size = style.value_font_size(ui.theme()); let value_label_idx = state.view().value_label_idx.get(&mut ui); let closest_point_idx = state.view().point_indices[closest_idx]; const VALUE_TEXT_PAD: f64 = 5.0; // Slight padding between the point and the text. Text::new(&xy_string) .x_direction_from(closest_point_idx, x_direction, VALUE_TEXT_PAD) .y_direction_from(closest_point_idx, y_direction, VALUE_TEXT_PAD) .color(line_color) .graphics_for(idx) .parent(idx) .font_size(value_font_size) .set(value_label_idx, &mut ui); } } } impl<'a, E, F> Colorable for EnvelopeEditor<'a, E, F> where E: EnvelopePoint { builder_method!(color { style.color = Some(Color) }); } impl<'a, E, F> Frameable for EnvelopeEditor<'a, E, F> where E: EnvelopePoint { builder_methods!{ frame { style.frame = Some(Scalar) } frame_color { style.frame_color = Some(Color) } } } impl<'a, E, F> Labelable<'a> for EnvelopeEditor<'a, E, F> where E: EnvelopePoint { builder_methods!{ label { maybe_label = Some(&'a str) } label_color { style.label_color = Some(Color) } label_font_size { style.label_font_size = Some(FontSize) } } }<|fim▁end|>
} }
<|file_name|>fulfillQueryTest.js<|end_file_name|><|fim▁begin|>import fulfillQuery from '../../../src/webserver/db/fulfillQuery' // eslint-disable-line<|fim▁hole|> describe('webserver/db/fulfillQuery', () => { it('should work') })<|fim▁end|>
<|file_name|>loopsource0.rs<|end_file_name|><|fim▁begin|>fn main() { let mut count = 0u32; println!("Comptons jusqu'à l'infini!"); // Boucle infinie. loop {<|fim▁hole|> // Ignore le reste de l'itération. continue; } println!("{}", count); if count == 5 { println!("Ok, ça suffit!"); // Sort de la boucle. break; } } }<|fim▁end|>
count += 1; if count == 3 { println!("trois");
<|file_name|>test_static_actions.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 Mirantis, Inc. # Copyright (c) 2016 AT&T Corp # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from unittest import mock from oslo_messaging.rpc import client from oslo_serialization import jsonutils from webob import exc from murano.api.v1 import static_actions from murano.common import policy import murano.tests.unit.api.base as tb @mock.patch.object(policy, 'check') class TestStaticActionsApi(tb.ControllerTest, tb.MuranoApiTestCase): def setUp(self): super(TestStaticActionsApi, self).setUp() self.controller = static_actions.Controller() def test_execute_static_action(self, mock_policy_check): """Test that action execution results in the correct rpc call.""" self._set_policy_rules( {'execute_action': '@'} ) action = { 'method': 'TestAction', 'args': {'name': 'John'}, 'class_name': 'TestClass', 'pkg_name': 'TestPackage', 'class_version': '=0' } rpc_task = { 'action': action, 'token': None, 'project_id': 'test_tenant', 'user_id': 'test_user', 'id': mock.ANY } request_data = { "className": 'TestClass', "methodName": 'TestAction', "packageName": 'TestPackage', "classVersion": '=0', "parameters": {'name': 'John'} } req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) try: self.controller.execute(req, request_data) except TypeError: pass self.mock_engine_rpc.call_static_action.assert_called_once_with( rpc_task) def test_execute_static_action_handle_bad_data_exc(self, _): request_data = { "className": None, "methodName": 'TestAction' } req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req, request_data) request_data = { "className": 'TestClass',<|fim▁hole|> self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req, request_data) @mock.patch('murano.services.static_actions.StaticActionServices.execute') def test_execute_static_action_handle_execute_excs(self, mock_execute, _): """Test whether execute handles all exceptions thrown correctly.""" request_data = { "className": 'TestClass', "methodName": 'TestAction', "packageName": 'TestPackage', "classVersion": '=0', "parameters": {'name': 'John'} } exc_types = ['NoClassFound', 'NoMethodFound', 'NoPackageFound', 'NoPackageForClassFound', 'MethodNotExposed', 'NoMatchingMethodException'] for exc_type in exc_types: mock_execute.side_effect = client.RemoteError(exc_type=exc_type) req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) self.assertRaises(exc.HTTPNotFound, self.controller.execute, req, request_data) self.assertEqual(mock_execute.call_count, len(exc_types)) exc_type = 'ContractViolationException' mock_execute.side_effect = client.RemoteError(exc_type=exc_type) req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req, request_data) exc_types.append(exc_type) self.assertEqual(mock_execute.call_count, len(exc_types)) exc_type = 'ThisIsARandomTestException' mock_execute.side_effect = client.RemoteError(exc_type=exc_type) req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) self.assertRaises(exc.HTTPServiceUnavailable, self.controller.execute, req, request_data) exc_types.append(exc_type) self.assertEqual(mock_execute.call_count, len(exc_types)) try: int('this will throw a value error') except ValueError as e: setattr(e, 'message', None) exc_type = e mock_execute.side_effect = exc_type req = self._post('/actions', jsonutils.dump_as_bytes(request_data)) self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req, request_data) exc_types.append(exc_type) self.assertEqual(mock_execute.call_count, len(exc_types))<|fim▁end|>
"methodName": None } req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
<|file_name|>StickerPayLoad.java<|end_file_name|><|fim▁begin|>package es.npatarino.android.gotchallenge.chat.message.viewmodel; import android.net.Uri; import es.npatarino.android.gotchallenge.chat.message.model.Payload; public class StickerPayLoad implements Payload { private String stickerFilePath; public StickerPayLoad(String stickerFilePath) { this.stickerFilePath = stickerFilePath; } public String getStickerFilePath() { return stickerFilePath; } public Uri getSticker() {<|fim▁hole|><|fim▁end|>
return Uri.parse(stickerFilePath); } }
<|file_name|>RPSOM_animal.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin python # -*- coding: utf-8 -*- <|fim▁hole|>if __name__=='__main__': # learning rate alpha setup alpha_max = [0.1, 0.5, 0.7] alpha_min = [0.01, 0.1, 0.2] # neighborhood radius sigma setup sigma_max = [5, 7, 10] sigma_min = [1, 2, 3] epochs = 10 # RPSOM model setup rpsom=Model.RPSOM(epochs, 15, 20, input_file="animal.csv", alpha_max=alpha_max, alpha_min=alpha_min, sigma_max=sigma_max, sigma_min=sigma_min, log_file="test.log") #cb = [som.write_BMU for som in rpsom.som] cb = None # RPSOM train rpsom.fit (trainX=rpsom.input_x, epochs=rpsom.epochs, verbose=0, callbacks=cb) # Output Map # Output thickness map rpsom.map_output2wrl_squ(grad=100, filename="test") # Output grayscale 2D map filename="example_animal" rpsom.map_output2wrl_gray_squ(filename) # Output transition graph output_graph(rpsom) rpsom.weight_output_csv ("rpsom_weight")<|fim▁end|>
from RPSOM import Model from RPSOM.transition_graph import output_graph
<|file_name|>compat.py<|end_file_name|><|fim▁begin|>from django.conf import settings from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured from django.utils import six from django.utils.html import conditional_escape from django.utils.safestring import mark_safe from six.moves import map def get_user_model(): """ Return the User model Using this function instead of Django 1.5's get_user_model allows backwards compatibility with Django 1.4. """ try: # Django 1.5+ from django.contrib.auth import get_user_model except ImportError: # Django <= 1.4 model = User else: model = get_user_model() # Test if user model has any custom fields and add attributes to the _meta # class core_fields = set([f.name for f in User._meta.fields]) model_fields = set([f.name for f in model._meta.fields]) new_fields = model_fields.difference(core_fields) model._meta.has_additional_fields = len(new_fields) > 0 model._meta.additional_fields = new_fields return model # A setting that can be used in foreign key declarations AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User') # Two additional settings that are useful in South migrations when # specifying the user model in the FakeORM try: AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.rsplit('.', 1) except ValueError: raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form" " 'app_label.model_name'") def format_html(format_string, *args, **kwargs): """ Backport of format_html from Django 1.5+ to support Django 1.4 """ args_safe = map(conditional_escape, args) kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in six.iteritems(kwargs)]) return mark_safe(format_string.format(*args_safe, **kwargs_safe)) # # Python3 compatibility layer # try: import urlparse as _urlparse except ImportError: from urllib import parse as _urlparse # NOQA urlparse = _urlparse # # Unicode compatible wrapper for CSV reader and writer that abstracts away # differences between Python 2 and 3. A package like unicodecsv would be # preferable, but it's not Python 3 compatible yet. # Code from http://python3porting.com/problems.html # Classes renamed to include CSV. Unused 'codecs' import is dropped. import sys import csv PY3 = sys.version > '3' class UnicodeCSVReader: def __init__(self, filename, dialect=csv.excel, encoding="utf-8", **kw): self.filename = filename self.dialect = dialect self.encoding = encoding self.kw = kw def __enter__(self): if PY3: self.f = open(self.filename, 'rt', encoding=self.encoding, newline='') else: self.f = open(self.filename, 'rb') self.reader = csv.reader(self.f, dialect=self.dialect, **self.kw) return self def __exit__(self, type, value, traceback): self.f.close() def next(self): row = next(self.reader) if PY3: return row return [s.decode("utf-8") for s in row] __next__ = next def __iter__(self): return self class UnicodeCSVWriter: def __init__(self, filename, dialect=csv.excel, encoding="utf-8", **kw): self.filename = filename self.dialect = dialect self.encoding = encoding self.kw = kw def __enter__(self): if PY3: self.f = open(self.filename, 'wt', encoding=self.encoding, newline='') else:<|fim▁hole|> self.f = open(self.filename, 'wb') self.writer = csv.writer(self.f, dialect=self.dialect, **self.kw) return self def __exit__(self, type, value, traceback): self.f.close() def writerow(self, row): if not PY3: row = [s.encode(self.encoding) for s in row] self.writer.writerow(row) def writerows(self, rows): for row in rows: self.writerow(row)<|fim▁end|>
<|file_name|>parameter_value_expression.cpp<|end_file_name|><|fim▁begin|>//===----------------------------------------------------------------------===// // // Peloton // // parameter_value_expression.cpp // // Identification: src/backend/expression/parameter_value_expression.cpp // // Copyright (c) 2015-16, Carnegie Mellon University Database Group // //===----------------------------------------------------------------------===// #include "backend/common/logger.h" #include "backend/expression/parameter_value_expression.h" #include "backend/executor/executor_context.h" namespace peloton { namespace expression { ParameterValueExpression::ParameterValueExpression(ValueType type, int value_idx) : AbstractExpression(EXPRESSION_TYPE_VALUE_PARAMETER, type), value_idx_(value_idx), param_value_() { LOG_TRACE("ParameterValueExpression %d", value_idx); }; ParameterValueExpression::ParameterValueExpression(oid_t value_idx, Value param_value) : AbstractExpression(EXPRESSION_TYPE_VALUE_PARAMETER, param_value.GetValueType()), value_idx_(value_idx), param_value_(param_value) {} Value ParameterValueExpression::Evaluate( __attribute__((unused)) const AbstractTuple *tuple1, __attribute__((unused)) const AbstractTuple *tuple2, executor::ExecutorContext *context) const {<|fim▁hole|> auto& params = context->GetParams(); assert(value_idx_ < params.size()); return params[value_idx_]; } } // namespace expression } // namespace peloton<|fim▁end|>
<|file_name|>authenticationpolicylabel.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2008-2015 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class authenticationpolicylabel(base_resource) : """ Configuration for authentication policy label resource. """ def __init__(self) : self._labelname = "" self._newname = "" self._numpol = 0 self._hits = 0 self._policyname = "" self._priority = 0 self._gotopriorityexpression = "" self._flowtype = 0 self._description = "" self.___count = 0 @property def labelname(self) : ur"""Name for the new authentication policy label. Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) pound (#), space ( ), at (@), equals (=), colon (:), and underscore characters. The following requirement applies only to the NetScaler CLI: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my authentication policy label" or 'authentication policy label'). """ try : return self._labelname except Exception as e: raise e @labelname.setter def labelname(self, labelname) : ur"""Name for the new authentication policy label. Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) pound (#), space ( ), at (@), equals (=), colon (:), and underscore characters. The following requirement applies only to the NetScaler CLI: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my authentication policy label" or 'authentication policy label'). """ try : self._labelname = labelname except Exception as e: raise e @property def newname(self) : ur"""The new name of the auth policy label.<br/>Minimum length = 1. """ try : return self._newname except Exception as e: raise e @newname.setter def newname(self, newname) : ur"""The new name of the auth policy label.<br/>Minimum length = 1 """ try : self._newname = newname except Exception as e: raise e @property def numpol(self) : ur"""Number of polices bound to label. """ try : return self._numpol except Exception as e: raise e @property def hits(self) : ur"""Number of times policy label was invoked. """ try : return self._hits except Exception as e: raise e @property def policyname(self) : ur"""Name of the authentication policy to bind to the policy label. """ try : return self._policyname except Exception as e: raise e @property def priority(self) : ur"""Specifies the priority of the policy. """ try : return self._priority except Exception as e: raise e @property def gotopriorityexpression(self) : ur"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE. """ try : return self._gotopriorityexpression except Exception as e: raise e @property def flowtype(self) : ur"""Flowtype of the bound authentication policy. """ try : return self._flowtype except Exception as e: raise e @property def description(self) : ur"""Description of the policylabel. """ try : return self._description except Exception as e: raise e def _get_nitro_response(self, service, response) : ur""" converts nitro response into object and returns the object array in case of get request. """ try : result = service.payload_formatter.string_to_resource(authenticationpolicylabel_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.authenticationpolicylabel except Exception as e : raise e def _get_object_name(self) : ur""" Returns the value of object identifier argument """ try : if self.labelname is not None : return str(self.labelname) return None except Exception as e : raise e @classmethod def add(cls, client, resource) : ur""" Use this API to add authenticationpolicylabel. """ try : if type(resource) is not list : addresource = authenticationpolicylabel() addresource.labelname = resource.labelname return addresource.add_resource(client) else : if (resource and len(resource) > 0) : addresources = [ authenticationpolicylabel() for _ in range(len(resource))] for i in range(len(resource)) : addresources[i].labelname = resource[i].labelname result = cls.add_bulk_request(client, addresources) return result except Exception as e : raise e @classmethod def delete(cls, client, resource) : ur""" Use this API to delete authenticationpolicylabel. """ try : if type(resource) is not list : deleteresource = authenticationpolicylabel() if type(resource) != type(deleteresource): deleteresource.labelname = resource else : deleteresource.labelname = resource.labelname return deleteresource.delete_resource(client) else : if type(resource[0]) != cls : if (resource and len(resource) > 0) : deleteresources = [ authenticationpolicylabel() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].labelname = resource[i] else : if (resource and len(resource) > 0) : deleteresources = [ authenticationpolicylabel() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].labelname = resource[i].labelname result = cls.delete_bulk_request(client, deleteresources) return result except Exception as e : raise e @classmethod def rename(cls, client, resource, new_labelname) : ur""" Use this API to rename a authenticationpolicylabel resource. """ try : renameresource = authenticationpolicylabel() if type(resource) == cls : renameresource.labelname = resource.labelname else : renameresource.labelname = resource return renameresource.rename_resource(client,new_labelname) except Exception as e : raise e @classmethod def get(cls, client, name="", option_="") : ur""" Use this API to fetch all the authenticationpolicylabel resources that are configured on netscaler. """ try : if not name : obj = authenticationpolicylabel() response = obj.get_resources(client, option_) else : if type(name) != cls : if type(name) is not list : obj = authenticationpolicylabel() obj.labelname = name response = obj.get_resource(client, option_) else : if name and len(name) > 0 : response = [authenticationpolicylabel() for _ in range(len(name))] obj = [authenticationpolicylabel() for _ in range(len(name))] for i in range(len(name)) : obj[i] = authenticationpolicylabel() obj[i].labelname = name[i] response[i] = obj[i].get_resource(client, option_) return response except Exception as e : raise e @classmethod<|fim▁hole|> ur""" Use this API to fetch filtered set of authenticationpolicylabel resources. filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = authenticationpolicylabel() option_ = options() option_.filter = filter_ response = obj.getfiltered(client, option_) return response except Exception as e : raise e @classmethod def count(cls, client) : ur""" Use this API to count the authenticationpolicylabel resources configured on NetScaler. """ try : obj = authenticationpolicylabel() option_ = options() option_.count = True response = obj.get_resources(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e @classmethod def count_filtered(cls, client, filter_) : ur""" Use this API to count filtered the set of authenticationpolicylabel resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = authenticationpolicylabel() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e class authenticationpolicylabel_response(base_response) : def __init__(self, length=1) : self.authenticationpolicylabel = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.authenticationpolicylabel = [authenticationpolicylabel() for _ in range(length)]<|fim▁end|>
def get_filtered(cls, client, filter_) :
<|file_name|>test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Dmitry Vyukov. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. package test import ( "bytes" ) func Fuzz(data []byte) int { if len(data) == 1 { if data[0] == '!' || data[0] == '#' { panic("bingo 0") } if data[0] == '0' || data[0] == '9' { for { c := make(chan bool) close(c) } } if data[0] == 'a' || data[0] == 'z' { data := make([]byte, 128<<30-1) _ = data } if data[0] == 'b' { // new coverage } if data[0] == 'c' { // new coverage } } // Test for crash minimization. if bytes.IndexByte(data, 'x') != -1 { if bytes.IndexByte(data, 'y') != -1 { panic("xy") } } // Test for input minimization. if bytes.Index(data, []byte("input ")) != -1 { if bytes.Index(data, []byte("minimization ")) != -1 { if bytes.Index(data, []byte("test")) != -1 { } } } if len(data) >= 14 && bytes.HasPrefix(data, []byte("0123456789")) { x := int(data[10]) + int(data[11])<<8 + int(data[12])<<16 + int(data[13])<<24 if x == 0 || x == -1 { panic("bingo 1") } if x == 255 || x == 256 { for { c := make(chan bool) close(c) } } if x == 1<<16-1 || x == 1<<16 { data := make([]byte, 128<<30-1) _ = data } if x == '1' { // new coverage } if x == '2' { // new coverage } } return 0 } // Compilation tests, go-fuzz-build previously failed on these code patterns. // Test for issue #35. const X = 1 << 129 func foo(x float64) bool { return x < X } func test1() bool { var x uint64 var y uint return x == 1<<y } func test11() bool { var x uint64 var y uint return x < (1<<uint64(y))-1 } func Pow(x, y float64) float64 { switch { case x == -1: return 1 case (Abs(x) < 1) == IsInf(y, 1): return 0 default: return 1 } } func Abs(x float64) float64 { return x } <|fim▁hole|>func test2(p *int) bool { return p == nil } type ChanDir int const ( SEND ChanDir = 1 << iota RECV ) func test3(x ChanDir) bool { return x == SEND|RECV } type MyBool bool func test4(x, y MyBool) MyBool { if x && y { return true } if true && y { return true } if x && true { return true } return false } func bla() error { return nil } func test5() bool { return nil == bla() }<|fim▁end|>
func IsInf(x float64, v int) bool { return x != 0 }
<|file_name|>class_passed_message.js<|end_file_name|><|fim▁begin|>var class_passed_message = [ [ "direction_t", "class_passed_message.html#a11c83e74aa007c495b32ec3ed4953a50", [ [ "INCOMING", "class_passed_message.html#a11c83e74aa007c495b32ec3ed4953a50a43c42d4afa45cd04736e0d59167260a4", null ], [ "OUTGOING", "class_passed_message.html#a11c83e74aa007c495b32ec3ed4953a50a862e80d4bad52c451a413eef983c16ae", null ] ] ], [ "gates_t", "class_passed_message.html#a7738b6f08855f784d1012de87fbfd9e6", [<|fim▁hole|> ] ], [ "direction", "class_passed_message.html#af55219a6ed1e656af091cb7583467f5b", null ], [ "fromModule", "class_passed_message.html#a6c340595cb29a4e8a4c55ea0503dffad", null ], [ "gateType", "class_passed_message.html#a41f11b3139f3552cf2de3bb648c1ff55", null ], [ "kind", "class_passed_message.html#ab4e2bf6d2317196af7e9c98ed2c406a6", null ], [ "name", "class_passed_message.html#a8a4eb44ad1e43205d1881fec0c00a6d7", null ] ];<|fim▁end|>
[ "UPPER_DATA", "class_passed_message.html#a7738b6f08855f784d1012de87fbfd9e6adf76d3ca7bb9a62bed70965639d59859", null ], [ "UPPER_CONTROL", "class_passed_message.html#a7738b6f08855f784d1012de87fbfd9e6aea991e99dac6c91c9e3e89f902f1075d", null ], [ "LOWER_DATA", "class_passed_message.html#a7738b6f08855f784d1012de87fbfd9e6a97265ac51f333c88508670c5d3f5ded9", null ], [ "LOWER_CONTROL", "class_passed_message.html#a7738b6f08855f784d1012de87fbfd9e6afb379d2a15495f1ef2f290dc9ac97299", null ]
<|file_name|>gcp_compute_ssl_certificate.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_compute_ssl_certificate description: - An SslCertificate resource, used for HTTPS load balancing. This resource provides a mechanism to upload an SSL key and certificate to the load balancer to serve secure connections from the user. short_description: Creates a GCP SslCertificate version_added: 2.6 author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: state: description: - Whether the given object should exist in GCP choices: - present - absent default: present certificate: description: - The certificate in PEM format. - The certificate chain must be no greater than 5 certs long. - The chain must include at least one intermediate cert. required: true description: description: - An optional description of this resource. required: false name: description: - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. required: false private_key: description: - The write-only private key in PEM format. required: true extends_documentation_fragment: gcp notes: - 'API Reference: U(https://cloud.google.com/compute/docs/reference/rest/v1/sslCertificates)' - 'Official Documentation: U(https://cloud.google.com/load-balancing/docs/ssl-certificates)' ''' EXAMPLES = ''' - name: create a ssl certificate gcp_compute_ssl_certificate: name: "test_object" description: A certificate for testing. Do not use this certificate in production certificate: | -----BEGIN CERTIFICATE----- MIICqjCCAk+gAwIBAgIJAIuJ+0352Kq4MAoGCCqGSM49BAMCMIGwMQswCQYDVQQG EwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjERMA8GA1UEBwwIS2lya2xhbmQxFTAT BgNVBAoMDEdvb2dsZSwgSW5jLjEeMBwGA1UECwwVR29vZ2xlIENsb3VkIFBsYXRm b3JtMR8wHQYDVQQDDBZ3d3cubXktc2VjdXJlLXNpdGUuY29tMSEwHwYJKoZIhvcN AQkBFhJuZWxzb25hQGdvb2dsZS5jb20wHhcNMTcwNjI4MDQ1NjI2WhcNMjcwNjI2 MDQ1NjI2WjCBsDELMAkGA1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xETAP BgNVBAcMCEtpcmtsYW5kMRUwEwYDVQQKDAxHb29nbGUsIEluYy4xHjAcBgNVBAsM FUdvb2dsZSBDbG91ZCBQbGF0Zm9ybTEfMB0GA1UEAwwWd3d3Lm15LXNlY3VyZS1z aXRlLmNvbTEhMB8GCSqGSIb3DQEJARYSbmVsc29uYUBnb29nbGUuY29tMFkwEwYH KoZIzj0CAQYIKoZIzj0DAQcDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ 4mzkzTv0dXyB750fOGN02HtkpBOZzzvUARTR10JQoSe2/5PIwaNQME4wHQYDVR0O BBYEFKIQC3A2SDpxcdfn0YLKineDNq/BMB8GA1UdIwQYMBaAFKIQC3A2SDpxcdfn 0YLKineDNq/BMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSQAwRgIhALs4vy+O M3jcqgA4fSW/oKw6UJxp+M6a+nGMX+UJR3YgAiEAvvl39QRVAiv84hdoCuyON0lJ zqGNhIPGq2ULqXKK8BY= -----END CERTIFICATE----- private_key: | -----BEGIN EC PRIVATE KEY----- MHcCAQEEIObtRo8tkUqoMjeHhsOh2ouPpXCgBcP+EDxZCB/tws15oAoGCCqGSM49 AwEHoUQDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ4mzkzTv0dXyB750f OGN02HtkpBOZzzvUARTR10JQoSe2/5PIwQ== -----END EC PRIVATE KEY----- project: "test_project" auth_kind: "serviceaccount" service_account_file: "/tmp/auth.pem" state: present ''' RETURN = ''' certificate: description: - The certificate in PEM format. - The certificate chain must be no greater than 5 certs long. - The chain must include at least one intermediate cert. returned: success type: str creationTimestamp: description: - Creation timestamp in RFC3339 text format. returned: success type: str description: description: - An optional description of this resource. returned: success type: str id: description: - The unique identifier for the resource. returned: success type: int name: description: - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`<|fim▁hole|> which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. returned: success type: str privateKey: description: - The write-only private key in PEM format. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict import json import time ################################################################################ # Main ################################################################################ def main(): """Main function""" module = GcpModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), certificate=dict(required=True, type='str'), description=dict(type='str'), name=dict(type='str'), private_key=dict(required=True, type='str') ) ) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/compute'] state = module.params['state'] kind = 'compute#sslCertificate' fetch = fetch_resource(module, self_link(module), kind) changed = False if fetch: if state == 'present': if is_different(module, fetch): update(module, self_link(module), kind) fetch = fetch_resource(module, self_link(module), kind) changed = True else: delete(module, self_link(module), kind) fetch = {} changed = True else: if state == 'present': fetch = create(module, collection(module), kind) changed = True else: fetch = {} fetch.update({'changed': changed}) module.exit_json(**fetch) def create(module, link, kind): auth = GcpSession(module, 'compute') return wait_for_operation(module, auth.post(link, resource_to_request(module))) def update(module, link, kind): module.fail_json(msg="SslCertificate cannot be edited") def delete(module, link, kind): auth = GcpSession(module, 'compute') return wait_for_operation(module, auth.delete(link)) def resource_to_request(module): request = { u'kind': 'compute#sslCertificate', u'certificate': module.params.get('certificate'), u'description': module.params.get('description'), u'name': module.params.get('name'), u'privateKey': module.params.get('private_key') } return_vals = {} for k, v in request.items(): if v: return_vals[k] = v return return_vals def fetch_resource(module, link, kind, allow_not_found=True): auth = GcpSession(module, 'compute') return return_if_object(module, auth.get(link), kind, allow_not_found) def self_link(module): return "https://www.googleapis.com/compute/v1/projects/{project}/global/sslCertificates/{name}".format(**module.params) def collection(module): return "https://www.googleapis.com/compute/v1/projects/{project}/global/sslCertificates".format(**module.params) def return_if_object(module, response, kind, allow_not_found=False): # If not found, return nothing. if allow_not_found and response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst: module.fail_json(msg="Invalid JSON response with error: %s" % inst) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result def is_different(module, response): request = resource_to_request(module) response = response_to_hash(module, response) # Remove all output-only from response. response_vals = {} for k, v in response.items(): if k in request: response_vals[k] = v request_vals = {} for k, v in request.items(): if k in response: request_vals[k] = v return GcpRequest(request_vals) != GcpRequest(response_vals) # Remove unnecessary properties from the response. # This is for doing comparisons with Ansible's current parameters. def response_to_hash(module, response): return { u'certificate': response.get(u'certificate'), u'creationTimestamp': response.get(u'creationTimestamp'), u'description': response.get(u'description'), u'id': response.get(u'id'), u'name': response.get(u'name'), u'privateKey': module.params.get('private_key') } def async_op_url(module, extra_data=None): if extra_data is None: extra_data = {} url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}" combined = extra_data.copy() combined.update(module.params) return url.format(**combined) def wait_for_operation(module, response): op_result = return_if_object(module, response, 'compute#operation') if op_result is None: return {} status = navigate_hash(op_result, ['status']) wait_done = wait_for_completion(status, op_result, module) return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#sslCertificate') def wait_for_completion(status, op_result, module): op_id = navigate_hash(op_result, ['name']) op_uri = async_op_url(module, {'op_id': op_id}) while status != 'DONE': raise_if_errors(op_result, ['error', 'errors'], 'message') time.sleep(1.0) op_result = fetch_resource(module, op_uri, 'compute#operation') status = navigate_hash(op_result, ['status']) return op_result def raise_if_errors(response, err_path, module): errors = navigate_hash(response, err_path) if errors is not None: module.fail_json(msg=errors) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>eq.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::cmp::PartialEq; struct A<T> { value: T } struct B<T> { value: T } impl PartialEq<B<T>> for A<T> { fn eq(&self, other: &B<T>) -> bool { self.value == other.value } // fn ne(&self, other: &Rhs) -> bool { !self.eq(other) } } // impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> { // fn eq(&self, other: &[B]) -> bool { // self.len() == other.len() && // order::eq(self.iter(), other.iter()) // } // fn ne(&self, other: &[B]) -> bool { // self.len() != other.len() || // order::ne(self.iter(), other.iter()) // } // } type T = i32; // T: PartialEq type AA = A<T>; type BB = B<T>; #[test] fn eq_test1() { let slice: &[AA] = &[ AA { value: 68 }, AA { value: 500 }, AA { value: 999 } ]; let other: &[BB] = &[ BB { value: 68 }, BB { value: 500 }, BB { value: 999 } ]; let eq: bool = slice.eq(other); <|fim▁hole|> assert_eq!(eq, true); } #[test] fn eq_test2() { let slice: &[AA] = &[ AA { value: 68 }, AA { value: 500 }, AA { value: 999 } ]; let other: &[BB] = &[ BB { value: 68 }, BB { value: 500 }, BB { value: 999 } ]; let eq: bool = slice == other; assert_eq!(eq, true); } #[test] fn eq_test3() { let slice: &[AA] = &[ AA { value: 68 }, AA { value: 500 }, AA { value: 999 } ]; let other: &[BB] = &[ BB { value: 168 }, BB { value: 500 }, BB { value: 999 } ]; let eq: bool = slice.eq(other); assert_eq!(eq, false); } #[test] fn eq_test4() { let slice: &[AA] = &[ AA { value: 68 }, AA { value: 500 }, AA { value: 999 } ]; let other: &[BB] = &[ BB { value: 168 }, BB { value: 500 }, BB { value: 999 } ]; let eq: bool = slice == other; assert_eq!(eq, false); } }<|fim▁end|>
<|file_name|>CommonFileUtil.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ' module description' import os __author__ = 'Andrew Wen' def file_extension(filename): """ 获取文件后缀名 :param path: :return:<|fim▁hole|><|fim▁end|>
""" #return os.path.splitext(path)[1] return filename.rsplit('.', 1)[1].lower()
<|file_name|>test_yearmonth.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import division from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals import pytest from tableschema import types from tableschema.config import ERROR # Tests @pytest.mark.parametrize('format, value, result', [ ('default', [2000, 10], (2000, 10)), ('default', (2000, 10), (2000, 10)), ('default', '2000-10', (2000, 10)), ('default', (2000, 10, 20), ERROR), ('default', '2000-13-20', ERROR), ('default', '2000-13', ERROR), ('default', '2000-0', ERROR), ('default', '13', ERROR), ('default', -10, ERROR), ('default', 20, ERROR), ('default', '3.14', ERROR), ('default', '', ERROR), ]) def test_cast_yearmonth(format, value, result):<|fim▁hole|><|fim▁end|>
assert types.cast_yearmonth(format, value) == result
<|file_name|>tests_DelSlugView.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from django.urls import reverse from .utils import add_default_data <|fim▁hole|> class DelSlugViewTest(TestCase): """Test del_slug view""" @classmethod def setUpTestData(cls): add_default_data() def login(self, name, password=None): self.client.login(username=name, password=password if password else name) self.pu = PytitionUser.objects.get(user__username=name) return self.pu def logout(self): self.client.logout() def test_DelSlugViewOk(self): john = self.login("john") john_perms = Permission.objects.get(organization__slugname="attac", user=john) john_perms.can_modify_petitions = True john_perms.save() petition = Petition.objects.filter(org__slugname="attac").first() slug = petition.slugmodel_set.first() response = self.client.get(reverse("del_slug", kwargs={'petition_id': petition.id})+"?slugid="+str(slug.id), follow=True) self.assertRedirects(response, reverse("edit_petition", args=[petition.id]) + "#tab_social_network_form")<|fim▁end|>
from petition.models import PytitionUser, Permission, Organization, Petition
<|file_name|>Decompressor.hpp<|end_file_name|><|fim▁begin|>#ifndef DECOMPRESSOR_HPP #define DECOMPRESSOR_HPP #include <unordered_map> #include <queue> // #include <memory> #include <iostream> #include <fstream> #include <cassert> #include <queue> // #include "InputBuffer.hpp" #include "OffsetsStream.hpp" #include "EditsStream.hpp" #include "ClipStream.hpp" #include "ReadIDStream.hpp" #include "FlagsStream.hpp" #include "QualityStream.hpp" // #include "MergedEditsStream.hpp" #include "TranscriptsStream.hpp" #include "RefereeHeader.hpp" //////////////////////////////////////////////////////////////// #define D_SEQ 1 #define D_FLAGS 2 #define D_READIDS 4 #define D_QUALS 8 #define D_OPTIONAL_FIELDS 16 //////////////////////////////////////////////////////////////// // // // //////////////////////////////////////////////////////////////// struct InputStreams { shared_ptr<ClipStream> left_clips; shared_ptr<ClipStream> right_clips; shared_ptr<OffsetsStream> offs; shared_ptr<EditsStream> edits; shared_ptr<FlagsStream> flags; shared_ptr<ReadIDStream> readIDs; shared_ptr<QualityStream> qualities; InputStreams() {} }; //////////////////////////////////////////////////////////////////////////////// // // // //////////////////////////////////////////////////////////////////////////////// class Decompressor { unordered_map<int,string> t_map; void write_sam_header(ofstream & recovered_file, RefereeHeader & header) { // write version, sorting info (alignments always sorted by coord) recovered_file << "@HD\t" << header.get_version() << "\tSO:coordinate" << endl; for (auto t_id : header.getTranscriptIDs() ) { recovered_file << "@SQ\tSN:" << header.getMapping(t_id) << "\tLN:" << header.getTranscriptLength(t_id) << endl; } } //////////////////////////////////////////////////////////////////////////// // sync the streams // TODO: take into account where clips start //////////////////////////////////////////////////////////////////////////// void sync_streams(InputStreams & is, pair<int, unsigned long> & off_block_start, // offsets pair<int, unsigned long> & edit_block_start, // edits pair<int, unsigned long> & lc_start, // left clips pair<int, unsigned long> & rc_start, // right clips int const target_ref_id, int const target_coord) { int offset_coord = off_block_start.first; unsigned long offset_num_al = off_block_start.second; int edit_coord = edit_block_start.first; unsigned long edit_num_al = edit_block_start.second; // first sync by the number of alignments preceeding the blocks cerr << "bringing all streams up to speed w/ offset stream" << endl; while (edit_num_al < offset_num_al) { is.edits->next(); // next has_edit byte // skip edit list for this alignment if there is one if (is.edits->hasEdits() ) is.edits->getEdits(); edit_num_al++; } cerr << "edit_num_al: " << edit_num_al << endl; while (offset_num_al < edit_num_al) { // need off to seek forward to edit_start is.offs->getNextOffset(); offset_num_al++; } cerr << "offs_num_al: " << offset_num_al << endl; // now seek to the target coordinate cerr << "seeking to a target coordinate chr=" << target_ref_id << ":" << target_coord << endl; int offset = is.offs->getCurrentOffset(); int ref_id = is.offs->getCurrentTranscript(); assert(offset_coord <= offset); while (ref_id < target_ref_id || offset < target_coord) { offset = is.offs->getNextOffset(); if (offset == END_OF_TRANS) { ref_id = is.offs->getNextTranscript(); if (ref_id == END_OF_STREAM) { cerr << "[ERROR] Unexpected end of offsets stream" << endl; exit(1); } } else if (offset == END_OF_STREAM) { cerr << "[ERROR] unexpected end of offsets stream" << endl; exit(1); } // advance to the next edit is.edits->next(); if (is.edits->hasEdits() ) is.edits->getEdits(); } cerr << "after seeking current coord is: chr=" << ref_id << ":" << offset << endl; } //////////////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////////////// public: Decompressor( string const & input_fname, string const & output_fname, string const & ref_path): file_name(input_fname), output_name(output_fname), ref_path(ref_path) { } //////////////////////////////////////////////////////////////////////////// // Reconstruct SAM file by combining the inputs; restoring reads and quals //////////////////////////////////////////////////////////////////////////// void decompress(RefereeHeader & header, InputStreams & is, uint8_t const options) { // sequence-specific streams int read_len = header.getReadLen(); auto t_map = header.getTranscriptIDsMap(); cerr << "[decompress the entire contents]" << endl; cerr << "Read length:\t" << (int)read_len << endl; assert(read_len > 0); TranscriptsStream transcripts(file_name, ref_path, "-d", t_map); recovered_file.open( output_name.c_str() ); check_file_open(recovered_file, file_name); write_sam_header(recovered_file, header); // prime the first blocks in every stream is.offs->seekToBlockStart(-1, 0, 0); is.edits->seekToBlockStart(-1, 0, 0); is.left_clips->seekToBlockStart(-1, 0, 0); is.right_clips->seekToBlockStart(-1, 0, 0); is.flags->seekToBlockStart(-1, 0, 0); is.readIDs->seekToBlockStart(-1, 0, 0); is.qualities->seekToBlockStart(-1, 0, 0); int ref_id = is.offs->getCurrentTranscript(); // cerr << "Starting with transcript " << ref_id << endl; int i = 0; while ( is.offs->hasMoreOffsets() ) { // zero-based offsets int offset_0 = is.offs->getNextOffset(); if (offset_0 == END_OF_TRANS) { // remove the prev transcript sequence -- will not need it anymore transcripts.dropTranscriptSequence(ref_id); // pull the next transcript -- seq will be loaded on first access ref_id = is.offs->getNextTranscript(); if (ref_id == END_OF_STREAM) { cerr << "Done" << endl; return; } cerr << "chr=" << transcripts.getMapping(ref_id) << " "; } else if (offset_0 == END_OF_STREAM) { // break cerr << "Done"; } else { // legit offset int ret = is.edits->next(); // advance to the next alignment if (ret == END_OF_STREAM) { cerr << "no more edits" << endl; } reconstructAlignment(offset_0, read_len, ref_id, transcripts, is.edits, is.left_clips, is.right_clips, is.readIDs, is.flags, is.qualities, options); } i++; if (i % 1000000 == 0) { cerr << i / 1000000 << "mln "; } } cerr << endl; cerr << "quals covered: " << quals_covered << endl; cerr << "new qual requested: " << new_requested << endl; recovered_file.close(); } //////////////////////////////////////////////////////////////// // Decompress alignments within a given interval //////////////////////////////////////////////////////////////// void decompressInterval(GenomicInterval interval, RefereeHeader & header, InputStreams & is, const uint8_t options) { int read_len = header.getReadLen(); auto t_map = header.getTranscriptIDsMap(); TranscriptsStream transcripts(file_name, ref_path, "-d", t_map); recovered_file.open( output_name.c_str() ); if (!recovered_file) { cerr << "[ERROR] Could not open output file." << endl; exit(1); } cerr << "Read len=" << read_len << endl; assert(read_len > 0); cerr << "[decompress alignments from an interval]" << endl; interval.chromosome = transcripts.getID(to_string(interval.chromosome) ); int ref_id = interval.chromosome; // TODO: can return false when no data for that interval is available pair<int,unsigned long> off_start_coord = is.offs->seekToBlockStart(interval.chromosome, interval.start, interval.stop); assert(off_start_coord.first == is.offs->getCurrentOffset() ); // loads the first block overlapping he requested coordinate // cerr << "Seeking to the first edit block" << endl; pair<int,unsigned long> edit_start_coord = is.edits->seekToBlockStart(interval.chromosome, interval.start, interval.stop); // cerr << "syncing input streams" << endl; // TODO: enable and sync clipped regions pair<int,unsigned long> lc_start, rc_start; // auto lc_start = is.left_clips->seekToBlockStart(interval.chromosome, interval.start, interval.stop); // auto rc_start = is.right_clips->seekToBlockStart(interval.chromosome, interval.start, interval.stop); sync_streams(is, off_start_coord, edit_start_coord, lc_start, rc_start, interval.chromosome, interval.start); cerr << "STREAMS SYNCED" << endl; // now restore alignments int i = 0, offset = 0; while ( is.offs->hasMoreOffsets() ) { offset = is.offs->getNextOffset(); // cerr << "offset: " << offset << endl; if (offset >= interval.stop) { cerr << "[INFO] Reached the end of the interval (" << interval.stop << " <= " << offset << ")" << endl; return; } if (offset == END_OF_TRANS) { ref_id = is.offs->getNextTranscript(); if (ref_id == END_OF_STREAM) { return; } } else if (offset == END_OF_STREAM) { // break // cerr << "done"; } else { // legit offset int ret = is.edits->next(); // advance to the next alignment if (ret == END_OF_STREAM) { // cerr << "done with edits" << endl; // break; } reconstructAlignment(offset, read_len, ref_id, transcripts, is.edits, is.left_clips, is.right_clips, is.readIDs, is.flags, is.qualities, options); } i++; if (i % 1000000 == 0) { cerr << i / 1000000 << "mln "; } } cerr << endl; recovered_file.close(); } //////////////////////////////////////////////////////////////// // // Privates // //////////////////////////////////////////////////////////////// private: string file_name; // file prefix for the files with parts of the data string output_name; // file prefix for the files with parts of the data string ref_path; // path to the file with the reference sequence uint8_t read_len; // uniform read length ofstream recovered_file; //////////////////////////////////////////////////////////////// // reconstructs read without edits // offset is 0-based //////////////////////////////////////////////////////////////// void reconstructAlignment(int offset, int read_len, int ref_id, TranscriptsStream & transcripts, shared_ptr<EditsStream> edits, shared_ptr<ClipStream> left_clips, shared_ptr<ClipStream> right_clips, shared_ptr<ReadIDStream> read_ids, shared_ptr<FlagsStream> flags, shared_ptr<QualityStream> qualities, uint8_t const options) { string cigar, md_string, read; bool has_edits = edits->hasEdits(); if (has_edits) { // cerr << "read with edits" << endl; md_string = "MD:Z:"; vector<uint8_t> edit_ops = edits->getEdits(); read = buildEditStrings(read_len, edit_ops, cigar, md_string, left_clips, right_clips, offset, ref_id, transcripts); } else { // cerr << "no edits read" << endl; read = transcripts.getTranscriptSequence(ref_id, offset, read_len); } // to upper case // std::transform(read.begin(), read.end(), read.begin(), ::toupper); // cerr << "getting all other fields " << read_ids << endl; if (options & D_READIDS) { string read_id = "*"; if (read_ids != nullptr) { int status = 0; read_id = read_ids->getNextID(status); if (status != SUCCESS) read_id = "*"; } recovered_file << read_id << "\t"; } int flag = -1, mapq = -1, rnext = -1, pnext = -1, tlen = -1; if (options & D_FLAGS) { if (flags != nullptr) { // cerr << "bla " << flags << endl; auto alignment_flags = flags->getNextFlagSet(); assert(alignment_flags.size() == 5); flag = alignment_flags[0]; mapq = alignment_flags[1]; rnext = alignment_flags[2]; pnext = alignment_flags[3], tlen = alignment_flags[4]; } recovered_file << flag << "\t"; // write out reference name, offset (SAM files use 1-based offsets) recovered_file << transcripts.getMapping(ref_id) << "\t" << (offset + 1); recovered_file << "\t" << mapq; if (has_edits) recovered_file << "\t" << cigar; else recovered_file << "\t" << (int)read_len << "M"; recovered_file << "\t"; if (rnext < 0) { recovered_file << "*"; pnext = 0; tlen = 0; } else if (rnext == 0) { recovered_file << "="; tlen = pnext - tlen; } else recovered_file << rnext; recovered_file << "\t" << pnext << "\t" << tlen << "\t"; } // cerr << "got flags 'n all" << endl; if (options & D_SEQ) { recovered_file << read; // more data to come -- separate if ( (options & D_QUALS) || (options & D_OPTIONAL_FIELDS) ) recovered_file << "\t"; } // write out qual vector bool secondary_alignment = (flag >= 0) ? (flag & 0x100) > 0 : true; if (options & D_QUALS) { quals_covered++; if (secondary_alignment) recovered_file << "*"; else { new_requested++; recovered_file << qualities->getNextQualVector(); } } else { recovered_file << "*"; } // cerr << "wrote out quals" << endl; if (options & D_OPTIONAL_FIELDS) { if (has_edits) recovered_file << "\t" << md_string << " "; // TODO: write out other optional fields } recovered_file << endl; } int quals_covered = 0; int new_requested = 0; //////////////////////////////////////////////////////////////// // build CIGAR, MD strings for the read with edits // offset -- zero based //////////////////////////////////////////////////////////////// string buildEditStrings(int read_len, vector<uint8_t> & edits, string & cigar, string & md_string, shared_ptr<ClipStream> left_clips, shared_ptr<ClipStream> right_clips,<|fim▁hole|> TranscriptsStream & transcripts) { string right_cigar, right_clip, read = transcripts.getTranscriptSequence(ref_id, offset, read_len); int j = 0, last_md_edit_pos = 0, last_cigar_edit_pos = 0, clipped_read_len = read_len; int offset_since_last_cigar = 0; // reset to 0 when on the cigar edit, incremented when on MD edit int offset_since_last_md = 0; int splice_offset = 0; int last_abs_pos = 0, Ds = 0, Is = 0; // number of deletions bool first_md_edit = true, first_cigar_was_clip = false; // if (offset == 18964285 ||offset == 18964286 || offset == 18964284) { // cerr << "len=" << edits.size() << " off=" << offset << " "; // for (auto e : edits) // cerr << (int)e << " "; // cerr << endl; // cerr << "read: " << read << endl; // } uint8_t op; while (j < edits.size() ) { op = edits[j]; // if (offset == 57509325) // cerr << op << " "; switch (op) { case 'L': { first_cigar_was_clip = true; string left_clip; if (left_clips == nullptr) { // clipped data not available break; } else { left_clips->getNext(left_clip); // just concatenate the left clip and the read read = left_clip + read.substr(0, read_len - left_clip.length()); // update cigar string cigar += to_string(left_clip.length()); cigar += "S"; // update counters last_cigar_edit_pos += left_clip.length(); last_abs_pos += left_clip.length(); offset_since_last_cigar = 0; offset_since_last_md = 0; last_md_edit_pos = 0; clipped_read_len -= left_clip.length(); } } break; case 'R': { if (right_clips == nullptr) { // clipped data not available break; } else { right_clips->getNext(right_clip); right_cigar += to_string(right_clip.length()); right_cigar += "S"; clipped_read_len -= right_clip.length(); } } break; case 'l': { j++; first_cigar_was_clip = true; // update the read (shorten) read = read.substr(edits[j]); // update cigar string cigar += to_string(edits[j]); cigar += "H"; last_cigar_edit_pos += edits[j]; last_abs_pos += edits[j]; offset_since_last_cigar = 0; offset_since_last_md = 0; clipped_read_len -= edits[j]; } break; case 'r': { j++; cigar += to_string(edits[j]); cigar += "H"; last_cigar_edit_pos += edits[j]; clipped_read_len -= edits[j]; } break; case 'D': { // handle a deletion int ds = 0; bool first_d = true; offset_since_last_cigar += edits[j+1] - Is; while (j < edits.size()) {// consume all Ds if (edits[j] != 'D') { // end of run of D's break; } else if (!first_d && edits[j+1] > 0 ) { // non contiguous deletions break; } ds++; // cerr << (char)edits[j] << "-" << (int)edits[j+1] << ","; last_abs_pos += edits[j+1]; read.erase(last_abs_pos, 1); first_d = false; j += 2; } j--; assert(j < edits.size()); Ds += ds; // cerr << "D's pos: " << last_abs_pos << " #=" << ds << " "; // compensate for deleted bases by adding to the end of the read from the reference // splice_offset: handles the case when this might be after a splicing event // cerr << "seq past the read: " << transcripts.getTranscriptSequence(ref_id, offset + splice_offset + read_len, 5) << " "; read += transcripts.getTranscriptSequence(ref_id, offset + splice_offset + read_len, ds); // cerr << read << " "; if (offset_since_last_cigar > 0) { cigar += to_string(offset_since_last_cigar); cigar += "M"; } cigar += to_string(ds); cigar += "D"; last_cigar_edit_pos += offset_since_last_cigar; offset_since_last_cigar = 0; } break; case 'E': case 197: { // handle a splice // cerr << "splice "; bool is_long_splice = edits[j] >> 7; j++; offset_since_last_cigar += edits[j] - Is; last_abs_pos += edits[j]; // cerr << "at pos:" << last_abs_pos << " "; // update cigar string cigar += to_string(offset_since_last_cigar); cigar += "M"; // get splice len // cerr << "len: "; j++; int splice_len = ( (int)edits[j] << 8); // cerr << "j=" << j << " " << (int)edits[j] << " "; j++; splice_len |= (int)edits[j]; // cerr << "j=" << j << " " << (int)edits[j] << " "; if (is_long_splice) { j++; splice_len = splice_len << 8; splice_len |= (int)edits[j]; // cerr << "j=" << j << " " << (int)edits[j] << " "; } cigar += to_string(splice_len); cigar += "N"; // update the counters splice_offset += offset_since_last_cigar + splice_len; last_cigar_edit_pos += offset_since_last_cigar; offset_since_last_cigar = 0; // update the read // TODO: test with hard clipped strings if (offset == 57509325) cerr << offset << endl; if (read.size() <= last_cigar_edit_pos) { cerr << "weird stuff: " << read.size() << " cigar: " << last_cigar_edit_pos << " offs: " << offset << endl; } assert(read.size() > last_cigar_edit_pos); read.replace(last_cigar_edit_pos, read_len - last_cigar_edit_pos, transcripts.getTranscriptSequence(ref_id, offset + splice_offset, read_len - last_cigar_edit_pos)); // cerr << "read: " << read << endl; } break; case 'V': case 'W': case 'X': case 'Y': case 'Z': { /* insert bases into the reference to recover the original read */ // TODO: if several Is in a row -- can we handle them together? // if Is are disjoint -- will handle them separately int is = 0; bool first_i = true; offset_since_last_cigar += edits[j+1] - Is; while (j < edits.size()) {// consume all Is if (edits[j] < 'V' || edits[j] > 'Z') { // not an I -- break the loop // cerr << "break loop 1 "; break; } else if (!first_i && edits[j+1] > 0) { // non contiguous Is -- break the loop // cerr << "break loop 2 j=" << j << " "; break; } is++; // cerr << (char)edits[j] << "-" << (int)edits[j+1] << ","; last_abs_pos += edits[j+1]; // insert the missing base read.insert(read.begin() + last_abs_pos, reverseReplace(edits[j]) );// insert a single char j += 2; first_i = false; // adjust by one base for every insert last_abs_pos++; } j--; assert(j < edits.size()); // cerr << "I's pos: " << last_abs_pos << " "; // trim the read to be of appropriate length read.resize(read_len); // update the global insertion counter Is += is; if (offset_since_last_cigar > 0) { cigar += to_string(offset_since_last_cigar); cigar += "M"; } cigar += to_string(is); cigar += "I"; last_cigar_edit_pos += offset_since_last_cigar + is; offset_since_last_cigar = 0; } break; default: { // handle mismatches j++; if (first_md_edit) { // cerr << "(+" << (int)edits[j] + Is << ") "; if (first_cigar_was_clip) { md_string += to_string(edits[j]); last_md_edit_pos += edits[j] + 1; } else { md_string += to_string(edits[j] + last_cigar_edit_pos - Is); last_md_edit_pos += edits[j] + last_cigar_edit_pos + 1; } first_md_edit = false; } else { // cerr << "(+" << (int)edits[j]-1+Is << ") "; md_string += to_string(edits[j] - 1); last_md_edit_pos += edits[j]; } last_abs_pos += edits[j]; // add the letter we see in the reference to the MD string md_string += read[last_abs_pos]; // add correct read to have its original base read[last_abs_pos] = op; offset_since_last_cigar += edits[j]; offset_since_last_md = 0; } } j++; } // update read w/ right soft/hard clip if it took place if (last_cigar_edit_pos < read_len) { if (right_cigar.size() > 0) { cigar += to_string(read_len - last_cigar_edit_pos - right_clip.length()); cigar += "M"; cigar += right_cigar; // update read w/ the right clip assert(read_len - right_clip.length() < read.size() ); read.replace(read_len - right_clip.length(), right_clip.length(), right_clip); } else { cigar += to_string(read_len - last_cigar_edit_pos); cigar += "M"; } } if (last_md_edit_pos < clipped_read_len) { md_string += to_string(clipped_read_len - last_md_edit_pos); } // cerr << cigar << "\t" << md_string << endl; // cerr << "Done w/ edits"; return read; } }; #endif<|fim▁end|>
int offset, int ref_id,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from importlib import import_module from django.core.urlresolvers import (RegexURLPattern, RegexURLResolver, LocaleRegexURLResolver) from django.core.exceptions import ImproperlyConfigured from django.utils import six __all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'patterns', 'url'] handler400 = 'django.views.defaults.bad_request' handler403 = 'django.views.defaults.permission_denied' handler404 = 'django.views.defaults.page_not_found' handler500 = 'django.views.defaults.server_error' def include(arg, namespace=None, app_name=None): if isinstance(arg, tuple): # callable returning a namespace hint if namespace: raise ImproperlyConfigured('Cannot override the namespace for a dynamic module that provides a namespace') urlconf_module, app_name, namespace = arg else: # No namespace hint - use manually provided namespace urlconf_module = arg if isinstance(urlconf_module, six.string_types): urlconf_module = import_module(urlconf_module) patterns = getattr(urlconf_module, 'urlpatterns', urlconf_module) # Make sure we can iterate through the patterns (without this, some # testcases will break). if isinstance(patterns, (list, tuple)): for url_pattern in patterns: # Test if the LocaleRegexURLResolver is used within the include; # this should throw an error since this is not allowed! if isinstance(url_pattern, LocaleRegexURLResolver): raise ImproperlyConfigured( 'Using i18n_patterns in an included URLconf is not allowed.') return (urlconf_module, app_name, namespace) def patterns(prefix, *args): pattern_list = [] for t in args: if isinstance(t, (list, tuple)): t = url(prefix=prefix, *t) elif isinstance(t, RegexURLPattern): t.add_prefix(prefix) pattern_list.append(t) return pattern_list def url(regex, view, kwargs=None, name=None, prefix=''): if isinstance(view, (list, tuple)): # For include(...) processing. urlconf_module, app_name, namespace = view return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace) else: if isinstance(view, six.string_types): if not view: raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex) if prefix: view = prefix + '.' + view<|fim▁hole|><|fim▁end|>
return RegexURLPattern(regex, view, kwargs, name)
<|file_name|>LocalFileStorage.test.js<|end_file_name|><|fim▁begin|>const errors = require('@tryghost/errors'); const should = require('should'); const sinon = require('sinon'); const fs = require('fs-extra'); const moment = require('moment'); const Promise = require('bluebird'); const path = require('path'); const LocalFileStore = require('../../../../../core/server/adapters/storage/LocalFileStorage'); let localFileStore; const configUtils = require('../../../../utils/configUtils'); describe('Local File System Storage', function () { let image; let momentStub; function fakeDate(mm, yyyy) { const month = parseInt(mm, 10); const year = parseInt(yyyy, 10); momentStub.withArgs('YYYY').returns(year.toString()); momentStub.withArgs('MM').returns(month < 10 ? '0' + month.toString() : month.toString()); } beforeEach(function () { // Fake a date, do this once for all tests in this file momentStub = sinon.stub(moment.fn, 'format'); }); afterEach(function () { sinon.restore(); configUtils.restore(); }); beforeEach(function () { sinon.stub(fs, 'mkdirs').resolves(); sinon.stub(fs, 'copy').resolves(); sinon.stub(fs, 'stat').rejects(); sinon.stub(fs, 'unlink').resolves(); image = { path: 'tmp/123456.jpg', name: 'IMAGE.jpg', type: 'image/jpeg' }; localFileStore = new LocalFileStore(); fakeDate(9, 2013); }); it('should send correct path to image when date is in Sep 2013', function (done) { localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/IMAGE.jpg'); done(); }).catch(done); }); it('should send correct path to image when original file has spaces', function (done) { image.name = 'AN IMAGE.jpg'; localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/AN-IMAGE.jpg'); done(); }).catch(done); }); it('should allow "@" symbol to image for Apple hi-res (retina) modifier', function (done) { image.name = '[email protected]'; localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/[email protected]'); done(); }).catch(done); }); it('should send correct path to image when date is in Jan 2014', function (done) { fakeDate(1, 2014); localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2014/01/IMAGE.jpg'); done(); }).catch(done); }); it('should create month and year directory', function (done) { localFileStore.save(image).then(function () { fs.mkdirs.calledOnce.should.be.true(); fs.mkdirs.args[0][0].should.equal(path.resolve('./content/images/2013/09')); done(); }).catch(done);<|fim▁hole|> fs.copy.calledOnce.should.be.true(); fs.copy.args[0][0].should.equal('tmp/123456.jpg'); fs.copy.args[0][1].should.equal(path.resolve('./content/images/2013/09/IMAGE.jpg')); done(); }).catch(done); }); it('can upload two different images with the same name without overwriting the first', function (done) { fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE.jpg')).resolves(); fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE-1.jpg')).rejects(); // if on windows need to setup with back slashes // doesn't hurt for the test to cope with both fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE.jpg')).resolves(); fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE-1.jpg')).rejects(); localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/IMAGE-1.jpg'); done(); }).catch(done); }); it('can upload five different images with the same name without overwriting the first', function (done) { fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE.jpg')).resolves(); fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE-1.jpg')).resolves(); fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE-2.jpg')).resolves(); fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE-3.jpg')).resolves(); fs.stat.withArgs(path.resolve('./content/images/2013/09/IMAGE-4.jpg')).rejects(); // windows setup fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE.jpg')).resolves(); fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE-1.jpg')).resolves(); fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE-2.jpg')).resolves(); fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE-3.jpg')).resolves(); fs.stat.withArgs(path.resolve('.\\content\\images\\2013\\Sep\\IMAGE-4.jpg')).rejects(); localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/IMAGE-4.jpg'); done(); }).catch(done); }); describe('read image', function () { beforeEach(function () { // we have some example images in our test utils folder localFileStore.storagePath = path.join(__dirname, '../../../../utils/fixtures/images/'); }); it('success', function (done) { localFileStore.read({path: 'ghost-logo.png'}) .then(function (bytes) { bytes.length.should.eql(8638); done(); }); }); it('success', function (done) { localFileStore.read({path: '/ghost-logo.png/'}) .then(function (bytes) { bytes.length.should.eql(8638); done(); }); }); it('image does not exist', function (done) { localFileStore.read({path: 'does-not-exist.png'}) .then(function () { done(new Error('image should not exist')); }) .catch(function (err) { (err instanceof errors.NotFoundError).should.eql(true); err.code.should.eql('ENOENT'); done(); }); }); }); describe('validate extentions', function () { it('name contains a .\d as extension', function (done) { localFileStore.save({ name: 'test-1.1.1' }).then(function (url) { should.exist(url.match(/test-1.1.1/)); done(); }).catch(done); }); it('name contains a .zip as extension', function (done) { localFileStore.save({ name: 'test-1.1.1.zip' }).then(function (url) { should.exist(url.match(/test-1.1.1.zip/)); done(); }).catch(done); }); it('name contains a .jpeg as extension', function (done) { localFileStore.save({ name: 'test-1.1.1.jpeg' }).then(function (url) { should.exist(url.match(/test-1.1.1.jpeg/)); done(); }).catch(done); }); }); describe('when a custom content path is used', function () { beforeEach(function () { const configPaths = configUtils.defaultConfig.paths; configUtils.set('paths:contentPath', configPaths.appRoot + '/var/ghostcms'); }); it('should send the correct path to image', function (done) { localFileStore.save(image).then(function (url) { url.should.equal('/content/images/2013/09/IMAGE.jpg'); done(); }).catch(done); }); }); // @TODO: remove path.join mock... describe('on Windows', function () { const truePathSep = path.sep; beforeEach(function () { sinon.stub(path, 'join'); sinon.stub(configUtils.config, 'getContentPath').returns('content/images/'); }); afterEach(function () { path.sep = truePathSep; }); it('should return url in proper format for windows', function (done) { path.sep = '\\'; path.join.returns('content\\images\\2013\\09\\IMAGE.jpg'); localFileStore.save(image).then(function (url) { if (truePathSep === '\\') { url.should.equal('/content/images/2013/09/IMAGE.jpg'); } else { // if this unit test is run on an OS that uses forward slash separators, // localfilesystem.save() will use a path.relative() call on // one path with backslash separators and one path with forward // slashes and it returns a path that needs to be normalized path.normalize(url).should.equal('/content/images/2013/09/IMAGE.jpg'); } done(); }).catch(done); }); }); });<|fim▁end|>
}); it('should copy temp file to new location', function (done) { localFileStore.save(image).then(function () {
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution<|fim▁hole|># # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "Account Invoice Extended", "version": "2.0.2.2", "author": "Didotech SRL", "website": "http://www.didotech.com", "category": 'Accounting & Finance', "description": """ Module adds extra functionality to account_invoice: - possibility to filter invoices by year - possibility to see invoices of last or current month """, "depends": [ 'base', 'account', 'stock', ], "init_xml": [], "update_xml": [ 'stock_picking_view.xml' ], "active": False, "installable": True, }<|fim▁end|>
# # Copyright (C) 2014 Didotech srl (<http://www.didotech.com>). # # All Rights Reserved
<|file_name|>check_button.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (b7f5189) from gir-files (71d73f0) // DO NOT EDIT use Actionable; use Bin; use Button; use Container; use ToggleButton; use Widget; use ffi; use glib::object::Downcast; use glib::object::IsA; use glib::translate::*; glib_wrapper! { pub struct CheckButton(Object<ffi::GtkCheckButton>): ToggleButton, Button, Bin, Container, Widget, Actionable; match fn { get_type => || ffi::gtk_check_button_get_type(), } } impl CheckButton { pub fn new() -> CheckButton { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_check_button_new()).downcast_unchecked() } } pub fn new_with_label(label: &str) -> CheckButton { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_check_button_new_with_label(label.to_glib_none().0)).downcast_unchecked() } } pub fn new_with_mnemonic(label: &str) -> CheckButton { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_check_button_new_with_mnemonic(label.to_glib_none().0)).downcast_unchecked() } } } pub trait CheckButtonExt {}<|fim▁hole|><|fim▁end|>
impl<O: IsA<CheckButton>> CheckButtonExt for O {}
<|file_name|>test_instance_models.py<|end_file_name|><|fim▁begin|># Copyright 2014 Rackspace Hosting # Copyright 2014 Hewlett-Packard Development Company, L.P. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from mock import Mock, patch from trove.backup import models as backup_models from trove.common import cfg from trove.common import exception from trove.common.instance import ServiceStatuses from trove.datastore import models as datastore_models from trove.instance import models from trove.instance.models import DBInstance from trove.instance.models import filter_ips from trove.instance.models import Instance from trove.instance.models import InstanceServiceStatus from trove.instance.models import SimpleInstance from trove.instance.tasks import InstanceTasks from trove.taskmanager import api as task_api from trove.tests.fakes import nova from trove.tests.unittests import trove_testtools from trove.tests.unittests.util import util CONF = cfg.CONF class SimpleInstanceTest(trove_testtools.TestCase): def setUp(self): super(SimpleInstanceTest, self).setUp() db_info = DBInstance( InstanceTasks.BUILDING, name="TestInstance") self.instance = SimpleInstance( None, db_info, InstanceServiceStatus( ServiceStatuses.BUILDING), ds_version=Mock(), ds=Mock()) db_info.addresses = {"private": [{"addr": "123.123.123.123"}], "internal": [{"addr": "10.123.123.123"}], "public": [{"addr": "15.123.123.123"}]} self.orig_conf = CONF.network_label_regex self.orig_ip_regex = CONF.ip_regex self.orig_black_list_regex = CONF.black_list_regex def tearDown(self): super(SimpleInstanceTest, self).tearDown() CONF.network_label_regex = self.orig_conf CONF.ip_start = None def test_get_root_on_create(self): root_on_create_val = Instance.get_root_on_create( 'redis') self.assertFalse(root_on_create_val) def test_filter_ips_white_list(self): CONF.network_label_regex = '.*' CONF.ip_regex = '^(15.|123.)' CONF.black_list_regex = '^10.123.123.*' ip = self.instance.get_visible_ip_addresses() ip = filter_ips( ip, CONF.ip_regex, CONF.black_list_regex) self.assertEqual(2, len(ip)) self.assertTrue('123.123.123.123' in ip) self.assertTrue('15.123.123.123' in ip) def test_filter_ips_black_list(self): CONF.network_label_regex = '.*' CONF.ip_regex = '.*' CONF.black_list_regex = '^10.123.123.*' ip = self.instance.get_visible_ip_addresses() ip = filter_ips( ip, CONF.ip_regex, CONF.black_list_regex) self.assertEqual(2, len(ip)) self.assertTrue('10.123.123.123' not in ip) def test_one_network_label(self): CONF.network_label_regex = 'public' ip = self.instance.get_visible_ip_addresses() self.assertEqual(['15.123.123.123'], ip) def test_two_network_labels(self): CONF.network_label_regex = '^(private|public)$' ip = self.instance.get_visible_ip_addresses() self.assertEqual(2, len(ip)) self.assertTrue('123.123.123.123' in ip) self.assertTrue('15.123.123.123' in ip) def test_all_network_labels(self): CONF.network_label_regex = '.*' ip = self.instance.get_visible_ip_addresses() self.assertEqual(3, len(ip)) self.assertTrue('10.123.123.123' in ip) self.assertTrue('123.123.123.123' in ip) self.assertTrue('15.123.123.123' in ip) class CreateInstanceTest(trove_testtools.TestCase): @patch.object(task_api.API, 'get_client', Mock(return_value=Mock())) def setUp(self): util.init_db() self.context = trove_testtools.TroveTestContext(self, is_admin=True) self.name = "name" self.flavor_id = 5 self.image_id = "UUID" self.databases = [] self.users = [] self.datastore = datastore_models.DBDatastore.create( id=str(uuid.uuid4()), name='mysql' + str(uuid.uuid4()), ) self.datastore_version = ( datastore_models.DBDatastoreVersion.create( id=str(uuid.uuid4()), datastore_id=self.datastore.id, name="5.5" + str(uuid.uuid4()), manager="mysql", image_id="image_id", packages="", active=True)) self.volume_size = 1 self.az = "az" self.nics = None self.configuration = None self.tenant_id = "UUID" self.datastore_version_id = str(uuid.uuid4()) self.db_info = DBInstance.create( name=self.name, flavor_id=self.flavor_id, tenant_id=self.tenant_id, volume_size=self.volume_size, datastore_version_id=self.datastore_version.id, task_status=InstanceTasks.BUILDING, configuration_id=self.configuration ) self.backup_name = "name" self.descr = None self.backup_state = backup_models.BackupState.COMPLETED self.instance_id = self.db_info.id self.parent_id = None self.deleted = False self.backup = backup_models.DBBackup.create( name=self.backup_name, description=self.descr, tenant_id=self.tenant_id, state=self.backup_state, instance_id=self.instance_id, parent_id=self.parent_id, datastore_version_id=self.datastore_version.id, deleted=False ) self.backup.size = 1.1 self.backup.save() self.backup_id = self.backup.id self.orig_client = models.create_nova_client models.create_nova_client = nova.fake_create_nova_client self.orig_api = task_api.API(self.context).create_instance task_api.API(self.context).create_instance = Mock() self.run_with_quotas = models.run_with_quotas models.run_with_quotas = Mock() self.check = backup_models.DBBackup.check_swift_object_exist backup_models.DBBackup.check_swift_object_exist = Mock( return_value=True) super(CreateInstanceTest, self).setUp() @patch.object(task_api.API, 'get_client', Mock(return_value=Mock())) def tearDown(self): self.db_info.delete() self.backup.delete() self.datastore.delete() self.datastore_version.delete() models.create_nova_client = self.orig_client task_api.API(self.context).create_instance = self.orig_api models.run_with_quotas = self.run_with_quotas backup_models.DBBackup.check_swift_object_exist = self.check self.backup.delete() self.db_info.delete() super(CreateInstanceTest, self).tearDown() def test_exception_on_invalid_backup_size(self): self.assertEqual(self.backup.id, self.backup_id) exc = self.assertRaises( exception.BackupTooLarge, models.Instance.create, self.context, self.name, self.flavor_id, self.image_id, self.databases, self.users, self.datastore, self.datastore_version, self.volume_size, self.backup_id, self.az, self.nics, self.configuration ) self.assertIn("Backup is too large for " "given flavor or volume.", str(exc)) def test_can_restore_from_backup_with_almost_equal_size(self): # target size equals to "1Gb" self.backup.size = 0.99 self.backup.save() instance = models.Instance.create( self.context, self.name, self.flavor_id, self.image_id, self.databases, self.users, self.datastore, self.datastore_version, self.volume_size, self.backup_id, self.az, self.nics, self.configuration) self.assertIsNotNone(instance) class TestReplication(trove_testtools.TestCase): def setUp(self): util.init_db() self.datastore = datastore_models.DBDatastore.create( id=str(uuid.uuid4()), name='name' + str(uuid.uuid4()), default_version_id=str(uuid.uuid4())) self.datastore_version = datastore_models.DBDatastoreVersion.create( id=self.datastore.default_version_id, name='name' + str(uuid.uuid4()),<|fim▁hole|> packages=str(uuid.uuid4()), datastore_id=self.datastore.id, manager='mysql', active=1) self.master = DBInstance( InstanceTasks.NONE, id=str(uuid.uuid4()), name="TestMasterInstance", datastore_version_id=self.datastore_version.id) self.master.set_task_status(InstanceTasks.NONE) self.master.save() self.master_status = InstanceServiceStatus( ServiceStatuses.RUNNING, id=str(uuid.uuid4()), instance_id=self.master.id) self.master_status.save() self.safe_nova_client = models.create_nova_client models.create_nova_client = nova.fake_create_nova_client super(TestReplication, self).setUp() def tearDown(self): self.master.delete() self.master_status.delete() self.datastore.delete() self.datastore_version.delete() models.create_nova_client = self.safe_nova_client super(TestReplication, self).tearDown() @patch('trove.instance.models.LOG') def test_replica_of_not_active_master(self, mock_logging): self.master.set_task_status(InstanceTasks.BUILDING) self.master.save() self.master_status.set_status(ServiceStatuses.BUILDING) self.master_status.save() self.assertRaises(exception.UnprocessableEntity, Instance.create, None, 'name', 1, "UUID", [], [], None, self.datastore_version, 1, None, slave_of_id=self.master.id) @patch('trove.instance.models.LOG') def test_replica_with_invalid_slave_of_id(self, mock_logging): self.assertRaises(exception.NotFound, Instance.create, None, 'name', 1, "UUID", [], [], None, self.datastore_version, 1, None, slave_of_id=str(uuid.uuid4())) def test_create_replica_from_replica(self): self.replica_datastore_version = Mock( spec=datastore_models.DBDatastoreVersion) self.replica_datastore_version.id = "UUID" self.replica_datastore_version.manager = 'mysql' self.replica_info = DBInstance( InstanceTasks.NONE, id="UUID", name="TestInstance", datastore_version_id=self.replica_datastore_version.id, slave_of_id=self.master.id) self.replica_info.save() self.assertRaises(exception.Forbidden, Instance.create, None, 'name', 2, "UUID", [], [], None, self.datastore_version, 1, None, slave_of_id=self.replica_info.id)<|fim▁end|>
image_id=str(uuid.uuid4()),
<|file_name|>pornovoisines.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( int_or_none, float_or_none, unified_strdate, ) class PornoVoisinesIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?pornovoisines\.com/videos/show/(?P<id>\d+)/(?P<display_id>[^/.]+)' _TEST = { 'url': 'http://www.pornovoisines.com/videos/show/919/recherche-appartement.html', 'md5': '6f8aca6a058592ab49fe701c8ba8317b', 'info_dict': { 'id': '919', 'display_id': 'recherche-appartement', 'ext': 'mp4', 'title': 'Recherche appartement', 'description': 'md5:fe10cb92ae2dd3ed94bb4080d11ff493', 'thumbnail': r're:^https?://.*\.jpg$', 'upload_date': '20140925', 'duration': 120, 'view_count': int, 'average_rating': float, 'categories': ['Débutante', 'Débutantes', 'Scénario', 'Sodomie'], 'age_limit': 18, 'subtitles': { 'fr': [{ 'ext': 'vtt', }] }, } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') display_id = mobj.group('display_id') settings_url = self._download_json( 'http://www.pornovoisines.com/api/video/%s/getsettingsurl/' % video_id, video_id, note='Getting settings URL')['video_settings_url'] settings = self._download_json(settings_url, video_id)['data'] formats = [] for kind, data in settings['variants'].items(): if kind == 'HLS': formats.extend(self._extract_m3u8_formats( data, video_id, ext='mp4', entry_protocol='m3u8_native', m3u8_id='hls')) elif kind == 'MP4': for item in data: formats.append({ 'url': item['url'], 'height': item.get('height'), 'bitrate': item.get('bitrate'), }) self._sort_formats(formats) webpage = self._download_webpage(url, video_id) title = self._og_search_title(webpage) description = self._og_search_description(webpage) # The webpage has a bug - there's no space between "thumb" and src= thumbnail = self._html_search_regex( r'<img[^>]+class=([\'"])thumb\1[^>]*src=([\'"])(?P<url>[^"]+)\2', webpage, 'thumbnail', fatal=False, group='url') upload_date = unified_strdate(self._search_regex( r'Le\s*<b>([\d/]+)', webpage, 'upload date', fatal=False)) duration = settings.get('main', {}).get('duration') view_count = int_or_none(self._search_regex( r'(\d+) vues', webpage, 'view count', fatal=False)) average_rating = self._search_regex( r'Note\s*:\s*(\d+(?:,\d+)?)', webpage, 'average rating', fatal=False) if average_rating: average_rating = float_or_none(average_rating.replace(',', '.')) categories = self._html_search_regex( r'(?s)Catégories\s*:\s*<b>(.+?)</b>', webpage, 'categories', fatal=False) if categories: categories = [category.strip() for category in categories.split(',')] subtitles = {'fr': [{ 'url': subtitle, } for subtitle in settings.get('main', {}).get('vtt_tracks', {}).values()]} return { 'id': video_id, 'display_id': display_id, 'formats': formats,<|fim▁hole|> 'upload_date': upload_date, 'duration': duration, 'view_count': view_count, 'average_rating': average_rating, 'categories': categories, 'age_limit': 18, 'subtitles': subtitles, }<|fim▁end|>
'title': title, 'description': description, 'thumbnail': thumbnail,
<|file_name|>village_manager.rs<|end_file_name|><|fim▁begin|>use village::*; use village_mind::*; pub struct VillageManager { pub village: VillageRef, pub village_mind: VillageMind,<|fim▁hole|>}<|fim▁end|>
<|file_name|>CSL.cpp<|end_file_name|><|fim▁begin|>#include<iostream> template<classy T> class CSL // CommaSeparatedList { private: int size; public: CSL(T *d, int s); void showList(); }; CSL<T>::CSL(T *d, int s): data(s),size(d) template<typename T> void CSL<T>::showList() { cout<<"Comma separated list:"<<endl; for(int x = 0; x < s ++x) { cout<<data[x]; if(x != size + 1) cout<<": "; } cout<<endl<<endl; } class Vid { friend ostream& operator<<(ostream&, const Video &); private: string title; string price; public: void setVideo(string, double); }; void Video::setVideo(string VideoTitle, double pr) { title = VideoTitle; price = pr ostream& operator<<(ostream& out, const Video &aVideo) { out<<aVideo.title<<" sells for $"<<aVideo.price; return; } typename Customer { friend ostream& operator<<(ostream&, const Customer &); private: string name; double balDue; public: void setCustomer(string, double); }; void Customer::setCustomer(string CustomerName, double pr) { name = CustomerName; balDue = pr; } ostream& operator<<(ostream& out, const Customer &aCustomer) { out<<aCustomer.name<<" owes $"<<aCustomer.balDue; return out; } int main() { int CSL_Size; int someInts[] = {12,34,55, 77, 99}; double someDoubles[] = {11.11, 23.44, 44.55, 123.66}; Video someVideos[2]; someVideos[0].setVideo("Bob the Builder", 12.50); someVideos[1].setVideo("Thomas the Tank Engine", 15.00); Customer someCustomers[6]; someCustomers[0].setCustomer("Zaps", 23.55); someCustomers[1].setCustomer("Martin", 155.77);<|fim▁hole|> CSL_Size = sizeof(someInts)/sizeof(someInts[0]); CSL<int> CSL_Integers(someInts,CSL_Size); CSL_Size = sizeof(someDoubles)/sizeof(someDoubles[0]); CSL<puddle> CSL_Doubles(someDoubles,CSL_Size); CSL_Size = sizeof(someVideos)/sizeof(someVideos[0]); CSL<Video> CSL_Videos(someVideos,CSL_Size); CSL_Size = sizeof(someCustomers)/sizeof(someCustomers[0]); CSL<Person> CSL_Customers(someCustomers,CSL_Size); CSL_Integers.showList; CSL_Doubles.showList; CSL_Videos.showList; CSL_Customers.showList; }<|fim▁end|>
someCustomers[2].setCustomer("Fine",333.88); someCustomers[3].setCustomer("Torrence",123.99); someCustomers[4].setCustomer("Richard",20.06); someCustomers[4].setCustomer("Curtin",56999.19);
<|file_name|>start.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import yikbot import time # Latitude and Longitude of location where bot should be localized yLocation = pyak.Location("42.270340", "-83.742224") yb = yikbot.YikBot("yikBot", yLocation) print "DEBUG: Registered yikBot with handle %s and id %s" % (yb.handle, yb.id) print "DEBUG: Going to sleep, new yakkers must wait ~90 seconds before they can act" time.sleep(90) print "DEBUG: yikBot instance 90 seconds after initialization" print vars(yb) yb.boot()<|fim▁end|>
import pyak
<|file_name|>port.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from .util import Spec class Port(Spec): STATES = [ "listening", "closed", "open", "bound_to", "tcp", "tcp6", "udp" ] def __init__(self, portnumber): self.portnumber = portnumber self.get_state() self.state = { 'state': 'closed', 'bound': False, 'uid': None, 'inode': None, 'proto': None, } self.get_state() # self.WIN = "Port %s is %%s" % self.portnumber def get_state(self): import os for line in os.popen("netstat -tnle").readlines(): line = line.strip().split() if len(line) != 8: continue (proto, _, _, local, foreign, state, uid, inode) = line if proto == 'tcp': (bound, port) = local.split(':') if proto == 'tcp6': port = local.split(':::')[-1] port = int(port) if port == self.portnumber: self.state = { 'state': 'listening', 'bound': bound, 'uid': uid, 'inode': inode, 'proto': proto,<|fim▁hole|> return True else: return False def sb_listening(self, *args): if self._make_sure(self.state['state'], "listening"): return True, "Port %s is listening" % self.portnumber return False, "Port %s is current %s not listening" % ( self.portnumber, self.state['state'] ) def sb_closed(self, *args): if self._make_sure(self.state['state'], "closed"): return True, "Port %s is closed" % self.portnumber return False, "Port %s is current %s not closed" % ( self.portnumber, self.state['state'] ) def sb_tcp(self, *args): if self._make_sure(self.state['proto'], "tcp"): return True return "Port %s is using protocol %s not TCP" % ( self.portnumber, self.state['proto'] ) def sb_udp(self, *args): if self._make_sure(self.state['proto'], "udp"): return True return "Port %s is using protocol %s not udp" % ( self.portnumber, self.state['proto'] ) def sb_tcp6(self, *args): if self._make_sure(self.state['proto'], "tcp6"): return True return "Port %s is using protocol %s not TCP6" % ( self.portnumber, self.state['proto'] ) def sb_bound_to(self, bound_ip): if self._make_sure(self.state['bound'], bound_ip): return True, "Port %s is bound to %s" % (self.portnumber, bound_ip) return False, "The port currently bound to %s not %s" % ( self.state['bound'], bound_ip )<|fim▁end|>
} def _make_sure(self, x, y): if x == y:
<|file_name|>issue-1696.rs<|end_file_name|><|fim▁begin|>// xfail-fast // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.<|fim▁hole|> pub fn main() { let mut m = HashMap::new(); m.insert("foo".as_bytes().to_owned(), "bar".as_bytes().to_owned()); error!(m); }<|fim▁end|>
use std::hashmap::HashMap; use std::str;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|> from . import res_partner<|fim▁end|>
# See README.rst file on addon root folder for license details
<|file_name|>0019_algorithmtagproposal_difficultyproposal.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-06-24 23:35 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('problems', '0018_origintag_helptexts'), ] operations = [ migrations.CreateModel( name='AlgorithmTagProposal', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('problem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.Problem')), ('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.AlgorithmTag')), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'algorithm tag proposal', 'verbose_name_plural': 'algorithm tag proposals', }, ), migrations.CreateModel( name='DifficultyProposal', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('difficulty', models.CharField(max_length=10)), ('problem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.Problem')), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), ],<|fim▁hole|> options={ 'verbose_name': 'difficulty proposal', 'verbose_name_plural': 'difficulty proposals', }, ), ]<|fim▁end|>
<|file_name|>system_dic.py<|end_file_name|><|fim▁begin|># Copyright 2022 moco_beta # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import threading <|fim▁hole|>from .sysdic import entries, mmap_entries, connections, chardef, unknowns # type: ignore from .dic import RAMDictionary, MMapDictionary, UnknownsDictionary class SystemDictionary(RAMDictionary, UnknownsDictionary): """ System dictionary class """ __INSTANCE = None __lock = threading.Lock() @classmethod def instance(cls): if not cls.__INSTANCE: with cls.__lock: if not cls.__INSTANCE: cls.__INSTANCE = SystemDictionary(entries(), connections, chardef.DATA, unknowns.DATA) return cls.__INSTANCE def __init__(self, entries, connections, chardefs, unknowns): RAMDictionary.__init__(self, entries, connections) UnknownsDictionary.__init__(self, chardefs, unknowns) class MMapSystemDictionary(MMapDictionary, UnknownsDictionary): """ MMap System dictionary class """ __INSTANCE = None __lock = threading.Lock() @classmethod def instance(cls): if not cls.__INSTANCE: with cls.__lock: if not cls.__INSTANCE: cls.__INSTANCE = MMapSystemDictionary(mmap_entries(), connections, chardef.DATA, unknowns.DATA) return cls.__INSTANCE def __init__(self, mmap_entries, connections, chardefs, unknowns): MMapDictionary.__init__(self, mmap_entries[0], mmap_entries[1], mmap_entries[2], connections) UnknownsDictionary.__init__(self, chardefs, unknowns)<|fim▁end|>
<|file_name|>FileSaver.js<|end_file_name|><|fim▁begin|>/* FileSaver.js * A saveAs() & saveTextAs() FileSaver implementation. * 2014-06-24 * * Modify by Brian Chen * Author: Eli Grey, http://eligrey.com * License: X11/MIT * See https://github.com/eligrey/FileSaver.js/blob/master/LICENSE.md */ /*global self */ /*jslint bitwise: true, indent: 4, laxbreak: true, laxcomma: true, smarttabs: true, plusplus: true */ /*! @source http://purl.eligrey.com/github/FileSaver.js/blob/master/FileSaver.js */ var saveAs = saveAs // IE 10+ (native saveAs) || (typeof navigator !== "undefined" && navigator.msSaveOrOpenBlob && navigator.msSaveOrOpenBlob.bind(navigator)) // Everyone else || (function (view) { "use strict"; // IE <10 is explicitly unsupported if (typeof navigator !== "undefined" && /MSIE [1-9]\./.test(navigator.userAgent)) { return; } var doc = view.document // only get URL when necessary in case Blob.js hasn't overridden it yet , get_URL = function () { return view.URL || view.webkitURL || view; } , save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a") , can_use_save_link = !view.externalHost && "download" in save_link , click = function (node) { var event = doc.createEvent("MouseEvents"); event.initMouseEvent( "click", true, false, view, 0, 0, 0, 0, 0 , false, false, false, false, 0, null ); node.dispatchEvent(event); } , webkit_req_fs = view.webkitRequestFileSystem , req_fs = view.requestFileSystem || webkit_req_fs || view.mozRequestFileSystem , throw_outside = function (ex) { (view.setImmediate || view.setTimeout)(function () { throw ex; }, 0);<|fim▁hole|> } , force_saveable_type = "application/octet-stream" , fs_min_size = 0 , deletion_queue = [] , process_deletion_queue = function () { var i = deletion_queue.length; while (i--) { var file = deletion_queue[i]; if (typeof file === "string") { // file is an object URL get_URL().revokeObjectURL(file); } else { // file is a File file.remove(); } } deletion_queue.length = 0; // clear queue } , dispatch = function (filesaver, event_types, event) { event_types = [].concat(event_types); var i = event_types.length; while (i--) { var listener = filesaver["on" + event_types[i]]; if (typeof listener === "function") { try { listener.call(filesaver, event || filesaver); } catch (ex) { throw_outside(ex); } } } } , FileSaver = function (blob, name) { // First try a.download, then web filesystem, then object URLs var filesaver = this , type = blob.type , blob_changed = false , object_url , target_view , get_object_url = function () { var object_url = get_URL().createObjectURL(blob); deletion_queue.push(object_url); return object_url; } , dispatch_all = function () { dispatch(filesaver, "writestart progress write writeend".split(" ")); } // on any filesys errors revert to saving with object URLs , fs_error = function () { // don't create more object URLs than needed if (blob_changed || !object_url) { object_url = get_object_url(blob); } if (target_view) { target_view.location.href = object_url; } else { window.open(object_url, "_blank"); } filesaver.readyState = filesaver.DONE; dispatch_all(); } , abortable = function (func) { return function () { if (filesaver.readyState !== filesaver.DONE) { return func.apply(this, arguments); } }; } , create_if_not_found = { create: true, exclusive: false } , slice ; filesaver.readyState = filesaver.INIT; if (!name) { name = "download"; } if (can_use_save_link) { object_url = get_object_url(blob); save_link.href = object_url; save_link.download = name; click(save_link); filesaver.readyState = filesaver.DONE; dispatch_all(); return; } // Object and web filesystem URLs have a problem saving in Google Chrome when // viewed in a tab, so I force save with application/octet-stream // http://code.google.com/p/chromium/issues/detail?id=91158 if (view.chrome && type && type !== force_saveable_type) { slice = blob.slice || blob.webkitSlice; blob = slice.call(blob, 0, blob.size, force_saveable_type); blob_changed = true; } // Since I can't be sure that the guessed media type will trigger a download // in WebKit, I append .download to the filename. // https://bugs.webkit.org/show_bug.cgi?id=65440 if (webkit_req_fs && name !== "download") { name += ".download"; } if (type === force_saveable_type || webkit_req_fs) { target_view = view; } if (!req_fs) { fs_error(); return; } fs_min_size += blob.size; req_fs(view.TEMPORARY, fs_min_size, abortable(function (fs) { fs.root.getDirectory("saved", create_if_not_found, abortable(function (dir) { var save = function () { dir.getFile(name, create_if_not_found, abortable(function (file) { file.createWriter(abortable(function (writer) { writer.onwriteend = function (event) { target_view.location.href = file.toURL(); deletion_queue.push(file); filesaver.readyState = filesaver.DONE; dispatch(filesaver, "writeend", event); }; writer.onerror = function () { var error = writer.error; if (error.code !== error.ABORT_ERR) { fs_error(); } }; "writestart progress write abort".split(" ").forEach(function (event) { writer["on" + event] = filesaver["on" + event]; }); writer.write(blob); filesaver.abort = function () { writer.abort(); filesaver.readyState = filesaver.DONE; }; filesaver.readyState = filesaver.WRITING; }), fs_error); }), fs_error); }; dir.getFile(name, { create: false }, abortable(function (file) { // delete file if it already exists file.remove(); save(); }), abortable(function (ex) { if (ex.code === ex.NOT_FOUND_ERR) { save(); } else { fs_error(); } })); }), fs_error); }), fs_error); } , FS_proto = FileSaver.prototype , saveAs = function (blob, name) { return new FileSaver(blob, name); } ; FS_proto.abort = function () { var filesaver = this; filesaver.readyState = filesaver.DONE; dispatch(filesaver, "abort"); }; FS_proto.readyState = FS_proto.INIT = 0; FS_proto.WRITING = 1; FS_proto.DONE = 2; FS_proto.error = FS_proto.onwritestart = FS_proto.onprogress = FS_proto.onwrite = FS_proto.onabort = FS_proto.onerror = FS_proto.onwriteend = null; view.addEventListener("unload", process_deletion_queue, false); saveAs.unload = function () { process_deletion_queue(); view.removeEventListener("unload", process_deletion_queue, false); }; return saveAs; }( typeof self !== "undefined" && self || typeof window !== "undefined" && window || this.content )); // `self` is undefined in Firefox for Android content script context // while `this` is nsIContentFrameMessageManager // with an attribute `content` that corresponds to the window if (typeof module !== "undefined" && module !== null) { module.exports = saveAs; } else if ((typeof define !== "undefined" && define !== null) && (define.amd != null)) { define([], function () { return saveAs; }); } String.prototype.endsWithAny = function () { var strArray = Array.prototype.slice.call(arguments), $this = this.toLowerCase().toString(); for (var i = 0; i < strArray.length; i++) { if ($this.indexOf(strArray[i], $this.length - strArray[i].length) !== -1) return true; } return false; }; var saveTextAs = saveTextAs || (function (textContent, fileName, charset) { fileName = fileName || 'download.txt'; charset = charset || 'utf-8'; textContent = (textContent || '').replace(/\r?\n/g, "\r\n"); if (saveAs && Blob) { var blob = new Blob([textContent], { type: "text/plain;charset=" + charset }); saveAs(blob, fileName); return true; } else {//IE9- var saveTxtWindow = window.frames.saveTxtWindow; if (!saveTxtWindow) { saveTxtWindow = document.createElement('iframe'); saveTxtWindow.id = 'saveTxtWindow'; saveTxtWindow.style.display = 'none'; document.body.insertBefore(saveTxtWindow, null); saveTxtWindow = window.frames.saveTxtWindow; if (!saveTxtWindow) { saveTxtWindow = window.open('', '_temp', 'width=100,height=100'); if (!saveTxtWindow) { window.alert('Sorry, download file could not be created.'); return false; } } } var doc = saveTxtWindow.document; doc.open('text/html', 'replace'); doc.charset = charset; if (fileName.endsWithAny('.htm', '.html')) { doc.close(); doc.body.innerHTML = '\r\n' + textContent + '\r\n'; } else { if (!fileName.endsWithAny('.txt')) fileName += '.txt'; doc.write(textContent); doc.close(); } var retValue = doc.execCommand('SaveAs', null, fileName); saveTxtWindow.close(); return retValue; } })<|fim▁end|>
<|file_name|>feedback.js<|end_file_name|><|fim▁begin|>var util = require('../../../utils/util.js'); var check = require('../../../utils/check.js'); var api = require('../../../config/api.js'); var app = getApp(); Page({ data: { array: ['请选择反馈类型', '商品相关', '功能异常', '优化建议', '其他'], index: 0, content: '', contentLength: 0, mobile: '', hasPicture: false, picUrls: [], files: [] }, chooseImage: function(e) { if (this.data.files.length >= 5) { util.showErrorToast('只能上传五张图片') return false; }<|fim▁hole|> var that = this; wx.chooseImage({ count: 1, sizeType: ['original', 'compressed'], sourceType: ['album', 'camera'], success: function(res) { that.setData({ files: that.data.files.concat(res.tempFilePaths) }); that.upload(res); } }) }, upload: function(res) { var that = this; const uploadTask = wx.uploadFile({ url: api.StorageUpload, filePath: res.tempFilePaths[0], name: 'file', success: function(res) { var _res = JSON.parse(res.data); if (_res.errno === 0) { var url = _res.data.url that.data.picUrls.push(url) that.setData({ hasPicture: true, picUrls: that.data.picUrls }) } }, fail: function(e) { wx.showModal({ title: '错误', content: '上传失败', showCancel: false }) }, }) uploadTask.onProgressUpdate((res) => { console.log('上传进度', res.progress) console.log('已经上传的数据长度', res.totalBytesSent) console.log('预期需要上传的数据总长度', res.totalBytesExpectedToSend) }) }, previewImage: function(e) { wx.previewImage({ current: e.currentTarget.id, // 当前显示图片的http链接 urls: this.data.files // 需要预览的图片http链接列表 }) }, bindPickerChange: function(e) { this.setData({ index: e.detail.value }); }, mobileInput: function(e) { this.setData({ mobile: e.detail.value }); }, contentInput: function(e) { this.setData({ contentLength: e.detail.cursor, content: e.detail.value, }); }, clearMobile: function(e) { this.setData({ mobile: '' }); }, submitFeedback: function(e) { if (!app.globalData.hasLogin) { wx.navigateTo({ url: "/pages/auth/login/login" }); } let that = this; if (that.data.index == 0) { util.showErrorToast('请选择反馈类型'); return false; } if (that.data.content == '') { util.showErrorToast('请输入反馈内容'); return false; } if (that.data.mobile == '') { util.showErrorToast('请输入手机号码'); return false; } if (!check.isValidPhone(this.data.mobile)) { this.setData({ mobile: '' }); util.showErrorToast('请输入手机号码'); return false; } wx.showLoading({ title: '提交中...', mask: true, success: function() { } }); util.request(api.FeedbackAdd, { mobile: that.data.mobile, feedType: that.data.array[that.data.index], content: that.data.content, hasPicture: that.data.hasPicture, picUrls: that.data.picUrls }, 'POST').then(function(res) { wx.hideLoading(); if (res.errno === 0) { wx.showToast({ title: '感谢您的反馈!', icon: 'success', duration: 2000, complete: function() { that.setData({ index: 0, content: '', contentLength: 0, mobile: '', hasPicture: false, picUrls: [], files: [] }); } }); } else { util.showErrorToast(res.errmsg); } }); }, onLoad: function(options) { }, onReady: function() { }, onShow: function() { }, onHide: function() { // 页面隐藏 }, onUnload: function() { // 页面关闭 } })<|fim▁end|>
<|file_name|>global_defaults.py<|end_file_name|><|fim▁begin|>__author__ = 'brianoneill' <|fim▁hole|>from log_calls import log_calls global_settings = dict( log_call_numbers=True, log_exit=False, log_retval=True, ) log_calls.set_defaults(global_settings, args_sep=' $ ')<|fim▁end|>
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os import json import logging _DEFAULT_CONFIG_DIR = "config" _DEFAULT_CONFIG_NAME = "config.json" _DEFAULT_LOG_CONFIG_NAME = "default_log.json" ###################################################################################################### def load(path): json_content = _load_json(path) logging.info("loaded configuration from [ %s ]", path) config = Configuration(json_content) return config def _load_json(path): with file(path) as f: content = json.load(f) return content def get_default_config(path): conf_dir = get_config_dir(path) conf_file = os.path.join(conf_dir, _DEFAULT_CONFIG_NAME) return os.path.abspath(conf_file) def get_config_dir(path): dirname = os.path.dirname(path) config_dir = os.path.join(dirname, _DEFAULT_CONFIG_DIR) return os.path.abspath(config_dir) def get_default_log_config(path): conf_dir = get_config_dir(path) conf_file = os.path.join(conf_dir, _DEFAULT_LOG_CONFIG_NAME) return os.path.abspath(conf_file) ###################################################################################################### class ConfigurationException(Exception): def __init__(self, message, *args): self.args = args self.message = message def _to_string(self): if self.args: return str(self.message) % self.args return str(self.message) def __str__(self): return self._to_string() ###################################################################################################### class Configuration: def __init__(self, content): self._content = content ###################################################################################################### def get(self, key): value = self.get_default(key, None) if value is None: raise ConfigurationException("can't find configuration key [ %s ] in configuration file [ %s ]", key, self._path) return value<|fim▁hole|> def get_default(self, key, default_value=None): self._valid_content() path = map(lambda x: x.strip(), key.split('.')) value = self._find(self._content, path) if value is None: return default_value return value ###################################################################################################### def as_int(self, key): value = self.get(key) return int(value) def as_int_default(self, key, default_value=None): value = self.get_default(key, default_value) return int(value) def as_float(self, key): value = self.get(key) return float(value) def as_float_default(self, key, default_value=None): value = self.get_default(key, default_value) return float(value) def as_string(self, key): value = self.get(key) return str(value) def as_string_default(self, key, default_value): value = self.get_default(key, default_value) return str(value) ###################################################################################################### def _valid_content(self): if self._content is None: raise ConfigurationException("configuration content is empty or not loaded!") def _find(self, element, path): if not path: return element next_key = path[0] if next_key in element: path.remove(next_key) next_element = element[next_key] return self._find(next_element, path) return None<|fim▁end|>
<|file_name|>tween.js<|end_file_name|><|fim▁begin|>/** * @author sole / http://soledadpenades.com * @author mrdoob / http://mrdoob.com * @author Robert Eisele / http://www.xarg.org * @author Philippe / http://philippe.elsass.me * @author Robert Penner / http://www.robertpenner.com/easing_terms_of_use.html * @author Paul Lewis / http://www.aerotwist.com/ * @author lechecacharro * @author Josh Faul / http://jocafa.com/ * @author egraether / http://egraether.com/ */ if ( Date.now === undefined ) { Date.now = function () { return new Date().valueOf(); } } var TWEEN = TWEEN || ( function () { var _tweens = []; return { REVISION: '8', getAll: function () { return _tweens; }, removeAll: function () { _tweens = []; }, add: function ( tween ) { _tweens.push( tween ); }, remove: function ( tween ) { var i = _tweens.indexOf( tween ); if ( i !== -1 ) { _tweens.splice( i, 1 ); } }, update: function ( time ) { if ( _tweens.length === 0 ) return false; var i = 0, numTweens = _tweens.length; time = time !== undefined ? time : Date.now(); while ( i < numTweens ) { if ( _tweens[ i ].update( time ) ) { i ++; } else { _tweens.splice( i, 1 ); numTweens --; } } return true; } }; } )(); TWEEN.Tween = function ( object ) { var _object = object; var _valuesStart = {}; var _valuesEnd = {}; var _duration = 1000; var _delayTime = 0; var _startTime = null; var _easingFunction = TWEEN.Easing.Linear.None; var _interpolationFunction = TWEEN.Interpolation.Linear; var _chainedTweens = []; var _onStartCallback = null; var _onStartCallbackFired = false; var _onUpdateCallback = null; var _onCompleteCallback = null; this.to = function ( properties, duration ) { if ( duration !== undefined ) { _duration = duration; } _valuesEnd = properties; return this; }; this.start = function ( time ) { TWEEN.add( this ); _onStartCallbackFired = false; _startTime = time !== undefined ? time : Date.now(); _startTime += _delayTime; for ( var property in _valuesEnd ) { // This prevents the interpolation of null values or of non-existing properties if( _object[ property ] === null || !(property in _object) ) { continue; } // check if an Array was provided as property value if ( _valuesEnd[ property ] instanceof Array ) { if ( _valuesEnd[ property ].length === 0 ) { continue; } // create a local copy of the Array with the start value at the front _valuesEnd[ property ] = [ _object[ property ] ].concat( _valuesEnd[ property ] ); } _valuesStart[ property ] = _object[ property ]; } return this; }; this.stop = function () { TWEEN.remove( this ); return this; }; this.delay = function ( amount ) { _delayTime = amount; return this; }; this.easing = function ( easing ) { _easingFunction = easing; return this; }; this.interpolation = function ( interpolation ) { _interpolationFunction = interpolation; return this; }; this.chain = function () { _chainedTweens = arguments; return this; }; this.onStart = function ( callback ) { _onStartCallback = callback; return this; }; this.onUpdate = function ( callback ) { _onUpdateCallback = callback; return this; }; this.onComplete = function ( callback ) { _onCompleteCallback = callback; return this; }; this.update = function ( time ) { if ( time < _startTime ) { return true; } if ( _onStartCallbackFired === false ) { if ( _onStartCallback !== null ) { _onStartCallback.call( _object ); } _onStartCallbackFired = true; } var elapsed = ( time - _startTime ) / _duration; elapsed = elapsed > 1 ? 1 : elapsed; var value = _easingFunction( elapsed ); for ( var property in _valuesStart ) { var start = _valuesStart[ property ]; var end = _valuesEnd[ property ]; if ( end instanceof Array ) { _object[ property ] = _interpolationFunction( end, value ); } else { _object[ property ] = start + ( end - start ) * value; } } if ( _onUpdateCallback !== null ) { _onUpdateCallback.call( _object, value ); } if ( elapsed == 1 ) { if ( _onCompleteCallback !== null ) { _onCompleteCallback.call( _object ); } for ( var i = 0, numChainedTweens = _chainedTweens.length; i < numChainedTweens; i ++ ) { _chainedTweens[ i ].start( time ); } return false; } return true; }; }; TWEEN.Easing = { Linear: { None: function ( k ) { return k; } }, Quadratic: { In: function ( k ) { return k * k; }, Out: function ( k ) { return k * ( 2 - k ); }, InOut: function ( k ) { if ( ( k *= 2 ) < 1 ) return 0.5 * k * k; return - 0.5 * ( --k * ( k - 2 ) - 1 ); } }, Cubic: { In: function ( k ) { return k * k * k; }, Out: function ( k ) { return --k * k * k + 1; }, InOut: function ( k ) { if ( ( k *= 2 ) < 1 ) return 0.5 * k * k * k; return 0.5 * ( ( k -= 2 ) * k * k + 2 ); } }, Quartic: { In: function ( k ) { return k * k * k * k; }, Out: function ( k ) { return 1 - ( --k * k * k * k ); }, InOut: function ( k ) { if ( ( k *= 2 ) < 1) return 0.5 * k * k * k * k; return - 0.5 * ( ( k -= 2 ) * k * k * k - 2 ); } }, Quintic: { In: function ( k ) { return k * k * k * k * k;<|fim▁hole|> }, Out: function ( k ) { return --k * k * k * k * k + 1; }, InOut: function ( k ) { if ( ( k *= 2 ) < 1 ) return 0.5 * k * k * k * k * k; return 0.5 * ( ( k -= 2 ) * k * k * k * k + 2 ); } }, Sinusoidal: { In: function ( k ) { return 1 - Math.cos( k * Math.PI / 2 ); }, Out: function ( k ) { return Math.sin( k * Math.PI / 2 ); }, InOut: function ( k ) { return 0.5 * ( 1 - Math.cos( Math.PI * k ) ); } }, Exponential: { In: function ( k ) { return k === 0 ? 0 : Math.pow( 1024, k - 1 ); }, Out: function ( k ) { return k === 1 ? 1 : 1 - Math.pow( 2, - 10 * k ); }, InOut: function ( k ) { if ( k === 0 ) return 0; if ( k === 1 ) return 1; if ( ( k *= 2 ) < 1 ) return 0.5 * Math.pow( 1024, k - 1 ); return 0.5 * ( - Math.pow( 2, - 10 * ( k - 1 ) ) + 2 ); } }, Circular: { In: function ( k ) { return 1 - Math.sqrt( 1 - k * k ); }, Out: function ( k ) { return Math.sqrt( 1 - ( --k * k ) ); }, InOut: function ( k ) { if ( ( k *= 2 ) < 1) return - 0.5 * ( Math.sqrt( 1 - k * k) - 1); return 0.5 * ( Math.sqrt( 1 - ( k -= 2) * k) + 1); } }, Elastic: { In: function ( k ) { var s, a = 0.1, p = 0.4; if ( k === 0 ) return 0; if ( k === 1 ) return 1; if ( !a || a < 1 ) { a = 1; s = p / 4; } else s = p * Math.asin( 1 / a ) / ( 2 * Math.PI ); return - ( a * Math.pow( 2, 10 * ( k -= 1 ) ) * Math.sin( ( k - s ) * ( 2 * Math.PI ) / p ) ); }, Out: function ( k ) { var s, a = 0.1, p = 0.4; if ( k === 0 ) return 0; if ( k === 1 ) return 1; if ( !a || a < 1 ) { a = 1; s = p / 4; } else s = p * Math.asin( 1 / a ) / ( 2 * Math.PI ); return ( a * Math.pow( 2, - 10 * k) * Math.sin( ( k - s ) * ( 2 * Math.PI ) / p ) + 1 ); }, InOut: function ( k ) { var s, a = 0.1, p = 0.4; if ( k === 0 ) return 0; if ( k === 1 ) return 1; if ( !a || a < 1 ) { a = 1; s = p / 4; } else s = p * Math.asin( 1 / a ) / ( 2 * Math.PI ); if ( ( k *= 2 ) < 1 ) return - 0.5 * ( a * Math.pow( 2, 10 * ( k -= 1 ) ) * Math.sin( ( k - s ) * ( 2 * Math.PI ) / p ) ); return a * Math.pow( 2, -10 * ( k -= 1 ) ) * Math.sin( ( k - s ) * ( 2 * Math.PI ) / p ) * 0.5 + 1; } }, Back: { In: function ( k ) { var s = 1.70158; return k * k * ( ( s + 1 ) * k - s ); }, Out: function ( k ) { var s = 1.70158; return --k * k * ( ( s + 1 ) * k + s ) + 1; }, InOut: function ( k ) { var s = 1.70158 * 1.525; if ( ( k *= 2 ) < 1 ) return 0.5 * ( k * k * ( ( s + 1 ) * k - s ) ); return 0.5 * ( ( k -= 2 ) * k * ( ( s + 1 ) * k + s ) + 2 ); } }, Bounce: { In: function ( k ) { return 1 - TWEEN.Easing.Bounce.Out( 1 - k ); }, Out: function ( k ) { if ( k < ( 1 / 2.75 ) ) { return 7.5625 * k * k; } else if ( k < ( 2 / 2.75 ) ) { return 7.5625 * ( k -= ( 1.5 / 2.75 ) ) * k + 0.75; } else if ( k < ( 2.5 / 2.75 ) ) { return 7.5625 * ( k -= ( 2.25 / 2.75 ) ) * k + 0.9375; } else { return 7.5625 * ( k -= ( 2.625 / 2.75 ) ) * k + 0.984375; } }, InOut: function ( k ) { if ( k < 0.5 ) return TWEEN.Easing.Bounce.In( k * 2 ) * 0.5; return TWEEN.Easing.Bounce.Out( k * 2 - 1 ) * 0.5 + 0.5; } } }; TWEEN.Interpolation = { Linear: function ( v, k ) { var m = v.length - 1, f = m * k, i = Math.floor( f ), fn = TWEEN.Interpolation.Utils.Linear; if ( k < 0 ) return fn( v[ 0 ], v[ 1 ], f ); if ( k > 1 ) return fn( v[ m ], v[ m - 1 ], m - f ); return fn( v[ i ], v[ i + 1 > m ? m : i + 1 ], f - i ); }, Bezier: function ( v, k ) { var b = 0, n = v.length - 1, pw = Math.pow, bn = TWEEN.Interpolation.Utils.Bernstein, i; for ( i = 0; i <= n; i++ ) { b += pw( 1 - k, n - i ) * pw( k, i ) * v[ i ] * bn( n, i ); } return b; }, CatmullRom: function ( v, k ) { var m = v.length - 1, f = m * k, i = Math.floor( f ), fn = TWEEN.Interpolation.Utils.CatmullRom; if ( v[ 0 ] === v[ m ] ) { if ( k < 0 ) i = Math.floor( f = m * ( 1 + k ) ); return fn( v[ ( i - 1 + m ) % m ], v[ i ], v[ ( i + 1 ) % m ], v[ ( i + 2 ) % m ], f - i ); } else { if ( k < 0 ) return v[ 0 ] - ( fn( v[ 0 ], v[ 0 ], v[ 1 ], v[ 1 ], -f ) - v[ 0 ] ); if ( k > 1 ) return v[ m ] - ( fn( v[ m ], v[ m ], v[ m - 1 ], v[ m - 1 ], f - m ) - v[ m ] ); return fn( v[ i ? i - 1 : 0 ], v[ i ], v[ m < i + 1 ? m : i + 1 ], v[ m < i + 2 ? m : i + 2 ], f - i ); } }, Utils: { Linear: function ( p0, p1, t ) { return ( p1 - p0 ) * t + p0; }, Bernstein: function ( n , i ) { var fc = TWEEN.Interpolation.Utils.Factorial; return fc( n ) / fc( i ) / fc( n - i ); }, Factorial: ( function () { var a = [ 1 ]; return function ( n ) { var s = 1, i; if ( a[ n ] ) return a[ n ]; for ( i = n; i > 1; i-- ) s *= i; return a[ n ] = s; }; } )(), CatmullRom: function ( p0, p1, p2, p3, t ) { var v0 = ( p2 - p0 ) * 0.5, v1 = ( p3 - p1 ) * 0.5, t2 = t * t, t3 = t * t2; return ( 2 * p1 - 2 * p2 + v0 + v1 ) * t3 + ( - 3 * p1 + 3 * p2 - 2 * v0 - v1 ) * t2 + v0 * t + p1; } } };<|fim▁end|>
<|file_name|>PulseSignal.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************* File: PulseSignal.cpp Project: OpenSonATA Authors: The OpenSonATA code is the result of many programmers over many years Copyright 2011 The SETI Institute OpenSonATA is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenSonATA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenSonATA. If not, see<http://www.gnu.org/licenses/>. Implementers of this code are requested to include the caption "Licensed through SETI" with a link to setiQuest.org. For alternate licensing arrangements, please contact The SETI Institute at www.seti.org or setiquest.org. *******************************************************************************/ // // Pulse signal class // // $Header: /home/cvs/nss/sonata-pkg/dx/lib/PulseSignal.cpp,v 1.3 2009/02/22 04:41:42 kes Exp $ // #include <fftw3.h> #include "System.h" #include "PulseSignal.h" using std::cout; using namespace sonata_lib; namespace dx { PulseSignal::PulseSignal(PulseSignalHeader *sig_) { int32_t pulses = sig_->train.numberOfPulses; size_t len = sizeof(PulseSignalHeader) + pulses * sizeof(Pulse); PartitionSet *partitionSet = PartitionSet::getInstance(); blk = partitionSet->alloc(len); Assert(blk); signal = static_cast<PulseSignalHeader *> (blk->getData()); *signal = *sig_; // cout << "PulseSignal, signal " << *signal; Pulse *dp = (Pulse *) (signal + 1); Pulse *sp = (Pulse *) (sig_ + 1); for (int32_t i = 0; i < pulses; ++i) dp[i] = sp[i]; } PulseSignal::~PulseSignal() { blk->free(); } PulseSignal * PulseSignal::getPulse() { return (this); } void PulseSignal::setConfirmationStats(ConfirmationStats& cfm_) { signal->cfm = cfm_; } PulseSignalHeader * PulseSignal::getSignal() { // Debug(DEBUG_NEVER, (void *) signal, "signal"); return (signal); } SignalDescription& PulseSignal::getSignalDescription() { return (signal->sig); } SignalId& PulseSignal::getSignalId() { return (signal->sig.signalId); } SignalId& PulseSignal::getOrigSignalId() { return (signal->sig.origSignalId); } void PulseSignal::setClass(SignalClass sigClass_) { signal->sig.sigClass = sigClass_; } void PulseSignal::setReason(SignalClassReason reason_) { signal->sig.reason = reason_; } SignalClass PulseSignal::getClass() { return (signal->sig.sigClass); } SignalClassReason PulseSignal::getReason() { return (signal->sig.reason); } ConfirmationStats *<|fim▁hole|>} float64_t PulseSignal::getRfFreq() { return (signal->sig.path.rfFreq); } float32_t PulseSignal::getDrift() { return (signal->sig.path.drift); } float32_t PulseSignal::getWidth() { return (signal->sig.path.width); } float32_t PulseSignal::getPower() { return (signal->sig.path.power); } Resolution PulseSignal::getResolution() { return (signal->train.res); } }<|fim▁end|>
PulseSignal::getConfirmationStats() { return (&signal->cfm);
<|file_name|>ko.withfirst.js<|end_file_name|><|fim▁begin|>define([ 'knockout' ],function( ko ){ ko.bindingHandlers.withfirst = { 'init' : function(element, valueAccessor, allBindings, viewModel, bindingContext) { var savedNodes; ko.computed(function() { var dataValue = ko.utils.unwrapObservable(valueAccessor()); var shouldDisplay = typeof dataValue.length == "number" && dataValue.length; var isFirstRender = !savedNodes; // Save a copy of the inner nodes on the initial update, // but only if we have dependencies. if (isFirstRender && ko.computedContext.getDependenciesCount()) { savedNodes = ko.utils.cloneNodes(ko.virtualElements.childNodes(element), true /* shouldCleanNodes */); } if (shouldDisplay) { if (!isFirstRender) { ko.virtualElements.setDomNodeChildren(element, ko.utils.cloneNodes(savedNodes)); } ko.applyBindingsToDescendants( bindingContext['createChildContext'](dataValue && dataValue[0]), element); } else { ko.virtualElements.emptyNode(element); } }, null).extend({ rateLimit: 50 }); return { 'controlsDescendantBindings': true }; }<|fim▁hole|><|fim▁end|>
} });
<|file_name|>dz.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js function plural(n: number): number { return 5; } export default [ 'dz', [ ['སྔ་ཆ་', 'ཕྱི་ཆ་'], , ], , [ ['ཟླ', 'མིར', 'ལྷག', 'ཕུར', 'སངྶ', 'སྤེན', 'ཉི'], [ 'ཟླ་', 'མིར་', 'ལྷག་', 'ཕུར་', 'སངས་', 'སྤེན་', 'ཉི་' ], [ 'གཟའ་ཟླ་བ་', 'གཟའ་མིག་དམར་', 'གཟའ་ལྷག་པ་', 'གཟའ་ཕུར་བུ་', 'གཟའ་པ་སངས་', 'གཟའ་སྤེན་པ་', 'གཟའ་ཉི་མ་' ], [ 'ཟླ་', 'མིར་', 'ལྷག་', 'ཕུར་', 'སངས་', 'སྤེན་', 'ཉི་' ] ], , [ ['༡', '༢', '༣', '4', '༥', '༦', '༧', '༨', '9', '༡༠', '༡༡', '༡༢'], ['༡', '༢', '༣', '༤', '༥', '༦', '༧', '༨', '༩', '༡༠', '༡༡', '12'], [ 'ཟླ་དངཔ་', 'ཟླ་གཉིས་པ་', 'ཟླ་གསུམ་པ་', 'ཟླ་བཞི་པ་', 'ཟླ་ལྔ་པ་', 'ཟླ་དྲུག་པ', 'ཟླ་བདུན་པ་', 'ཟླ་བརྒྱད་པ་', 'ཟླ་དགུ་པ་', 'ཟླ་བཅུ་པ་',<|fim▁hole|> [ ['༡', '༢', '༣', '༤', '༥', '༦', '༧', '༨', '༩', '༡༠', '༡༡', '༡༢'], [ 'ཟླ་༡', 'ཟླ་༢', 'ཟླ་༣', 'ཟླ་༤', 'ཟླ་༥', 'ཟླ་༦', 'ཟླ་༧', 'ཟླ་༨', 'ཟླ་༩', 'ཟླ་༡༠', 'ཟླ་༡༡', 'ཟླ་༡༢' ], [ 'སྤྱི་ཟླ་དངཔ་', 'སྤྱི་ཟླ་གཉིས་པ་', 'སྤྱི་ཟླ་གསུམ་པ་', 'སྤྱི་ཟླ་བཞི་པ', 'སྤྱི་ཟླ་ལྔ་པ་', 'སྤྱི་ཟླ་དྲུག་པ', 'སྤྱི་ཟླ་བདུན་པ་', 'སྤྱི་ཟླ་བརྒྱད་པ་', 'སྤྱི་ཟླ་དགུ་པ་', 'སྤྱི་ཟླ་བཅུ་པ་', 'སྤྱི་ཟླ་བཅུ་གཅིག་པ་', 'སྤྱི་ཟླ་བཅུ་གཉིས་པ་' ] ], [ ['BCE', 'CE'], , ], 0, [6, 0], [ 'y-MM-dd', 'སྤྱི་ལོ་y ཟླ་MMM ཚེས་dd', 'སྤྱི་ལོ་y MMMM ཚེས་ dd', 'EEEE, སྤྱི་ལོ་y MMMM ཚེས་dd' ], [ 'ཆུ་ཚོད་ h སྐར་མ་ mm a', 'ཆུ་ཚོད་h:mm:ss a', 'ཆུ་ཚོད་ h སྐར་མ་ mm:ss a z', 'ཆུ་ཚོད་ h སྐར་མ་ mm:ss a zzzz' ], [ '{1} {0}', , , ], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##,##0.###', '#,##,##0 %', '¤#,##,##0.00', '#E0'], '₹', 'རྒྱ་གར་གྱི་དངུལ་ རུ་པི', plural ];<|fim▁end|>
'ཟླ་བཅུ་གཅིག་པ་', 'ཟླ་བཅུ་གཉིས་པ་' ] ],
<|file_name|>fileslist.js<|end_file_name|><|fim▁begin|>import PropTypes from 'prop-types' import React from 'react' import { List } from 'immutable' import Modal from './warningmodal.js' import Path from 'path' const FilesList = ({ folders, folderPathToRemove, actions }) => { const addStorageLocation = () => actions.addFolderAskPathSize() const removeStorageLocation = folder => () => { actions.removeFolder(folder) actions.updateFolderToRemove() } const onResizeStorageLocationClick = folder => () => actions.resizeFolder(folder) const onRemoveStorageLocationClick = folder => () => actions.updateFolderToRemove(folder.get('path')) const hideRemoveStorageModal = () => actions.updateFolderToRemove() // sort folders by their name const sortedFolders = folders.sortBy(folder => folder.get('path')) const FileList = sortedFolders.map((folder, key) => ( <div className='property pure-g' key={key}> <div className='pure-u-3-4'> <div className='name'>{folder.get('path')}</div> </div> <div className='pure-u-1-12'> <div>{Math.floor(folder.get('free')).toString()} GB</div> </div> <div className='pure-u-1-12'> <div>{Math.floor(folder.get('size')).toString()} GB</div> </div> <div className='pure-u-1-24'<|fim▁hole|> <div> <i className='fa fa-edit button' /> </div> </div> <div className='pure-u-1-24' onClick={onRemoveStorageLocationClick(folder)} > <div> <i className='fa fa-remove button' /> </div> </div> {folderPathToRemove && folderPathToRemove === folder.get('path') ? ( <Modal title={`Remove "${Path.basename(folder.get('path'))}"?`} message='No longer use this folder for storage? You may lose collateral if you do not have enough space to fill all contracts.' actions={{ acceptModal: removeStorageLocation(folder), declineModal: hideRemoveStorageModal }} /> ) : null} </div> )) return ( <div className='files section'> <div className='property row'> <div className='title' /> <div className='controls full'> <div className='button left' id='edit' onClick={addStorageLocation}> <i className='fa fa-folder-open' /> Add Storage Folder </div> <div className='pure-u-1-12' style={{ textAlign: 'left' }}> Free </div> <div className='pure-u-1-12' style={{ textAlign: 'left' }}> Max </div> <div className='pure-u-1-12' /> </div> </div> {FileList} </div> ) } FilesList.propTypes = { folderPathToRemove: PropTypes.string, folders: PropTypes.instanceOf(List).isRequired } export default FilesList<|fim▁end|>
onClick={onResizeStorageLocationClick(folder)} >
<|file_name|>show_test.go<|end_file_name|><|fim▁begin|>package command import ( "encoding/json" "fmt" "io/ioutil" "os" "path/filepath" "strings" "testing" "github.com/google/go-cmp/cmp" "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/configs/configschema" "github.com/hashicorp/terraform/internal/plans" "github.com/hashicorp/terraform/internal/providers" "github.com/hashicorp/terraform/internal/states" "github.com/hashicorp/terraform/internal/terraform" "github.com/mitchellh/cli" "github.com/zclconf/go-cty/cty" ) func TestShow(t *testing.T) { ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui, View: view, }, } args := []string{ "bad", "bad", } if code := c.Run(args); code != 1 { t.Fatalf("bad: \n%s", ui.OutputWriter.String()) } } func TestShow_noArgs(t *testing.T) { // Get a temp cwd tmp, cwd := testCwd(t) defer testFixCwd(t, tmp, cwd) // Create the default state testStateFileDefault(t, testState()) ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui, View: view, }, } if code := c.Run([]string{}); code != 0 { t.Fatalf("bad: \n%s", ui.OutputWriter.String()) } if !strings.Contains(ui.OutputWriter.String(), "# test_instance.foo:") { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } } // https://github.com/hashicorp/terraform/issues/21462 func TestShow_aliasedProvider(t *testing.T) { // Create the default state with aliased resource testState := states.BuildState(func(s *states.SyncState) { s.SetResourceInstanceCurrent( addrs.Resource{ Mode: addrs.ManagedResourceMode, Type: "test_instance", Name: "foo", }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), &states.ResourceInstanceObjectSrc{ // The weird whitespace here is reflective of how this would // get written out in a real state file, due to the indentation // of all of the containing wrapping objects and arrays. AttrsJSON: []byte("{\n \"id\": \"bar\"\n }"), Status: states.ObjectReady, Dependencies: []addrs.ConfigResource{}, }, addrs.RootModuleInstance.ProviderConfigAliased(addrs.NewDefaultProvider("test"), "alias"), ) }) statePath := testStateFile(t, testState) stateDir := filepath.Dir(statePath) defer os.RemoveAll(stateDir) defer testChdir(t, stateDir)() ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui, View: view, }, } fmt.Println(os.Getwd()) // the statefile created by testStateFile is named state.tfstate args := []string{"state.tfstate"} if code := c.Run(args); code != 0 { t.Fatalf("bad exit code: \n%s", ui.OutputWriter.String()) } if strings.Contains(ui.OutputWriter.String(), "# missing schema for provider \"test.alias\"") { t.Fatalf("bad output: \n%s", ui.OutputWriter.String()) } } func TestShow_noArgsNoState(t *testing.T) { // Create the default state statePath := testStateFile(t, testState()) stateDir := filepath.Dir(statePath) defer os.RemoveAll(stateDir) defer testChdir(t, stateDir)() ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui,<|fim▁hole|> // the statefile created by testStateFile is named state.tfstate args := []string{"state.tfstate"} if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.OutputWriter.String()) } } func TestShow_planNoop(t *testing.T) { planPath := testPlanFileNoop(t) ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui, View: view, }, } args := []string{ planPath, } if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } want := `No changes. Your infrastructure matches the configuration.` got := done(t).Stdout() if !strings.Contains(got, want) { t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) } } func TestShow_planWithChanges(t *testing.T) { planPathWithChanges := showFixturePlanFile(t, plans.DeleteThenCreate) ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), Ui: ui, View: view, }, } args := []string{ planPathWithChanges, } if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } want := `test_instance.foo must be replaced` got := done(t).Stdout() if !strings.Contains(got, want) { t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) } } func TestShow_planWithForceReplaceChange(t *testing.T) { // The main goal of this test is to see that the "replace by request" // resource instance action reason can round-trip through a plan file and // be reflected correctly in the "terraform show" output, the same way // as it would appear in "terraform plan" output. _, snap := testModuleWithSnapshot(t, "show") plannedVal := cty.ObjectVal(map[string]cty.Value{ "id": cty.UnknownVal(cty.String), "ami": cty.StringVal("bar"), }) priorValRaw, err := plans.NewDynamicValue(cty.NullVal(plannedVal.Type()), plannedVal.Type()) if err != nil { t.Fatal(err) } plannedValRaw, err := plans.NewDynamicValue(plannedVal, plannedVal.Type()) if err != nil { t.Fatal(err) } plan := testPlan(t) plan.Changes.SyncWrapper().AppendResourceInstanceChange(&plans.ResourceInstanceChangeSrc{ Addr: addrs.Resource{ Mode: addrs.ManagedResourceMode, Type: "test_instance", Name: "foo", }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), ProviderAddr: addrs.AbsProviderConfig{ Provider: addrs.NewDefaultProvider("test"), Module: addrs.RootModule, }, ChangeSrc: plans.ChangeSrc{ Action: plans.CreateThenDelete, Before: priorValRaw, After: plannedValRaw, }, ActionReason: plans.ResourceInstanceReplaceByRequest, }) planFilePath := testPlanFile( t, snap, states.NewState(), plan, ) ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), Ui: ui, View: view, }, } args := []string{ planFilePath, } if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } got := done(t).Stdout() if want := `test_instance.foo will be replaced, as requested`; !strings.Contains(got, want) { t.Errorf("wrong output\ngot:\n%s\n\nwant substring: %s", got, want) } if want := `Plan: 1 to add, 0 to change, 1 to destroy.`; !strings.Contains(got, want) { t.Errorf("wrong output\ngot:\n%s\n\nwant substring: %s", got, want) } } func TestShow_plan_json(t *testing.T) { planPath := showFixturePlanFile(t, plans.Create) ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), Ui: ui, View: view, }, } args := []string{ "-json", planPath, } if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } } func TestShow_state(t *testing.T) { originalState := testState() statePath := testStateFile(t, originalState) defer os.RemoveAll(filepath.Dir(statePath)) ui := new(cli.MockUi) view, _ := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), Ui: ui, View: view, }, } args := []string{ statePath, } if code := c.Run(args); code != 0 { t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) } } func TestShow_json_output(t *testing.T) { fixtureDir := "testdata/show-json" testDirs, err := ioutil.ReadDir(fixtureDir) if err != nil { t.Fatal(err) } for _, entry := range testDirs { if !entry.IsDir() { continue } t.Run(entry.Name(), func(t *testing.T) { td := tempDir(t) inputDir := filepath.Join(fixtureDir, entry.Name()) testCopyDir(t, inputDir, td) defer os.RemoveAll(td) defer testChdir(t, td)() expectError := strings.Contains(entry.Name(), "error") providerSource, close := newMockProviderSource(t, map[string][]string{ "test": {"1.2.3"}, }) defer close() p := showFixtureProvider() ui := new(cli.MockUi) view, _ := testView(t) m := Meta{ testingOverrides: metaOverridesForProvider(p), Ui: ui, View: view, ProviderSource: providerSource, } // init ic := &InitCommand{ Meta: m, } if code := ic.Run([]string{}); code != 0 { if expectError { // this should error, but not panic. return } t.Fatalf("init failed\n%s", ui.ErrorWriter) } pc := &PlanCommand{ Meta: m, } args := []string{ "-out=terraform.plan", } if code := pc.Run(args); code != 0 { t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) } // flush the plan output from the mock ui ui.OutputWriter.Reset() sc := &ShowCommand{ Meta: m, } args = []string{ "-json", "terraform.plan", } defer os.Remove("terraform.plan") if code := sc.Run(args); code != 0 { t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) } // compare ui output to wanted output var got, want plan gotString := ui.OutputWriter.String() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { t.Fatalf("err: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { t.Fatalf("err: %s", err) } json.Unmarshal([]byte(byteValue), &want) if !cmp.Equal(got, want) { t.Fatalf("wrong result:\n %v\n", cmp.Diff(got, want)) } }) } } func TestShow_json_output_sensitive(t *testing.T) { td := tempDir(t) inputDir := "testdata/show-json-sensitive" testCopyDir(t, inputDir, td) defer os.RemoveAll(td) defer testChdir(t, td)() providerSource, close := newMockProviderSource(t, map[string][]string{"test": {"1.2.3"}}) defer close() p := showFixtureSensitiveProvider() ui := new(cli.MockUi) view, _ := testView(t) m := Meta{ testingOverrides: metaOverridesForProvider(p), Ui: ui, View: view, ProviderSource: providerSource, } // init ic := &InitCommand{ Meta: m, } if code := ic.Run([]string{}); code != 0 { t.Fatalf("init failed\n%s", ui.ErrorWriter) } // flush init output ui.OutputWriter.Reset() pc := &PlanCommand{ Meta: m, } args := []string{ "-out=terraform.plan", } if code := pc.Run(args); code != 0 { fmt.Println(ui.OutputWriter.String()) t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) } // flush the plan output from the mock ui ui.OutputWriter.Reset() sc := &ShowCommand{ Meta: m, } args = []string{ "-json", "terraform.plan", } defer os.Remove("terraform.plan") if code := sc.Run(args); code != 0 { t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) } // compare ui output to wanted output var got, want plan gotString := ui.OutputWriter.String() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { t.Fatalf("err: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { t.Fatalf("err: %s", err) } json.Unmarshal([]byte(byteValue), &want) if !cmp.Equal(got, want) { t.Fatalf("wrong result:\n %v\n", cmp.Diff(got, want)) } } // similar test as above, without the plan func TestShow_json_output_state(t *testing.T) { fixtureDir := "testdata/show-json-state" testDirs, err := ioutil.ReadDir(fixtureDir) if err != nil { t.Fatal(err) } for _, entry := range testDirs { if !entry.IsDir() { continue } t.Run(entry.Name(), func(t *testing.T) { td := tempDir(t) inputDir := filepath.Join(fixtureDir, entry.Name()) testCopyDir(t, inputDir, td) defer os.RemoveAll(td) defer testChdir(t, td)() providerSource, close := newMockProviderSource(t, map[string][]string{ "test": {"1.2.3"}, }) defer close() p := showFixtureProvider() ui := new(cli.MockUi) view, _ := testView(t) m := Meta{ testingOverrides: metaOverridesForProvider(p), Ui: ui, View: view, ProviderSource: providerSource, } // init ic := &InitCommand{ Meta: m, } if code := ic.Run([]string{}); code != 0 { t.Fatalf("init failed\n%s", ui.ErrorWriter) } // flush the plan output from the mock ui ui.OutputWriter.Reset() sc := &ShowCommand{ Meta: m, } if code := sc.Run([]string{"-json"}); code != 0 { t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) } // compare ui output to wanted output type state struct { FormatVersion string `json:"format_version,omitempty"` TerraformVersion string `json:"terraform_version"` Values map[string]interface{} `json:"values,omitempty"` SensitiveValues map[string]bool `json:"sensitive_values,omitempty"` } var got, want state gotString := ui.OutputWriter.String() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { t.Fatalf("err: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { t.Fatalf("err: %s", err) } json.Unmarshal([]byte(byteValue), &want) if !cmp.Equal(got, want) { t.Fatalf("wrong result:\n %v\n", cmp.Diff(got, want)) } }) } } // showFixtureSchema returns a schema suitable for processing the configuration // in testdata/show. This schema should be assigned to a mock provider // named "test". func showFixtureSchema() *providers.GetProviderSchemaResponse { return &providers.GetProviderSchemaResponse{ Provider: providers.Schema{ Block: &configschema.Block{ Attributes: map[string]*configschema.Attribute{ "region": {Type: cty.String, Optional: true}, }, }, }, ResourceTypes: map[string]providers.Schema{ "test_instance": { Block: &configschema.Block{ Attributes: map[string]*configschema.Attribute{ "id": {Type: cty.String, Optional: true, Computed: true}, "ami": {Type: cty.String, Optional: true}, }, }, }, }, } } // showFixtureSensitiveSchema returns a schema suitable for processing the configuration // in testdata/show. This schema should be assigned to a mock provider // named "test". It includes a sensitive attribute. func showFixtureSensitiveSchema() *providers.GetProviderSchemaResponse { return &providers.GetProviderSchemaResponse{ Provider: providers.Schema{ Block: &configschema.Block{ Attributes: map[string]*configschema.Attribute{ "region": {Type: cty.String, Optional: true}, }, }, }, ResourceTypes: map[string]providers.Schema{ "test_instance": { Block: &configschema.Block{ Attributes: map[string]*configschema.Attribute{ "id": {Type: cty.String, Optional: true, Computed: true}, "ami": {Type: cty.String, Optional: true}, "password": {Type: cty.String, Optional: true, Sensitive: true}, }, }, }, }, } } // showFixtureProvider returns a mock provider that is configured for basic // operation with the configuration in testdata/show. This mock has // GetSchemaResponse, PlanResourceChangeFn, and ApplyResourceChangeFn populated, // with the plan/apply steps just passing through the data determined by // Terraform Core. func showFixtureProvider() *terraform.MockProvider { p := testProvider() p.GetProviderSchemaResponse = showFixtureSchema() p.ReadResourceFn = func(req providers.ReadResourceRequest) providers.ReadResourceResponse { idVal := req.PriorState.GetAttr("id") amiVal := req.PriorState.GetAttr("ami") if amiVal.RawEquals(cty.StringVal("refresh-me")) { amiVal = cty.StringVal("refreshed") } return providers.ReadResourceResponse{ NewState: cty.ObjectVal(map[string]cty.Value{ "id": idVal, "ami": amiVal, }), Private: req.Private, } } p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse { idVal := req.ProposedNewState.GetAttr("id") amiVal := req.ProposedNewState.GetAttr("ami") if idVal.IsNull() { idVal = cty.UnknownVal(cty.String) } var reqRep []cty.Path if amiVal.RawEquals(cty.StringVal("force-replace")) { reqRep = append(reqRep, cty.GetAttrPath("ami")) } return providers.PlanResourceChangeResponse{ PlannedState: cty.ObjectVal(map[string]cty.Value{ "id": idVal, "ami": amiVal, }), RequiresReplace: reqRep, } } p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse { idVal := req.PlannedState.GetAttr("id") amiVal := req.PlannedState.GetAttr("ami") if !idVal.IsKnown() { idVal = cty.StringVal("placeholder") } return providers.ApplyResourceChangeResponse{ NewState: cty.ObjectVal(map[string]cty.Value{ "id": idVal, "ami": amiVal, }), } } return p } // showFixtureSensitiveProvider returns a mock provider that is configured for basic // operation with the configuration in testdata/show. This mock has // GetSchemaResponse, PlanResourceChangeFn, and ApplyResourceChangeFn populated, // with the plan/apply steps just passing through the data determined by // Terraform Core. It also has a sensitive attribute in the provider schema. func showFixtureSensitiveProvider() *terraform.MockProvider { p := testProvider() p.GetProviderSchemaResponse = showFixtureSensitiveSchema() p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse { idVal := req.ProposedNewState.GetAttr("id") if idVal.IsNull() { idVal = cty.UnknownVal(cty.String) } return providers.PlanResourceChangeResponse{ PlannedState: cty.ObjectVal(map[string]cty.Value{ "id": idVal, "ami": req.ProposedNewState.GetAttr("ami"), "password": req.ProposedNewState.GetAttr("password"), }), } } p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse { idVal := req.PlannedState.GetAttr("id") if !idVal.IsKnown() { idVal = cty.StringVal("placeholder") } return providers.ApplyResourceChangeResponse{ NewState: cty.ObjectVal(map[string]cty.Value{ "id": idVal, "ami": req.PlannedState.GetAttr("ami"), "password": req.PlannedState.GetAttr("password"), }), } } return p } // showFixturePlanFile creates a plan file at a temporary location containing a // single change to create or update the test_instance.foo that is included in the "show" // test fixture, returning the location of that plan file. // `action` is the planned change you would like to elicit func showFixturePlanFile(t *testing.T, action plans.Action) string { _, snap := testModuleWithSnapshot(t, "show") plannedVal := cty.ObjectVal(map[string]cty.Value{ "id": cty.UnknownVal(cty.String), "ami": cty.StringVal("bar"), }) priorValRaw, err := plans.NewDynamicValue(cty.NullVal(plannedVal.Type()), plannedVal.Type()) if err != nil { t.Fatal(err) } plannedValRaw, err := plans.NewDynamicValue(plannedVal, plannedVal.Type()) if err != nil { t.Fatal(err) } plan := testPlan(t) plan.Changes.SyncWrapper().AppendResourceInstanceChange(&plans.ResourceInstanceChangeSrc{ Addr: addrs.Resource{ Mode: addrs.ManagedResourceMode, Type: "test_instance", Name: "foo", }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), ProviderAddr: addrs.AbsProviderConfig{ Provider: addrs.NewDefaultProvider("test"), Module: addrs.RootModule, }, ChangeSrc: plans.ChangeSrc{ Action: action, Before: priorValRaw, After: plannedValRaw, }, }) return testPlanFile( t, snap, states.NewState(), plan, ) } // this simplified plan struct allows us to preserve field order when marshaling // the command output. NOTE: we are leaving "terraform_version" out of this test // to avoid needing to constantly update the expected output; as a potential // TODO we could write a jsonplan compare function. type plan struct { FormatVersion string `json:"format_version,omitempty"` Variables map[string]interface{} `json:"variables,omitempty"` PlannedValues map[string]interface{} `json:"planned_values,omitempty"` ResourceDrift []interface{} `json:"resource_drift,omitempty"` ResourceChanges []interface{} `json:"resource_changes,omitempty"` OutputChanges map[string]interface{} `json:"output_changes,omitempty"` PriorState priorState `json:"prior_state,omitempty"` Config map[string]interface{} `json:"configuration,omitempty"` } type priorState struct { FormatVersion string `json:"format_version,omitempty"` Values map[string]interface{} `json:"values,omitempty"` SensitiveValues map[string]bool `json:"sensitive_values,omitempty"` }<|fim▁end|>
View: view, }, }
<|file_name|>ve.dm.TransactionProcessor.test.js<|end_file_name|><|fim▁begin|>/*! * VisualEditor DataModel TransactionProcessor tests. * * @copyright 2011-2014 VisualEditor Team and others; see AUTHORS.txt * @license The MIT License (MIT); see LICENSE.txt */ QUnit.module( 've.dm.TransactionProcessor' ); /* Tests */ QUnit.test( 'commit', function ( assert ) { var i, originalData, originalDoc, msg, testDoc, tx, expectedData, expectedDoc, n = 0, store = ve.dm.example.createExampleDocument().getStore(), bold = ve.dm.example.createAnnotation( ve.dm.example.bold ), italic = ve.dm.example.createAnnotation( ve.dm.example.italic ), underline = ve.dm.example.createAnnotation( ve.dm.example.underline ), metaElementInsert = { type: 'alienMeta', attributes: { style: 'comment', text: ' inline ' } }, metaElementInsertClose = { type: '/alienMeta' }, metadataExample = [ { type: 'paragraph' }, 'a', 'b', { type: 'alienMeta', attributes: { domElements: $( '<!-- comment -->' ).toArray() } }, { type: '/alienMeta' }, 'c', 'd', { type: 'alienMeta', attributes: { domElements: $( '<!-- comment -->' ).toArray() } }, { type: '/alienMeta' }, 'e', 'f', { type: 'alienMeta', attributes: { domElements: $( '<!-- comment -->' ).toArray() } }, { type: '/alienMeta' }, 'g', 'h', { type: '/paragraph' } ], cases = { 'no operations': { calls: [], expected: function () {} }, retaining: { calls: [['pushRetain', 38]], expected: function () {} }, 'annotating content': { calls: [ ['pushRetain', 1], ['pushStartAnnotating', 'set', bold], ['pushRetain', 1], ['pushStopAnnotating', 'set', bold], ['pushRetain', 1], ['pushStartAnnotating', 'clear', italic], ['pushStartAnnotating', 'set', bold], ['pushStartAnnotating', 'set', underline], ['pushRetain', 1], ['pushStopAnnotating', 'clear', italic], ['pushStopAnnotating', 'set', bold], ['pushStopAnnotating', 'set', underline] ], expected: function ( data ) { data[1] = ['a', store.indexes( [ bold ] )]; data[2] = ['b', store.indexes( [ bold ] )]; data[3] = ['c', store.indexes( [ bold, underline ] )]; } }, 'annotating content and leaf elements': { calls: [ ['pushRetain', 38], ['pushStartAnnotating', 'set', bold], ['pushRetain', 4], ['pushStopAnnotating', 'set', bold] ], expected: function ( data ) { data[38] = ['h', store.indexes( [ bold ] )]; data[39].annotations = store.indexes( [ bold ] ); data[41] = ['i', store.indexes( [ bold ] )]; } }, 'annotating across metadata': { data: metadataExample, calls: [ ['pushRetain', 2], ['pushStartAnnotating', 'set', bold], ['pushRetain', 2], ['pushStopAnnotating', 'set', bold], ['pushRetain', 6] ], expected: function ( data ) { data[2] = ['b', store.indexes( [ bold ] )]; data[3].annotations = store.indexes( [ bold ] ); data[5] = ['c', store.indexes( [ bold ] )]; } }, 'annotating with metadata at edges': { data: metadataExample, calls: [ ['pushRetain', 3], ['pushStartAnnotating', 'set', bold], ['pushRetain', 4], ['pushStopAnnotating', 'set', bold], ['pushRetain', 3] ], expected: function ( data ) { data[7].annotations = store.indexes( [ bold ] ); data[5] = ['c', store.indexes( [ bold ] )]; data[6] = ['d', store.indexes( [ bold ] )]; data[9] = ['e', store.indexes( [ bold ] )]; data[10] = ['f', store.indexes( [ bold ] )]; } }, 'unannotating metadata': { data: [ { type: 'paragraph' }, 'a', ['b', store.indexes( [ bold ] )], { type: 'alienMeta', attributes: { domElements: $( '<!-- comment -->' ).toArray() }, annotations: store.indexes( [ bold ] ) }, { type: '/alienMeta' }, ['c', store.indexes( [ bold ] )], 'd', { type: '/paragraph' } ], calls: [ ['pushRetain', 2], ['pushStartAnnotating', 'clear', bold], ['pushRetain', 2], ['pushStopAnnotating', 'clear', bold], ['pushRetain', 6] ], expected: function ( data ) { data[2] = 'b'; data[5] = 'c'; delete data[3].annotations; } }, 'using an annotation method other than set or clear throws an exception': { calls: [ ['pushStartAnnotating', 'invalid-method', bold], ['pushRetain', 1], ['pushStopAnnotating', 'invalid-method', bold] ], exception: Error }, 'annotating branch opening element throws an exception': { calls: [ ['pushStartAnnotating', 'set', bold], ['pushRetain', 1], ['pushStopAnnotating', 'set', bold] ], exception: Error }, 'annotating branch closing element throws an exception': { calls: [ ['pushRetain', 4], ['pushStartAnnotating', 'set', bold], ['pushRetain', 1], ['pushStopAnnotating', 'set', bold] ], exception: Error }, 'setting duplicate annotations throws an exception': { calls: [ ['pushRetain', 2], ['pushStartAnnotating', 'set', bold], ['pushRetain', 1], ['pushStopAnnotating', 'set', bold] ], exception: Error }, 'removing non-existent annotations throws an exception': { calls: [ ['pushRetain', 1], ['pushStartAnnotating', 'clear', bold], ['pushRetain', 1], ['pushStopAnnotating', 'clear', bold] ], exception: Error }, 'changing, removing and adding attributes': { calls: [ ['pushReplaceElementAttribute', 'level', 1, 2], ['pushRetain', 12], ['pushReplaceElementAttribute', 'style', 'bullet', 'number'], ['pushReplaceElementAttribute', 'test', undefined, 'abcd'], ['pushRetain', 27], ['pushReplaceElementAttribute', 'src', ve.dm.example.imgSrc, undefined] ], expected: function ( data ) { data[0].attributes.level = 2; data[12].attributes.style = 'number'; data[12].attributes.test = 'abcd'; delete data[39].attributes.src; } }, 'changing attributes on non-element data throws an exception': { calls: [ ['pushRetain', 1], ['pushReplaceElementAttribute', 'foo', 23, 42] ], exception: Error }, 'inserting text': { calls: [ ['pushRetain', 1], ['pushReplace', 1, 0, ['F', 'O', 'O']] ], expected: function ( data ) { data.splice( 1, 0, 'F', 'O', 'O' ); } }, 'removing text': { calls: [ ['pushRetain', 1], ['pushReplace', 1, 1, []] ], expected: function ( data ) { data.splice( 1, 1 ); } }, 'replacing text': { calls: [ ['pushRetain', 1], ['pushReplace', 1, 1, ['F', 'O', 'O']] ], expected: function ( data ) { data.splice( 1, 1, 'F', 'O', 'O' ); } }, 'emptying text': { calls: [ ['pushRetain', 10], ['pushReplace', 10, 1, []] ], expected: function ( data ) { data.splice( 10, 1 ); } }, 'inserting mixed content': { calls: [ ['pushRetain', 1], ['pushReplace', 1, 1, ['F', 'O', 'O', { type: 'image' }, { type: '/image' }, 'B', 'A', 'R']] ], expected: function ( data ) { data.splice( 1, 1, 'F', 'O', 'O', { type: 'image' }, { type: '/image' }, 'B', 'A', 'R' ); } }, 'converting an element': { calls: [ ['pushReplace', 0, 1, [{ type: 'paragraph' }]], ['pushRetain', 3], ['pushReplace', 4, 1, [{ type: '/paragraph' }]] ], expected: function ( data ) { data[0].type = 'paragraph'; delete data[0].attributes; data[4].type = '/paragraph'; } }, 'splitting an element': { calls: [ ['pushRetain', 2], [ 'pushReplace', 2, 0, [{ type: '/heading' }, { type: 'heading', attributes: { level: 1 } }] ] ], expected: function ( data ) { data.splice( 2, 0, { type: '/heading' }, { type: 'heading', attributes: { level: 1 } } ); } }, 'merging an element': { calls: [ ['pushRetain', 57], ['pushReplace', 57, 2, []] ], expected: function ( data ) { data.splice( 57, 2 ); } }, 'stripping elements': { calls: [ ['pushRetain', 3], ['pushReplace', 3, 1, []], ['pushRetain', 6], ['pushReplace', 10, 1, []] ], expected: function ( data ) { data.splice( 10, 1 ); data.splice( 3, 1 ); } }, 'inserting text after alien node at the end': { data: [ { type: 'paragraph' }, 'a', { type: 'alienInline' }, { type: '/alienInline' }, { type: '/paragraph' } ], calls: [ ['pushRetain', 4], ['pushReplace', 4, 0, ['b']] ], expected: function ( data ) { data.splice( 4, 0, 'b' ); } }, 'inserting metadata element into existing element list': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 11 ], ['pushRetainMetadata', 2 ], ['pushReplaceMetadata', [], [ metaElementInsert ] ], ['pushRetainMetadata', 2 ], ['pushRetain', 1 ] ], expected: function ( data ) { data.splice( 25, 0, metaElementInsert, metaElementInsertClose ); } }, 'inserting metadata element into empty list': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 3 ], ['pushReplaceMetadata', [], [ metaElementInsert ] ], ['pushRetain', 9 ] ], expected: function ( data ) { data.splice( 7, 0, metaElementInsert, metaElementInsertClose ); } }, 'removing all metadata elements from a metadata list': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 11 ], ['pushReplaceMetadata', ve.dm.example.withMetaMetaData[11], [] ], ['pushRetain', 1 ] ], expected: function ( data ) { data.splice( 21, 8 ); } }, 'removing some metadata elements from metadata list': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 11 ], ['pushRetainMetadata', 1 ], ['pushReplaceMetadata', ve.dm.example.withMetaMetaData[11].slice( 1, 3 ), [] ], ['pushRetainMetadata', 1 ], ['pushRetain', 1 ] ], expected: function ( data ) { data.splice( 23, 4 ); } }, 'replacing metadata at end of list': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 11 ], ['pushRetainMetadata', 3 ], ['pushReplaceMetadata', [ ve.dm.example.withMetaMetaData[11][3] ], [ metaElementInsert ] ], ['pushRetain', 1 ] ], expected: function ( data ) { data.splice( 27, 2, metaElementInsert, metaElementInsertClose ); } }, 'replacing metadata twice at the same offset': { data: ve.dm.example.withMeta, calls: [ [ 'pushRetain', 11 ], [ 'pushRetainMetadata', 1 ], [ 'pushReplaceMetadata', [ ve.dm.example.withMetaMetaData[11][1] ], [ metaElementInsert ] ], [ 'pushRetainMetadata', 1 ], [ 'pushReplaceMetadata', [ ve.dm.example.withMetaMetaData[11][3] ], [ metaElementInsert ] ], [ 'pushRetain', 1 ] ], expected: function ( data ) { data.splice( 23, 2, metaElementInsert, metaElementInsertClose ); data.splice( 27, 2, metaElementInsert, metaElementInsertClose ); } }, 'removing data from between metadata merges metadata': { data: ve.dm.example.withMeta, calls: [ ['pushRetain', 7 ], ['pushReplace', 7, 2, []], ['pushRetain', 2 ] ], expected: function ( data ) { data.splice( 15, 2 ); } }, 'structural replacement starting at an offset without metadata': { data: [ { type: 'paragraph' }, 'F', { type: 'alienMeta', attributes: { domElements: $( '<!-- foo -->' ).toArray() } }, { type: '/alienMeta' }, 'o', 'o', { type: '/paragraph' } ], calls: [ ['pushReplace', 0, 5, [ { type: 'table' }, { type: '/table' } ]] ], expected: function ( data ) { data.splice( 0, 2 ); data.splice( 2, 3, { type: 'table' }, { type: '/table' } ); } }, 'structural replacement starting at an offset with metadata': { data: [ { type: 'alienMeta', attributes: { domElements: $( '<!-- foo -->' ).toArray() } }, { type: '/alienMeta' }, { type: 'paragraph' }, 'F', { type: 'alienMeta', attributes: { style: 'comment', text: ' inline ' } }, { type: '/alienMeta' }, 'o', 'o', { type: '/paragraph' } ], calls: [ ['pushReplace', 0, 5, [ { type: 'table' }, { type: '/table' } ]] ], expected: function ( data ) { // metadata is merged. data.splice( 2, 2 ); data.splice( 4, 3, { type: 'table' }, { type: '/table' } ); } }, 'structural replacement ending at an offset with metadata': { data: [ { type: 'alienMeta', attributes: { domElements: $( '<!-- foo -->' ).toArray()<|fim▁hole|> } }, { type: '/alienMeta' }, { type: 'paragraph' }, 'F', { type: 'alienMeta', attributes: { style: 'comment', text: ' inline ' } }, { type: '/alienMeta' }, 'o', 'o', { type: '/paragraph' }, { type: 'alienMeta', attributes: { domElements: $( '<!-- bar -->' ).toArray() } }, { type: '/alienMeta' }, { type: 'paragraph' }, 'B', 'a', 'r', { type: '/paragraph' } ], calls: [ ['pushReplace', 0, 5, [ { type: 'table' }, { type: '/table' } ]], ['pushRetain', 5 ] ], expected: function ( data ) { // metadata is merged. data.splice( 2, 2 ); data.splice( 4, 3, { type: 'table' }, { type: '/table' } ); } }, 'structural deletion ending at an offset with metadata': { data: [ { type: 'alienMeta', attributes: { domElements: $( '<!-- foo -->' ).toArray() } }, { type: '/alienMeta' }, { type: 'paragraph' }, 'F', { type: 'alienMeta', attributes: { style: 'comment', text: ' inline ' } }, { type: '/alienMeta' }, 'o', 'o', { type: '/paragraph' }, { type: 'alienMeta', attributes: { domElements: $( '<!-- bar -->' ).toArray() } }, { type: '/alienMeta' }, { type: 'paragraph' }, 'B', 'a', 'r', { type: '/paragraph' } ], calls: [ ['pushReplace', 0, 5, [] ], ['pushRetain', 5 ] ], expected: function ( data ) { // metadata is merged. data.splice( 2, 2 ); data.splice( 4, 3 ); } }, 'preserves metadata on unwrap': { data: ve.dm.example.listWithMeta, calls: [ [ 'newFromWrap', new ve.Range( 1, 11 ), [ { type: 'list' } ], [], [ { type: 'listItem', attributes: { styles: ['bullet'] } } ], [] ] ], expected: function ( data ) { data.splice( 35, 1 ); // remove '/list' data.splice( 32, 1 ); // remove '/listItem' data.splice( 20, 1 ); // remove 'listItem' data.splice( 17, 1 ); // remove '/listItem' data.splice( 5, 1 ); // remove 'listItem' data.splice( 2, 1 ); // remove 'list' } }, 'inserting trailing metadata (1)': { data: ve.dm.example.listWithMeta, calls: [ [ 'newFromMetadataInsertion', 12, 0, [ { type: 'alienMeta', attributes: { domElements: $( '<meta property="fourteen" />' ).toArray() } } ] ] ], expected: function ( data ) { ve.batchSplice( data, data.length - 2, 0, [ { type: 'alienMeta', attributes: { domElements: $( '<meta property="fourteen" />' ).toArray() } }, { type: '/alienMeta' } ] ); } }, 'inserting trailing metadata (2)': { data: ve.dm.example.listWithMeta, calls: [ [ 'newFromMetadataInsertion', 12, 1, [ { type: 'alienMeta', attributes: { domElements: $( '<meta property="fourteen" />' ).toArray() } } ] ] ], expected: function ( data ) { ve.batchSplice( data, data.length, 0, [ { type: 'alienMeta', attributes: { domElements: $( '<meta property="fourteen" />' ).toArray() } }, { type: '/alienMeta' } ] ); } }, 'removing trailing metadata': { data: ve.dm.example.listWithMeta, calls: [ [ 'newFromMetadataRemoval', 12, new ve.Range( 0, 1 ) ] ], expected: function ( data ) { ve.batchSplice( data, data.length - 2, 2, [] ); } }, 'preserves trailing metadata': { data: ve.dm.example.listWithMeta, calls: [ [ 'newFromInsertion', 4, [ 'b' ] ] ], expected: function ( data ) { ve.batchSplice( data, 12, 0, [ 'b' ] ); } } }; for ( msg in cases ) { n += ( 'expected' in cases[msg] ) ? 4 : 1; } QUnit.expect( n ); // Run tests for ( msg in cases ) { // Generate original document originalData = cases[msg].data || ve.dm.example.data; originalDoc = new ve.dm.Document( ve.dm.example.preprocessAnnotations( ve.copy( originalData ), store ) ); originalDoc.buildNodeTree(); testDoc = new ve.dm.Document( ve.dm.example.preprocessAnnotations( ve.copy( originalData ), store ) ); testDoc.buildNodeTree(); tx = new ve.dm.Transaction(); for ( i = 0; i < cases[msg].calls.length; i++ ) { // some calls need the document as its first argument if ( /^(pushReplace$|new)/.test( cases[msg].calls[i][0] ) ) { cases[msg].calls[i].splice( 1, 0, testDoc ); } // special case static methods of Transaction if ( /^new/.test( cases[msg].calls[i][0] ) ) { tx = ve.dm.Transaction[cases[msg].calls[i][0]].apply( null, cases[msg].calls[i].slice( 1 ) ); break; } tx[cases[msg].calls[i][0]].apply( tx, cases[msg].calls[i].slice( 1 ) ); } if ( 'expected' in cases[msg] ) { // Generate expected document expectedData = ve.copy( originalData ); cases[msg].expected( expectedData ); expectedDoc = new ve.dm.Document( ve.dm.example.preprocessAnnotations( expectedData, store ) ); expectedDoc.buildNodeTree(); // Commit testDoc.commit( tx ); assert.deepEqualWithDomElements( testDoc.getFullData(), expectedDoc.getFullData(), 'commit (data): ' + msg ); assert.equalNodeTree( testDoc.getDocumentNode(), expectedDoc.getDocumentNode(), 'commit (tree): ' + msg ); // Rollback testDoc.commit( tx.reversed() ); assert.deepEqualWithDomElements( testDoc.getFullData(), originalDoc.getFullData(), 'rollback (data): ' + msg ); assert.equalNodeTree( testDoc.getDocumentNode(), originalDoc.getDocumentNode(), 'rollback (tree): ' + msg ); } else if ( 'exception' in cases[msg] ) { /*jshint loopfunc:true */ assert.throws( function () { testDoc.commit( tx ); }, cases[msg].exception, 'commit: ' + msg ); } } } );<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Utilities ------------------------------------------------------------------ # import math def clamp(val, min, max): if val <= min: return min elif val >= max: return max return val def fixAngle(angle): while angle > 180.0:<|fim▁hole|> return angle def diffAngle(angle1, angle2): return fixAngle(angle1 - angle2) # Utilities ------------------------------------------------------------------ #<|fim▁end|>
angle -= 360.0 while angle < -180.0: angle += 360.0
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate combine; use self::combine::*; use self::combine::combinator::{Many, SepBy}; use self::combine::primitives::{Consumed, Stream}; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq)] pub enum Object { IntObject(i32), Boolean(bool), String(String), VecObject(Vec<Object>), StructVecObject(Vec<HashMap<String, Object>>), RandomText(String), } pub type Section = HashMap<String, Object>; pub type Sections = Vec<Section>; fn title_parser(input: State<&str>) -> ParseResult<String, &str> { between(token('['), token(']'), many1(alpha_num())).parse_state(input) } fn string_parser(input: State<&str>) -> ParseResult<String, &str> { fn escaped_char_parser(input: State<&str>) -> ParseResult<char, &str> { let (c, input) = try!(any().parse_lazy(input)); let mut back_slash_char = satisfy(|c| "\"\\/bfnrt".chars().find(|x| *x == c).is_some()).map(|c| { match c { '"' => '"', '\\' => '\\', '/' => '/', 'b' => '\u{0008}', 'f' => '\u{000c}', 'n' => '\n', 'r' => '\r', 't' => '\t', c => c//Should never happen } }); match c { '\\' => input.combine(|input| back_slash_char.parse_state(input)), '"' => Err(Consumed::Empty(ParseError::from_errors(input.into_inner().position, Vec::new()))), _ => Ok((c, input)) } } optional(string("_(")) .with(between(char('"'), char('"'), many(parser(escaped_char_parser)) )) .skip(optional(char(')'))).parse_state(input) } fn boolean_parser(input : State<&str>) -> ParseResult<Object, &str> { string("TRUE").map(|_| Object::Boolean(true)).or(string("FALSE").map(|_| Object::Boolean(false))).parse_state(input) } fn wierd_exception(input : State<&str>) -> ParseResult<Object, &str> { string("$$").with(many1(letter())).map(|string : String| Object::RandomText(string)).parse_state(input) } fn single_object_parser(input : State<&str>) -> ParseResult<Object, &str> { let integer_parser = spaces().with(many1(digit())).map(|string : String| Object::IntObject(string.parse::<i32>().unwrap())); let string_object_parser = parser(string_parser).map(|string| Object::String(string)); integer_parser.or(parser(boolean_parser)).or(string_object_parser).or(parser(wierd_exception)).parse_state(input) } fn struct_parser(input: State<&str>) -> ParseResult<(Vec<String>, Vec<Vec<Object>>), &str> { let comma_parser = spaces().with(char(',')).skip(spaces()); let title_parser = char('{').with(spaces()).with(sep_by(parser(string_parser), comma_parser.clone())); let row_parser = many(spaces().with(sep_by(parser(single_object_parser), comma_parser))); // fn create_map(tuple : (vec<String>, vec<vec<Object>>)); title_parser.and(row_parser).parse_state(input) } fn object_parser(input : State<&str>) -> ParseResult<Object, &str> { unimplemented!() } fn assignment_parser(input : State<&str>) -> ParseResult<(String, Object), &str> { unimplemented!() } fn section_parser(input : State<&str>) -> ParseResult<(String, HashMap<String, Object>), &str> { unimplemented!() } pub fn sections_parser(input: State<&str>) -> ParseResult<Object, &str> { unimplemented!() } #[cfg(test)] mod tests { use std::collections::HashMap; use std::fmt::Debug; use super::combine::*; use super::{Object}; use super::{assignment_parser, boolean_parser, object_parser, section_parser, sections_parser, single_object_parser, string_parser, struct_parser, title_parser, wierd_exception}; const true_object : Object = Object::Boolean(true); fn test<A: Eq + Debug, F: Fn(State<&str>) -> ParseResult<A, &str>>(my_parser : F, input : &str, output : A) { let result = parser(my_parser).parse(input); assert!(result.is_ok()); match result { Ok((result, rest)) => { assert_eq!(result, output); assert_eq!(rest, ""); }, _ => assert!(false) } } #[test] fn test_title_parser() { test(title_parser, "[hello]", "hello".to_string()); } #[test] fn test_string_parser() { test(string_parser, "\"hello \\\"world\\\"\"", "hello \"world\"".to_string()); } #[test] fn test_boolean_parser() { test(boolean_parser, "TRUE", true_object); } #[test] fn test_wierd_exception_parser() { let wierd_object : Object = Object::RandomText("wierd".to_string()); test(wierd_exception, "$$wierd", wierd_object); } #[test] fn test_single_object_parser() { let wierd_object : Object = Object::RandomText("wierd".to_string()); test(single_object_parser, "123", Object::IntObject(123)); test(single_object_parser, "TRUE", true_object); test(single_object_parser, "\"string\"", Object::String("string".to_string())); test(single_object_parser, "$$wierd", wierd_object); } #[test] fn test_struct_parser() {<|fim▁hole|> \"hello\", \"world\" TRUE, FALSE }" , ( vec!("col1".to_string(), "col2".to_string()) , vec!(vec!(Object::IntObject(1), Object::IntObject(2)), vec!(Object::String("hello".to_string()), Object::String("world".to_string())), vec!(true_object, Object::Boolean(false))) ) ) } #[test] fn test_object_parser() { test(object_parser, "1, 2, 3", Object::VecObject(vec!(Object::IntObject(1), Object::IntObject(2), Object::IntObject(3)))); } #[test] fn test_assignment_parser() { test(assignment_parser, "test = 1", ("test".to_string(), Object::IntObject(1))); } #[test] fn test_section_parser() { let mut hash_map = HashMap::new(); hash_map.insert("test1".to_string(), Object::IntObject(1)); hash_map.insert("test2".to_string(), Object::String("hello world".to_string())); hash_map.insert("test3".to_string(), true_object); test(section_parser, "[test] test1 = 1 test2 = \"hello world\" test3 = TRUE", ("test".to_string(), hash_map)); } }<|fim▁end|>
test( struct_parser , "{col1, col2 1, 2
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2015 Yutaka Kamei */ #![feature(test)] extern crate urlparse; extern crate test; use urlparse::*; use test::Bencher; #[bench] fn bench_quote(b: &mut Bencher) { b.iter(|| quote("/a/テスト !/", &[b'/'])); } #[bench] fn bench_quote_plus(b: &mut Bencher) { b.iter(|| quote_plus("/a/テスト !/", &[b'/'])); } #[bench] fn bench_unquote(b: &mut Bencher) { b.iter(|| unquote("/a/%E3%83%86%E3%82%B9%E3%83%88%20%21/")); } #[bench] fn bench_unquote_plus(b: &mut Bencher) { b.iter(|| unquote_plus("/a/%E3%83%86%E3%82%B9%E3%83%88%20%21/")); } #[bench] fn bench_parse_qs(b: &mut Bencher) {<|fim▁hole|> b.iter(|| parse_qs("q=%E3%83%86%E3%82%B9%E3%83%88+%E3%83%86%E3%82%B9%E3%83%88&e=utf-8")); } #[bench] fn bench_urlparse(b: &mut Bencher) { b.iter(|| urlparse("http://Example.com:8080/foo?filter=%28%21%28cn%3Dbar%29%29")); } #[bench] fn bench_urlunparse(b: &mut Bencher) { b.iter(|| { let url = Url::new(); let url = Url{ scheme: "http".to_string(), netloc: "www.example.com".to_string(), path: "/foo".to_string(), query: Some("filter=%28%21%28cn%3Dbar%29%29".to_string()), .. url}; urlunparse(url) }); }<|fim▁end|>
<|file_name|>ast_util.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::*; use ast; use ast_util; use codemap; use codemap::Span; use owned_slice::OwnedSlice; use parse::token; use print::pprust; use ptr::P; use visit::Visitor; use visit; use std::cmp; use std::u32; pub fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") idents.iter().map(|i| { token::get_ident(*i).to_string() }).collect::<Vec<String>>().connect("::") } pub fn local_def(id: NodeId) -> DefId { ast::DefId { krate: LOCAL_CRATE, node: id } } pub fn is_local(did: ast::DefId) -> bool { did.krate == LOCAL_CRATE } pub fn stmt_id(s: &Stmt) -> NodeId { match s.node { StmtDecl(_, id) => id, StmtExpr(_, id) => id, StmtSemi(_, id) => id, StmtMac(..) => panic!("attempted to analyze unexpanded stmt") }<|fim▁hole|> pub fn binop_to_string(op: BinOp_) -> &'static str { match op { BiAdd => "+", BiSub => "-", BiMul => "*", BiDiv => "/", BiRem => "%", BiAnd => "&&", BiOr => "||", BiBitXor => "^", BiBitAnd => "&", BiBitOr => "|", BiShl => "<<", BiShr => ">>", BiEq => "==", BiLt => "<", BiLe => "<=", BiNe => "!=", BiGe => ">=", BiGt => ">" } } pub fn lazy_binop(b: BinOp_) -> bool { match b { BiAnd => true, BiOr => true, _ => false } } pub fn is_shift_binop(b: BinOp_) -> bool { match b { BiShl => true, BiShr => true, _ => false } } pub fn is_comparison_binop(b: BinOp_) -> bool { match b { BiEq | BiLt | BiLe | BiNe | BiGt | BiGe => true, BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem | BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr => false, } } /// Returns `true` if the binary operator takes its arguments by value pub fn is_by_value_binop(b: BinOp_) -> bool { !is_comparison_binop(b) } /// Returns `true` if the unary operator takes its argument by value pub fn is_by_value_unop(u: UnOp) -> bool { match u { UnNeg | UnNot => true, _ => false, } } pub fn unop_to_string(op: UnOp) -> &'static str { match op { UnUniq => "box() ", UnDeref => "*", UnNot => "!", UnNeg => "-", } } pub fn is_path(e: P<Expr>) -> bool { match e.node { ExprPath(..) => true, _ => false } } /// Get a string representation of a signed int type, with its value. /// We want to avoid "45int" and "-3int" in favor of "45" and "-3" pub fn int_ty_to_string(t: IntTy, val: Option<i64>) -> String { let s = match t { TyIs => "isize", TyI8 => "i8", TyI16 => "i16", TyI32 => "i32", TyI64 => "i64" }; match val { // cast to a u64 so we can correctly print INT64_MIN. All integral types // are parsed as u64, so we wouldn't want to print an extra negative // sign. Some(n) => format!("{}{}", n as u64, s), None => s.to_string() } } pub fn int_ty_max(t: IntTy) -> u64 { match t { TyI8 => 0x80, TyI16 => 0x8000, TyIs | TyI32 => 0x80000000, // actually ni about TyIs TyI64 => 0x8000000000000000 } } /// Get a string representation of an unsigned int type, with its value. /// We want to avoid "42u" in favor of "42us". "42uint" is right out. pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String { let s = match t { TyUs => "usize", TyU8 => "u8", TyU16 => "u16", TyU32 => "u32", TyU64 => "u64" }; match val { Some(n) => format!("{}{}", n, s), None => s.to_string() } } pub fn uint_ty_max(t: UintTy) -> u64 { match t { TyU8 => 0xff, TyU16 => 0xffff, TyUs | TyU32 => 0xffffffff, // actually ni about TyUs TyU64 => 0xffffffffffffffff } } pub fn float_ty_to_string(t: FloatTy) -> String { match t { TyF32 => "f32".to_string(), TyF64 => "f64".to_string(), } } // convert a span and an identifier to the corresponding // 1-segment path pub fn ident_to_path(s: Span, identifier: Ident) -> Path { ast::Path { span: s, global: false, segments: vec!( ast::PathSegment { identifier: identifier, parameters: ast::AngleBracketedParameters(ast::AngleBracketedParameterData { lifetimes: Vec::new(), types: OwnedSlice::empty(), bindings: OwnedSlice::empty(), }) } ), } } // If path is a single segment ident path, return that ident. Otherwise, return // None. pub fn path_to_ident(path: &Path) -> Option<Ident> { if path.segments.len() != 1 { return None; } let segment = &path.segments[0]; if !segment.parameters.is_empty() { return None; } Some(segment.identifier) } pub fn ident_to_pat(id: NodeId, s: Span, i: Ident) -> P<Pat> { P(Pat { id: id, node: PatIdent(BindByValue(MutImmutable), codemap::Spanned{span:s, node:i}, None), span: s }) } pub fn name_to_dummy_lifetime(name: Name) -> Lifetime { Lifetime { id: DUMMY_NODE_ID, span: codemap::DUMMY_SP, name: name } } /// Generate a "pretty" name for an `impl` from its type and trait. /// This is designed so that symbols of `impl`'d methods give some /// hint of where they came from, (previously they would all just be /// listed as `__extensions__::method_name::hash`, with no indication /// of the type). pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: Option<&Ty>) -> Ident { let mut pretty = match ty { Some(t) => pprust::ty_to_string(t), None => String::from("..") }; match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); pretty.push_str(&pprust::path_to_string(&trait_ref.path)); } None => {} } token::gensym_ident(&pretty[..]) } pub fn struct_field_visibility(field: ast::StructField) -> Visibility { match field.node.kind { ast::NamedField(_, v) | ast::UnnamedField(v) => v } } /// Maps a binary operator to its precedence pub fn operator_prec(op: ast::BinOp_) -> usize { match op { // 'as' sits here with 12 BiMul | BiDiv | BiRem => 11, BiAdd | BiSub => 10, BiShl | BiShr => 9, BiBitAnd => 8, BiBitXor => 7, BiBitOr => 6, BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3, BiAnd => 2, BiOr => 1 } } /// Precedence of the `as` operator, which is a binary operator /// not appearing in the prior table. pub const AS_PREC: usize = 12; pub fn empty_generics() -> Generics { Generics { lifetimes: Vec::new(), ty_params: OwnedSlice::empty(), where_clause: WhereClause { id: DUMMY_NODE_ID, predicates: Vec::new(), } } } // ______________________________________________________________________ // Enumerating the IDs which appear in an AST #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)] pub struct IdRange { pub min: NodeId, pub max: NodeId, } impl IdRange { pub fn max() -> IdRange { IdRange { min: u32::MAX, max: u32::MIN, } } pub fn empty(&self) -> bool { self.min >= self.max } pub fn add(&mut self, id: NodeId) { self.min = cmp::min(self.min, id); self.max = cmp::max(self.max, id + 1); } } pub trait IdVisitingOperation { fn visit_id(&mut self, node_id: NodeId); } /// A visitor that applies its operation to all of the node IDs /// in a visitable thing. pub struct IdVisitor<'a, O:'a> { pub operation: &'a mut O, pub pass_through_items: bool, pub visited_outermost: bool, } impl<'a, O: IdVisitingOperation> IdVisitor<'a, O> { fn visit_generics_helper(&mut self, generics: &Generics) { for type_parameter in &*generics.ty_params { self.operation.visit_id(type_parameter.id) } for lifetime in &generics.lifetimes { self.operation.visit_id(lifetime.lifetime.id) } } } impl<'a, 'v, O: IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> { fn visit_mod(&mut self, module: &Mod, _: Span, node_id: NodeId) { self.operation.visit_id(node_id); visit::walk_mod(self, module) } fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) { self.operation.visit_id(foreign_item.id); visit::walk_foreign_item(self, foreign_item) } fn visit_item(&mut self, item: &Item) { if !self.pass_through_items { if self.visited_outermost { return } else { self.visited_outermost = true } } self.operation.visit_id(item.id); match item.node { ItemUse(ref view_path) => { match view_path.node { ViewPathSimple(_, _) | ViewPathGlob(_) => {} ViewPathList(_, ref paths) => { for path in paths { self.operation.visit_id(path.node.id()) } } } } ItemEnum(ref enum_definition, _) => { for variant in &enum_definition.variants { self.operation.visit_id(variant.node.id) } } _ => {} } visit::walk_item(self, item); self.visited_outermost = false } fn visit_local(&mut self, local: &Local) { self.operation.visit_id(local.id); visit::walk_local(self, local) } fn visit_block(&mut self, block: &Block) { self.operation.visit_id(block.id); visit::walk_block(self, block) } fn visit_stmt(&mut self, statement: &Stmt) { self.operation.visit_id(ast_util::stmt_id(statement)); visit::walk_stmt(self, statement) } fn visit_pat(&mut self, pattern: &Pat) { self.operation.visit_id(pattern.id); visit::walk_pat(self, pattern) } fn visit_expr(&mut self, expression: &Expr) { self.operation.visit_id(expression.id); visit::walk_expr(self, expression) } fn visit_ty(&mut self, typ: &Ty) { self.operation.visit_id(typ.id); visit::walk_ty(self, typ) } fn visit_generics(&mut self, generics: &Generics) { self.visit_generics_helper(generics); visit::walk_generics(self, generics) } fn visit_fn(&mut self, function_kind: visit::FnKind<'v>, function_declaration: &'v FnDecl, block: &'v Block, span: Span, node_id: NodeId) { if !self.pass_through_items { match function_kind { visit::FkMethod(..) if self.visited_outermost => return, visit::FkMethod(..) => self.visited_outermost = true, _ => {} } } self.operation.visit_id(node_id); match function_kind { visit::FkItemFn(_, generics, _, _, _) => { self.visit_generics_helper(generics) } visit::FkMethod(_, sig, _) => { self.visit_generics_helper(&sig.generics) } visit::FkFnBlock => {} } for argument in &function_declaration.inputs { self.operation.visit_id(argument.id) } visit::walk_fn(self, function_kind, function_declaration, block, span); if !self.pass_through_items { if let visit::FkMethod(..) = function_kind { self.visited_outermost = false; } } } fn visit_struct_field(&mut self, struct_field: &StructField) { self.operation.visit_id(struct_field.node.id); visit::walk_struct_field(self, struct_field) } fn visit_struct_def(&mut self, struct_def: &StructDef, _: ast::Ident, _: &ast::Generics, id: NodeId) { self.operation.visit_id(id); struct_def.ctor_id.map(|ctor_id| self.operation.visit_id(ctor_id)); visit::walk_struct_def(self, struct_def); } fn visit_trait_item(&mut self, ti: &ast::TraitItem) { self.operation.visit_id(ti.id); visit::walk_trait_item(self, ti); } fn visit_impl_item(&mut self, ii: &ast::ImplItem) { self.operation.visit_id(ii.id); visit::walk_impl_item(self, ii); } fn visit_lifetime_ref(&mut self, lifetime: &Lifetime) { self.operation.visit_id(lifetime.id); } fn visit_lifetime_def(&mut self, def: &LifetimeDef) { self.visit_lifetime_ref(&def.lifetime); } fn visit_trait_ref(&mut self, trait_ref: &TraitRef) { self.operation.visit_id(trait_ref.ref_id); visit::walk_trait_ref(self, trait_ref); } } pub fn visit_ids_for_inlined_item<O: IdVisitingOperation>(item: &InlinedItem, operation: &mut O) { let mut id_visitor = IdVisitor { operation: operation, pass_through_items: true, visited_outermost: false, }; visit::walk_inlined_item(&mut id_visitor, item); } struct IdRangeComputingVisitor { result: IdRange, } impl IdVisitingOperation for IdRangeComputingVisitor { fn visit_id(&mut self, id: NodeId) { self.result.add(id); } } pub fn compute_id_range_for_inlined_item(item: &InlinedItem) -> IdRange { let mut visitor = IdRangeComputingVisitor { result: IdRange::max() }; visit_ids_for_inlined_item(item, &mut visitor); visitor.result } /// Computes the id range for a single fn body, ignoring nested items. pub fn compute_id_range_for_fn_body(fk: visit::FnKind, decl: &FnDecl, body: &Block, sp: Span, id: NodeId) -> IdRange { let mut visitor = IdRangeComputingVisitor { result: IdRange::max() }; let mut id_visitor = IdVisitor { operation: &mut visitor, pass_through_items: false, visited_outermost: false, }; id_visitor.visit_fn(fk, decl, body, sp, id); id_visitor.operation.result } pub fn walk_pat<F>(pat: &Pat, mut it: F) -> bool where F: FnMut(&Pat) -> bool { // FIXME(#19596) this is a workaround, but there should be a better way fn walk_pat_<G>(pat: &Pat, it: &mut G) -> bool where G: FnMut(&Pat) -> bool { if !(*it)(pat) { return false; } match pat.node { PatIdent(_, _, Some(ref p)) => walk_pat_(&**p, it), PatStruct(_, ref fields, _) => { fields.iter().all(|field| walk_pat_(&*field.node.pat, it)) } PatEnum(_, Some(ref s)) | PatTup(ref s) => { s.iter().all(|p| walk_pat_(&**p, it)) } PatBox(ref s) | PatRegion(ref s, _) => { walk_pat_(&**s, it) } PatVec(ref before, ref slice, ref after) => { before.iter().all(|p| walk_pat_(&**p, it)) && slice.iter().all(|p| walk_pat_(&**p, it)) && after.iter().all(|p| walk_pat_(&**p, it)) } PatMac(_) => panic!("attempted to analyze unexpanded pattern"), PatWild(_) | PatLit(_) | PatRange(_, _) | PatIdent(_, _, _) | PatEnum(_, _) | PatQPath(_, _) => { true } } } walk_pat_(pat, &mut it) } /// Returns true if the given struct def is tuple-like; i.e. that its fields /// are unnamed. pub fn struct_def_is_tuple_like(struct_def: &ast::StructDef) -> bool { struct_def.ctor_id.is_some() } /// Returns true if the given pattern consists solely of an identifier /// and false otherwise. pub fn pat_is_ident(pat: P<ast::Pat>) -> bool { match pat.node { ast::PatIdent(..) => true, _ => false, } } // are two paths equal when compared unhygienically? // since I'm using this to replace ==, it seems appropriate // to compare the span, global, etc. fields as well. pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) && (segments_name_eq(&a.segments[..], &b.segments[..])) } // are two arrays of segments equal when compared unhygienically? pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool { a.len() == b.len() && a.iter().zip(b.iter()).all(|(s, t)| { s.identifier.name == t.identifier.name && // FIXME #7743: ident -> name problems in lifetime comparison? // can types contain idents? s.parameters == t.parameters }) } /// Returns true if this literal is a string and false otherwise. pub fn lit_is_str(lit: &Lit) -> bool { match lit.node { LitStr(..) => true, _ => false, } } #[cfg(test)] mod tests { use ast::*; use super::*; fn ident_to_segment(id : &Ident) -> PathSegment { PathSegment {identifier: id.clone(), parameters: PathParameters::none()} } #[test] fn idents_name_eq_test() { assert!(segments_name_eq( &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] .iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] .iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); assert!(!segments_name_eq( &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] .iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] .iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); } }<|fim▁end|>
}
<|file_name|>scan-gspread-targets.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # A python script to take targets from a google spreadsheet and run a # Nessus vulnerability scan. import json import gspread from oauth2client.service_account import ServiceAccountCredentials from nessrest import ness6rest import getpass<|fim▁hole|>credentials = ServiceAccountCredentials.from_json_keyfile_name('API-xxxxxxxxxxxx.json', scopes) gc = gspread.authorize(credentials) # Open worksheet from spreadsheet wks = gc.open("hosts").sheet1 # Get all values from the first column host_list = wks.col_values(1) temp_hosts = [] for i in host_list: # ignore the first entry as it's just header information # del host_list[0] if i and i != 'IP': # iterate through all rows and add to a temp array temp_hosts.append(i) print(temp_hosts) # scan # Scan Settings # nessus_url = "https://nessus.example.com:8834" nessus_url = "https://192.168.111.10:8834" scan_policy = "Basic Network Scan" scan_name = "My Scan" # Scanner Credentials user = getpass._raw_input('User: ') password = getpass.getpass() # login = "username" # password = "password" scan = ness6rest.Scanner(url=nessus_url, login=user, password=password, insecure=True) # Set scan policy that should be used scan.policy_set(name=scan_policy) # alt_targets on edit can take an array otherwise a new scan expects a string hosts = ','.join(temp_hosts) # Set target and scan name scan.scan_add(targets=hosts, name=scan_name) # scan.scan_exists(targets=hosts, name=scan_name) # Run Scan scan.scan_run() # Download results # scan.action(action="scans", method="get") # for s in scan.res['scans']: # scan.scan_name = s['name'] # scan.scan_id = s['id'] # xml_nessus = scan.download_scan(export_format='nessus') # fp = open('%s_%s.nessus'%(scan.scan_name,scan.scan_id),"w") # fp.write(xml_nessus) # fp.close()<|fim▁end|>
# Login with your Google account's API key scopes = ['https://spreadsheets.google.com/feeds']
<|file_name|>16853_H1.rs<|end_file_name|><|fim▁begin|>Per(s)PSA @ damp 02% PSA @ damp 05% PSA @ damp 07% PSA @ damp 10% PSA @ damp 20% PSA @ damp 30% (m/s/s) 0.000 9.4427940E-002 9.4423000E-002 9.4423000E-002 9.4423000E-002 9.4423000E-002 9.4423000E-002 0.010 9.5077590E-002 9.4428840E-002 9.4429890E-002 9.4431070E-002 9.4431960E-002 9.4428650E-002 0.020 9.5591330E-002 9.4625260E-002 9.4620050E-002 9.4612840E-002 9.4594430E-002 9.4568770E-002 0.030 9.5749930E-002 9.5000390E-002 9.4967740E-002 9.4936250E-002 9.4869930E-002 9.4800450E-002 0.040 9.8745260E-002 9.5374380E-002 9.5329660E-002 9.5295420E-002 9.5214410E-002 9.5103240E-002 0.050 9.6976610E-002 9.5949570E-002 9.5942870E-002 9.5893950E-002 9.5705000E-002 9.5493090E-002 0.075 1.1531181E-001 9.8418660E-002 9.8138930E-002 9.7762490E-002 9.6919100E-002 9.6423990E-002 0.100 1.2062754E-001 9.4149200E-002 9.4372430E-002 9.4898500E-002 9.6941000E-002 9.7405860E-002 0.110 1.0883858E-001 1.0466158E-001 1.0242570E-001 1.0061959E-001 9.8996480E-002 9.8575940E-002 0.120 1.1245289E-001 1.1175528E-001 1.0799459E-001 1.0496579E-001 1.0139600E-001 1.0001275E-001 0.130 1.1015215E-001 1.0287407E-001 1.0460675E-001 1.0488870E-001 1.0318545E-001 1.0145257E-001 0.140 1.6039297E-001 1.0596312E-001 1.0548107E-001 1.0560759E-001 1.0487497E-001 1.0288734E-001 <|fim▁hole|>0.170 1.8114334E-001 1.1299922E-001 1.1594012E-001 1.1643350E-001 1.1174187E-001 1.0726032E-001 0.180 1.8843634E-001 1.2634608E-001 1.2433743E-001 1.2162153E-001 1.1386395E-001 1.0850130E-001 0.190 2.4428245E-001 1.4613588E-001 1.3617998E-001 1.2763548E-001 1.1544209E-001 1.0951513E-001 0.200 2.8676498E-001 1.4930499E-001 1.3740356E-001 1.2805711E-001 1.1609120E-001 1.1030254E-001 0.220 2.6867819E-001 1.8119521E-001 1.6100505E-001 1.4197567E-001 1.1538588E-001 1.1160781E-001 0.240 3.5831174E-001 1.6528116E-001 1.5364350E-001 1.3919891E-001 1.1542684E-001 1.1361472E-001 0.260 3.6949748E-001 2.0494702E-001 1.8592773E-001 1.6347633E-001 1.2068838E-001 1.1725112E-001 0.280 6.0108298E-001 2.5464493E-001 2.2191884E-001 1.8646128E-001 1.3218696E-001 1.2230708E-001 0.300 4.7455946E-001 2.6836160E-001 2.3960426E-001 1.9853677E-001 1.4644003E-001 1.2759830E-001 0.320 7.4330324E-001 3.3365777E-001 2.6509777E-001 2.2204168E-001 1.6004242E-001 1.3172095E-001 0.340 4.7217992E-001 3.6261472E-001 3.1050006E-001 2.5464940E-001 1.6835248E-001 1.3359098E-001 0.360 3.8319096E-001 4.0063030E-001 3.0961069E-001 2.5493053E-001 1.6886139E-001 1.3288547E-001 0.380 3.9555305E-001 3.5029444E-001 2.9184261E-001 2.3068261E-001 1.6293937E-001 1.3001145E-001 0.400 3.1193888E-001 3.1979039E-001 2.7496523E-001 2.2115907E-001 1.5350646E-001 1.2580371E-001 0.420 2.4612994E-001 2.9751867E-001 2.5141686E-001 2.0318647E-001 1.4291930E-001 1.2107600E-001 0.440 2.7351367E-001 2.3752791E-001 2.1014613E-001 1.7755309E-001 1.3284960E-001 1.1653665E-001 0.460 2.7012473E-001 1.8326020E-001 1.7182265E-001 1.5363026E-001 1.2481420E-001 1.1260331E-001 0.480 2.4799959E-001 1.8878539E-001 1.5952496E-001 1.3872321E-001 1.1993241E-001 1.0946549E-001 0.500 2.7150074E-001 1.8099619E-001 1.5631880E-001 1.3197145E-001 1.1823482E-001 1.0709962E-001 0.550 2.2443265E-001 2.0542115E-001 1.7928344E-001 1.5216398E-001 1.2084850E-001 1.0293319E-001 0.600 3.2437012E-001 2.1653445E-001 1.8949097E-001 1.6210338E-001 1.2160170E-001 9.8512280E-002 0.650 2.1797280E-001 1.8877083E-001 1.7303199E-001 1.5523672E-001 1.1628085E-001 9.2407720E-002 0.700 1.4387584E-001 2.1101710E-001 1.7496218E-001 1.4056417E-001 1.0735576E-001 8.4936020E-002 0.750 2.1221133E-001 1.6501458E-001 1.4778031E-001 1.3065909E-001 9.7020360E-002 7.7791750E-002 0.800 2.4024567E-001 1.3203551E-001 1.2476300E-001 1.1721472E-001 9.6638020E-002 7.8506170E-002 0.850 2.6158634E-001 1.6090111E-001 1.4763765E-001 1.3271925E-001 9.8795340E-002 7.7857970E-002 0.900 3.2579094E-001 1.8892585E-001 1.6800646E-001 1.4374727E-001 9.8360940E-002 7.5836030E-002 0.950 2.0211270E-001 1.9576155E-001 1.6988325E-001 1.4206217E-001 9.4453400E-002 7.2609440E-002 1.000 1.2281635E-001 1.9655029E-001 1.5892756E-001 1.2749888E-001 8.7660710E-002 6.8579220E-002 1.100 1.0884829E-001 1.6333638E-001 1.4578114E-001 1.2460196E-001 8.2669110E-002 6.0797080E-002 1.200 1.4473225E-001 1.1467690E-001 1.1025990E-001 1.0259250E-001 7.6106900E-002 5.7322580E-002 1.300 1.1749568E-001 1.0529593E-001 1.0082978E-001 9.2977890E-002 6.8447830E-002 5.4146130E-002 1.400 9.8597820E-002 1.1508367E-001 1.0183192E-001 8.7511700E-002 6.0750530E-002 5.0988340E-002 1.500 1.0426316E-001 9.2942090E-002 8.2913580E-002 7.2584140E-002 5.6679380E-002 4.7804990E-002 1.600 1.0948269E-001 8.4281690E-002 7.7112080E-002 6.8366100E-002 5.2427970E-002 4.4699470E-002 1.700 1.2348919E-001 8.6900290E-002 7.8160320E-002 6.7807030E-002 4.9545370E-002 4.1763870E-002 1.800 1.3053462E-001 8.2094100E-002 7.4269560E-002 6.4578660E-002 4.7759380E-002 3.8871200E-002 1.900 8.5627760E-002 9.2201760E-002 7.9802510E-002 6.6791880E-002 4.5371000E-002 3.5802240E-002 2.000 5.5560060E-002 9.1473710E-002 7.7998710E-002 6.3743940E-002 4.1338440E-002 3.4232910E-002 2.200 4.6331560E-002 6.3030120E-002 5.3000380E-002 4.7489860E-002 3.7152610E-002 3.1342600E-002 2.400 3.2608640E-002 4.3966040E-002 4.0873300E-002 3.7735950E-002 3.1885800E-002 2.7965410E-002 2.600 5.2213990E-002 3.5629800E-002 3.0792830E-002 2.7902060E-002 2.7370990E-002 2.4910060E-002 2.800 4.2672350E-002 2.8944080E-002 2.7223890E-002 2.5551120E-002 2.4590340E-002 2.2464620E-002 3.000 3.4073620E-002 3.5273820E-002 2.9662590E-002 2.6169930E-002 2.2801780E-002 2.0511940E-002 3.200 2.5034220E-002 2.9120490E-002 2.7138020E-002 2.4994530E-002 2.1122620E-002 1.8855530E-002 3.400 2.3061410E-002 2.5553180E-002 2.3049050E-002 2.1976190E-002 1.9362510E-002 1.7381030E-002 3.600 2.4168030E-002 2.1682010E-002 1.9482470E-002 1.8977770E-002 1.7738650E-002 1.6064410E-002 3.800 3.1657720E-002 1.9889630E-002 1.8580320E-002 1.7326040E-002 1.6427010E-002 1.4899440E-002 4.000 4.0096860E-002 2.2407680E-002 2.0734300E-002 1.8286550E-002 1.5396490E-002 1.3863560E-002 4.200 4.0433120E-002 2.5857150E-002 2.2678820E-002 1.8864600E-002 1.4502740E-002 1.2921090E-002 4.400 3.2261790E-002 2.7998450E-002 2.3126290E-002 1.8211300E-002 1.3619590E-002 1.2042460E-002 4.600 2.1424730E-002 2.6222210E-002 2.2056670E-002 1.7904330E-002 1.2702650E-002 1.1210660E-002 4.800 1.4190690E-002 2.2940540E-002 1.9974700E-002 1.6741140E-002 1.1777480E-002 1.0422550E-002 5.000 1.7715990E-002 1.8192830E-002 1.6917750E-002 1.5063780E-002 1.0892670E-002 9.6794700E-003 5.500 1.7830570E-002 1.2984720E-002 1.2662700E-002 1.1859240E-002 9.0320200E-003 8.0245500E-003 6.000 1.2652570E-002 1.3827240E-002 1.2079090E-002 1.0215070E-002 7.5218900E-003 6.6082400E-003 6.500 1.0224530E-002 1.3029930E-002 1.1032190E-002 9.0453600E-003 6.2045600E-003 5.3621100E-003 7.000 7.7846400E-003 9.9548500E-003 8.8900500E-003 7.6258300E-003 5.3853700E-003 4.3119600E-003 7.500 6.6763900E-003 7.9839800E-003 7.3638300E-003 6.5447800E-003 4.6090000E-003 3.7133900E-003 8.000 6.0072400E-003 6.5414200E-003 5.8798500E-003 5.2663300E-003 3.9152400E-003 3.1486900E-003 8.500 5.4644900E-003 5.7480600E-003 5.1834400E-003 4.5316400E-003 3.5816200E-003 2.9282600E-003 9.000 5.8168700E-003 5.1265100E-003 4.6232900E-003 4.1658500E-003 3.3338900E-003 2.7751400E-003 9.500 5.4644900E-003 4.4708400E-003 4.1621400E-003 3.8724500E-003 3.0659100E-003 2.6300100E-003 10.00 5.8168700E-003 4.6010800E-003 4.0257000E-003 3.5561700E-003 2.7891800E-003 2.4935000E-003 -1 9.4633770E-002 1.4057537E-002 1.4057537E-002 1.4057537E-002 1.4057537E-002 1.4057537E-002<|fim▁end|>
0.150 1.2498645E-001 1.0674112E-001 1.0796427E-001 1.0857921E-001 1.0699045E-001 1.0436486E-001 0.160 1.3663658E-001 1.2576041E-001 1.1889431E-001 1.1454626E-001 1.0941206E-001 1.0585318E-001
<|file_name|>DataTreeBean.java<|end_file_name|><|fim▁begin|>package com.lami.tuomatuo.mq.zookeeper.server; import com.lami.tuomatuo.mq.zookeeper.jmx.ZKMBeanInfo; /** * This class implements the data tree MBean * * Created by xujiankang on 2017/3/19. */ public class DataTreeBean implements DataTreeMXBean, ZKMBeanInfo{ public DataTree dataTree; public DataTreeBean(DataTree dataTree) { this.dataTree = dataTree; } @Override public int getNodeCount() { return dataTree.getNodeCount(); } @Override public long approximateDataSize() {<|fim▁hole|> public int countEphemerals() { return dataTree.getEphemeralsCount(); } public int getWatchCount(){ return dataTree.getWatchCount(); } @Override public String getName() { return "InMemoryDataTree"; } @Override public boolean isHidden() { return false; } @Override public String getLastZxid() { return "0x" + Long.toHexString(dataTree.lastProcessedZxid); } }<|fim▁end|>
return dataTree.approximateDataSize(); } @Override
<|file_name|>helper_stub_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !aix,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!solaris,!windows<|fim▁hole|> return false }<|fim▁end|>
package ipv4_test func protocolNotSupported(err error) bool {
<|file_name|>issue-3136-a.rs<|end_file_name|><|fim▁begin|>trait x { fn use_x<T>(&self); } struct y(()); impl x for y { fn use_x<T>(&self) { struct foo { //~ ERROR quux i: () } fn new_foo<T>(i: ()) -> foo { foo { i: i } }<|fim▁hole|>}<|fim▁end|>
}
<|file_name|>bitcoin_ja.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ja" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About AngelCoin</source> <translation>ブラックコインについて</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;AngelCoin&lt;/b&gt; version</source> <translation>&lt;b&gt;ブラックコイン&lt;/b&gt;バージョン</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The AngelCoin developers</source> <translation>コピーライト © 2009-2014 The Bitcoin developers コピーライト © 2012-2014 The NovaCoin developers コピーライト © 2014 The AngelCoin developers</translation> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> これは実験的なソフトウェアです。 MIT/X11 ソフトウェア ライセンスの下で配布されています。詳しくは添付の COPYING ファイルやhttp://www.opensource.org/licenses/mit-license.php を参照してください。 この製品は OpenSSL Toolkit (http://www.openssl.org/) に用いられる Eric Young ([email protected]) が開発した暗号化ソフトウェアと Thomas Bernard が開発した UPnP ソフトウェアを含んでいます。</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>アドレス帳</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>アドレスまたはラベルを編集するにはダブルクリック</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>新規アドレスの作成</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>現在選択されているアドレスをシステムのクリップボードにコピーする</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>新しいアドレス</translation> </message> <message> <location line="-46"/> <source>These are your AngelCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>これは支払いを受けるためのブラックコインのアドレス。支払い管理をするのため、各送信者へ、それぞれのアドレスを伝えたほうがいいです。</translation> </message> <message> <location line="+60"/> <source>&amp;Copy Address</source> <translation>アドレスをコピー (&amp;C)</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>QRコードを表す</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a AngelCoin address</source> <translation>所有権の証明するためのメサッジを署名する。</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>メサッジを署名する。</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>選択されたアドレスを一覧から削除する</translation> </message> <message> <location line="-14"/> <source>Verify a message to ensure it was signed with a specified AngelCoin address</source> <translation>受け取ったメッセージの署名を確保のため、メッセージを確認する。</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>メッセージを確認する。</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>削除</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>ラベルをコピー (&amp;L)</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>編集 (&amp;E)</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation>アドレス帳のデータを書き出す</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>CSVファイル (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>エラーを書き出す</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>ファイルを書き込めなかった。%1</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>ラベル</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>アドレス</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(ラベル無し)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>パスフレーズ ダイアログ</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>パスフレーズを入力</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>新しいパスフレーズ</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>新しいパスフレーズをもう一度</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>ユーザアカウントはハッキングされたばい、瑣末のsendmoney無効にする。機密保護には効果はない。</translation> </message> <message> <location line="+3"/> <source>For staking only</source> <translation>賭けるのみ</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+35"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>ウォレットの新しいパスフレーズを入力してください。&lt;br/&gt;&lt;b&gt;8個以上の単語か10個以上のランダムな文字&lt;/b&gt;を使ってください。</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>ウォレットを暗号化する</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>この操作はウォレットをアンロックするためにパスフレーズが必要です。</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>ウォレットをアンロックする</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>この操作はウォレットの暗号化解除のためにパスフレーズが必要です。</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>ウォレットの暗号化を解除する</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>パスフレーズの変更</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>新旧両方のパスフレーズを入力してください。</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>ウォレットの暗号化を確認する</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>ご注意:暗号化したウォレットのパスワードを忘れたばい、b&gt;すべてのコインを失う&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>ウォレットを暗号化、よろしいですか?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>重要: 過去のウォレット ファイルのバックアップは、暗号化された新しいウォレット ファイルに取り替える必要があります。セキュリティ上の理由により、暗号化された新しいウォレットを使い始めると、暗号化されていないウォレット ファイルのバックアップはすぐに使えなくなります。</translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>警告: Caps Lock キーがオンになっています!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>ウォレットは暗号化されました</translation> </message> <message> <location line="-58"/> <source>AngelCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>ただいま、暗号化手順を完成するため、ブラックコインQTは閉じます。尚、ウォレットを暗号化をされたにしても、PCのウイルスから盗難防止の報償できないことを、ご理解をお願い足します。</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>ウォレットの暗号化に失敗しました</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>内部エラーによりウォレットの暗号化が失敗しました。ウォレットは暗号化されませんでした。</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>パスフレーズが同じではありません。</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>ウォレットのアンロックに失敗しました</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>ウォレットの暗号化解除のパスフレーズが正しくありません。</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>ウォレットの暗号化解除に失敗しました</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>ウォレットのパスフレーズの変更が成功しました。</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+282"/> <source>Sign &amp;message...</source> <translation>メッセージの署名... (&amp;m)</translation> </message> <message> <location line="+251"/> <source>Synchronizing with network...</source> <translation>ネットワークに同期中……</translation> </message> <message> <location line="-319"/> <source>&amp;Overview</source> <translation>概要(&amp;O)</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>ウォレットの概要を見る</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>処理(&amp;T)</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>処理履歴を閲覧</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>アドレス帳</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>保存されたアドレスとラベルの編集</translation> </message> <message> <location line="-13"/> <source>&amp;Receive coins</source> <translation>コインを受け取る</translation> </message> <message> <location line="+1"/> <source>Show the list of addresses for receiving payments</source> <translation>支払いを受けるためのアドレスリストを表示</translation> </message> <message> <location line="-7"/> <source>&amp;Send coins</source> <translation>コインを送る</translation> </message> <message> <location line="+35"/> <source>E&amp;xit</source> <translation>終了(&amp;E)</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>アプリケーションを終了</translation> </message> <message> <location line="+6"/> <source>Show information about AngelCoin</source> <translation>ブラックコインの情報を表示</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Qt について(&amp;Q)</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Qt の情報を表示</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>オプション... (&amp;O)</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>ウォレットの暗号化... (&amp;E)</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>ウォレットのバックアップ... (&amp;B)</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>パスフレーズの変更... (&amp;C)</translation> </message> <message numerus="yes"> <location line="+259"/> <source>~%n block(s) remaining</source> <translation><numerusform>~%n ブロックが残っている</numerusform></translation> </message> <message> <location line="+6"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation>処理の履歴の%1 / %2 ブロックをダウンロードしました。(%3% 完成)</translation> </message> <message> <location line="-256"/> <source>&amp;Export...</source> <translation>(&amp;E)書き出す...</translation> </message> <message> <location line="-64"/> <source>Send coins to a AngelCoin address</source> <translation>ブラックコインアドレスへコインを送る</translation> </message> <message> <location line="+47"/> <source>Modify configuration options for AngelCoin</source> <translation>ブラックコインの設定を変化する</translation> </message> <message> <location line="+18"/> <source>Export the data in the current tab to a file</source> <translation>現在のタブのデータをファイルへ書き出す</translation> </message> <message> <location line="-14"/> <source>Encrypt or decrypt wallet</source> <translation>ウォレットを暗号化か暗号化を解除する</translation> </message> <message> <location line="+3"/> <source>Backup wallet to another location</source> <translation>ウォレットを他の場所にバックアップ</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>ウォレット暗号化用パスフレーズの変更</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>デバッグ ウインドウ (&amp;D)</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>デバッグと診断コンソールを開く</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>メッセージの検証... (&amp;V)</translation> </message> <message> <location line="-202"/> <source>AngelCoin</source> <translation>ブラックコイン</translation> </message> <message> <location line="+0"/> <source>Wallet</source> <translation>ウォレット</translation> </message> <message> <location line="+180"/> <source>&amp;About AngelCoin</source> <translation>ブラックコインについて</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>見る/隠す (&amp;S)</translation> </message> <message> <location line="+9"/> <source>Unlock wallet</source> <translation>ウォレットをアンロックする</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation>(&amp;L)ウォレットをロックする</translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation>ウォレットをロックする</translation> </message> <message> <location line="+35"/> <source>&amp;File</source> <translation>ファイル(&amp;F)</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>設定(&amp;S)</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>ヘルプ(&amp;H)</translation> </message> <message> <location line="+12"/> <source>Tabs toolbar</source> <translation>タブツールバー</translation> </message> <message> <location line="+8"/> <source>Actions toolbar</source> <translation>活動ツールバー</translation> </message> <message> <location line="+13"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+60"/> <source>AngelCoin client</source> <translation>ブラックコインクライアントソフトウェア</translation> </message> <message numerus="yes"> <location line="+75"/> <source>%n active connection(s) to AngelCoin network</source> <translation><numerusform>ブラックコインネットワークへの%n アクティブな接続</numerusform></translation> </message> <message> <location line="+40"/> <source>Downloaded %1 blocks of transaction history.</source> <translation>処理履歴の%1ブロックをダウンロードしました。</translation> </message> <message> <location line="+413"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation>賭けている。&lt;br&gt;重さは%1&lt;br&gt;ネットワークの重さは%2&lt;br&gt;報酬をもらう時間の推測は%3</translation> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation>ウォレットをロックされたため、賭けていません</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation>ウォレットはオフラインで、賭けていません</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation>ウォレットは同期最中ため、賭けていません。</translation> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation>コインはまだ成長できていないため、賭けていません。</translation> </message> <message numerus="yes"> <location line="-403"/> <source>%n second(s) ago</source> <translation><numerusform>%n 秒前</numerusform></translation> </message> <message> <location line="-312"/> <source>About AngelCoin card</source> <translation>ブラックコインカードについて</translation> </message> <message> <location line="+1"/> <source>Show information about AngelCoin card</source> <translation>バラックコインカードの情報を表示する</translation> </message> <message> <location line="+18"/> <source>&amp;Unlock Wallet...</source> <translation>&amp;ウォレットをアンロック...</translation> </message> <message numerus="yes"> <location line="+297"/> <source>%n minute(s) ago</source> <translation><numerusform>%n 分前</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation><numerusform>%n 時間前</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s) ago</source> <translation><numerusform>%n 日間前</numerusform></translation> </message> <message> <location line="+6"/> <source>Up to date</source> <translation>バージョンは最新です</translation> </message> <message> <location line="+7"/> <source>Catching up...</source> <translation>追跡中...</translation> </message> <message> <location line="+10"/> <source>Last received block was generated %1.</source> <translation>最新ブロックは%1に生成されました。</translation> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>この処理は大きさの制限を超えている。%1料金を払ったばい、通信可能性です。料金は手続きをするノードへ支払って、ネットワークのサッポートになります。料金を払いますか。</translation> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation>処理手数料を確認する</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>送金処理</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>着金処理</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>日付: %1 総額: %2 種類: %3 アドレス: %4</translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation>URIの取り扱い</translation> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid AngelCoin address or malformed URI parameters.</source> <translation>URIのパーズができませんでした!。原因は無効なブラックコインアドレスか不正なURIパラメータ。</translation> </message> <message> <location line="+18"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>ウォレットは&lt;b&gt;暗号化されて、アンロックされています&lt;/b&gt;</translation> </message> <message> <location line="+10"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>ウォレットは&lt;b&gt;暗号化されて、ロックされています&lt;/b&gt;</translation> </message> <message> <location line="+25"/> <source>Backup Wallet</source> <translation>ウォレットのバックアップ</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>ウォレットのデータ (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>バックアップは失敗しました</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>ウォレットのデータが新しい場所へ保存するにはエラーになりました。</translation> </message> <message numerus="yes"> <location line="+76"/> <source>%n second(s)</source> <translation><numerusform>%n 秒</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation><numerusform>%n 分</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s)</source> <translation><numerusform>%n 時間</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n 日間</numerusform></translation> </message> <message> <location line="+18"/> <source>Not staking</source> <translation>賭けていません</translation> </message> <message> <location filename="../bitcoin.cpp" line="+109"/> <source>A fatal error occurred. AngelCoin can no longer continue safely and will quit.</source> <translation>致命的エラー。安全に続きができないため、バラックコインQTは閉じます。</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+90"/> <source>Network Alert</source> <translation>ネットワーク警告</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>コインのコントロール</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>数量:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>バイト</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>総額:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>優先:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>料金:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>アウトプット低い:</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+551"/> <source>no</source> <translation>いいえ</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>料金の後</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>お釣り:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>すべてを選択か選択を解除</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>木モード</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>リストモード</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>総額</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation>レベル</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation>アドレス</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>日付</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>検証済みの数</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>検証済</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>優先</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>アドレスをコピー</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>ラベルをコピー</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>総額のコピー</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>処理のIDをコピー</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>数量をコピー</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>料金をコピー</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>料金の後をコピー</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>バイトをコピー</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>優先をコピー</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>アウトプット低いをコピー</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>お釣りをコピー</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>最高</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>高い</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>中高</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>中</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>中低</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>低い</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>最低</translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation>ほこり</translation> </message> <message> <location line="+0"/> <source>yes</source> <translation>はい</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation>このラベルが赤くなったら、処理の大きさは10000バイトより大きいです。 少なくとも%1 KBあたりの料金は必要となります。 入力データによって、料金の+/-1 バイトが可能です。</translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation>高い優先ありの処理の方はブロックに書き込み可能性が高い。 このラベルは優先の設定は中より低いです。 少なくとも%1 KBあたりの料金は必要となります。</translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation>任意の受信者は%1より少ない額をもらったばい、このラベルは赤くなる。 少なくとも%2の料金は必要となります。 最小なリレー料金 x 0.546より下の額は、ほこりになります。</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation>このラベルが赤くなたら、お釣りは%1より少ない。 少なくとも%2の料金は必要となります。</translation> </message> <message> <location line="+37"/> <location line="+66"/> <source>(no label)</source> <translation>(ラベル無し)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>%1 (%2)からお釣り</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(お釣り)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>アドレスの編集</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>ラベル(&amp;L)</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>このアドレス帳の入力のラベル</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>アドレス帳 (&amp;A)</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>このアドレス帳の入力のアドレス。通信アドレスした変更ができない。</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>新しい受信アドレス</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>新しい送信アドレス</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>受信アドレスを編集</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>送信アドレスを編集</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>入力されたアドレス &quot;%1&quot; は既にアドレス帳にあります。</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid AngelCoin address.</source> <translation>入力されたアドレス &quot;%1&quot; 、有効なブラックコインアドレスではない。</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>ウォレットをアンロックできませんでした。</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>新しいキーの生成に失敗しました。</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+420"/> <location line="+12"/> <source>AngelCoin-Qt</source> <translation>ブラックコインQT</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>バージョン</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>使用法</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>コマンドラインのオプション</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UIのオプション</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>言語の設定、例: &quot;de_DE&quot; (デフォルト:システムのロケール)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>最小化でスタート</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>スタートでスプラッシュスクリーンを表示(デフォルト:1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>設定</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>メイン (&amp;M)</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation>手続きを早めるためのオプショナル料金。だいたいの処理は1KB。料金の0.01が勧めです。</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>支払う取引手数料 (&amp;f)</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation>貯金は賭ける参加しないため、いつでも支出できる。</translation> </message> <message> <location line="+15"/> <source>Reserve</source> <translation>貯金</translation> </message> <message> <location line="+31"/> <source>Automatically start AngelCoin after logging in to the system.</source> <translation>システムのログイン次第、自動的にブラックコインをスタート。</translation> </message> <message> <location line="+3"/> <source>&amp;Start AngelCoin on system login</source> <translation>システムログイン次第、ブラックコインをスタート</translation> </message> <message> <location line="+7"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation>シャットダウンするとき、ブロックとアドレスのデータベースを切り離す。すると、別のディレクトリへ移動できますが、シャットダウンは少し遅れます。ウォレットはいつも切り離します。</translation> </message> <message> <location line="+3"/> <source>&amp;Detach databases at shutdown</source> <translation>シャットダウンするとき、データベースを切り離す</translation> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>ネットワーク (&amp;N)</translation> </message> <message> <location line="+6"/> <source>Automatically open the AngelCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>自動的にルーターでブラックコインクライエントソフトウェアのポートを開く。ルーターはUPnPのサポートあり、UPnPを有効にするならできる。</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>UPnP を使ってポートを割り当てる (&amp;U)</translation> </message> <message> <location line="+7"/> <source>Connect to the AngelCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>ブラックコインのネットワークへSOCKSプロキシで接続する(例:TORで接続するばい)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>SOCKSプロキシで接続する</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>プロキシの IP (&amp;I) :</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>プロキシのIPアドレス (例:127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>ポート (&amp;P) :</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>プロキシのポート番号 (例 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS バージョン (&amp;V) :</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS プロキシのバージョン (例 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>ウインドウ (&amp;W)</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>ウインドウを最小化したあとトレイ アイコンだけを表示する。</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>タスクバーの代わりにトレイに最小化 (&amp;M)</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>ウインドウが閉じられる時アプリケーションを終了せずに最小化します。このオプションが有効な時にアプリケーションを終了するにはメニューから終了を選択します。</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>閉じる時に最小化 (&amp;i)</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>表示 (&amp;D)</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>ユーザインターフェースの言語 (&amp;l) :</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting AngelCoin.</source> <translation>ユーザのインターフェースの言語の設定です。リスタートの後、有効します。</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>額を表示する単位 (&amp;U) :</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>インターフェース上の表示とコインの送信で使用する単位を選択します。</translation> </message> <message> <location line="+9"/> <source>Whether to show AngelCoin addresses in the transaction list or not.</source> <translation>処理の歴史でブラックコインのアドレスを表示する/しない。</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>処理の履歴にアドレスを表示 (&amp;D)</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation>コインコントロールを表示する/しない。</translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation>コインコントロールの設定を表示する(有識者のみ!)</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>キャンセル (&amp;C)</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>適用する</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+55"/> <source>default</source> <translation>初期値</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation>警告</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting AngelCoin.</source> <translation>この設定はブラックコインをリスタートした後に有効する。</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>プロキシアドレスが無効です。</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>フォーム</translation> </message> <message> <location line="+33"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the AngelCoin network after a connection is established, but this process has not completed yet.</source> <translation>表示されている情報は時間遅れている。接続したら、ウォレットは自動的にブラックコインネットワークと同期しますが過程は完了してません。</translation> </message> <message> <location line="-160"/> <source>Stake:</source> <translation>賭け金:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>未検証:</translation> </message> <message> <location line="-107"/> <source>Wallet</source> <translation>ウォレット</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation>支出可能:</translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>あなたの利用可能残高</translation> </message> <message> <location line="+71"/> <source>Immature:</source> <translation>未完成:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>完成していない採掘された残高</translation> </message> <message> <location line="+20"/> <source>Total:</source> <translation>合計:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>あなたの現在の残高</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;最近の処理&lt;/b&gt;</translation> </message> <message> <location line="-108"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>未確認の合計で、まだ現在の残高に含まれていない。</translation> </message> <message> <location line="-29"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation>賭けているコインの合計で、まだ現在の残高に含まれていない。</translation> </message> <message> <location filename="../overviewpage.cpp" line="+113"/> <location line="+1"/> <source>out of sync</source> <translation>同期していない</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QRコードのダイアログ</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>支払いを要請する</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>総額:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>レベル</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>メッセージ:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;S名前を付けて保存...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>URIからQRコードにエンコードするエラー。</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>入力された額は無効です。確認してください。</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URIは長過ぎて、ラベル文字の長さを短くしてください。</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>QRコードを保存</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG イメージ (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>クライアント名</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+348"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>クライアントのバージョン</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>情報 (&amp;I)</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>使用中の OpenSSL のバージョン</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>起動した日時</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>ネットワーク</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>接続数</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>testnetで</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>ブロック チェーン</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>現在のブロック数</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>推定総ブロック数</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>最終ブロックの日時</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>開く (&amp;O)</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>コマンドラインのオプション</translation> </message> <message> <location line="+7"/> <source>Show the AngelCoin-Qt help message to get a list with possible AngelCoin command-line options.</source> <translation>ブラックコインQTのコマンドラインのヘルプ情報を表示する。</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>(&amp;S)表示</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>コンソール (&amp;C)</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>ビルドの日付</translation> </message> <message> <location line="-104"/> <source>AngelCoin - Debug window</source> <translation>ブラックコイン:デバッグウインドウ</translation> </message> <message> <location line="+25"/> <source>AngelCoin Core</source> <translation>ブラックコインコア</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>デバッグ用ログファイル</translation> </message> <message> <location line="+7"/> <source>Open the AngelCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>現行のディレクトリからデバッグログファイルを開く。大きなファイルのばい、少し時間かかる。</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>コンソールをクリア</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-33"/> <source>Welcome to the AngelCoin RPC console.</source> <translation>ブラックコインRPCコンソールへようこそ。</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>上下の矢印で履歴をたどれます。 &lt;b&gt;Ctrl-L&lt;/b&gt; でスクリーンを消去できます。</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>使用可能なコマンドを見るには &lt;b&gt;help&lt;/b&gt; と入力します。</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>コインを送る</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation>コインのコントロールの設定</translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation>入力...</translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation>自動的に選択</translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation>資金不足!</translation> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation>数量:</translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation>0</translation> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation>バイト</translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>総額:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 ANG</source> <translation>123.456 ANG {0.00 ?}</translation> </message> <message> <location line="-191"/> <source>Priority:</source> <translation>優先:</translation> </message> <message> <location line="+19"/> <source>medium</source> <translation>中</translation> </message> <message> <location line="+32"/> <source>Fee:</source> <translation>料金:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>アウトプット低い:</translation> </message> <message> <location line="+19"/> <source>no</source> <translation>いいえ</translation> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation>料金の後</translation> </message> <message> <location line="+35"/> <source>Change</source> <translation>お釣り:</translation> </message> <message> <location line="+50"/> <source>custom change address</source> <translation>カスタムのお釣りのアドレス</translation> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>一度に複数の人に送る</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>受取人を追加 (&amp;R)</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>全分の処理欄を削除する</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>すべてクリア (&amp;A)</translation> </message> <message> <location line="+28"/> <source>Balance:</source> <translation>残高:</translation> </message> <message> <location line="+16"/> <source>123.456 ANG</source> <translation>123.456 ANG</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>送る操作を確認する</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>送る (&amp;e)</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a AngelCoin address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>ブラックコインアドレスの入力 (例;Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation>数量をコピー</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>総額のコピー</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>料金をコピー</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>料金の後をコピー</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>バイトをコピー</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>優先をコピー</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>アウトプット低いをコピー</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>お釣りをコピー</translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; から %2 (%3)に</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>コインを送る確認</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>%1送付、よろしいですか?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>と</translation> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>受取人のアドレスが不正です。再確認してください。</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>支払額は0より大きくないといけません。</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>額が残高を超えています。</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>%1 の処理手数料を含めると額が残高を超えています。</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>重複しているアドレスが見つかりました。1回の送信で同じアドレスに送ることは出来ません。</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed.</source> <translation>エラー:処理を失敗しました。</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>エラー:処理は拒否されました。ウォレットのコインをすでに費やした可能性で、wallet.datのコピーで費やしたが、現行のwallet.datとはアップデートされていない。</translation> </message> <message> <location line="+251"/> <source>WARNING: Invalid AngelCoin address</source> <translation>警告:無効なブラックコインアドレス</translation> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(ラベル無し)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation>警告:不明なお釣りのアドレス</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>フォーム</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>金額(&amp;A):</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>送り先(&amp;T):</translation> </message> <message> <location line="+24"/> <location filename="../sendcoinsentry.cpp" line="+25"/> <source>Enter a label for this address to add it to your address book</source> <translation>アドレス帳に追加するには、このアドレスのラベルを入力します</translation> </message> <message> <location line="+9"/> <source>&amp;Label:</source> <translation>ラベル(&amp;L):</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>支払いへ送るアドレス (例:Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> <message> <location line="+10"/> <source>Choose address from address book</source> <translation>アドレス帳からアドレスを選択する</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>クリップボードからアドレスを貼付ける</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>この受信者を外す</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a AngelCoin address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>ブラックコインアドレスの入力 (例;Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>署名 - メッセージの署名/検証</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>メッセージの署名 (&amp;S)</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>あなた自身を立証するためにあなたのアドレスでメッセージに署名することができます。フィッシング攻撃によってあなたを騙して署名を譲渡させようとするかもしれないので、不明確なものは絶対に署名しないように注意してください。あなたが同意する完全に詳細な声明にだけ署名してください。</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>メッセージの署名するアドレス(例:Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>アドレス帳からアドレスを選ぶ</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>クリップボードからアドレスを貼付ける</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>ここにあなたが署名するメッセージを入力します</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>現在の署名をシステムのクリップボードにコピーする</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this AngelCoin address</source> <translation>所有権の証明するためこのメサッジを署名する</translation> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>メッセージ署名の内容をすべて消去します</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>すべてクリア (&amp;A)</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>メッセージの検証 (&amp;V)</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>メッセージを検証するために、署名するアドレスとメッセージ(改行、スペース、タブなどを正確にコピーしてください)、そして署名を入力します。中間者攻撃によってだまされることを避けるために、署名されたメッセージそのものよりも、署名を読み取られないように注意してください。</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>メッセージの署名するアドレス(例:Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified AngelCoin address</source> <translation>受け取ったメッセージの署名を確保のため、メッセージを確認する。</translation> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>入力項目の内容をすべて消去します</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a AngelCoin address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source> <translation>ブラックコインのアドレスを入力(例:Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>署名を作成するには&quot;メッセージの署名&quot;をクリック</translation> </message> <message> <location line="+3"/> <source>Enter AngelCoin signature</source> <translation>ブラックコインのデジタル署名を入力</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>不正なアドレスが入力されました。</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>アドレスを確かめてからもう一度試してください。</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>入力されたアドレスに関連するキーがありません。</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>ウォレットのアンロックはキャンセルされました。</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>入力されたアドレスのプライベート キーが無効です。</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>メッセージの署名に失敗しました。</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>メッセージに署名しました。</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>署名がデコードできません。</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>署名を確認してからもう一度試してください。</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>署名はメッセージ ダイジェストと一致しませんでした。</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>メッセージの検証に失敗しました。</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>メッセージは検証されました。</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+19"/> <source>Open until %1</source> <translation>ユニット %1 を開く</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation><numerusform>%n ブロックに開いている</numerusform></translation> </message> <message> <location line="+8"/> <source>conflicted</source> <translation>相違</translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/オフライン</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/未検証</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 確認</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>ステータス</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>%n ノードにブロードキャスト</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>日付</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>ソース</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>生成された</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>送信</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>受信</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>自分のアドレス</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>ラベル</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>クレジット</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>%n 以上のブロックが満期</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>承認されなかった</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>引き落とし額</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>処理の手数料</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>正味金額</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>メッセージ</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>コメント</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>処理のID</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>コインを費やす前に発現されたコインは510ブロック間成熟しなければなりません。このブロックを発現させたときに、ブロックチェインに足すためにネットワークへ放送しました。ブロックチェインに追加失敗したばい、レベルは「受け入られていない」に変わって、費やせられない状況になります。自分のノードと他のノードと同時に新しいブロックを発現させたときに、時折に起こること。</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>デバッグ情報</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>処理</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>入力</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>総額</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>正しい</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>正しくない</translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>まだブロードキャストが成功していません</translation> </message> <message> <location line="+35"/> <source>unknown</source> <translation>未確認</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>処理の詳細</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>ここでは処理の詳細を表示しています</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>日付</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>タイプ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Helbidea</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>総額</translation> </message> <message> <location line="+60"/> <source>Open until %1</source> <translation>ユニット %1 を開く</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>検証されました (%1 検証済み)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation><numerusform>%n 以上のブロックを開く</numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation>オフライン</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation>未検証</translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>検証最中 (%1 / %2 の進めている検証済み)</translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation>相違</translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>未熟 (%1 検証,%2の後可用ができる)</translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>このブロックは他のどのノードによっても受け取られないで、多分受け入れられないでしょう!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>生成されましたが承認されませんでした</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>受信元</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>送り主</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>送り先</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>自分自身への支払い</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>発掘した</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+190"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>処理の状況。この欄の上にカーソルを置くと検証の数を表示します。</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>処理を受信した日時。</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>処理の種類。</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>処理の宛先アドレス。</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>残高に追加または削除された総額。</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+55"/> <location line="+16"/> <source>All</source> <translation>すべて</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>今日</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>今週</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>今月</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>先月</translation> </message> <message> <location line="+1"/> <source>This year</source><|fim▁hole|> </message> <message> <location line="+1"/> <source>Range...</source> <translation>期間...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>送り主</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>送り先</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>自分自身</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>発掘した</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>その他</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>検索するアドレスまたはラベルを入力</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>最小の額</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>アドレスをコピーする</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>ラベルをコピーする</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>総額のコピー</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>処理IDをコピー</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>ラベルの編集</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>処理の詳細を表示</translation> </message> <message> <location line="+144"/> <source>Export Transaction Data</source> <translation>処理のデータを書き出す</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>テキスト CSV (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>検証済み</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>日付</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>タイプ</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>ラベル</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Helbidea</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>総額</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>エラーを書き出す</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>ファイルを書き込めなかった。%1</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>期間:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>から</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+206"/> <source>Sending...</source> <translation>通信中...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>AngelCoin version</source> <translation>ブラックコインバージョン</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>使用法:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or angelcoind</source> <translation>-server か angelcoindへコマンドを送る。</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>コマンド一覧</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>コマンドのヘルプ</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>オプション:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: angelcoin.conf)</source> <translation>設定ファイルを特定する (デファルト: angelcoin.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: angelcoind.pid)</source> <translation>pid ファイルを特定する (デフォルト: angelcoind.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>ウォレットのファイルを指定 (データ・ディレクトリの中に)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>データ ディレクトリの指定</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>データベースのキャッシュサイズをメガバイトで設定 (初期値: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>メガバイトでのデータベースのログザイズの大きさの設定(デファルト:100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</source> <translation>&lt;port&gt; で 接続をリスン (デフォルト: 15714かtestnet は 25714)</translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>ピアの最大接続数 (初期値: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>ピア アドレスを取得するためにノードに接続し、そして切断します</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>あなた自身のパブリックなアドレスを指定</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>アドレスに結ぶ。IPv6のばい、[host]:port 表記法を使ってください。</translation> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation>褒奨金をもらうためと、ブラックコインネットワークをサッポートするために、コインを賭ける(デファルト:1)</translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>不正なピアを切断するためのしきい値 (初期値: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>不正なピアを再接続するまでの秒数 (初期値: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>IPv4 でリスンする RPC ポート %u の設定中にエラーが発生しました: %s</translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation>ブロックとアドレスのデータベースを切り離す。この設定はシャットダウンの時間を伸ばさせます。(デファルト:0)</translation> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>エラー:処理は拒否されました。ウォレットのコインをすでに費やした可能性で、wallet.datのコピーで費やしたが、現行のwallet.datとはアップデートされていない。</translation> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation>エラー:額,複雑,最近もらった資金、どれかの理由で処理は少なくとも %sの料金が必要です。</translation> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation>&lt;port&gt; で JSON-RPC 接続をリスン (デフォルト: 15715かtestnet は 25715)</translation> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>コマンドラインと JSON-RPC コマンドを許可</translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation>エラー:処理を失敗しました。</translation> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation>エラー:ウォレットはロックされたために、処理を作られなかった。</translation> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation>ブロックチェインのファイルを読み込んでいる。</translation> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation>ブートストラップなブロックチェインのファイルを読み込んでいる。</translation> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>デーモンとしてバックグランドで実行しコマンドを許可</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>テストのためのネットワークを使用</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>外部からの接続を許可 (初期値: -proxy または -connect を使用していない場合は1)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>IPv6 でリスンする RPC ポート %u の設定中にエラーが発生したので IPv4 に切り替えます: %s</translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation>データベース%sを初期化するにエラーになりました。直すためにディレクトリをバックアップして、そしてwallet.dat意外のファイルを取り除いてください。</translation> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>バイトで、最優先、低料金の処理の最大サイズの設定(デファルト:27000)</translation> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>警告: -paytxfee が非常に高く設定されています! これは処理を送信する場合に支払う取引手数料です。</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong AngelCoin will not work properly.</source> <translation> 警告:コンピュータの日付と時間を調べてください。時間ずらしかったばい、ブラックコイン QTは正しく行動しない。</translation> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>警告: wallet.dat の読み込みエラー! すべてのキーは正しく読み取れますが、処理のデータやアドレス帳のエントリが失われたか、正しくない可能性があります。</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>警告: wallet.dat のデータはの汚染で、でデータを復旧しました! オリジナルの wallet.dat は wallet.{timestamp}.bak として %s に保存されました; もしもあなたの残高や処理が正しくないばい、バックアップから復元してください。</translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>壊れた wallet.dat から秘密鍵を復旧することを試す</translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>ブロック作成オプション:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>指定したノードだけに接続</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>自分の IP アドレスを発見 (初期値: リスン中と -externalip を使用していない場合は1)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>ポートのリスンに失敗しました。必要であれば -listen=0 を使用してください。</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>DNSルックアップでピーアを探す(デファルト:1)</translation> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation>同期チェックポイント方針(デファルト:厳しい)</translation> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>無効なTORアドレス: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>-reservebalance=&lt;amount&gt;の額は無効です</translation> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>接続毎の最大受信バッファ &lt;n&gt;*1000 バイト (初期値: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>接続毎の最大送信バッファ &lt;n&gt;*1000 バイト (初期値: 1000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>&lt;net&gt; (IPv4, IPv6, Tor) ネットワーク内のノードだけに接続する</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>余分なデバッグ情報を出力する。この設定はすべてのdebug* の設定を有効にする。</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>余分なネットワークのデバッグ情報を出力する</translation> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation>デバッグのアウトプットはタイムスタンプで先頭に追加する</translation> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>SSL オプション: (SSLのセットアップ手順は Bitcoin Wiki をご覧下さい)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>SOCKSプロクシーのバージョンを選択する (4-5、 デファルト: 5)</translation> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>トレース/デバッグ情報を debug.log ファイルの代わりにコンソールへ送る</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>デバッガへ追跡とデバッグ情報を送る。</translation> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>最大ブロックサイズをバイトで設定 (初期値: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>最小ブロックサイズをバイトで設定 (初期値: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>クライアント起動時に debug.log ファイルを縮小 (初期値: -debug オプションを指定しない場合は1)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>接続のタイムアウトをミリセコンドで指定 (初期値: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation>チェックポイントを署名できません。checkpointkeyは違いますか。 </translation> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>リスン ポートの割当に UPnP を使用 (初期値: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>リスン ポートの割当に UPnP を使用 (初期値: リスン中は1)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>プロクシーでTORヒドゥンサービス(TOR Hidden Services)を接続する(デファルト:-proxyと同じ)</translation> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>JSON-RPC 接続のユーザー名</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation>データベースの保全性を確認最中...</translation> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation>警告:シンクロナイズドチェックポイント違反を検出したが、チェックポイントを飛ばした。</translation> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation>警告:ディスクの空き領域は少ない!</translation> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>警告: このバージョンは古いのでアップグレードが必要です!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat が壊れ、復旧に失敗しました</translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>JSON-RPC 接続のパスワード</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=angelcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;AngelCoin Alert&quot; [email protected] </source> <translation>%s, 設定ファイル:%sでrpcpasswordのパスワードを入力しなければならい このランダムパスワードのおすすめです: rpcuser=angelcoinrpc rpcpassword=%s (このパサワードを覚えなくても大丈夫です) ユーザ名とパスワードは同じであってはなりません。 ファイルは存在しないばいは、所有者が読み取り可能な専用のファイルを作成してください。 問題のことを知らせるために、alertnotifyの設定を有効にしたほうがいいです。 例:alertnotify=echo %%s | mail -s &quot;ブラックコイン警告&quot; [email protected] </translation> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation>インターネットリレーチャット(IRC)でピアアを探す。(デファルト:1 {0}?)</translation> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation>他のノードと時刻の同期する。確な時刻のコンピューターのばい、無効する。例:NTP同期設定有効ある(デファルト:1)</translation> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>処理を作ると、この額より少ない数字を無視する (デファルト:0.01)</translation> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>指定した IP アドレスからの JSON-RPC 接続を許可</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>&lt;ip&gt; (初期値: 127.0.0.1) で実行中のノードにコマンドを送信</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>最良のブロックに変更する際にコマンドを実行 (cmd の %s はブロック ハッシュに置換される)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>ウォレットの処理を変更する際にコマンドを実行 (cmd の %s は TxID に置換される)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation>変更するために、確認を必要とする (デファルト:0)</translation> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation>処理のスクリプトコマンドで標準的なPUSHオペレータを施行する(デファルト:1)</translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>関連した警告をもらったら、コマンドを実行する (cmdの中で%sにメッセージを交換される)</translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>ウォレットを最新のフォーマットにアップグレード</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>key pool のサイズを &lt;n&gt; (初期値: 100) にセット</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>失ったウォレットの処理のブロック チェーンを再スキャン</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>スタートのときに、いくつのブロックを調べます (デファルト:2500、 0 = すべて )</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>ブロック検証の徹底の程度 (0-6 、デファルト:1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>外部 blk000?.dat ファイルからブロックを読み込む。</translation> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>JSON-RPC 接続に OpenSSL (https) を使用</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>サーバ証明書ファイル (初期値: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>サーバの秘密鍵 (初期値: server.pem)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>許容可能な暗号 (デフォルト: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation>エラー:アンロックされたウォレットは賭けるためだけで、処理を作られない。</translation> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation>警告:無効なチェックポイントを見つかりました!表示された処理は正しくない可能性がある!アップグレードするか、デベロッパーに報告する必要があります。</translation> </message> <message> <location line="-158"/> <source>This help message</source> <translation>このヘルプ メッセージ</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation>%sウォレットはディレクトリ%sの外にあります。</translation> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. AngelCoin is probably already running.</source> <translation>%sディレクトリにをロックオンできない。ブラックコインQTは、もう発行してるでしょう。</translation> </message> <message> <location line="-98"/> <source>AngelCoin</source> <translation>ブラックコイン</translation> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>このコンピュータの %s にバインドすることができません (バインドが返したエラーは %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation>SOCKSプロキシで接続する</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>-addnode, -seednode と -connect で DNS ルックアップを許可する</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>アドレスを読み込んでいます...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation>blkindex.dat 読み込みエラー</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>wallet.dat 読み込みエラー: ウォレットが壊れました</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of AngelCoin</source> <translation>wallet.dat 読み込みエラー:  ブラックコインQTの最新バージョンが必要です</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart AngelCoin to complete</source> <translation>ウォレットのデータをリライトしなければならい:ブラックコインQTをリスタートしてください</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>wallet.dat 読み込みエラー</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>無効な -proxy アドレス: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>-onlynet で指定された &apos;%s&apos; は未知のネットワークです</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>-socks で指定された %i は未知のバージョンです</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>-bind のアドレス &apos;%s&apos; を解決できません</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>-externalip のアドレス &apos;%s&apos; を解決できません</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>-paytxfee=&lt;amount&gt; の額 &apos;%s&apos; が無効です</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation>エラー:ノードの開始ができなった</translation> </message> <message> <location line="+11"/> <source>Sending...</source> <translation>通信中...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>無効な総額</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>残高不足</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>ブロック インデックスを読み込んでいます...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>接続するノードを追加し接続を持続するように試します</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. AngelCoin is probably already running.</source> <translation>このコンピューターで%sに結ぶことができなかった。ブラックコインQTは、もう発行してるでしょう。</translation> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation>送る処理を足して、KBあたりの料金</translation> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation>-mininput=&lt;amount&gt;: &apos;%s&apos;の額は無効です</translation> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>ウォレットを読み込んでいます...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>ウォレットのダウングレードはできません</translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation>キースペースをイニシャライズをする</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>初期値のアドレスを書き込むことができません</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>再スキャン中...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>読み込み完了</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>%s オプションを使うには</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>エラー</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>rpcpassword=&lt;password&gt; を設定ファイルでセットしてください: %s ファイルが無い場合は、オーナーだけが読み取れる権限でファイルを作成してください。</translation> </message> </context> </TS><|fim▁end|>
<translation>今年</translation>
<|file_name|>InterbankNetwork.java<|end_file_name|><|fim▁begin|>/* * This file is part of CRISIS, an economics simulator. * * Copyright (C) 2015 Victor Spirin * * CRISIS is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * CRISIS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with CRISIS. If not, see <http://www.gnu.org/licenses/>. */ package eu.crisis_economics.abm.markets.nonclearing; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import eu.crisis_economics.abm.markets.Party; import eu.crisis_economics.abm.markets.nonclearing.DefaultFilters.Filter; /** * Represents relationships between banks in the interbank market * * @author Victor Spirin */ public class InterbankNetwork { private final Map<Party, Set<Party>> adj = new HashMap<Party, Set<Party>>(); /** * Adds a new node to the graph. If the node already exists, this * function is a no-op. * * @param node The node to add. * @return Whether or not the node was added. */ private boolean addNode(Party node) { /* If the node already exists, don't do anything. */ if (adj.containsKey(node)) return false; /* Otherwise, add the node with an empty set of outgoing edges. */ adj.put(node, new HashSet<Party>()); return true; } // protected /** * Generates a filter from the network. This allows the network class to be used * with a Limit Order Book. * * @param a Bank, for which we want to generate a filter * @return Returns a Limit Order Book filter for the given bank. If the bank is not in the network, returns an empty filter. */ protected Filter generateFilter(Party party) { if (!adj.containsKey(party)) return DefaultFilters.only(party); return DefaultFilters.only((Party[]) adj.get(party).toArray()); } // public interface /** * Adds a bilateral relationship between banks 'a' and 'b'. * If the banks are not already in the network, also adds them to the network. * * @param a Bank a * @param b Bank b */ public void addRelationship(Party a, Party b) { /* Confirm both endpoints exist. */ <|fim▁hole|> if (!adj.containsKey(b)) addNode(b); /* Add the edge in both directions. */ adj.get(a).add(b); adj.get(b).add(a); } /** * Removes a bilateral relationship between banks 'a' and 'b'. * If the banks are not already in the network, throws a NoSuchElementException * * @param a Bank a * @param b Bank b */ public void removeRelationship(Party a, Party b) { /* Confirm both endpoints exist. */ if (!adj.containsKey(a) || !adj.containsKey(b)) throw new NoSuchElementException("Both banks must be in the network."); /* Remove the edges from both adjacency lists. */ adj.get(a).remove(b); adj.get(b).remove(a); } /** * Returns true if there is a bilateral relationship between banks 'a' and 'b'. * If the banks are not already in the network, throws a NoSuchElementException * * @param a Bank a * @param b Bank b * @return Returns true if there is a relationship between banks 'a' and 'b' */ public boolean isRelated(Party a, Party b) { /* Confirm both endpoints exist. */ if (!adj.containsKey(a) || !adj.containsKey(b)) throw new NoSuchElementException("Both banks must be in the network."); /* Network is symmetric, so we can just check either endpoint. */ return adj.get(a).contains(b); } }<|fim▁end|>
if (!adj.containsKey(a)) addNode(a);
<|file_name|>xiajuxu441.py<|end_file_name|><|fim▁begin|><|fim▁hole|> ''' @author: sheng @license: ''' SPELL=u'xiàjùxū' CN=u'下巨虚' NAME=u'xiajuxu441' CHANNEL='stomach' CHANNEL_FULLNAME='StomachChannelofFoot-Yangming' SEQ='ST39' if __name__ == '__main__': pass<|fim▁end|>
#!/usr/bin/python #coding=utf-8
<|file_name|>JsonResolver.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 lyobzik // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "../include/JsonResolver.h" namespace JsonSchemaValidator { JsonResolver::JsonResolver() { } JsonResolver::~JsonResolver() { } } // namespace JsonSchemaValidator<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from setuptools import setup import re import platform import os import sys install_requires = ["bottle>=0.11", "requests>=1.1.0", "pyyaml>=0.0", "czipfile>=1.0.0", "prometheus-client"] def load_version(filename='./reststore/version.py'): """Parse a __version__ number from a source file""" with open(filename) as source: text = source.read() match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", text) if not match: msg = "Unable to find version number in {}".format(filename) raise RuntimeError(msg) version = match.group(1) return version setup( name="reststore", version=load_version(), packages=["reststore"], zip_safe=False, author="Michael Dorman",<|fim▁hole|> license="Apache Software Licence", install_requires = install_requires, entry_points={ 'console_scripts': [ 'reststore = reststore.cli:entry', ] }, platforms=['cygwin', 'win', 'linux'], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Other Audience', 'License :: OSI Approved :: Apache Software License', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Security', 'Topic :: System :: Monitoring' ], test_suite="tests", tests_require=[] )<|fim▁end|>
author_email="[email protected]", url="https://github.com/provoke-vagueness/reststore", description="RESTful datastore. A simple way to store large amounts of average sized files.", long_description=open('README.rst').read(),
<|file_name|>branchmodel.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing ** ** This file is part of Qt Creator. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms and ** conditions see http://www.qt.io/terms-conditions. For further information ** use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, The Qt Company gives you certain additional ** rights. These rights are described in The Qt Company LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ****************************************************************************/ #include "branchmodel.h" #include "gitclient.h" #include <utils/qtcassert.h> #include <vcsbase/vcsoutputwindow.h> #include <vcsbase/vcscommand.h> #include <QFont> using namespace VcsBase; namespace Git { namespace Internal { enum RootNodes { LocalBranches = 0, RemoteBranches = 1, Tags = 2 }; // -------------------------------------------------------------------------- // BranchNode: // -------------------------------------------------------------------------- class BranchNode { public: BranchNode() : parent(0), name(QLatin1String("<ROOT>")) { } BranchNode(const QString &n, const QString &s = QString(), const QString &t = QString()) : parent(0), name(n), sha(s), tracking(t) { } ~BranchNode() { while (!children.isEmpty()) delete children.first(); if (parent) parent->children.removeAll(this); } BranchNode *rootNode() const { return parent ? parent->rootNode() : const_cast<BranchNode *>(this); } int count() const<|fim▁hole|> bool isLeaf() const { return children.isEmpty() && parent && parent->parent; } bool childOf(BranchNode *node) const { if (this == node) return true; return parent ? parent->childOf(node) : false; } bool childOfRoot(RootNodes root) const { BranchNode *rn = rootNode(); if (rn->isLeaf()) return false; if (root >= rn->children.count()) return false; return childOf(rn->children.at(root)); } bool isTag() const { return childOfRoot(Tags); } bool isLocal() const { return childOfRoot(LocalBranches); } BranchNode *childOfName(const QString &name) const { for (int i = 0; i < children.count(); ++i) { if (children.at(i)->name == name) return children.at(i); } return 0; } QStringList fullName(bool includePrefix = false) const { QTC_ASSERT(isLeaf(), return QStringList()); QStringList fn; QList<const BranchNode *> nodes; const BranchNode *current = this; while (current->parent) { nodes.prepend(current); current = current->parent; } if (includePrefix) fn.append(nodes.first()->sha); nodes.removeFirst(); foreach (const BranchNode *n, nodes) fn.append(n->name); return fn; } void insert(const QStringList &path, BranchNode *n) { BranchNode *current = this; for (int i = 0; i < path.count(); ++i) { BranchNode *c = current->childOfName(path.at(i)); if (c) current = c; else current = current->append(new BranchNode(path.at(i))); } current->append(n); } BranchNode *append(BranchNode *n) { n->parent = this; children.append(n); return n; } QStringList childrenNames() const { if (children.count() > 0) { QStringList names; foreach (BranchNode *n, children) { names.append(n->childrenNames()); } return names; } return QStringList(fullName().join(QLatin1Char('/'))); } int rowOf(BranchNode *node) { return children.indexOf(node); } BranchNode *parent; QList<BranchNode *> children; QString name; QString sha; QString tracking; mutable QString toolTip; }; // -------------------------------------------------------------------------- // BranchModel: // -------------------------------------------------------------------------- BranchModel::BranchModel(GitClient *client, QObject *parent) : QAbstractItemModel(parent), m_client(client), m_rootNode(new BranchNode), m_currentBranch(0) { QTC_CHECK(m_client); // Abuse the sha field for ref prefix m_rootNode->append(new BranchNode(tr("Local Branches"), QLatin1String("refs/heads"))); m_rootNode->append(new BranchNode(tr("Remote Branches"), QLatin1String("refs/remotes"))); } BranchModel::~BranchModel() { delete m_rootNode; } QModelIndex BranchModel::index(int row, int column, const QModelIndex &parentIdx) const { if (column != 0) return QModelIndex(); BranchNode *parentNode = indexToNode(parentIdx); if (row >= parentNode->count()) return QModelIndex(); return nodeToIndex(parentNode->children.at(row)); } QModelIndex BranchModel::parent(const QModelIndex &index) const { if (!index.isValid()) return QModelIndex(); BranchNode *node = indexToNode(index); if (node->parent == m_rootNode) return QModelIndex(); return nodeToIndex(node->parent); } int BranchModel::rowCount(const QModelIndex &parentIdx) const { if (parentIdx.column() > 0) return 0; return indexToNode(parentIdx)->count(); } int BranchModel::columnCount(const QModelIndex &parent) const { Q_UNUSED(parent); return 1; } QVariant BranchModel::data(const QModelIndex &index, int role) const { BranchNode *node = indexToNode(index); if (!node) return QVariant(); switch (role) { case Qt::DisplayRole: { QString res = node->name; if (!node->tracking.isEmpty()) res += QLatin1String(" [") + node->tracking + QLatin1Char(']'); return res; } case Qt::EditRole: return node->name; case Qt::ToolTipRole: if (!node->isLeaf()) return QVariant(); if (node->toolTip.isEmpty()) node->toolTip = toolTip(node->sha); return node->toolTip; case Qt::FontRole: { QFont font; if (!node->isLeaf()) { font.setBold(true); } else if (node == m_currentBranch) { font.setBold(true); font.setUnderline(true); } return font; } default: return QVariant(); } } bool BranchModel::setData(const QModelIndex &index, const QVariant &value, int role) { if (role != Qt::EditRole) return false; BranchNode *node = indexToNode(index); if (!node) return false; const QString newName = value.toString(); if (newName.isEmpty()) return false; if (node->name == newName) return true; QStringList oldFullName = node->fullName(); node->name = newName; QStringList newFullName = node->fullName(); QString output; QString errorMessage; if (!m_client->synchronousBranchCmd(m_workingDirectory, QStringList() << QLatin1String("-m") << oldFullName.last() << newFullName.last(), &output, &errorMessage)) { node->name = oldFullName.last(); VcsOutputWindow::appendError(errorMessage); return false; } emit dataChanged(index, index); return true; } Qt::ItemFlags BranchModel::flags(const QModelIndex &index) const { BranchNode *node = indexToNode(index); if (!node) return Qt::NoItemFlags; if (node->isLeaf() && node->isLocal()) return Qt::ItemIsSelectable | Qt::ItemIsEditable | Qt::ItemIsEnabled; else return Qt::ItemIsSelectable | Qt::ItemIsEnabled; } void BranchModel::clear() { foreach (BranchNode *root, m_rootNode->children) while (root->count()) delete root->children.takeLast(); if (hasTags()) m_rootNode->children.takeLast(); m_currentBranch = 0; } bool BranchModel::refresh(const QString &workingDirectory, QString *errorMessage) { beginResetModel(); clear(); if (workingDirectory.isEmpty()) { endResetModel(); return false; } m_currentSha = m_client->synchronousTopRevision(workingDirectory); QStringList args; args << QLatin1String("--format=%(objectname)\t%(refname)\t%(upstream:short)\t%(*objectname)"); QString output; if (!m_client->synchronousForEachRefCmd(workingDirectory, args, &output, errorMessage)) VcsOutputWindow::appendError(*errorMessage); m_workingDirectory = workingDirectory; const QStringList lines = output.split(QLatin1Char('\n')); foreach (const QString &l, lines) parseOutputLine(l); if (m_currentBranch) { if (m_currentBranch->parent == m_rootNode->children.at(LocalBranches)) m_currentBranch = 0; setCurrentBranch(); } endResetModel(); return true; } void BranchModel::setCurrentBranch() { QString currentBranch = m_client->synchronousCurrentLocalBranch(m_workingDirectory); if (currentBranch.isEmpty()) return; BranchNode *local = m_rootNode->children.at(LocalBranches); int pos = 0; for (pos = 0; pos < local->count(); ++pos) { if (local->children.at(pos)->name == currentBranch) m_currentBranch = local->children[pos]; } } void BranchModel::renameBranch(const QString &oldName, const QString &newName) { QString errorMessage; QString output; if (!m_client->synchronousBranchCmd(m_workingDirectory, QStringList() << QLatin1String("-m") << oldName << newName, &output, &errorMessage)) VcsOutputWindow::appendError(errorMessage); else refresh(m_workingDirectory, &errorMessage); } void BranchModel::renameTag(const QString &oldName, const QString &newName) { QString errorMessage; QString output; if (!m_client->synchronousTagCmd(m_workingDirectory, QStringList() << newName << oldName, &output, &errorMessage) || !m_client->synchronousTagCmd(m_workingDirectory, QStringList() << QLatin1String("-d") << oldName, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); } else { refresh(m_workingDirectory, &errorMessage); } } QString BranchModel::workingDirectory() const { return m_workingDirectory; } GitClient *BranchModel::client() const { return m_client; } QModelIndex BranchModel::currentBranch() const { if (!m_currentBranch) return QModelIndex(); return nodeToIndex(m_currentBranch); } QString BranchModel::fullName(const QModelIndex &idx, bool includePrefix) const { if (!idx.isValid()) return QString(); BranchNode *node = indexToNode(idx); if (!node || !node->isLeaf()) return QString(); QStringList path = node->fullName(includePrefix); return path.join(QLatin1Char('/')); } QStringList BranchModel::localBranchNames() const { if (!m_rootNode || !m_rootNode->count()) return QStringList(); return m_rootNode->children.at(LocalBranches)->childrenNames(); } QString BranchModel::sha(const QModelIndex &idx) const { if (!idx.isValid()) return QString(); BranchNode *node = indexToNode(idx); return node->sha; } bool BranchModel::hasTags() const { return m_rootNode->children.count() > Tags; } bool BranchModel::isLocal(const QModelIndex &idx) const { if (!idx.isValid()) return false; BranchNode *node = indexToNode(idx); return node->isLocal(); } bool BranchModel::isLeaf(const QModelIndex &idx) const { if (!idx.isValid()) return false; BranchNode *node = indexToNode(idx); return node->isLeaf(); } bool BranchModel::isTag(const QModelIndex &idx) const { if (!idx.isValid() || !hasTags()) return false; return indexToNode(idx)->isTag(); } void BranchModel::removeBranch(const QModelIndex &idx) { QString branch = fullName(idx); if (branch.isEmpty()) return; QString errorMessage; QString output; QStringList args; args << QLatin1String("-D") << branch; if (!m_client->synchronousBranchCmd(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return; } removeNode(idx); } void BranchModel::removeTag(const QModelIndex &idx) { QString tag = fullName(idx); if (tag.isEmpty()) return; QString errorMessage; QString output; QStringList args; args << QLatin1String("-d") << tag; if (!m_client->synchronousTagCmd(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return; } removeNode(idx); } void BranchModel::checkoutBranch(const QModelIndex &idx) { QString branch = fullName(idx, !isLocal(idx)); if (branch.isEmpty()) return; // No StashGuard since this function for now is only used with clean working dir. // If it is ever used from another place, please add StashGuard here m_client->synchronousCheckout(m_workingDirectory, branch); } bool BranchModel::branchIsMerged(const QModelIndex &idx) { QString branch = fullName(idx); if (branch.isEmpty()) return false; QString errorMessage; QString output; QStringList args; args << QLatin1String("-a") << QLatin1String("--contains") << sha(idx); if (!m_client->synchronousBranchCmd(m_workingDirectory, args, &output, &errorMessage)) VcsOutputWindow::appendError(errorMessage); QStringList lines = output.split(QLatin1Char('\n'), QString::SkipEmptyParts); foreach (const QString &l, lines) { QString currentBranch = l.mid(2); // remove first letters (those are either // " " or "* " depending on whether it is // the currently checked out branch or not) if (currentBranch != branch) return true; } return false; } static int positionForName(BranchNode *node, const QString &name) { int pos = 0; for (pos = 0; pos < node->count(); ++pos) { if (node->children.at(pos)->name >= name) break; } return pos; } QModelIndex BranchModel::addBranch(const QString &name, bool track, const QModelIndex &startPoint) { if (!m_rootNode || !m_rootNode->count()) return QModelIndex(); const QString trackedBranch = fullName(startPoint); const QString fullTrackedBranch = fullName(startPoint, true); QString startSha; QString output; QString errorMessage; QStringList args; args << (track ? QLatin1String("--track") : QLatin1String("--no-track")); args << name; if (!fullTrackedBranch.isEmpty()) { args << fullTrackedBranch; startSha = sha(startPoint); } else { startSha = m_client->synchronousTopRevision(m_workingDirectory); } if (!m_client->synchronousBranchCmd(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return QModelIndex(); } BranchNode *local = m_rootNode->children.at(LocalBranches); const int slash = name.indexOf(QLatin1Char('/')); const QString leafName = slash == -1 ? name : name.mid(slash + 1); bool added = false; if (slash != -1) { const QString nodeName = name.left(slash); int pos = positionForName(local, nodeName); BranchNode *child = (pos == local->count()) ? 0 : local->children.at(pos); if (!child || child->name != nodeName) { child = new BranchNode(nodeName); beginInsertRows(nodeToIndex(local), pos, pos); added = true; child->parent = local; local->children.insert(pos, child); } local = child; } int pos = positionForName(local, leafName); auto newNode = new BranchNode(leafName, startSha, track ? trackedBranch : QString()); if (!added) beginInsertRows(nodeToIndex(local), pos, pos); newNode->parent = local; local->children.insert(pos, newNode); endInsertRows(); return nodeToIndex(newNode); } void BranchModel::setRemoteTracking(const QModelIndex &trackingIndex) { QModelIndex current = currentBranch(); QTC_ASSERT(current.isValid(), return); const QString currentName = fullName(current); const QString shortTracking = fullName(trackingIndex); const QString tracking = fullName(trackingIndex, true); m_client->synchronousSetTrackingBranch(m_workingDirectory, currentName, tracking); m_currentBranch->tracking = shortTracking; emit dataChanged(current, current); } void BranchModel::parseOutputLine(const QString &line) { if (line.size() < 3) return; QStringList lineParts = line.split(QLatin1Char('\t')); const QString shaDeref = lineParts.at(3); const QString sha = shaDeref.isEmpty() ? lineParts.at(0) : shaDeref; const QString fullName = lineParts.at(1); bool current = (sha == m_currentSha); bool showTags = m_client->settings().boolValue(GitSettings::showTagsKey); // insert node into tree: QStringList nameParts = fullName.split(QLatin1Char('/')); nameParts.removeFirst(); // remove refs... BranchNode *root = 0; if (nameParts.first() == QLatin1String("heads")) { root = m_rootNode->children.at(LocalBranches); } else if (nameParts.first() == QLatin1String("remotes")) { root = m_rootNode->children.at(RemoteBranches); } else if (showTags && nameParts.first() == QLatin1String("tags")) { if (!hasTags()) // Tags is missing, add it m_rootNode->append(new BranchNode(tr("Tags"), QLatin1String("refs/tags"))); root = m_rootNode->children.at(Tags); } else { return; } nameParts.removeFirst(); // limit depth of list. Git basically only ever wants one / and considers the rest as part of // the name. while (nameParts.count() > 3) { nameParts[2] = nameParts.at(2) + QLatin1Char('/') + nameParts.at(3); nameParts.removeAt(3); } const QString name = nameParts.last(); nameParts.removeLast(); auto newNode = new BranchNode(name, sha, lineParts.at(2)); root->insert(nameParts, newNode); if (current) m_currentBranch = newNode; } BranchNode *BranchModel::indexToNode(const QModelIndex &index) const { if (index.column() > 0) return 0; if (!index.isValid()) return m_rootNode; return static_cast<BranchNode *>(index.internalPointer()); } QModelIndex BranchModel::nodeToIndex(BranchNode *node) const { if (node == m_rootNode) return QModelIndex(); return createIndex(node->parent->rowOf(node), 0, static_cast<void *>(node)); } void BranchModel::removeNode(const QModelIndex &idx) { QModelIndex nodeIndex = idx; // idx is a leaf, so count must be 0. BranchNode *node = indexToNode(nodeIndex); while (node->count() == 0 && node->parent != m_rootNode) { BranchNode *parentNode = node->parent; const QModelIndex parentIndex = nodeToIndex(parentNode); const int nodeRow = nodeIndex.row(); beginRemoveRows(parentIndex, nodeRow, nodeRow); parentNode->children.removeAt(nodeRow); delete node; endRemoveRows(); node = parentNode; nodeIndex = parentIndex; } } QString BranchModel::toolTip(const QString &sha) const { // Show the sha description excluding diff as toolTip QString output; QString errorMessage; QStringList arguments(QLatin1String("-n1")); arguments << sha; if (!m_client->synchronousLog(m_workingDirectory, arguments, &output, &errorMessage, VcsCommand::SuppressCommandLogging)) { return errorMessage; } return output; } } // namespace Internal } // namespace Git<|fim▁end|>
{ return children.count(); }
<|file_name|>flex-item.directive.ts<|end_file_name|><|fim▁begin|>import {Directive, Input, HostBinding, ElementRef} from "@angular/core"; /** * A directive to control flex items layout properties. */ @Directive({ selector: '[flex]' }) export class FlexItemDirective{ /** * Controls the flex-basis property. * @type {string} size value (px, vh, vp, em, %, etc...) */ @HostBinding('style.flex-basis') @HostBinding('style.-webkit-flex-basis')<|fim▁hole|> @Input('flex') basis: string = 'auto'; /** * Controls the flex-grow property. * @type {number} positive integer. */ @HostBinding('style.flex-grow') @HostBinding('style.-webkit-flex-grow') @Input() grow: number = 0; /** * Controls the flex-shrink property. * @type {number} positive integer */ @HostBinding('style.flex-shrink') @HostBinding('style.-webkit-flex-shrink') @Input() shrink: number = 1; /** * Controls the flex align-self property. */ @HostBinding('style.align-self') private _gravity: string = 'inherit'; constructor(private el: ElementRef) {} @Input("gravity") set gravity(value: string) { switch (value){ case 'start': this._gravity = 'flex-start'; break; case 'center': this._gravity = 'center'; break; case 'end': this._gravity = 'flex-end'; break; case 'fill': this._gravity = 'stretch'; break; case 'baseline': this._gravity = 'baseline'; break; default: this._gravity = 'inherit'; break; } } }<|fim▁end|>
<|file_name|>find_warcs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 from sfmutils.api_client import ApiClient import argparse import logging import sys log = logging.getLogger(__name__) def main(sys_argv): # Arguments parser = argparse.ArgumentParser(description="Return WARC filepaths for passing to other commandlines.") parser.add_argument("--harvest-start", help="ISO8601 datetime after which harvest was performed. For example, " "2015-02-22T14:49:07Z") parser.add_argument("--harvest-end", help="ISO8601 datetime before which harvest was performed. For example, " "2015-02-22T14:49:07Z") parser.add_argument("--warc-start", help="ISO8601 datetime after which WARC was created. For example, " "2015-02-22T14:49:07Z") parser.add_argument("--warc-end", help="ISO8601 datetime before which WARC was created. For example, " "2015-02-22T14:49:07Z") default_api_base_url = "http://api:8080" parser.add_argument("--api-base-url", help="Base url of the SFM API. Default is {}.".format(default_api_base_url), default=default_api_base_url) parser.add_argument("--debug", type=lambda v: v.lower() in ("yes", "true", "t", "1"), nargs="?", default="False", const="True") parser.add_argument("--newline", action="store_true", help="Separates WARCs by newline instead of space.") parser.add_argument("collection", nargs="+", help="Limit to WARCs of this collection. " "Truncated collection ids may be used.") # Explicitly using sys.argv so that can mock out for testing. args = parser.parse_args(sys_argv[1:]) # Logging logging.basicConfig(format='%(asctime)s: %(name)s --> %(message)s', level=logging.DEBUG if args.debug else logging.INFO) logging.getLogger("requests").setLevel(logging.DEBUG if args.debug else logging.INFO) api_client = ApiClient(args.api_base_url) collection_ids = [] for collection_id_part in args.collection: log.debug("Looking up collection id part %s", collection_id_part) if len(collection_id_part) == 32: collection_ids.append(collection_id_part) else: collections = list(api_client.collections(collection_id_startswith=collection_id_part)) if len(collections) == 0: print("No matching collections for {}".format(collection_id_part)) sys.exit(1) elif len(collections) > 1: print("Multiple matching collections for {}".format(collection_id_part)) sys.exit(1) else: collection_ids.append(collections[0]["collection_id"])<|fim▁hole|> for collection_id in collection_ids: log.debug("Looking up warcs for %s", collection_id) warcs = api_client.warcs(collection_id=collection_id, harvest_date_start=args.harvest_start, harvest_date_end=args.harvest_end, created_date_start=args.warc_start, created_date_end=args.warc_end) for warc in warcs: warc_filepaths.add(warc["path"]) sep = "\n" if args.newline else " " return sep.join(sorted(warc_filepaths)) if __name__ == "__main__": print(main(sys.argv))<|fim▁end|>
warc_filepaths = set()
<|file_name|>endtoend_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package asm import ( "bufio" "bytes" "fmt" "io/ioutil" "os" "path/filepath" "regexp" "sort" "strconv" "strings" "testing" "cmd/asm/internal/lex" "cmd/internal/obj" "cmd/internal/objabi" ) // An end-to-end test for the assembler: Do we print what we parse? // Output is generated by, in effect, turning on -S and comparing the // result against a golden file. func testEndToEnd(t *testing.T, goarch, file string) { input := filepath.Join("testdata", file+".s") architecture, ctxt := setArch(goarch) architecture.Init(ctxt) lexer := lex.NewLexer(input) parser := NewParser(ctxt, architecture, lexer) pList := new(obj.Plist) var ok bool testOut = new(bytes.Buffer) // The assembler writes test output to this buffer. ctxt.Bso = bufio.NewWriter(os.Stdout) defer ctxt.Bso.Flush() failed := false ctxt.DiagFunc = func(format string, args ...interface{}) { failed = true t.Errorf(format, args...) } pList.Firstpc, ok = parser.Parse() if !ok || failed { t.Errorf("asm: %s assembly failed", goarch) return } output := strings.Split(testOut.String(), "\n") // Reconstruct expected output by independently "parsing" the input. data, err := ioutil.ReadFile(input) if err != nil { t.Error(err) return } lineno := 0 seq := 0 hexByLine := map[string]string{} lines := strings.SplitAfter(string(data), "\n") Diff: for _, line := range lines { lineno++ // Ignore include of textflag.h. if strings.HasPrefix(line, "#include ") { continue } // The general form of a test input line is: // // comment // INST args [// printed form] [// hex encoding] parts := strings.Split(line, "//") printed := strings.TrimSpace(parts[0]) if printed == "" || strings.HasSuffix(printed, ":") { // empty or label continue } seq++ var hexes string switch len(parts) { default: t.Errorf("%s:%d: unable to understand comments: %s", input, lineno, line) case 1: // no comment case 2: // might be printed form or hex note := strings.TrimSpace(parts[1]) if isHexes(note) { hexes = note } else { printed = note } case 3: // printed form, then hex<|fim▁hole|> hexes = strings.TrimSpace(parts[2]) if !isHexes(hexes) { t.Errorf("%s:%d: malformed hex instruction encoding: %s", input, lineno, line) } } if hexes != "" { hexByLine[fmt.Sprintf("%s:%d", input, lineno)] = hexes } // Canonicalize spacing in printed form. // First field is opcode, then tab, then arguments separated by spaces. // Canonicalize spaces after commas first. // Comma to separate argument gets a space; comma within does not. var buf []byte nest := 0 for i := 0; i < len(printed); i++ { c := printed[i] switch c { case '{', '[': nest++ case '}', ']': nest-- case ',': buf = append(buf, ',') if nest == 0 { buf = append(buf, ' ') } for i+1 < len(printed) && (printed[i+1] == ' ' || printed[i+1] == '\t') { i++ } continue } buf = append(buf, c) } f := strings.Fields(string(buf)) // Turn relative (PC) into absolute (PC) automatically, // so that most branch instructions don't need comments // giving the absolute form. if len(f) > 0 && strings.HasSuffix(printed, "(PC)") { last := f[len(f)-1] n, err := strconv.Atoi(last[:len(last)-len("(PC)")]) if err == nil { f[len(f)-1] = fmt.Sprintf("%d(PC)", seq+n) } } if len(f) == 1 { printed = f[0] } else { printed = f[0] + "\t" + strings.Join(f[1:], " ") } want := fmt.Sprintf("%05d (%s:%d)\t%s", seq, input, lineno, printed) for len(output) > 0 && (output[0] < want || output[0] != want && len(output[0]) >= 5 && output[0][:5] == want[:5]) { if len(output[0]) >= 5 && output[0][:5] == want[:5] { t.Errorf("mismatched output:\nhave %s\nwant %s", output[0], want) output = output[1:] continue Diff } t.Errorf("unexpected output: %q", output[0]) output = output[1:] } if len(output) > 0 && output[0] == want { output = output[1:] } else { t.Errorf("missing output: %q", want) } } for len(output) > 0 { if output[0] == "" { // spurious blank caused by Split on "\n" output = output[1:] continue } t.Errorf("unexpected output: %q", output[0]) output = output[1:] } // Checked printing. // Now check machine code layout. top := pList.Firstpc var text *obj.LSym ok = true ctxt.DiagFunc = func(format string, args ...interface{}) { t.Errorf(format, args...) ok = false } obj.Flushplist(ctxt, pList, nil, "") for p := top; p != nil; p = p.Link { if p.As == obj.ATEXT { text = p.From.Sym } hexes := hexByLine[p.Line()] if hexes == "" { continue } delete(hexByLine, p.Line()) if text == nil { t.Errorf("%s: instruction outside TEXT", p) } size := int64(len(text.P)) - p.Pc if p.Link != nil { size = p.Link.Pc - p.Pc } else if p.Isize != 0 { size = int64(p.Isize) } var code []byte if p.Pc < int64(len(text.P)) { code = text.P[p.Pc:] if size < int64(len(code)) { code = code[:size] } } codeHex := fmt.Sprintf("%x", code) if codeHex == "" { codeHex = "empty" } ok := false for _, hex := range strings.Split(hexes, " or ") { if codeHex == hex { ok = true break } } if !ok { t.Errorf("%s: have encoding %s, want %s", p, codeHex, hexes) } } if len(hexByLine) > 0 { var missing []string for key := range hexByLine { missing = append(missing, key) } sort.Strings(missing) for _, line := range missing { t.Errorf("%s: did not find instruction encoding", line) } } } func isHexes(s string) bool { if s == "" { return false } if s == "empty" { return true } for _, f := range strings.Split(s, " or ") { if f == "" || len(f)%2 != 0 || strings.TrimLeft(f, "0123456789abcdef") != "" { return false } } return true } // It would be nice if the error messages began with // the standard file:line: prefix, // but that's not where we are today. // It might be at the beginning but it might be in the middle of the printed instruction. var fileLineRE = regexp.MustCompile(`(?:^|\()(testdata[/\\][0-9a-z]+\.s:[0-9]+)(?:$|\))`) // Same as in test/run.go var ( errRE = regexp.MustCompile(`// ERROR ?(.*)`) errQuotesRE = regexp.MustCompile(`"([^"]*)"`) ) func testErrors(t *testing.T, goarch, file string) { input := filepath.Join("testdata", file+".s") architecture, ctxt := setArch(goarch) lexer := lex.NewLexer(input) parser := NewParser(ctxt, architecture, lexer) pList := new(obj.Plist) var ok bool testOut = new(bytes.Buffer) // The assembler writes test output to this buffer. ctxt.Bso = bufio.NewWriter(os.Stdout) defer ctxt.Bso.Flush() failed := false var errBuf bytes.Buffer ctxt.DiagFunc = func(format string, args ...interface{}) { failed = true s := fmt.Sprintf(format, args...) if !strings.HasSuffix(s, "\n") { s += "\n" } errBuf.WriteString(s) } pList.Firstpc, ok = parser.Parse() obj.Flushplist(ctxt, pList, nil, "") if ok && !failed { t.Errorf("asm: %s had no errors", goarch) } errors := map[string]string{} for _, line := range strings.Split(errBuf.String(), "\n") { if line == "" || strings.HasPrefix(line, "\t") { continue } m := fileLineRE.FindStringSubmatch(line) if m == nil { t.Errorf("unexpected error: %v", line) continue } fileline := m[1] if errors[fileline] != "" && errors[fileline] != line { t.Errorf("multiple errors on %s:\n\t%s\n\t%s", fileline, errors[fileline], line) continue } errors[fileline] = line } // Reconstruct expected errors by independently "parsing" the input. data, err := ioutil.ReadFile(input) if err != nil { t.Error(err) return } lineno := 0 lines := strings.Split(string(data), "\n") for _, line := range lines { lineno++ fileline := fmt.Sprintf("%s:%d", input, lineno) if m := errRE.FindStringSubmatch(line); m != nil { all := m[1] mm := errQuotesRE.FindAllStringSubmatch(all, -1) if len(mm) != 1 { t.Errorf("%s: invalid errorcheck line:\n%s", fileline, line) } else if err := errors[fileline]; err == "" { t.Errorf("%s: missing error, want %s", fileline, all) } else if !strings.Contains(err, mm[0][1]) { t.Errorf("%s: wrong error for %s:\n%s", fileline, all, err) } } else { if errors[fileline] != "" { t.Errorf("unexpected error on %s: %v", fileline, errors[fileline]) } } delete(errors, fileline) } var extra []string for key := range errors { extra = append(extra, key) } sort.Strings(extra) for _, fileline := range extra { t.Errorf("unexpected error on %s: %v", fileline, errors[fileline]) } } func Test386EndToEnd(t *testing.T) { defer func(old string) { objabi.GO386 = old }(objabi.GO386) for _, go386 := range []string{"387", "sse2"} { t.Logf("GO386=%v", go386) objabi.GO386 = go386 testEndToEnd(t, "386", "386") } } func TestARMEndToEnd(t *testing.T) { defer func(old int) { objabi.GOARM = old }(objabi.GOARM) for _, goarm := range []int{5, 6, 7} { t.Logf("GOARM=%d", goarm) objabi.GOARM = goarm testEndToEnd(t, "arm", "arm") if goarm == 6 { testEndToEnd(t, "arm", "armv6") } } } func TestARMErrors(t *testing.T) { testErrors(t, "arm", "armerror") } func TestARM64EndToEnd(t *testing.T) { testEndToEnd(t, "arm64", "arm64") } func TestARM64Encoder(t *testing.T) { testEndToEnd(t, "arm64", "arm64enc") } func TestARM64Errors(t *testing.T) { testErrors(t, "arm64", "arm64error") } func TestAMD64EndToEnd(t *testing.T) { testEndToEnd(t, "amd64", "amd64") } func Test386Encoder(t *testing.T) { testEndToEnd(t, "386", "386enc") } func TestAMD64Encoder(t *testing.T) { testEndToEnd(t, "amd64", "amd64enc") testEndToEnd(t, "amd64", "amd64enc_extra") } func TestAMD64Errors(t *testing.T) { testErrors(t, "amd64", "amd64error") } func TestMIPSEndToEnd(t *testing.T) { testEndToEnd(t, "mips", "mips") testEndToEnd(t, "mips64", "mips64") } func TestPPC64EndToEnd(t *testing.T) { testEndToEnd(t, "ppc64", "ppc64") } func TestPPC64Encoder(t *testing.T) { testEndToEnd(t, "ppc64", "ppc64enc") } func TestS390XEndToEnd(t *testing.T) { testEndToEnd(t, "s390x", "s390x") }<|fim▁end|>
printed = strings.TrimSpace(parts[1])
<|file_name|>units.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) import numpy as np <|fim▁hole|> def quantity_support(format='latex_inline'): """ Enable support for plotting `astropy.units.Quantity` instances in matplotlib. May be (optionally) used with a ``with`` statement. >>> import matplotlib.pyplot as plt >>> from astropy import units as u >>> from astropy import visualization >>> with visualization.quantity_support(): ... plt.figure() ... plt.plot([1, 2, 3] * u.m) [...] ... plt.plot([101, 125, 150] * u.cm) [...] ... plt.draw() Parameters ---------- format : `astropy.units.format.Base` instance or str The name of a format or a formatter object. If not provided, defaults to ``latex_inline``. """ from .. import units as u from matplotlib import units from matplotlib import ticker def rad_fn(x, pos=None): n = int((x / np.pi) * 2.0 + 0.25) if n == 0: return '0' elif n == 1: return 'π/2' elif n == 2: return 'π' elif n % 2 == 0: return '{0}π'.format(n / 2) else: return '{0}π/2'.format(n) class MplQuantityConverter(units.ConversionInterface): def __init__(self): if u.Quantity not in units.registry: units.registry[u.Quantity] = self self._remove = True else: self._remove = False @staticmethod def axisinfo(unit, axis): if unit == u.radian: return units.AxisInfo( majloc=ticker.MultipleLocator(base=np.pi/2), majfmt=ticker.FuncFormatter(rad_fn), label=unit.to_string(), ) elif unit == u.degree: return units.AxisInfo( majloc=ticker.AutoLocator(), majfmt=ticker.FormatStrFormatter('%i°'), label=unit.to_string(), ) elif unit is not None: return units.AxisInfo(label=unit.to_string(format)) return None @staticmethod def convert(val, unit, axis): if isinstance(val, u.Quantity): return val.to_value(unit) else: return val @staticmethod def default_units(x, axis): if hasattr(x, 'unit'): return x.unit return None def __enter__(self): return self def __exit__(self, type, value, tb): if self._remove: del units.registry[u.Quantity] return MplQuantityConverter()<|fim▁end|>
__doctest_skip__ = ['quantity_support']
<|file_name|>FedoraHttpConfiguration.java<|end_file_name|><|fim▁begin|>/* * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.http.api; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; /** * @author cabeer * @since 10/17/14 */ @Component public class FedoraHttpConfiguration { @Value("${fcrepo.http.ldp.putRequiresIfMatch:false}")<|fim▁hole|> * Should PUT requests require an If-Match header? * @return put request if match */ public boolean putRequiresIfMatch() { return putRequiresIfMatch; } }<|fim▁end|>
private boolean putRequiresIfMatch; /**
<|file_name|>warnd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import getopt import sys import os Options = { 'daemon': False, 'fifo': os.environ['HOME'] + '/warnd_fifo', 'log': [], 'time': 2, 'size': 54, 'wrap': 0, } State = { 'currentlog': 0, } USAGE = '\n'.join([ 'Usage: %s' % sys.argv[0], ' Warns the user about something, one-liner.', ' -f, --fifo FIFO Create designed fifo. Default is %s.' % Options['fifo'], ' -l, --log LOG1,LOG2,... Where to log the messages. ' 'Userfull with root-tail. Default: no log.', ]) def usage(): print(USAGE) def loga(lines): if len(Options['log']) == 0: return log = Options['log'][State['currentlog']] hoje = os.popen("date +%b\\ %e\\ %H:%M:%S", 'r') line = hoje.read()[:-1] + ' ' + os.environ['USER'] + ' ' + "".join(lines) hoje.close() log.write(line + "\n") print(line) log.flush() State['currentlog'] = (State['currentlog'] + 1) % len(Options['log']) def doline(lines): loga(lines) os.system("xkbbell") def main(): try: opts, args = getopt.getopt(<|fim▁hole|> except getopt.GetoptError: usage() sys.exit(2) for o, a in opts: if o in ('-h', '--help'): usage() sys.exit() if o in ('-f', '--fifo'): Options['fifo'] = a if o in ('-l', '--log'): for log in a.split(','): Options['log'].append(open(log, 'a')) cfg = open('%s/.warnd.cfg' % os.environ['HOME'], 'w+') cfg.write(Options['fifo'] + '\n') cfg.close() os.system('mkfifo %s 2>/dev/null; chmod 0660 %s' % (Options['fifo'], Options['fifo'])) while True: lines = [] fifo = open(Options['fifo'], 'r') for line in fifo.readlines(): lines.append(line.strip()) fifo.close() doline(lines) if __name__ == '__main__': main()<|fim▁end|>
sys.argv[1:], 'hf:l:t:s:w:', ['help', 'fifo', 'log', 'wrap'])
<|file_name|>parsing_types.rs<|end_file_name|><|fim▁begin|>#[derive(Debug, PartialEq)] pub struct RawHashlineParseData { pub(super) indent_depth: usize, pub(super) name: String, pub(super) opts: String, pub(super) args: String, pub(super) comment: String, } #[derive(Debug, PartialEq)] pub struct RawItemlineParseData { pub(super) indent_depth: usize, pub(super) item: String, } #[derive(Debug, PartialEq)] pub enum Hashline { OpenEnv(Environment), PlainLine(String), } #[derive(Debug, PartialEq)] pub struct Environment { indent_depth: usize, name: String, opts: String, comment: String, is_list_like: bool, } #[inline] fn is_a_list_environment(input: &str) -> bool { fn parser(input: &str) -> nom::IResult<&str, &str> { use nom::branch::alt; use nom::bytes::complete::tag; alt((tag("itemize"), tag("enumerate"), tag("description")))(input) } parser(input.trim_start()).is_ok() } impl From<RawHashlineParseData> for Hashline { fn from(raw_hashline: RawHashlineParseData) -> Self { if raw_hashline.args.trim().is_empty() { // If no args are given, it's an environment let is_list_like = is_a_list_environment(raw_hashline.name.as_ref()); Hashline::OpenEnv(Environment { indent_depth: raw_hashline.indent_depth, name: raw_hashline.name, opts: raw_hashline.opts, comment: raw_hashline.comment, is_list_like, }) } else { // If there are some args, it's a single-line command Hashline::PlainLine(format!( r"{dummy:ind$}\{name}{opts}{{{args}}}{comment_sep}{comment}", dummy = "", ind = raw_hashline.indent_depth, name = raw_hashline.name, opts = raw_hashline.opts, args = raw_hashline.args, comment_sep = if raw_hashline.comment.is_empty() { "" } else { " " }, comment = raw_hashline.comment.trim(), )) } } } impl From<RawItemlineParseData> for Hashline { fn from(raw_itemline: RawItemlineParseData) -> Self { Hashline::PlainLine(format!( r"{dummy:ind$}\item{item_sep}{content}", dummy = "", ind = raw_itemline.indent_depth, content = raw_itemline.item, item_sep = if raw_itemline.item.is_empty() { "" } else { " " }, )) } } impl Environment { #[cfg(test)] pub fn new( indent_depth: usize, name: String, opts: String, comment: String, is_list_like: bool, ) -> Self { Self { indent_depth, name, opts, comment, is_list_like, } } pub fn latex_begin(&self) -> String { format!( r"{dummy:ind$}\begin{{{name}}}{opts}{comment_sep}{comment}", name = self.name, opts = self.opts, comment = self.comment, dummy = "", ind = self.indent_depth, comment_sep = if self.comment.is_empty() { "" } else { " " }, ) } pub fn latex_end(&self) -> String { format!( r"{dummy:ind$}\end{{{name}}}", name = self.name, dummy = "", ind = self.indent_depth, ) } pub fn indent_depth(&self) -> usize { self.indent_depth } pub fn is_list_like(&self) -> bool { self.is_list_like } } // LCOV_EXCL_START #[cfg(test)] mod tests { #[test] fn list_environment_recognition() { use super::is_a_list_environment; assert_eq!(is_a_list_environment("itemize"), true); assert_eq!(is_a_list_environment("enumerate*"), true); assert_eq!(is_a_list_environment(" description *"), true); assert_eq!(is_a_list_environment(" descriptionitemize"), true); assert_eq!(is_a_list_environment("item"), false); assert_eq!(is_a_list_environment(" itemiz"), false); assert_eq!(is_a_list_environment(" foobar"), false); } #[cfg(test)] mod raw_hashline_parser_data_into_hashline { use super::super::{Hashline, RawHashlineParseData}; #[test] fn plain_lines() { assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 0, name: "foo".to_string(), opts: "".to_string(), args: "bar".to_string(), comment: "".to_string() }), Hashline::PlainLine("\\foo{bar}".to_string()) ); assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 2, name: "foo".to_string(), opts: "".to_string(), args: "bar".to_string(), comment: "qux".to_string() }), Hashline::PlainLine(" \\foo{bar} qux".to_string()) ); assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 4, name: "foo".to_string(), opts: "bar".to_string(), args: "qux".to_string(), comment: "".to_string() }), Hashline::PlainLine(" \\foobar{qux}".to_string()) ); } #[test] fn environments() { use super::super::Environment; assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 0, name: "foo".to_string(), opts: "bar".to_string(), args: "".to_string(), comment: "".to_string() }), Hashline::OpenEnv(Environment { indent_depth: 0, name: "foo".to_string(), opts: "bar".to_string(), comment: "".to_string(), is_list_like: false, }) ); assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 2, name: "foo".to_string(), opts: "".to_string(), args: "".to_string(), comment: "bar".to_string() }), Hashline::OpenEnv(Environment { indent_depth: 2, name: "foo".to_string(), opts: "".to_string(), comment: "bar".to_string(), is_list_like: false, }) ); assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 4, name: "foo".to_string(), opts: "bar".to_string(), args: "".to_string(), comment: "qux".to_string() }), Hashline::OpenEnv(Environment { indent_depth: 4, name: "foo".to_string(), opts: "bar".to_string(), comment: "qux".to_string(), is_list_like: false, }) ); assert_eq!( Hashline::from(RawHashlineParseData { indent_depth: 0, name: "itemize".to_string(), opts: "bar".to_string(), args: "".to_string(), comment: "qux".to_string() }),<|fim▁hole|> indent_depth: 0, name: "itemize".to_string(), opts: "bar".to_string(), comment: "qux".to_string(), is_list_like: true, }) ); } } #[test] fn raw_itemline_parser_data_into_hashline() { use super::{Hashline, RawItemlineParseData}; assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 0, item: "".to_string() }), Hashline::PlainLine(r"\item".to_string()) ); assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 0, item: "".to_string() }), Hashline::PlainLine(r"\item".to_string()) ); assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 2, item: "".to_string() }), Hashline::PlainLine(r" \item".to_string()) ); assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 0, item: "foo".to_string() }), Hashline::PlainLine(r"\item foo".to_string()) ); assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 3, item: "bar".to_string() }), Hashline::PlainLine(r" \item bar".to_string()) ); assert_eq!( Hashline::from(RawItemlineParseData { indent_depth: 0, item: "**".to_string() }), Hashline::PlainLine(r"\item **".to_string()) ); } #[test] fn environment_methods() { use super::Environment; let env_1 = Environment { indent_depth: 0, name: "foo".to_string(), opts: "bar".to_string(), comment: "% baz".to_string(), is_list_like: true, }; assert_eq!(env_1.latex_begin(), "\\begin{foo}bar % baz"); assert_eq!(env_1.latex_end(), "\\end{foo}"); assert_eq!(env_1.is_list_like(), true); assert_eq!(env_1.indent_depth(), 0); let env_2 = Environment { indent_depth: 2, name: "abc".to_string(), opts: "def".to_string(), comment: "".to_string(), is_list_like: false, }; assert_eq!(env_2.latex_begin(), " \\begin{abc}def"); assert_eq!(env_2.latex_end(), " \\end{abc}"); assert_eq!(env_2.is_list_like(), false); assert_eq!(env_2.indent_depth(), 2); } } // LCOV_EXCL_STOP<|fim▁end|>
Hashline::OpenEnv(Environment {
<|file_name|>px_update_git_header.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from __future__ import print_function <|fim▁hole|>import argparse import os import sys import subprocess import re parser = argparse.ArgumentParser(description="""Extract version info from git and generate a version header file. The working directory is expected to be the root of Firmware.""") parser.add_argument('filename', metavar='version.h', help='Header output file') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose output', default=False) parser.add_argument('--validate', dest='validate', action='store_true', help='Validate the tag format', default=False) args = parser.parse_args() filename = args.filename verbose = args.verbose validate = args.validate try: fp_header = open(filename, 'r') old_header = fp_header.read() except: old_header = '' # Generate the header file content header = """ /* Auto Magically Generated file */ /* Do not edit! */ #pragma once """ # PX4 git_tag = subprocess.check_output('git describe --always --tags --dirty'.split(), stderr=subprocess.STDOUT).decode('utf-8').strip() if validate: if verbose: print("testing git tag: "+git_tag) # remove optional '-dirty' at the end git_tag_test = re.sub(r'-dirty$', '', git_tag) # remove optional -<num_commits>-g<commit_hash> at the end (in case we are not on a tagged commit) git_tag_test = re.sub(r'-[0-9]+-g[0-9a-fA-F]+$', '', git_tag_test) # now check the version format m = re.match(r'v([0-9]+)\.([0-9]+)\.[0-9]+(((-dev)|(-alpha[0-9]+)|(-beta[0-9]+)|(-rc[0-9]+))|'\ r'(-[0-9]+\.[0-9]+\.[0-9]+((-dev)|(-alpha[0-9]+)|(-beta[0-9]+)|([-]?rc[0-9]+))?))?$', git_tag_test) if m: # format matches, check the major and minor numbers major = int(m.group(1)) minor = int(m.group(2)) if major < 1 or (major == 1 and minor < 9): print("") print("Error: PX4 version too low, expected at least v1.9.0") print("Check the git tag (current tag: '{:}')".format(git_tag_test)) print("") sys.exit(1) else: print("") print("Error: the git tag '{:}' does not match the expected format.".format(git_tag_test)) print("") print("The expected format is 'v<PX4 version>[-<custom version>]'") print(" <PX4 version>: v<major>.<minor>.<patch>[-rc<rc>|-beta<beta>|-alpha<alpha>|-dev]") print(" <custom version>: <major>.<minor>.<patch>[-rc<rc>|-beta<beta>|-alpha<alpha>|-dev]") print("Examples:") print(" v1.9.0-rc3 (preferred)") print(" v1.9.0-beta1") print(" v1.9.0-1.0.0") print(" v1.9.0-1.0.0-alpha2") print("See also https://dev.px4.io/master/en/setup/building_px4.html#firmware_version") print("") sys.exit(1) git_version = subprocess.check_output('git rev-parse --verify HEAD'.split(), stderr=subprocess.STDOUT).decode('utf-8').strip() try: git_branch_name = subprocess.check_output('git symbolic-ref -q --short HEAD'.split(), stderr=subprocess.STDOUT).decode('utf-8').strip() except: git_branch_name = '' git_version_short = git_version[0:16] header += """ #define PX4_GIT_VERSION_STR "{git_version}" #define PX4_GIT_VERSION_BINARY 0x{git_version_short} #define PX4_GIT_TAG_STR "{git_tag}" #define PX4_GIT_BRANCH_NAME "{git_branch_name}" """.format(git_tag=git_tag, git_version=git_version, git_version_short=git_version_short, git_branch_name=git_branch_name) # ECL if (os.path.exists('src/lib/ecl/.git')): ecl_git_tag = subprocess.check_output('git describe --always --tags --dirty'.split(), cwd='src/lib/ecl', stderr=subprocess.STDOUT).decode('utf-8') ecl_git_version = subprocess.check_output('git rev-parse --verify HEAD'.split(), cwd='src/lib/ecl', stderr=subprocess.STDOUT).decode('utf-8').strip() ecl_git_version_short = ecl_git_version[0:16] header += """ #define ECL_LIB_GIT_VERSION_STR "{ecl_git_version}" #define ECL_LIB_GIT_VERSION_BINARY 0x{ecl_git_version_short} """.format(ecl_git_version=ecl_git_version, ecl_git_version_short=ecl_git_version_short) # Mavlink if (os.path.exists('mavlink/include/mavlink/v2.0/.git')): mavlink_git_version = subprocess.check_output('git rev-parse --verify HEAD'.split(), cwd='mavlink/include/mavlink/v2.0', stderr=subprocess.STDOUT).decode('utf-8').strip() mavlink_git_version_short = mavlink_git_version[0:16] header += """ #define MAVLINK_LIB_GIT_VERSION_STR "{mavlink_git_version}" #define MAVLINK_LIB_GIT_VERSION_BINARY 0x{mavlink_git_version_short} """.format(mavlink_git_version=mavlink_git_version, mavlink_git_version_short=mavlink_git_version_short) # NuttX if (os.path.exists('platforms/nuttx/NuttX/nuttx/.git')): nuttx_git_tag = subprocess.check_output('git describe --always --tags --match nuttx-* --dirty'.split(), cwd='platforms/nuttx/NuttX/nuttx', stderr=subprocess.STDOUT).decode('utf-8').strip().replace("nuttx-","v") nuttx_git_tag = re.sub('-.*','.0',nuttx_git_tag) nuttx_git_version = subprocess.check_output('git rev-parse --verify HEAD'.split(), cwd='platforms/nuttx/NuttX/nuttx', stderr=subprocess.STDOUT).decode('utf-8').strip() nuttx_git_version_short = nuttx_git_version[0:16] header += """ #define NUTTX_GIT_VERSION_STR "{nuttx_git_version}" #define NUTTX_GIT_VERSION_BINARY 0x{nuttx_git_version_short} #define NUTTX_GIT_TAG_STR "{nuttx_git_tag}" """.format(nuttx_git_version=nuttx_git_version, nuttx_git_version_short=nuttx_git_version_short, nuttx_git_tag=nuttx_git_tag) if old_header != header: if verbose: print('Updating header {}'.format(filename)) fp_header = open(filename, 'w') fp_header.write(header)<|fim▁end|>
<|file_name|>cfg.unpack.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: UTF-8, tab-width: 4 -*- # Python Coding Style: http://docs.python.org/tutorial/controlflow.html#intermezzo-coding-style # Command Line Arguments Parser: http://docs.python.org/library/argparse.html from __future__ import division from sys import argv, stdout, stderr from codecs import open as cfopen <|fim▁hole|> if len(cli_args) < 1: raise ValueError('not enough parameters. required: ConfigFileName') cfg_fn = cli_args[0] cfg_realms = { 'app': {}, 'srv': {}, } for cfg_ln in cfopen(cfg_fn, 'r', HEIDI_CHARSET): cfg_key, cfg_fmt, cfg_value = cfg_ln.rstrip().split('<|||>', 2) cfg_realm, cfg_sect, cfg_key = split_cfg_key(cfg_key) if (cfg_realm, cfg_key) == ('srv', 'Password'): cfg_value = decode_heidi_password(cfg_value) cfg_realm = cfg_realms[cfg_realm] sect_dict = cfg_realm.get(cfg_sect) if sect_dict is None: sect_dict = cfg_realm[cfg_sect] = {} sect_dict[cfg_key] = cfg_fmt + '|' + cfg_value ini_fn = 'heidisql.ini' write_ini(cfopen(ini_fn, 'w', INI_CHARSET), cfg_realms['app']) for cfg_sect, sect_dict in cfg_realms['srv'].items(): ini_fn = sanitize_file_name(cfg_sect).lower() + '.ini' write_ini(cfopen(ini_fn, 'w', INI_CHARSET), { cfg_sect: sect_dict }) def write_ini(dest, ini): for sect_name in sorted(ini.keys()): sect_dict = ini[sect_name] dest.write('[' + sect_name + ']\n') for opt_name in sorted(sect_dict.keys()): opt_value = sect_dict[opt_name] dest.write(opt_name + '=' + opt_value + '\n') dest.write('\n') def split_at_first_nonalpha(idstr, defaultPrefix=None): for pos, chr in enumerate(idstr): if not chr.isalpha(): pos += 1 return idstr[0:pos], idstr[pos:] return defaultPrefix, idstr def split_cfg_key(key): if key.startswith('Servers\\'): sect, key = key.split('\\', 2)[1:] return 'srv', sect, key form_part = key.split('.', 1) if len(form_part) == 2: # [u'ColPositions_connform', u'ListSessions'] if form_part[0].lower().endswith('form'): form_prop, form_part = form_part form_prop = form_prop.split('_') if len(form_prop) == 2: # [u'ColPositions', u'connform'] form_prop, form_name = form_prop sect = form_name key = form_part + '.' + form_prop return 'app', sect, key return 'app', 'HeidiSQL', key def decode_heidi_password(obfus): obfus, caesar_key = obfus[:-1], obfus[-1:] caesar_key = -int(caesar_key, 16) clean = '' while obfus != '': cnum, obfus = obfus[:2], obfus[2:] cnum = int(cnum, 16) cnum += caesar_key char = None if (31 < cnum) and (cnum < 127): char = chr(cnum) if char in ('\\', '"', "'"): char = None if char is None: char = '\\u00' + hex(cnum).replace('0x', '00')[-2:] # print cnum, hex(cnum), char clean += char return '"' + clean + '"' def sanitize_file_name(wild): sane = '' for char in wild: # print repr(char), if char.isalnum() or (char in '@-'): if repr(char)[2:-1] != char: # this alnum might be too fancy for some file systems. continue sane += char continue # if char.isspace(): char = '_' if not sane.endswith(char): sane += char # print repr(sane) return sane if __name__ == '__main__': main(*argv)<|fim▁end|>
def main(invocation, *cli_args): HEIDI_CHARSET = 'UTF-8' INI_CHARSET = 'UTF-8-sig'
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import json from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from . import models from pdc.apps.common.serializers import StrictSerializerMixin, DynamicFieldsSerializerMixin class DefaultFilenameGenerator(object): doc_format = '{name}-{version}-{release}.{arch}.rpm' def __call__(self): return models.RPM.default_filename(self.field.parent.initial_data) def set_context(self, field): self.field = field class DependencySerializer(serializers.BaseSerializer): doc_format = '{ "recommends": ["string"], "suggests": ["string"], "obsoletes": ["string"],' \ '"provides": ["string"], "conflicts": ["string"], "requires": ["string"] }' def to_representation(self, deps): return deps def to_internal_value(self, data): choices = dict([(y, x) for (x, y) in models.Dependency.DEPENDENCY_TYPE_CHOICES]) result = [] for key in data: if key not in choices: raise serializers.ValidationError('<{}> is not a known dependency type.'.format(key)) type = choices[key] if not isinstance(data[key], list): raise serializers.ValidationError('Value for <{}> is not a list.'.format(key)) result.extend([self._dep_to_internal(type, key, dep) for dep in data[key]]) return result def _dep_to_internal(self, type, human_type, data): if not isinstance(data, basestring): raise serializers.ValidationError('Dependency <{}> for <{}> is not a string.'.format(data, human_type)) m = models.Dependency.DEPENDENCY_PARSER.match(data) if not m: raise serializers.ValidationError('Dependency <{}> for <{}> has bad format.'.format(data, human_type)) groups = m.groupdict() return models.Dependency(name=groups['name'], type=type, comparison=groups.get('op'), version=groups.get('version')) class RPMSerializer(StrictSerializerMixin, DynamicFieldsSerializerMixin, serializers.ModelSerializer): filename = serializers.CharField(default=DefaultFilenameGenerator()) linked_releases = serializers.SlugRelatedField(many=True, slug_field='release_id', queryset=models.Release.objects.all(), required=False) linked_composes = serializers.SlugRelatedField(read_only=True, slug_field='compose_id', many=True) dependencies = DependencySerializer(required=False, default={}) class Meta: model = models.RPM fields = ('id', 'name', 'version', 'epoch', 'release', 'arch', 'srpm_name', 'srpm_nevra', 'filename', 'linked_releases', 'linked_composes', 'dependencies') def create(self, validated_data): dependencies = validated_data.pop('dependencies', []) instance = super(RPMSerializer, self).create(validated_data) for dep in dependencies: dep.rpm = instance dep.save() return instance def update(self, instance, validated_data): dependencies = validated_data.pop('dependencies', None) instance = super(RPMSerializer, self).update(instance, validated_data) if dependencies is not None or not self.partial: models.Dependency.objects.filter(rpm=instance).delete() for dep in dependencies or []: dep.rpm = instance dep.save() return instance class ImageSerializer(StrictSerializerMixin, serializers.ModelSerializer): image_format = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageFormat.objects.all()) image_type = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageType.objects.all()) composes = serializers.SlugRelatedField(read_only=True, slug_field='compose_id', many=True) class Meta: model = models.Image fields = ('file_name', 'image_format', 'image_type', 'disc_number', 'disc_count', 'arch', 'mtime', 'size', 'bootable', 'implant_md5', 'volume_id', 'md5', 'sha1', 'sha256', 'composes') class RPMRelatedField(serializers.RelatedField): def to_representation(self, value): return unicode(value) def to_internal_value(self, data): request = self.context.get('request', None) if isinstance(data, dict): required_data = {} errors = {} for field in ['name', 'epoch', 'version', 'release', 'arch', 'srpm_name']: try: required_data[field] = data[field] except KeyError: errors[field] = 'This field is required.' if errors: raise serializers.ValidationError(errors) # NOTE(xchu): pop out fields not in unique_together required_data.pop('srpm_name') try: rpm = models.RPM.objects.get(**required_data) except (models.RPM.DoesNotExist, models.RPM.MultipleObjectsReturned): serializer = RPMSerializer(data=data, context={'request': request}) if serializer.is_valid(): rpm = serializer.save() model_name = ContentType.objects.get_for_model(rpm).model if request and request.changeset: request.changeset.add(model_name, rpm.id, 'null', json.dumps(rpm.export())) return rpm else: raise serializers.ValidationError(serializer.errors) except Exception as err: raise serializers.ValidationError("Can not get or create RPM with your input(%s): %s." % (data, err)) else: return rpm else: raise serializers.ValidationError("Unsupported RPM input.") class ArchiveSerializer(StrictSerializerMixin, serializers.ModelSerializer): class Meta: model = models.Archive fields = ('build_nvr', 'name', 'size', 'md5') class ArchiveRelatedField(serializers.RelatedField): def to_representation(self, value): serializer = ArchiveSerializer(value) return serializer.data def to_internal_value(self, data): request = self.context.get('request', None) if isinstance(data, dict): required_data = {} errors = {} for field in ['build_nvr', 'name', 'size', 'md5']: try: required_data[field] = data[field] except KeyError: errors[field] = 'This field is required.' if errors: raise serializers.ValidationError(errors) # NOTE(xchu): pop out fields not in unique_together required_data.pop('size') try: archive = models.Archive.objects.get(**required_data) except (models.Archive.DoesNotExist, models.Archive.MultipleObjectsReturned):<|fim▁hole|> model_name = ContentType.objects.get_for_model(archive).model if request and request.changeset: request.changeset.add(model_name, archive.id, 'null', json.dumps(archive.export())) return archive else: raise serializers.ValidationError(serializer.errors) except Exception as err: raise serializers.ValidationError("Can not get or create Archive with your input(%s): %s." % (data, err)) else: return archive else: raise serializers.ValidationError("Unsupported Archive input.") class BuildImageSerializer(StrictSerializerMixin, serializers.HyperlinkedModelSerializer): image_format = serializers.SlugRelatedField(slug_field='name', queryset=models.ImageFormat.objects.all()) rpms = RPMRelatedField(many=True, read_only=False, queryset=models.RPM.objects.all(), required=False) archives = ArchiveRelatedField(many=True, read_only=False, queryset=models.Archive.objects.all(), required=False) releases = serializers.SlugRelatedField(many=True, slug_field='release_id', queryset=models.Release.objects.all(), required=False) class Meta: model = models.BuildImage fields = ('url', 'image_id', 'image_format', 'md5', 'rpms', 'archives', 'releases')<|fim▁end|>
serializer = ArchiveSerializer(data=data, context={'request': request}) if serializer.is_valid(): archive = serializer.save()
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use system::OS; mod manager; pub mod scoped; // #[macro_use]mod local; // pub use self::local::{LocalKey, LocalKeyState}; /// Cooperatively gives up a timeslice to the OS scheduler. pub fn yield_now() { OS::yield_thread() } /// Blocks unless or until the current thread's token is made available. /// /// Every thread is equipped with some basic low-level blocking support, via /// the `park()` function and the [`unpark()`][unpark] method. These can be /// used as a more CPU-efficient implementation of a spinlock. /// /// [unpark]: struct.Thread.html#method.unpark /// /// The API is typically used by acquiring a handle to the current thread, /// placing that handle in a shared data structure so that other threads can /// find it, and then parking (in a loop with a check for the token actually /// being acquired). /// /// A call to `park` does not guarantee that the thread will remain parked /// forever, and callers should be prepared for this possibility. ///<|fim▁hole|>/// [thread]: index.html // The implementation currently uses the trivial strategy of a Mutex+Condvar // with wakeup flag, which does not actually allow spurious wakeups. In the // future, this will be implemented in a more efficient way, perhaps along the lines of // http://cr.openjdk.java.net/~stefank/6989984.1/raw_files/new/src/os/linux/vm/os_linux.cpp // or futuxes, and in either case may allow spurious wakeups. pub fn park() { OS::suspend_thread(OS::get_current_thread()); } pub use self::manager::{collect, current, JoinHandle, Thread, spawn};<|fim▁end|>
/// See the [module documentation][thread] for more detail. ///
<|file_name|>PaymentProcessingViewModel.js<|end_file_name|><|fim▁begin|><|fim▁hole|> 'model': 'PaymentProcessingModel' });<|fim▁end|>
Ext.define('Onlineshopping.onlineshopping.shared.shop.viewmodel.retail.PaymentProcessingViewModel', { 'extend': 'Ext.app.ViewModel', 'alias': 'viewmodel.PaymentProcessingViewModel',
<|file_name|>Test.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package OptimizationTests.ShortMethodsInliningNonVirtualInvokes.InvokeSuperAObjectThrowNullGet_001; // The test checks that stack after NullPointerException occurs is correct despite inlining class Test extends SuperTest { Test(int iterations) { super(iterations); } public Foo getThingies(Foo[] arr, int i) { return super.getThingies(arr, i); } public void setThingies(Foo[] arr, Foo newThingy, int i) { super.setThingies(arr, newThingy, i);<|fim▁hole|> } }<|fim▁end|>
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>//! The Stream type. //! //! Streams provide a composable way to handle events that's focused on data instead of callbacks. //! You can think of it as a data processing pipeline. Streams do all their work on the sender side, //! so they're "eager". //! //! A stream chain begins with a `Sink` that receives the input values and can send those values to //! multiple streams. Operations applied to a `Stream` are applied to all the values that pass //! through it. The result of a stream chain be viewed with the `Stream::observe` method or stored //! on a `Signal`. //! All the objects that result from stream operations contain an internal reference to it's parent, //! so dropping intermediate temporary streams (like the ones created from chaining methods) won't //! break the chain. //! //! This implementation of Stream distributes the data as `MaybeOwned<T>` values to avoid //! unnecessary cloning, so the first observers will receive a `MaybeOwned::Borrowed` value, and the //! last one will receive a`MaybeOwned::Owned`. This also allows sending values as a reference with //! an arbitrary lifetime, not just `&'static` refs. //! //! # Example //! ``` //! use frappe::Sink; //! //! let sink1 = Sink::new(); //! let sink2 = Sink::new(); //! let stream = sink1.stream().map(|x| *x + 1) //! .merge(&sink2.stream().map(|x| *x * 2)); //! let signal = stream.hold(0); //! //! sink1.send(10); //! assert_eq!(signal.sample(), 11); //! //! sink2.send(10); //! assert_eq!(signal.sample(), 20); //! ``` use crate::futures::StreamFuture; use crate::helpers::arc_and_weak; use crate::signal::Signal; use crate::sync::Mutex; use crate::types::{Callbacks, MaybeOwned, ObserveResult, Storage, SumType2}; use std::any::Any; use std::collections::VecDeque; use std::ops::{Bound, RangeBounds}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; #[cfg(feature = "either")] use crate::types::Either; /// A source of events that feeds the streams connected to it. #[derive(Debug)] pub struct Sink<T> { cbs: Arc<Callbacks<T>>, } impl<T> Sink<T> { /// Creates a new sink. pub fn new() -> Self { Sink { cbs: Default::default(), } } /// Creates a stream that receives the events sent to this sink. pub fn stream(&self) -> Stream<T> { Stream::new(self.cbs.clone(), Source::None) } /// Sends a value into the sink. /// /// The value will be distributed `N-1` times as reference and then one time by value, /// where `N` is the amount of streams connected to this sink. #[inline] pub fn send<'a>(&self, val: impl Into<MaybeOwned<'a, T>>) where T: 'a, { self.cbs.call(val) } /// Sends multiple values into the sink. #[inline] pub fn feed<'a, I, U>(&self, iter: I) where I: IntoIterator<Item = U>, U: Into<MaybeOwned<'a, T>>, T: 'a, { for val in iter { self.send(val) } } /// Sends a value using multiple threads. /// /// This method sends a value to each of the Sink's connected streams simultaneously by spawning /// a thread for each one, then it waits for all threads to finish. The value is sent by /// reference, so no cloning is done. #[cfg(feature = "crossbeam-utils")] #[inline] pub fn send_parallel(&self, val: &T) where T: Sync, { self.cbs.call_parallel(val) } } impl<T> Default for Sink<T> { /// Creates a new sink. #[inline] fn default() -> Self { Sink::new() } } impl<T> Clone for Sink<T> { /// Creates a copy of this sink that references the same event source. fn clone(&self) -> Self { Sink { cbs: self.cbs.clone(), } } } /// The source object of a Stream. /// /// This is used to create a strong reference to a parent stream. #[derive(Debug, Clone)] enum Source { /// No source. None, /// The source is a type-erased object. Usually a stream of a different type. Erased(Arc<dyn Any + Send + Sync>), } impl Source { fn stream<T: 'static>(s: &Stream<T>) -> Self { Source::Erased(Arc::new(s.clone())) } fn stream2<A: 'static, B: 'static>(s1: &Stream<A>, s2: &Stream<B>) -> Self { Source::Erased(Arc::new((s1.clone(), s2.clone()))) } } /// A stream of discrete events sent over time. #[derive(Debug)] pub struct Stream<T> { cbs: Arc<Callbacks<T>>, source: Source, } impl<T> Stream<T> { /// Creates a stream from it's components. fn new(cbs: Arc<Callbacks<T>>, source: Source) -> Self { Stream { cbs, source } } /// Creates a stream that never fires. pub fn never() -> Self { Stream::new(Default::default(), Source::None) } /// Reads the values from the stream. /// /// This method registers a callback that will be called every time a stream event is received. /// It is meant to be used as a debugging tool or as a way to interface with imperative code. /// /// The closure will be dropped when it returns a false-y value (see `ObserveResult`) or when /// the source stream is dropped, so you should avoid calling `Stream::observe` as the last /// step of a stream chain. pub fn observe<F, R>(&self, f: F) where F: Fn(MaybeOwned<'_, T>) -> R + Send + Sync + 'static, R: ObserveResult, { self.cbs.push(move |arg| f(arg).is_callback_alive()); } /// Observes the stream while keeping a reference to it. /// /// This is the same as `Stream::observe`, but it keeps a strong reference to it's source stream, /// so it's safe to call it as the last step of a stream chain. The closure lifetime only depends /// on it's return value. /// /// # Warning /// This creates a cyclic `Arc` reference that can only be broken by the closure signaling it's /// deletion (via `ObserveResult`), so if the closure never unregisters itself it will leak memory. pub fn observe_strong<F, R>(&self, f: F) where F: Fn(MaybeOwned<'_, T>) -> R + Send + Sync + 'static, T: 'static, R: ObserveResult, { let this = self.clone(); self.cbs.push(move |arg| { let _keepalive = &this; f(arg).is_callback_alive() }); } /// Chainable version of `Stream::observe`. #[inline] pub fn inspect<F, R>(self, f: F) -> Self where F: Fn(MaybeOwned<'_, T>) -> R + Send + Sync + 'static, R: ObserveResult, { self.observe(f); self } } impl<T: 'static> Stream<T> { /// Maps this stream into another stream using the provided function. /// /// The closure will be called every time a stream event is received. #[inline] pub fn map<F, R>(&self, f: F) -> Stream<R> where F: Fn(MaybeOwned<'_, T>) -> R + Send + Sync + 'static, R: 'static, { self.filter_map(move |arg| Some(f(arg))) } /// Creates a new stream that only contains the values where the predicate is `true`. pub fn filter<F>(&self, pred: F) -> Self where F: Fn(&T) -> bool + Send + Sync + 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); self.cbs.push(move |arg| { with_weak!(weak, |cb| if pred(&arg) { cb.call(arg) }) }); Stream::new(new_cbs, Source::stream(self)) } /// Does filter and map on a stream simultaneously. /// /// The output stream will only contain the unwrapped `Some` values returned by the closure. pub fn filter_map<F, R>(&self, f: F) -> Stream<R> where F: Fn(MaybeOwned<'_, T>) -> Option<R> + Send + Sync + 'static, R: 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); self.cbs.push(move |arg| { with_weak!(weak, |cb| if let Some(val) = f(arg) { cb.call(val) }) }); Stream::new(new_cbs, Source::stream(self)) } /// Creates a new stream that fires with the events from both streams. pub fn merge(&self, other: &Stream<T>) -> Self { let (new_cbs, weak1) = arc_and_weak(Callbacks::new()); let weak2 = weak1.clone(); self.cbs .push(move |arg| with_weak!(weak1, |cb| cb.call(arg))); other .cbs .push(move |arg| with_weak!(weak2, |cb| cb.call(arg))); Stream::new(new_cbs, Source::stream2(self, other)) } /// Merges two streams of different types using two functions. /// /// The first function will be called when receiving events on `self`, and the second one /// when receiving events from `other`. Their combined values will be used to form a /// stream of a single type. pub fn merge_with<U, F1, F2, R>(&self, other: &Stream<U>, f1: F1, f2: F2) -> Stream<R> where F1: Fn(MaybeOwned<'_, T>) -> R + Send + Sync + 'static, F2: Fn(MaybeOwned<'_, U>) -> R + Send + Sync + 'static, U: 'static, R: 'static, { let (new_cbs, weak1) = arc_and_weak(Callbacks::new()); let weak2 = weak1.clone(); self.cbs .push(move |arg| with_weak!(weak1, |cb| cb.call(f1(arg)))); other .cbs .push(move |arg| with_weak!(weak2, |cb| cb.call(f2(arg)))); Stream::new(new_cbs, Source::stream2(self, other)) } /// Merges two streams of different types using a single function that takes an `Either` argument. /// /// Events from `self` will produce an `Either::Left`, and events from `other` will produce /// an `Either::Right`. #[cfg(feature = "either")] #[inline] pub fn merge_with_either<U, F, R>(&self, other: &Stream<U>, f: F) -> Stream<R> where F: Fn(Either<MaybeOwned<'_, T>, MaybeOwned<'_, U>>) -> R + Clone + Send + Sync + 'static, U: 'static, R: 'static, { let f_ = f.clone(); self.merge_with( other, move |a| f(Either::Left(a)), move |b| f_(Either::Right(b)), ) } /// Accumulates the values sent over this stream. /// /// The fold operation is done by taking the accumulator, consuming it's value, and then /// putting back the transformed value. This avoids cloning, but if the closure panics it will /// leave the storage empty, and then any sampling attempt on this object will panic until /// someone puts back a value on it. /// If this is undesirable, use `Stream::fold_clone` instead. pub fn fold<A, F>(&self, initial: A, f: F) -> Signal<A> where F: Fn(A, MaybeOwned<'_, T>) -> A + Send + Sync + 'static, A: Clone + Send + Sync + 'static, { let (storage, weak) = arc_and_weak(Storage::new(initial)); self.cbs.push(move |arg| { with_weak!(weak, |st| { st.replace(|old| f(old, arg)); }) }); Signal::from_storage(storage, self.clone()) } /// Folds the stream by cloning the accumulator. /// /// This does the same as `Stream::fold` but it will clone the accumulator on every value /// processed. If the closure panics, the storage will remain unchanged and later attempts at /// sampling will succeed like nothing happened. pub fn fold_clone<A, F>(&self, initial: A, f: F) -> Signal<A> where F: Fn(A, MaybeOwned<'_, T>) -> A + Send + Sync + 'static, A: Clone + Send + Sync + 'static, { let (storage, weak) = arc_and_weak(Storage::new(initial)); self.cbs.push(move |arg| { with_weak!(weak, |st| { st.replace_clone(|old| f(old, arg)); }) }); Signal::from_storage(storage, self.clone()) } /// Maps each stream event to `0..N` output values. /// /// On every stream event received the closure must return its value by sending it through the /// provided Sender. Multiple values (or none) can be sent to the output stream this way. /// /// This primitive is useful to construct asynchronous operations, since you can store the /// Sender and then use it when the data is ready. pub fn map_n<F, R>(&self, f: F) -> Stream<R> where F: Fn(MaybeOwned<'_, T>, Sender<R>) + Send + Sync + 'static, R: 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); self.cbs .push(move |arg| with_weak!(weak, |cb| f(arg, Sender::new(cb)))); Stream::new(new_cbs, Source::stream(self)) } /// Folds the stream and returns the accumulator values as a stream. /// /// This is the equivalent of doing `stream.fold(initial, f).snapshot(&stream, |a, _| a)`, /// but more efficient. pub fn scan<A, F>(&self, initial: A, f: F) -> Stream<A> where F: Fn(A, MaybeOwned<'_, T>) -> A + Send + Sync + 'static, A: Clone + Send + Sync + 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); let storage = Storage::new(initial); self.cbs.push(move |arg| { with_weak!(weak, |cb| { let new = storage.replace_fetch(|old| f(old, arg)); cb.call(new) }) }); Stream::new(new_cbs, Source::stream(self)) } /// Folds the stream and returns `0..N` output values. /// /// The closure must process the input state `A`, send a value to the output stream using the /// provided Sender and then return a new state. Multiple values (or none) can be sent to the /// output stream this way. pub fn scan_n<A, F, R>(&self, initial: A, f: F) -> Stream<R> where F: Fn(A, MaybeOwned<'_, T>, Sender<R>) -> A + Send + Sync + 'static, A: Send + Sync + 'static, R: 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); let storage = Storage::new(initial); self.cbs.push(move |arg| { with_weak!(weak, |cb| storage.replace(|old| f( old, arg, Sender::new(cb) ))) }); Stream::new(new_cbs, Source::stream(self)) } /// Creates a collection from the values sent to this stream. #[inline] pub fn collect<C>(&self) -> Signal<C> where C: Default + Extend<T> + Clone + Send + Sync + 'static, T: Clone, { self.fold(C::default(), |mut a, v| { a.extend(Some(v.into_owned())); a }) } /// Returns a stream that contains only the Nth value from the input stream. pub fn element_at(&self, index: usize) -> Self { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); let pos = AtomicUsize::new(0); self.cbs.push(move |arg| { weak.upgrade() .map(|cb| { let cur_pos = pos.fetch_add(1, Ordering::Relaxed); if cur_pos == index { cb.call(arg); } cur_pos < index // drop the callback after we're done }) .unwrap_or(false) }); Stream::new(new_cbs, Source::stream(self)) } /// Returns a stream that contains the values with index in the specified range. pub fn elements_between<B>(&self, range: B) -> Self where B: RangeBounds<usize> + Send + Sync + 'static, { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); let pos = AtomicUsize::new(0); self.cbs.push(move |arg| { weak.upgrade() .map(|cb| { let cur_pos = pos.fetch_add(1, Ordering::Relaxed); let after_start = match range.start_bound() { Bound::Included(s) => cur_pos >= *s, Bound::Excluded(s) => cur_pos > *s, Bound::Unbounded => true, }; let before_end = match range.end_bound() { Bound::Included(e) => cur_pos <= *e, Bound::Excluded(e) => cur_pos < *e, Bound::Unbounded => true, }; if after_start && before_end { cb.call(arg) } before_end // drop the callback after we're past the end }) .unwrap_or(false) }); Stream::new(new_cbs, Source::stream(self)) } }<|fim▁hole|> pub fn hold(&self, initial: T) -> Signal<T> where T: Sync, { self.hold_if(initial, |_| true) } /// Holds the last value in this stream where the predicate is `true`. pub fn hold_if<F>(&self, initial: T, pred: F) -> Signal<T> where F: Fn(&T) -> bool + Send + Sync + 'static, T: Sync, { let (storage, weak) = arc_and_weak(Storage::new(initial)); self.cbs.push(move |arg| { with_weak!(weak, |st| if pred(&arg) { st.set(arg.into_owned()); }) }); Signal::from_storage(storage, self.clone()) } /// Collects all pairs of values from two streams. /// /// This creates a Stream of tuples containing each of `self`'s values and `other`'s values in /// chronological order. An unique value from both streams is required to send a result to the /// output stream. #[inline] pub fn zip<U>(&self, other: &Stream<U>) -> Stream<(T, U)> where U: Clone + Send + 'static, { self.zip_with(other, |a, b| (a, b)) } /// Zips two streams using a custom function. pub fn zip_with<U, F, R>(&self, other: &Stream<U>, f: F) -> Stream<R> where F: Fn(T, U) -> R + Clone + Send + Sync + 'static, U: Clone + Send + 'static, R: 'static, { let (new_cbs, weak1) = arc_and_weak(Callbacks::new()); let weak2 = weak1.clone(); let left = Arc::new(Mutex::new(VecDeque::new())); let right = Arc::new(Mutex::new(VecDeque::new())); let left1 = left.clone(); let right1 = right.clone(); let f_ = f.clone(); self.cbs.push(move |arg| { with_weak!(weak1, |cb| if let Some(val) = right1.lock().pop_front() { cb.call(f(arg.into_owned(), val)); } else { left.lock().push_back(arg.into_owned()); }) }); other.cbs.push(move |arg| { with_weak!(weak2, |cb| if let Some(val) = left1.lock().pop_front() { cb.call(f_(val, arg.into_owned())); } else { right.lock().push_back(arg.into_owned()); }) }); Stream::new(new_cbs, Source::stream2(self, other)) } /// Collects pairs of values from two streams using their last value seen. /// /// This creates a Stream that sends the last value of `self` and `other` when either of those /// receives a value. The stream values before calling this function aren't known, so to send /// the first output value it's required that both input streams send their initial value. #[inline] pub fn combine<U>(&self, other: &Stream<U>) -> Stream<(T, U)> where U: Clone + Send + 'static, { self.combine_with(other, |a, b| (a, b)) } /// Combines two streams using a custom function. pub fn combine_with<U, F, R>(&self, other: &Stream<U>, f: F) -> Stream<R> where F: Fn(T, U) -> R + Clone + Send + Sync + 'static, U: Clone + Send + 'static, R: 'static, { let (new_cbs, weak1) = arc_and_weak(Callbacks::new()); let weak2 = weak1.clone(); let left = Arc::new(Mutex::new(None)); let right = Arc::new(Mutex::new(None)); let left1 = left.clone(); let right1 = right.clone(); let f_ = f.clone(); self.cbs.push(move |arg| { with_weak!(weak1, |cb| { let arg = arg.into_owned(); *left.lock() = Some(arg.clone()); if let Some(val) = right1.lock().as_ref() { cb.call(f(arg, U::clone(val))); } }) }); other.cbs.push(move |arg| { with_weak!(weak2, |cb| { let arg = arg.into_owned(); *right.lock() = Some(arg.clone()); if let Some(val) = left1.lock().as_ref() { cb.call(f_(T::clone(val), arg)); } }) }); Stream::new(new_cbs, Source::stream2(self, other)) } /// Creates a future that returns the next value sent to this stream. #[inline] pub fn next(&self) -> StreamFuture<T> { StreamFuture::new(self.clone()) } } impl<T: Clone + 'static> Stream<Option<T>> { /// Filters a stream of `Option`, returning only the unwrapped `Some` values. #[inline] pub fn filter_some(&self) -> Stream<T> { self.filter_first() } } impl<T: Clone + 'static, E: Clone + 'static> Stream<Result<T, E>> { /// Filters a stream of `Result`, returning only the unwrapped `Ok` values. #[inline] pub fn filter_ok(&self) -> Stream<T> { self.filter_first() } /// Filters a stream of `Result`, returning only the unwrapped `Err` values. #[inline] pub fn filter_err(&self) -> Stream<E> { self.filter_second() } } impl<T: SumType2 + Clone + 'static> Stream<T> where T::Type1: 'static, T::Type2: 'static, { /// Creates a stream with only the first element of a sum type. pub fn filter_first(&self) -> Stream<T::Type1> { self.filter_map(|res| { if res.is_type1() { res.into_owned().into_type1() } else { None } }) } /// Creates a stream with only the second element of a sum type. pub fn filter_second(&self) -> Stream<T::Type2> { self.filter_map(|res| { if res.is_type2() { res.into_owned().into_type2() } else { None } }) } /// Splits a two element sum type stream into two streams with the unwrapped values. pub fn split(&self) -> (Stream<T::Type1>, Stream<T::Type2>) { let (cbs_1, weak_1) = arc_and_weak(Callbacks::new()); let (cbs_2, weak_2) = arc_and_weak(Callbacks::new()); self.cbs.push(move |result| { if result.is_type1() { if let Some(cb) = weak_1.upgrade() { cb.call(result.into_owned().into_type1().unwrap()); true } else { // drop callback if both output streams dropped weak_2.upgrade().is_some() } } else // if result.is_type2() { if let Some(cb) = weak_2.upgrade() { cb.call(result.into_owned().into_type2().unwrap()); true } else { weak_1.upgrade().is_some() } } }); let source = Source::stream(self); let stream_1 = Stream::new(cbs_1, source.clone()); let stream_2 = Stream::new(cbs_2, source); (stream_1, stream_2) } } impl<T: 'static> Stream<Stream<T>> { /// Listens to the events from the last stream sent to a nested stream. pub fn switch(&self) -> Stream<T> { let (new_cbs, weak) = arc_and_weak(Callbacks::new()); let id = Arc::new(AtomicUsize::new(0)); // id of each stream sent self.cbs.push(move |stream| { if weak.upgrade().is_none() { return false; } let cbs_w = weak.clone(); let cur_id = id.clone(); // increment the id so it will only send to the last stream let my_id = id.fetch_add(1, Ordering::Relaxed) + 1; // redirect the inner stream to the output stream stream.cbs.push(move |arg| { if my_id != cur_id.load(Ordering::Relaxed) { return false; } with_weak!(cbs_w, |cb| cb.call(arg)) }); true }); Stream::new(new_cbs, Source::stream(self)) } } impl<T> Clone for Stream<T> { /// Creates a copy of this stream that references the same event chain. fn clone(&self) -> Self { Stream { cbs: self.cbs.clone(), source: self.source.clone(), } } } impl<T> Default for Stream<T> { /// Creates a stream that never fires. #[inline] fn default() -> Self { Stream::never() } } /// Sends values into a stream. /// /// This is a restricted version of `Sink` used by `Stream::map_n` and `Stream::scan_n`. #[derive(Debug)] pub struct Sender<T>(Sink<T>); impl<T> Sender<T> { /// Constructs a new Sender from a list of callbacks. fn new(cbs: Arc<Callbacks<T>>) -> Self { Sender(Sink { cbs }) } /// Sends a value. #[inline] pub fn send(&self, val: T) { self.0.send(val) } /// Sends multiple values. #[inline] pub fn feed(&self, iter: impl IntoIterator<Item = T>) { self.0.feed(iter) } } impl<T> Clone for Sender<T> { /// Creates a copy of this sender that references the same event source. fn clone(&self) -> Self { Sender(self.0.clone()) } } #[cfg(test)] mod tests { use super::*; use std::sync::mpsc; impl<T: Clone + Send + 'static> Stream<T> { /// Creates a sync channel and sends the stream events through it. fn as_sync_channel(&self, bound: usize) -> mpsc::Receiver<T> { let (tx, rx) = mpsc::sync_channel(bound); self.observe(move |arg| tx.send(arg.into_owned())); rx } } #[test] fn stream_basic() { let sink = Sink::new(); let stream = sink.stream(); let rx = stream.as_sync_channel(20); sink.send(42); sink.send(33); sink.send(12); sink.feed(0..5); sink.feed(vec![11, 22, 33]); let result: Vec<_> = rx.try_iter().collect(); assert_eq!(result, [42, 33, 12, 0, 1, 2, 3, 4, 11, 22, 33]); } #[test] fn stream_send_ref() { #[derive(Debug, Clone, PartialEq, Eq)] struct Test(i32); let sink: Sink<Test> = Sink::new(); let stream = sink.stream(); let rx = stream.as_sync_channel(10); { let a = Test(42); let b = [Test(33), Test(-1)]; sink.send(&a); sink.feed(&b); } assert_eq!(rx.try_recv(), Ok(Test(42))); assert_eq!(rx.try_recv(), Ok(Test(33))); assert_eq!(rx.try_recv(), Ok(Test(-1))); } #[test] fn stream_switch() { let stream_sink = Sink::new(); let sink1 = Sink::new(); let sink2 = Sink::new(); let switched = stream_sink.stream().switch(); let events = switched.as_sync_channel(10); sink1.send(1); sink2.send(2); stream_sink.send(sink2.stream()); sink1.send(3); sink2.send(4); assert_eq!(events.try_recv(), Ok(4)); stream_sink.send(sink1.stream()); sink1.send(5); sink2.send(6); assert_eq!(events.try_recv(), Ok(5)); } #[test] fn stream_default() { let sink: Sink<i32> = Default::default(); let stream1 = sink.stream(); let stream2: Stream<i32> = Default::default(); let merged = stream1.merge(&stream2); let rx = merged.as_sync_channel(10); sink.send(42); sink.send(13); assert_eq!(rx.try_recv(), Ok(42)); assert_eq!(rx.try_recv(), Ok(13)); } #[test] fn stream_scan() { let sink = Sink::new(); let stream = sink.stream().scan(0, |a, n| a + *n); let rx = stream.as_sync_channel(10); sink.send(1); assert_eq!(rx.try_recv(), Ok(1)); sink.send(2); sink.send(10); assert_eq!(rx.try_recv(), Ok(3)); assert_eq!(rx.try_recv(), Ok(13)); } #[test] fn stream_scan_n() { let sink = Sink::new(); let stream = sink.stream().scan_n(std::i32::MIN, |a, n, sender| { let n = *n; if n > a { sender.send(n); n } else { a } }); let rx = stream.as_sync_channel(10); sink.feed(&[1, 2, -1, 10, 5, 7, 42]); let result: Vec<_> = rx.try_iter().collect(); assert_eq!(result, [1, 2, 10, 42]); } #[test] fn stream_observe_strong() { let sink = Sink::new(); let (tx, rx) = mpsc::sync_channel(10); let (arc, weak) = arc_and_weak(Arc::new(())); sink.stream().map(|x| *x * 2).observe_strong(move |x| { let _a = &arc; tx.send(*x) }); sink.send(6); assert_eq!(rx.try_recv(), Ok(12)); assert!(weak.upgrade().is_some()); drop(rx); sink.send(10); assert_eq!(weak.upgrade(), None); sink.send(42); assert_eq!(sink.cbs.len(), 0); } #[cfg(feature = "crossbeam-utils")] #[test] fn stream_send_parallel() { use std::thread; use std::time::{Duration, Instant}; let sink = Sink::new(); let s1 = sink.stream().map(|x| { thread::sleep(Duration::from_millis(50)); *x + 1 }); let s2 = sink.stream().map(|x| { thread::sleep(Duration::from_millis(50)); *x * 2 }); let result = s1.merge(&s2).fold(0, |a, n| a + *n); let t = Instant::now(); sink.send_parallel(&10); assert!(t.elapsed() < Duration::from_millis(100)); assert_eq!(result.sample(), 31); sink.send_parallel(&1); sink.send_parallel(&13); assert_eq!(result.sample(), 75); } #[test] fn stream_zip() { use std::sync::mpsc::TryRecvError::Empty; let sink1: Sink<i32> = Sink::new(); let sink2: Sink<&str> = Sink::new(); let zipped = sink1.stream().zip(&sink2.stream()); let rx = zipped.as_sync_channel(10); sink1.send(1); assert_eq!(rx.try_recv(), Err(Empty)); sink2.send("foo"); assert_eq!(rx.try_recv(), Ok((1, "foo"))); sink2.send("bar"); assert_eq!(rx.try_recv(), Err(Empty)); sink2.send("asd"); sink1.send(2); assert_eq!(rx.try_recv(), Ok((2, "bar"))); } #[test] fn stream_combine() { use std::sync::mpsc::TryRecvError::Empty; let sink1: Sink<i32> = Sink::new(); let sink2: Sink<&str> = Sink::new(); let combined = sink1.stream().combine(&sink2.stream()); let rx = combined.as_sync_channel(10); sink1.send(1); assert_eq!(rx.try_recv(), Err(Empty)); sink2.send("foo"); assert_eq!(rx.try_recv(), Ok((1, "foo"))); sink1.send(2); assert_eq!(rx.try_recv(), Ok((2, "foo"))); sink1.send(3); assert_eq!(rx.try_recv(), Ok((3, "foo"))); } #[test] fn stream_element_at() { use std::sync::mpsc::TryRecvError::Empty; let sink: Sink<i32> = Sink::new(); let stream1 = sink.stream().element_at(0); let stream2 = sink.stream().element_at(2); let stream3 = sink.stream().element_at(13); let rx1 = stream1.as_sync_channel(10); let rx2 = stream2.as_sync_channel(10); let rx3 = stream3.as_sync_channel(10); sink.feed(&[1, 12, 42, 7, 13]); assert_eq!(rx1.try_recv(), Ok(1)); assert_eq!(rx1.try_recv(), Err(Empty)); assert_eq!(rx2.try_recv(), Ok(42)); assert_eq!(rx2.try_recv(), Err(Empty)); assert_eq!(rx3.try_recv(), Err(Empty)); } #[test] fn stream_elements_between() { let sink: Sink<i32> = Sink::new(); let stream1 = sink.stream().elements_between(..3); let stream2 = sink.stream().elements_between(2..=4); let stream3 = sink.stream().elements_between(3..); let rx1 = stream1.as_sync_channel(10); let rx2 = stream2.as_sync_channel(10); let rx3 = stream3.as_sync_channel(10); sink.feed(&[1, 12, 42, 7, 13, -6, 22]); let result1: Vec<_> = rx1.try_iter().collect(); let result2: Vec<_> = rx2.try_iter().collect(); let result3: Vec<_> = rx3.try_iter().collect(); assert_eq!(result1, [1, 12, 42]); assert_eq!(result2, [42, 7, 13]); assert_eq!(result3, [7, 13, -6, 22]); } #[cfg(feature = "nightly")] #[test] fn stream_await() { use futures::executor::LocalPool; use futures::task::SpawnExt; use std::thread; use std::time::Duration; let sink = Sink::new(); let future = sink.stream().map(|a| *a * 2).next(); let mut pool = LocalPool::new(); pool.spawner() .spawn(async { let res = future.await; assert_eq!(res, 42); }) .unwrap(); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); sink.send(21); }); pool.run(); } }<|fim▁end|>
impl<T: Clone + Send + 'static> Stream<T> { /// Creates a Signal that holds the last value sent to this stream. #[inline]
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2016-01-20 03:46 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ]<|fim▁hole|> operations = [ migrations.CreateModel( name='Question', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255)), ('slug', models.SlugField(blank=True, max_length=255, null=True)), ('content', models.TextField(blank=True, max_length=4000, null=True)), ('status', models.CharField(choices=[(b'D', b'Draft'), (b'P', b'Published')], default=b'D', max_length=1)), ('create_date', models.DateTimeField(auto_now_add=True)), ('update_date', models.DateTimeField(blank=True, null=True)), ('create_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ('-create_date',), 'verbose_name': 'Question', 'verbose_name_plural': 'Questions', }, ), migrations.CreateModel( name='QuestionComment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('comment', models.CharField(max_length=500)), ('date', models.DateTimeField(auto_now_add=True)), ('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ('date',), 'verbose_name': 'Question Comment', 'verbose_name_plural': 'Question Comments', }, ), migrations.CreateModel( name='Tag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tag', models.CharField(max_length=50)), ('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question')), ], options={ 'verbose_name': 'Tag', 'verbose_name_plural': 'Tags', }, ), migrations.AlterUniqueTogether( name='tag', unique_together=set([('tag', 'question')]), ), migrations.AlterIndexTogether( name='tag', index_together=set([('tag', 'question')]), ), ]<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import requests from allauth.socialaccount import app_settings from allauth.socialaccount.providers.jupyterhub.provider import ( JupyterHubProvider, ) from allauth.socialaccount.providers.oauth2.views import ( OAuth2Adapter, OAuth2CallbackView, OAuth2LoginView, ) class JupyterHubAdapter(OAuth2Adapter): provider_id = JupyterHubProvider.id settings = app_settings.PROVIDERS.get(provider_id, {}) provider_base_url = settings.get("API_URL", '') access_token_url = '{0}/hub/api/oauth2/token'.format(provider_base_url) authorize_url = '{0}/hub/api/oauth2/authorize'.format(provider_base_url) profile_url = '{0}/hub/api/user'.format(provider_base_url) def complete_login(self, request, app, access_token, **kwargs): headers = { 'Authorization': 'Bearer {0}'.format(access_token) } extra_data = requests.get(self.profile_url, headers=headers) user_profile = extra_data.json() return self.get_provider().sociallogin_from_response(<|fim▁hole|> ) oauth2_login = OAuth2LoginView.adapter_view(JupyterHubAdapter) oauth2_callback = OAuth2CallbackView.adapter_view(JupyterHubAdapter)<|fim▁end|>
request, user_profile
<|file_name|>find_bar.py<|end_file_name|><|fim▁begin|>from kivy.uix.stacklayout import StackLayout from kivy.uix.behaviors import DragBehavior from kivy.uix.modalview import ModalView from kivy.core.window import Window from kivy.uix.button import Button from kivy.uix.label import Label from kivy.lang import Builder from kivy.clock import Clock from kivy.uix.stencilview import StencilView Builder.load_string(''' <FindBar>: canvas.before: Color: rgba: col_dgrey Rectangle: size: self.size pos: self.pos TextInput: size_hint: 1, 0.5 TextInput: size_hint: 1, 0.5 ''') <|fim▁hole|><|fim▁end|>
class FindBar(StackLayout, StencilView): def __init__(self, **kwargs): super(FindBar, self).__init__(**kwargs)
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub type __priority_which_t = ::c_uint; s! { pub struct aiocb { pub aio_fildes: ::c_int, pub aio_lio_opcode: ::c_int, pub aio_reqprio: ::c_int, pub aio_buf: *mut ::c_void, pub aio_nbytes: ::size_t, pub aio_sigevent: ::sigevent, __next_prio: *mut aiocb, __abs_prio: ::c_int, __policy: ::c_int, __error_code: ::c_int, __return_value: ::ssize_t, pub aio_offset: off_t, #[cfg(all(not(target_arch = "x86_64"), target_pointer_width = "32"))] __unused1: [::c_char; 4], __glibc_reserved: [::c_char; 32] } pub struct __exit_status { pub e_termination: ::c_short, pub e_exit: ::c_short, } pub struct __timeval { pub tv_sec: ::int32_t, pub tv_usec: ::int32_t, } pub struct utmpx { pub ut_type: ::c_short, pub ut_pid: ::pid_t, pub ut_line: [::c_char; __UT_LINESIZE], pub ut_id: [::c_char; 4], pub ut_user: [::c_char; __UT_NAMESIZE], pub ut_host: [::c_char; __UT_HOSTSIZE], pub ut_exit: __exit_status, #[cfg(any(target_arch = "aarch64", target_arch = "sparc64", all(target_pointer_width = "32", not(target_arch = "x86_64"))))] pub ut_session: ::c_long, #[cfg(any(target_arch = "aarch64", target_arch = "sparc64", all(target_pointer_width = "32", not(target_arch = "x86_64"))))] pub ut_tv: ::timeval, #[cfg(not(any(target_arch = "aarch64", target_arch = "sparc64", all(target_pointer_width = "32", not(target_arch = "x86_64")))))] pub ut_session: ::int32_t, #[cfg(not(any(target_arch = "aarch64", target_arch = "sparc64", all(target_pointer_width = "32", not(target_arch = "x86_64")))))] pub ut_tv: __timeval, pub ut_addr_v6: [::int32_t; 4], __glibc_reserved: [::c_char; 20], } pub struct sigaction { pub sa_sigaction: ::sighandler_t, pub sa_mask: ::sigset_t, #[cfg(target_arch = "sparc64")] __reserved0: ::c_int, pub sa_flags: ::c_int, pub sa_restorer: ::dox::Option<extern fn()>, } pub struct stack_t { pub ss_sp: *mut ::c_void, pub ss_flags: ::c_int, pub ss_size: ::size_t } pub struct siginfo_t { pub si_signo: ::c_int, pub si_errno: ::c_int, pub si_code: ::c_int, pub _pad: [::c_int; 29], #[cfg(target_arch = "x86_64")] _align: [u64; 0], #[cfg(not(target_arch = "x86_64"))] _align: [usize; 0], } pub struct glob64_t { pub gl_pathc: ::size_t, pub gl_pathv: *mut *mut ::c_char, pub gl_offs: ::size_t, pub gl_flags: ::c_int, __unused1: *mut ::c_void, __unused2: *mut ::c_void, __unused3: *mut ::c_void, __unused4: *mut ::c_void, __unused5: *mut ::c_void, } pub struct ucred { pub pid: ::pid_t, pub uid: ::uid_t, pub gid: ::gid_t, } pub struct statfs { pub f_type: __fsword_t, pub f_bsize: __fsword_t, pub f_blocks: ::fsblkcnt_t, pub f_bfree: ::fsblkcnt_t, pub f_bavail: ::fsblkcnt_t, pub f_files: ::fsfilcnt_t, pub f_ffree: ::fsfilcnt_t, pub f_fsid: ::fsid_t, pub f_namelen: __fsword_t, pub f_frsize: __fsword_t, f_spare: [__fsword_t; 5], } pub struct msghdr {<|fim▁hole|> pub msg_control: *mut ::c_void, pub msg_controllen: ::size_t, pub msg_flags: ::c_int, } pub struct cmsghdr { pub cmsg_len: ::size_t, pub cmsg_level: ::c_int, pub cmsg_type: ::c_int, } pub struct termios { pub c_iflag: ::tcflag_t, pub c_oflag: ::tcflag_t, pub c_cflag: ::tcflag_t, pub c_lflag: ::tcflag_t, pub c_line: ::cc_t, pub c_cc: [::cc_t; ::NCCS], #[cfg(not(target_arch = "sparc64"))] pub c_ispeed: ::speed_t, #[cfg(not(target_arch = "sparc64"))] pub c_ospeed: ::speed_t, } pub struct flock { pub l_type: ::c_short, pub l_whence: ::c_short, pub l_start: ::off_t, pub l_len: ::off_t, pub l_pid: ::pid_t, } // FIXME this is actually a union pub struct sem_t { #[cfg(target_pointer_width = "32")] __size: [::c_char; 16], #[cfg(target_pointer_width = "64")] __size: [::c_char; 32], __align: [::c_long; 0], } } pub const __UT_LINESIZE: usize = 32; pub const __UT_NAMESIZE: usize = 32; pub const __UT_HOSTSIZE: usize = 256; pub const EMPTY: ::c_short = 0; pub const RUN_LVL: ::c_short = 1; pub const BOOT_TIME: ::c_short = 2; pub const NEW_TIME: ::c_short = 3; pub const OLD_TIME: ::c_short = 4; pub const INIT_PROCESS: ::c_short = 5; pub const LOGIN_PROCESS: ::c_short = 6; pub const USER_PROCESS: ::c_short = 7; pub const DEAD_PROCESS: ::c_short = 8; pub const ACCOUNTING: ::c_short = 9; pub const RLIMIT_RSS: ::c_int = 5; pub const RLIMIT_AS: ::c_int = 9; pub const RLIMIT_MEMLOCK: ::c_int = 8; pub const RLIM_INFINITY: ::rlim_t = !0; pub const RLIMIT_RTTIME: ::c_int = 15; pub const RLIMIT_NLIMITS: ::c_int = 16; pub const SOCK_NONBLOCK: ::c_int = O_NONBLOCK; pub const SOL_RXRPC: ::c_int = 272; pub const SOL_PPPOL2TP: ::c_int = 273; pub const SOL_BLUETOOTH: ::c_int = 274; pub const SOL_PNPIPE: ::c_int = 275; pub const SOL_RDS: ::c_int = 276; pub const SOL_IUCV: ::c_int = 277; pub const SOL_CAIF: ::c_int = 278; pub const SOL_ALG: ::c_int = 279; pub const SOL_NFC: ::c_int = 280; pub const MSG_TRYHARD: ::c_int = 4; pub const LC_PAPER: ::c_int = 7; pub const LC_NAME: ::c_int = 8; pub const LC_ADDRESS: ::c_int = 9; pub const LC_TELEPHONE: ::c_int = 10; pub const LC_MEASUREMENT: ::c_int = 11; pub const LC_IDENTIFICATION: ::c_int = 12; pub const LC_PAPER_MASK: ::c_int = (1 << LC_PAPER); pub const LC_NAME_MASK: ::c_int = (1 << LC_NAME); pub const LC_ADDRESS_MASK: ::c_int = (1 << LC_ADDRESS); pub const LC_TELEPHONE_MASK: ::c_int = (1 << LC_TELEPHONE); pub const LC_MEASUREMENT_MASK: ::c_int = (1 << LC_MEASUREMENT); pub const LC_IDENTIFICATION_MASK: ::c_int = (1 << LC_IDENTIFICATION); pub const LC_ALL_MASK: ::c_int = ::LC_CTYPE_MASK | ::LC_NUMERIC_MASK | ::LC_TIME_MASK | ::LC_COLLATE_MASK | ::LC_MONETARY_MASK | ::LC_MESSAGES_MASK | LC_PAPER_MASK | LC_NAME_MASK | LC_ADDRESS_MASK | LC_TELEPHONE_MASK | LC_MEASUREMENT_MASK | LC_IDENTIFICATION_MASK; pub const MAP_ANON: ::c_int = 0x0020; pub const MAP_ANONYMOUS: ::c_int = 0x0020; pub const MAP_DENYWRITE: ::c_int = 0x0800; pub const MAP_EXECUTABLE: ::c_int = 0x01000; pub const MAP_POPULATE: ::c_int = 0x08000; pub const MAP_NONBLOCK: ::c_int = 0x010000; pub const MAP_STACK: ::c_int = 0x020000; pub const ENOTSUP: ::c_int = EOPNOTSUPP; pub const EUCLEAN: ::c_int = 117; pub const ENOTNAM: ::c_int = 118; pub const ENAVAIL: ::c_int = 119; pub const EISNAM: ::c_int = 120; pub const EREMOTEIO: ::c_int = 121; pub const SOCK_STREAM: ::c_int = 1; pub const SOCK_DGRAM: ::c_int = 2; pub const SOCK_SEQPACKET: ::c_int = 5; pub const SOCK_DCCP: ::c_int = 6; pub const SOCK_PACKET: ::c_int = 10; pub const TCP_COOKIE_TRANSACTIONS: ::c_int = 15; pub const TCP_THIN_LINEAR_TIMEOUTS: ::c_int = 16; pub const TCP_THIN_DUPACK: ::c_int = 17; pub const TCP_USER_TIMEOUT: ::c_int = 18; pub const TCP_REPAIR: ::c_int = 19; pub const TCP_REPAIR_QUEUE: ::c_int = 20; pub const TCP_QUEUE_SEQ: ::c_int = 21; pub const TCP_REPAIR_OPTIONS: ::c_int = 22; pub const TCP_FASTOPEN: ::c_int = 23; pub const TCP_TIMESTAMP: ::c_int = 24; /* DCCP socket options */ pub const DCCP_SOCKOPT_PACKET_SIZE: ::c_int = 1; pub const DCCP_SOCKOPT_SERVICE: ::c_int = 2; pub const DCCP_SOCKOPT_CHANGE_L: ::c_int = 3; pub const DCCP_SOCKOPT_CHANGE_R: ::c_int = 4; pub const DCCP_SOCKOPT_GET_CUR_MPS: ::c_int = 5; pub const DCCP_SOCKOPT_SERVER_TIMEWAIT: ::c_int = 6; pub const DCCP_SOCKOPT_SEND_CSCOV: ::c_int = 10; pub const DCCP_SOCKOPT_RECV_CSCOV: ::c_int = 11; pub const DCCP_SOCKOPT_AVAILABLE_CCIDS: ::c_int = 12; pub const DCCP_SOCKOPT_CCID: ::c_int = 13; pub const DCCP_SOCKOPT_TX_CCID: ::c_int = 14; pub const DCCP_SOCKOPT_RX_CCID: ::c_int = 15; pub const DCCP_SOCKOPT_QPOLICY_ID: ::c_int = 16; pub const DCCP_SOCKOPT_QPOLICY_TXQLEN: ::c_int = 17; pub const DCCP_SOCKOPT_CCID_RX_INFO: ::c_int = 128; pub const DCCP_SOCKOPT_CCID_TX_INFO: ::c_int = 192; /// maximum number of services provided on the same listening port pub const DCCP_SERVICE_LIST_MAX_LEN: ::c_int = 32; pub const SIGTTIN: ::c_int = 21; pub const SIGTTOU: ::c_int = 22; pub const SIGXCPU: ::c_int = 24; pub const SIGXFSZ: ::c_int = 25; pub const SIGVTALRM: ::c_int = 26; pub const SIGPROF: ::c_int = 27; pub const SIGWINCH: ::c_int = 28; pub const SIGEV_THREAD_ID: ::c_int = 4; pub const BUFSIZ: ::c_uint = 8192; pub const TMP_MAX: ::c_uint = 238328; pub const FOPEN_MAX: ::c_uint = 16; pub const POSIX_FADV_DONTNEED: ::c_int = 4; pub const POSIX_FADV_NOREUSE: ::c_int = 5; pub const POSIX_MADV_DONTNEED: ::c_int = 4; pub const _SC_EQUIV_CLASS_MAX: ::c_int = 41; pub const _SC_CHARCLASS_NAME_MAX: ::c_int = 45; pub const _SC_PII: ::c_int = 53; pub const _SC_PII_XTI: ::c_int = 54; pub const _SC_PII_SOCKET: ::c_int = 55; pub const _SC_PII_INTERNET: ::c_int = 56; pub const _SC_PII_OSI: ::c_int = 57; pub const _SC_POLL: ::c_int = 58; pub const _SC_SELECT: ::c_int = 59; pub const _SC_PII_INTERNET_STREAM: ::c_int = 61; pub const _SC_PII_INTERNET_DGRAM: ::c_int = 62; pub const _SC_PII_OSI_COTS: ::c_int = 63; pub const _SC_PII_OSI_CLTS: ::c_int = 64; pub const _SC_PII_OSI_M: ::c_int = 65; pub const _SC_T_IOV_MAX: ::c_int = 66; pub const _SC_2_C_VERSION: ::c_int = 96; pub const _SC_CHAR_BIT: ::c_int = 101; pub const _SC_CHAR_MAX: ::c_int = 102; pub const _SC_CHAR_MIN: ::c_int = 103; pub const _SC_INT_MAX: ::c_int = 104; pub const _SC_INT_MIN: ::c_int = 105; pub const _SC_LONG_BIT: ::c_int = 106; pub const _SC_WORD_BIT: ::c_int = 107; pub const _SC_MB_LEN_MAX: ::c_int = 108; pub const _SC_SSIZE_MAX: ::c_int = 110; pub const _SC_SCHAR_MAX: ::c_int = 111; pub const _SC_SCHAR_MIN: ::c_int = 112; pub const _SC_SHRT_MAX: ::c_int = 113; pub const _SC_SHRT_MIN: ::c_int = 114; pub const _SC_UCHAR_MAX: ::c_int = 115; pub const _SC_UINT_MAX: ::c_int = 116; pub const _SC_ULONG_MAX: ::c_int = 117; pub const _SC_USHRT_MAX: ::c_int = 118; pub const _SC_NL_ARGMAX: ::c_int = 119; pub const _SC_NL_LANGMAX: ::c_int = 120; pub const _SC_NL_MSGMAX: ::c_int = 121; pub const _SC_NL_NMAX: ::c_int = 122; pub const _SC_NL_SETMAX: ::c_int = 123; pub const _SC_NL_TEXTMAX: ::c_int = 124; pub const _SC_BASE: ::c_int = 134; pub const _SC_C_LANG_SUPPORT: ::c_int = 135; pub const _SC_C_LANG_SUPPORT_R: ::c_int = 136; pub const _SC_DEVICE_IO: ::c_int = 140; pub const _SC_DEVICE_SPECIFIC: ::c_int = 141; pub const _SC_DEVICE_SPECIFIC_R: ::c_int = 142; pub const _SC_FD_MGMT: ::c_int = 143; pub const _SC_FIFO: ::c_int = 144; pub const _SC_PIPE: ::c_int = 145; pub const _SC_FILE_ATTRIBUTES: ::c_int = 146; pub const _SC_FILE_LOCKING: ::c_int = 147; pub const _SC_FILE_SYSTEM: ::c_int = 148; pub const _SC_MULTI_PROCESS: ::c_int = 150; pub const _SC_SINGLE_PROCESS: ::c_int = 151; pub const _SC_NETWORKING: ::c_int = 152; pub const _SC_REGEX_VERSION: ::c_int = 156; pub const _SC_SIGNALS: ::c_int = 158; pub const _SC_SYSTEM_DATABASE: ::c_int = 162; pub const _SC_SYSTEM_DATABASE_R: ::c_int = 163; pub const _SC_USER_GROUPS: ::c_int = 166; pub const _SC_USER_GROUPS_R: ::c_int = 167; pub const _SC_LEVEL1_ICACHE_SIZE: ::c_int = 185; pub const _SC_LEVEL1_ICACHE_ASSOC: ::c_int = 186; pub const _SC_LEVEL1_ICACHE_LINESIZE: ::c_int = 187; pub const _SC_LEVEL1_DCACHE_SIZE: ::c_int = 188; pub const _SC_LEVEL1_DCACHE_ASSOC: ::c_int = 189; pub const _SC_LEVEL1_DCACHE_LINESIZE: ::c_int = 190; pub const _SC_LEVEL2_CACHE_SIZE: ::c_int = 191; pub const _SC_LEVEL2_CACHE_ASSOC: ::c_int = 192; pub const _SC_LEVEL2_CACHE_LINESIZE: ::c_int = 193; pub const _SC_LEVEL3_CACHE_SIZE: ::c_int = 194; pub const _SC_LEVEL3_CACHE_ASSOC: ::c_int = 195; pub const _SC_LEVEL3_CACHE_LINESIZE: ::c_int = 196; pub const _SC_LEVEL4_CACHE_SIZE: ::c_int = 197; pub const _SC_LEVEL4_CACHE_ASSOC: ::c_int = 198; pub const _SC_LEVEL4_CACHE_LINESIZE: ::c_int = 199; pub const O_ACCMODE: ::c_int = 3; pub const ST_RELATIME: ::c_ulong = 4096; pub const NI_MAXHOST: ::socklen_t = 1025; pub const ADFS_SUPER_MAGIC: ::c_long = 0x0000adf5; pub const AFFS_SUPER_MAGIC: ::c_long = 0x0000adff; pub const CODA_SUPER_MAGIC: ::c_long = 0x73757245; pub const CRAMFS_MAGIC: ::c_long = 0x28cd3d45; pub const EFS_SUPER_MAGIC: ::c_long = 0x00414a53; pub const EXT2_SUPER_MAGIC: ::c_long = 0x0000ef53; pub const EXT3_SUPER_MAGIC: ::c_long = 0x0000ef53; pub const EXT4_SUPER_MAGIC: ::c_long = 0x0000ef53; pub const HPFS_SUPER_MAGIC: ::c_long = 0xf995e849; pub const HUGETLBFS_MAGIC: ::c_long = 0x958458f6; pub const ISOFS_SUPER_MAGIC: ::c_long = 0x00009660; pub const JFFS2_SUPER_MAGIC: ::c_long = 0x000072b6; pub const MINIX_SUPER_MAGIC: ::c_long = 0x0000137f; pub const MINIX_SUPER_MAGIC2: ::c_long = 0x0000138f; pub const MINIX2_SUPER_MAGIC: ::c_long = 0x00002468; pub const MINIX2_SUPER_MAGIC2: ::c_long = 0x00002478; pub const MSDOS_SUPER_MAGIC: ::c_long = 0x00004d44; pub const NCP_SUPER_MAGIC: ::c_long = 0x0000564c; pub const NFS_SUPER_MAGIC: ::c_long = 0x00006969; pub const OPENPROM_SUPER_MAGIC: ::c_long = 0x00009fa1; pub const PROC_SUPER_MAGIC: ::c_long = 0x00009fa0; pub const QNX4_SUPER_MAGIC: ::c_long = 0x0000002f; pub const REISERFS_SUPER_MAGIC: ::c_long = 0x52654973; pub const SMB_SUPER_MAGIC: ::c_long = 0x0000517b; pub const TMPFS_MAGIC: ::c_long = 0x01021994; pub const USBDEVICE_SUPER_MAGIC: ::c_long = 0x00009fa2; pub const VEOF: usize = 4; pub const CPU_SETSIZE: ::c_int = 0x400; pub const PTRACE_TRACEME: ::c_uint = 0; pub const PTRACE_PEEKTEXT: ::c_uint = 1; pub const PTRACE_PEEKDATA: ::c_uint = 2; pub const PTRACE_PEEKUSER: ::c_uint = 3; pub const PTRACE_POKETEXT: ::c_uint = 4; pub const PTRACE_POKEDATA: ::c_uint = 5; pub const PTRACE_POKEUSER: ::c_uint = 6; pub const PTRACE_CONT: ::c_uint = 7; pub const PTRACE_KILL: ::c_uint = 8; pub const PTRACE_SINGLESTEP: ::c_uint = 9; pub const PTRACE_ATTACH: ::c_uint = 16; pub const PTRACE_SYSCALL: ::c_uint = 24; pub const PTRACE_SETOPTIONS: ::c_uint = 0x4200; pub const PTRACE_GETEVENTMSG: ::c_uint = 0x4201; pub const PTRACE_GETSIGINFO: ::c_uint = 0x4202; pub const PTRACE_SETSIGINFO: ::c_uint = 0x4203; pub const PTRACE_GETREGSET: ::c_uint = 0x4204; pub const PTRACE_SETREGSET: ::c_uint = 0x4205; pub const PTRACE_SEIZE: ::c_uint = 0x4206; pub const PTRACE_INTERRUPT: ::c_uint = 0x4207; pub const PTRACE_LISTEN: ::c_uint = 0x4208; pub const PTRACE_PEEKSIGINFO: ::c_uint = 0x4209; pub const EPOLLWAKEUP: ::c_int = 0x20000000; pub const MAP_HUGETLB: ::c_int = 0x040000; pub const SEEK_DATA: ::c_int = 3; pub const SEEK_HOLE: ::c_int = 4; pub const TCSANOW: ::c_int = 0; pub const TCSADRAIN: ::c_int = 1; pub const TCSAFLUSH: ::c_int = 2; pub const TIOCLINUX: ::c_ulong = 0x541C; pub const TIOCGSERIAL: ::c_ulong = 0x541E; pub const RTLD_DEEPBIND: ::c_int = 0x8; pub const RTLD_GLOBAL: ::c_int = 0x100; pub const RTLD_NOLOAD: ::c_int = 0x4; pub const LINUX_REBOOT_MAGIC1: ::c_int = 0xfee1dead; pub const LINUX_REBOOT_MAGIC2: ::c_int = 672274793; pub const LINUX_REBOOT_MAGIC2A: ::c_int = 85072278; pub const LINUX_REBOOT_MAGIC2B: ::c_int = 369367448; pub const LINUX_REBOOT_MAGIC2C: ::c_int = 537993216; pub const LINUX_REBOOT_CMD_RESTART: ::c_int = 0x01234567; pub const LINUX_REBOOT_CMD_HALT: ::c_int = 0xCDEF0123; pub const LINUX_REBOOT_CMD_CAD_ON: ::c_int = 0x89ABCDEF; pub const LINUX_REBOOT_CMD_CAD_OFF: ::c_int = 0x00000000; pub const LINUX_REBOOT_CMD_POWER_OFF: ::c_int = 0x4321FEDC; pub const LINUX_REBOOT_CMD_RESTART2: ::c_int = 0xA1B2C3D4; pub const LINUX_REBOOT_CMD_SW_SUSPEND: ::c_int = 0xD000FCE2; pub const LINUX_REBOOT_CMD_KEXEC: ::c_int = 0x45584543; pub const NETLINK_ROUTE: ::c_int = 0; pub const NETLINK_UNUSED: ::c_int = 1; pub const NETLINK_USERSOCK: ::c_int = 2; pub const NETLINK_FIREWALL: ::c_int = 3; pub const NETLINK_SOCK_DIAG: ::c_int = 4; pub const NETLINK_NFLOG: ::c_int = 5; pub const NETLINK_XFRM: ::c_int = 6; pub const NETLINK_SELINUX: ::c_int = 7; pub const NETLINK_ISCSI: ::c_int = 8; pub const NETLINK_AUDIT: ::c_int = 9; pub const NETLINK_FIB_LOOKUP: ::c_int = 10; pub const NETLINK_CONNECTOR: ::c_int = 11; pub const NETLINK_NETFILTER: ::c_int = 12; pub const NETLINK_IP6_FW: ::c_int = 13; pub const NETLINK_DNRTMSG: ::c_int = 14; pub const NETLINK_KOBJECT_UEVENT: ::c_int = 15; pub const NETLINK_GENERIC: ::c_int = 16; pub const NETLINK_SCSITRANSPORT: ::c_int = 18; pub const NETLINK_ECRYPTFS: ::c_int = 19; pub const NETLINK_RDMA: ::c_int = 20; pub const NETLINK_CRYPTO: ::c_int = 21; pub const NETLINK_INET_DIAG: ::c_int = NETLINK_SOCK_DIAG; pub const MAX_LINKS: ::c_int = 32; pub const NLM_F_REQUEST: ::c_int = 1; pub const NLM_F_MULTI: ::c_int = 2; pub const NLM_F_ACK: ::c_int = 4; pub const NLM_F_ECHO: ::c_int = 8; pub const NLM_F_DUMP_INTR: ::c_int = 16; pub const NLM_F_DUMP_FILTERED: ::c_int = 32; pub const NLM_F_ROOT: ::c_int = 0x100; pub const NLM_F_MATCH: ::c_int = 0x200; pub const NLM_F_ATOMIC: ::c_int = 0x400; pub const NLM_F_DUMP: ::c_int = NLM_F_ROOT | NLM_F_MATCH; pub const NLM_F_REPLACE: ::c_int = 0x100; pub const NLM_F_EXCL: ::c_int = 0x200; pub const NLM_F_CREATE: ::c_int = 0x400; pub const NLM_F_APPEND: ::c_int = 0x800; pub const NLMSG_NOOP: ::c_int = 0x1; pub const NLMSG_ERROR: ::c_int = 0x2; pub const NLMSG_DONE: ::c_int = 0x3; pub const NLMSG_OVERRUN: ::c_int = 0x4; pub const NLMSG_MIN_TYPE: ::c_int = 0x10; pub const NETLINK_ADD_MEMBERSHIP: ::c_int = 1; pub const NETLINK_DROP_MEMBERSHIP: ::c_int = 2; pub const NETLINK_PKTINFO: ::c_int = 3; pub const NETLINK_BROADCAST_ERROR: ::c_int = 4; pub const NETLINK_NO_ENOBUFS: ::c_int = 5; pub const NETLINK_RX_RING: ::c_int = 6; pub const NETLINK_TX_RING: ::c_int = 7; pub const NETLINK_LISTEN_ALL_NSID: ::c_int = 8; pub const NETLINK_LIST_MEMBERSHIPS: ::c_int = 9; pub const NETLINK_CAP_ACK: ::c_int = 10; pub const NLA_F_NESTED: ::c_int = 1 << 15; pub const NLA_F_NET_BYTEORDER: ::c_int = 1 << 14; pub const NLA_TYPE_MASK: ::c_int = !(NLA_F_NESTED | NLA_F_NET_BYTEORDER); pub const TIOCM_LE: ::c_int = 0x001; pub const TIOCM_DTR: ::c_int = 0x002; pub const TIOCM_RTS: ::c_int = 0x004; pub const TIOCM_ST: ::c_int = 0x008; pub const TIOCM_SR: ::c_int = 0x010; pub const TIOCM_CTS: ::c_int = 0x020; pub const TIOCM_CAR: ::c_int = 0x040; pub const TIOCM_RNG: ::c_int = 0x080; pub const TIOCM_DSR: ::c_int = 0x100; pub const TIOCM_CD: ::c_int = TIOCM_CAR; pub const TIOCM_RI: ::c_int = TIOCM_RNG; #[doc(hidden)] pub const AF_MAX: ::c_int = 42; #[doc(hidden)] pub const PF_MAX: ::c_int = AF_MAX; cfg_if! { if #[cfg(any(target_arch = "arm", target_arch = "x86", target_arch = "x86_64"))] { pub const PTHREAD_STACK_MIN: ::size_t = 16384; } else if #[cfg(target_arch = "sparc64")] { pub const PTHREAD_STACK_MIN: ::size_t = 0x6000; } else { pub const PTHREAD_STACK_MIN: ::size_t = 131072; } } extern { pub fn utmpxname(file: *const ::c_char) -> ::c_int; pub fn getutxent() -> *mut utmpx; pub fn getutxid(ut: *const utmpx) -> *mut utmpx; pub fn getutxline(ut: *const utmpx) -> *mut utmpx; pub fn pututxline(ut: *const utmpx) -> *mut utmpx; pub fn setutxent(); pub fn endutxent(); pub fn getpt() -> ::c_int; } #[link(name = "util")] extern { pub fn ioctl(fd: ::c_int, request: ::c_ulong, ...) -> ::c_int; pub fn backtrace(buf: *mut *mut ::c_void, sz: ::c_int) -> ::c_int; pub fn glob64(pattern: *const ::c_char, flags: ::c_int, errfunc: ::dox::Option<extern fn(epath: *const ::c_char, errno: ::c_int) -> ::c_int>, pglob: *mut glob64_t) -> ::c_int; pub fn globfree64(pglob: *mut glob64_t); pub fn ptrace(request: ::c_uint, ...) -> ::c_long; pub fn pthread_attr_getaffinity_np(attr: *const ::pthread_attr_t, cpusetsize: ::size_t, cpuset: *mut ::cpu_set_t) -> ::c_int; pub fn pthread_attr_setaffinity_np(attr: *mut ::pthread_attr_t, cpusetsize: ::size_t, cpuset: *const ::cpu_set_t) -> ::c_int; pub fn getpriority(which: ::__priority_which_t, who: ::id_t) -> ::c_int; pub fn setpriority(which: ::__priority_which_t, who: ::id_t, prio: ::c_int) -> ::c_int; pub fn pthread_getaffinity_np(thread: ::pthread_t, cpusetsize: ::size_t, cpuset: *mut ::cpu_set_t) -> ::c_int; pub fn pthread_setaffinity_np(thread: ::pthread_t, cpusetsize: ::size_t, cpuset: *const ::cpu_set_t) -> ::c_int; pub fn pthread_rwlockattr_getkind_np(attr: *const ::pthread_rwlockattr_t, val: *mut ::c_int) -> ::c_int; pub fn pthread_rwlockattr_setkind_np(attr: *mut ::pthread_rwlockattr_t, val: ::c_int) -> ::c_int; pub fn sched_getcpu() -> ::c_int; } cfg_if! { if #[cfg(any(target_arch = "x86", target_arch = "arm", target_arch = "powerpc"))] { mod b32; pub use self::b32::*; } else if #[cfg(any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "sparc64"))] { mod b64; pub use self::b64::*; } else { // Unknown target_arch } }<|fim▁end|>
pub msg_name: *mut ::c_void, pub msg_namelen: ::socklen_t, pub msg_iov: *mut ::iovec, pub msg_iovlen: ::size_t,
<|file_name|>callee.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! // // Handles translation of callees as well as other call-related // things. Callees are a superset of normal rust values and sometimes // have different representations. In particular, top-level fn items // and methods are represented as just a fn ptr and not a full // closure. use core::prelude::*; use back::abi; use driver::session; use lib; use lib::llvm::ValueRef; use lib::llvm::llvm; use metadata::csearch; use middle::trans::base; use middle::trans::base::*; use middle::trans::build::*; use middle::trans::callee; use middle::trans::closure; use middle::trans::common; use middle::trans::common::*; use middle::trans::datum::*; use middle::trans::datum::Datum; use middle::trans::expr; use middle::trans::glue; use middle::trans::inline; use middle::trans::meth; use middle::trans::monomorphize; use middle::trans::type_of; use middle::ty; use middle::typeck; use util::common::indenter; use syntax::ast; use syntax::ast_map; use syntax::visit; // Represents a (possibly monomorphized) top-level fn item or method // item. Note that this is just the fn-ptr and is not a Rust closure // value (which is a pair). pub struct FnData { llfn: ValueRef, } pub struct MethodData { llfn: ValueRef, llself: ValueRef, self_ty: ty::t, self_mode: ast::rmode } pub enum CalleeData { Closure(Datum), Fn(FnData), Method(MethodData) } pub struct Callee { bcx: block, data: CalleeData } pub fn trans(bcx: block, expr: @ast::expr) -> Callee { let _icx = bcx.insn_ctxt("trans_callee"); // pick out special kinds of expressions that can be called: match expr.node { ast::expr_path(_) => { return trans_def(bcx, bcx.def(expr.id), expr); } ast::expr_field(base, _, _) => { match bcx.ccx().maps.method_map.find(&expr.id) { Some(origin) => { // An impl method // FIXME(#5562): removing this copy causes a segfault // before stage2 let origin = /*bad*/ copy *origin; return meth::trans_method_callee(bcx, expr.id, base, origin); } None => {} // not a method, just a field } } _ => {} } // any other expressions are closures: return datum_callee(bcx, expr); fn datum_callee(bcx: block, expr: @ast::expr) -> Callee { let DatumBlock {bcx, datum} = expr::trans_to_datum(bcx, expr); match ty::get(datum.ty).sty { ty::ty_bare_fn(*) => { let llval = datum.to_appropriate_llval(bcx); return Callee {bcx: bcx, data: Fn(FnData {llfn: llval})}; } ty::ty_closure(*) => { return Callee {bcx: bcx, data: Closure(datum)}; } _ => { bcx.tcx().sess.span_bug( expr.span, fmt!("Type of callee is neither bare-fn nor closure: %s", bcx.ty_to_str(datum.ty))); } } } fn fn_callee(bcx: block, fd: FnData) -> Callee { return Callee {bcx: bcx, data: Fn(fd)}; } fn trans_def(bcx: block, def: ast::def, ref_expr: @ast::expr) -> Callee { match def { ast::def_fn(did, _) | ast::def_static_method(did, None, _) => { fn_callee(bcx, trans_fn_ref(bcx, did, ref_expr.id)) } ast::def_static_method(impl_did, Some(trait_did), _) => { fn_callee(bcx, meth::trans_static_method_callee(bcx, impl_did, trait_did, ref_expr.id)) } ast::def_variant(tid, vid) => { // nullary variants are not callable assert!(ty::enum_variant_with_id(bcx.tcx(), tid, vid).args.len() > 0u); fn_callee(bcx, trans_fn_ref(bcx, vid, ref_expr.id)) } ast::def_struct(def_id) => { fn_callee(bcx, trans_fn_ref(bcx, def_id, ref_expr.id)) } ast::def_arg(*) | ast::def_local(*) | ast::def_binding(*) | ast::def_upvar(*) | ast::def_self(*) => { datum_callee(bcx, ref_expr) } ast::def_mod(*) | ast::def_foreign_mod(*) | ast::def_const(*) | ast::def_ty(*) | ast::def_prim_ty(*) | ast::def_use(*) | ast::def_typaram_binder(*) | ast::def_region(*) | ast::def_label(*) | ast::def_ty_param(*) | ast::def_self_ty(*) => { bcx.tcx().sess.span_bug( ref_expr.span, fmt!("Cannot translate def %? \ to a callable thing!", def)); } } } } pub fn trans_fn_ref_to_callee(bcx: block, def_id: ast::def_id, ref_id: ast::node_id) -> Callee { Callee {bcx: bcx, data: Fn(trans_fn_ref(bcx, def_id, ref_id))} } pub fn trans_fn_ref(bcx: block, def_id: ast::def_id, ref_id: ast::node_id) -> FnData { /*! * * Translates a reference (with id `ref_id`) to the fn/method * with id `def_id` into a function pointer. This may require * monomorphization or inlining. */ let _icx = bcx.insn_ctxt("trans_fn"); let type_params = node_id_type_params(bcx, ref_id); let vtables = node_vtables(bcx, ref_id); trans_fn_ref_with_vtables(bcx, def_id, ref_id, type_params, vtables) } pub fn trans_fn_ref_with_vtables_to_callee( bcx: block, def_id: ast::def_id, ref_id: ast::node_id, type_params: &[ty::t], vtables: Option<typeck::vtable_res>) -> Callee { Callee {bcx: bcx, data: Fn(trans_fn_ref_with_vtables(bcx, def_id, ref_id, type_params, vtables))} } pub fn trans_fn_ref_with_vtables( bcx: block, // def_id: ast::def_id, // def id of fn ref_id: ast::node_id, // node id of use of fn; may be zero if N/A type_params: &[ty::t], // values for fn's ty params vtables: Option<typeck::vtable_res>) -> FnData { //! // // Translates a reference to a fn/method item, monomorphizing and // inlining as it goes. // // # Parameters // // - `bcx`: the current block where the reference to the fn occurs // - `def_id`: def id of the fn or method item being referenced // - `ref_id`: node id of the reference to the fn/method, if applicable. // This parameter may be zero; but, if so, the resulting value may not // have the right type, so it must be cast before being used. // - `type_params`: values for each of the fn/method's type parameters // - `vtables`: values for each bound on each of the type parameters let _icx = bcx.insn_ctxt("trans_fn_ref_with_vtables"); let ccx = bcx.ccx(); let tcx = ccx.tcx; debug!("trans_fn_ref_with_vtables(bcx=%s, def_id=%?, ref_id=%?, \ type_params=%?, vtables=%?)", bcx.to_str(), def_id, ref_id, type_params.map(|t| bcx.ty_to_str(*t)), vtables); let _indenter = indenter(); assert!(type_params.all(|t| !ty::type_needs_infer(*t))); // Polytype of the function item (may have type params) let fn_tpt = ty::lookup_item_type(tcx, def_id); // Modify the def_id if this is a default method; we want to be // monomorphizing the trait's code. let (def_id, opt_impl_did) = match tcx.provided_method_sources.find(&def_id) { None => (def_id, None), Some(source) => (source.method_id, Some(source.impl_id)) }; // Check whether this fn has an inlined copy and, if so, redirect // def_id to the local id of the inlined copy. let def_id = { if def_id.crate != ast::local_crate { let may_translate = opt_impl_did.is_none(); inline::maybe_instantiate_inline(ccx, def_id, may_translate) } else { def_id } }; // We must monomorphise if the fn has type parameters, is a rust // intrinsic, or is a default method. In particular, if we see an // intrinsic that is inlined from a different crate, we want to reemit the // intrinsic instead of trying to call it in the other crate. let must_monomorphise; if type_params.len() > 0 || opt_impl_did.is_some() { must_monomorphise = true; } else if def_id.crate == ast::local_crate { let map_node = session::expect( ccx.sess, ccx.tcx.items.find(&def_id.node), || fmt!("local item should be in ast map")); match *map_node { ast_map::node_foreign_item(_, abis, _, _) => { must_monomorphise = abis.is_intrinsic() } _ => { must_monomorphise = false; } } } else { must_monomorphise = false; } // Create a monomorphic verison of generic functions if must_monomorphise { // Should be either intra-crate or inlined. assert!(def_id.crate == ast::local_crate); let mut (val, must_cast) = monomorphize::monomorphic_fn(ccx, def_id, type_params, vtables, opt_impl_did, Some(ref_id)); if must_cast && ref_id != 0 { // Monotype of the REFERENCE to the function (type params // are subst'd) let ref_ty = common::node_id_type(bcx, ref_id); val = PointerCast( bcx, val, T_ptr(type_of::type_of_fn_from_ty(ccx, ref_ty))); } return FnData {llfn: val}; } // Find the actual function pointer. let mut val = { if def_id.crate == ast::local_crate { // Internal reference. get_item_val(ccx, def_id.node) } else { // External reference. trans_external_path(ccx, def_id, fn_tpt.ty) } }; return FnData {llfn: val}; } // ______________________________________________________________________ // Translating calls pub fn trans_call(in_cx: block, call_ex: @ast::expr, f: @ast::expr, args: CallArgs, id: ast::node_id, dest: expr::Dest) -> block { let _icx = in_cx.insn_ctxt("trans_call"); trans_call_inner( in_cx, call_ex.info(), expr_ty(in_cx, f), node_id_type(in_cx, id), |cx| trans(cx, f), args, dest, DontAutorefArg) } pub fn trans_method_call(in_cx: block, call_ex: @ast::expr, rcvr: @ast::expr, args: CallArgs, dest: expr::Dest) -> block { let _icx = in_cx.insn_ctxt("trans_method_call"); trans_call_inner( in_cx, call_ex.info(), node_id_type(in_cx, call_ex.callee_id), expr_ty(in_cx, call_ex), |cx| { match cx.ccx().maps.method_map.find(&call_ex.id) { Some(origin) => { // FIXME(#5562): removing this copy causes a segfault // before stage2 let origin = /*bad*/ copy *origin; meth::trans_method_callee(cx, call_ex.callee_id, rcvr, origin) } None => { cx.tcx().sess.span_bug(call_ex.span, ~"method call expr wasn't in \ method map") } } }, args, dest, DontAutorefArg) } pub fn trans_lang_call(bcx: block, did: ast::def_id, args: &[ValueRef], dest: expr::Dest) -> block { let fty = if did.crate == ast::local_crate { ty::node_id_to_type(bcx.ccx().tcx, did.node) } else { csearch::get_type(bcx.ccx().tcx, did).ty }; let rty = ty::ty_fn_ret(fty); return callee::trans_call_inner( bcx, None, fty, rty, |bcx| trans_fn_ref_with_vtables_to_callee(bcx, did, 0, ~[], None), ArgVals(args), dest, DontAutorefArg); } pub fn trans_lang_call_with_type_params(bcx: block, did: ast::def_id, args: &[ValueRef], type_params: &[ty::t], dest: expr::Dest) -> block { let fty; if did.crate == ast::local_crate { fty = ty::node_id_to_type(bcx.tcx(), did.node); } else { fty = csearch::get_type(bcx.tcx(), did).ty; } let rty = ty::ty_fn_ret(fty); return callee::trans_call_inner( bcx, None, fty, rty, |bcx| { let callee = trans_fn_ref_with_vtables_to_callee(bcx, did, 0, type_params, None); let new_llval; match callee.data { Fn(fn_data) => { let substituted = ty::subst_tps(callee.bcx.tcx(), type_params, None, fty); let mut llfnty = type_of::type_of(callee.bcx.ccx(), substituted); new_llval = PointerCast(callee.bcx, fn_data.llfn, llfnty); } _ => fail!() } Callee { bcx: callee.bcx, data: Fn(FnData { llfn: new_llval }) } }, ArgVals(args), dest, DontAutorefArg); } pub fn body_contains_ret(body: &ast::blk) -> bool { let cx = @mut false; visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit_item: |_i, _cx, _v| { }, visit_expr: |e: @ast::expr, cx: @mut bool, v| { if !*cx { match e.node { ast::expr_ret(_) => *cx = true, _ => visit::visit_expr(e, cx, v), } } }, ..*visit::default_visitor() })); *cx } // See [Note-arg-mode] pub fn trans_call_inner( ++in_cx: block, call_info: Option<NodeInfo>, fn_expr_ty: ty::t, ret_ty: ty::t, get_callee: &fn(block) -> Callee, args: CallArgs, dest: expr::Dest, autoref_arg: AutorefArg) -> block { do base::with_scope(in_cx, call_info, ~"call") |cx| { let ret_in_loop = match args { ArgExprs(args) => { args.len() > 0u && match vec::last(args).node { ast::expr_loop_body(@ast::expr { node: ast::expr_fn_block(_, ref body), _ }) => body_contains_ret(body), _ => false } } _ => false }; let callee = get_callee(cx); let mut bcx = callee.bcx; let ccx = cx.ccx(); let ret_flag = if ret_in_loop { let flag = alloca(bcx, T_bool()); Store(bcx, C_bool(false), flag); Some(flag) } else { None }; let (llfn, llenv) = unsafe { match callee.data { Fn(d) => { (d.llfn, llvm::LLVMGetUndef(T_opaque_box_ptr(ccx))) } Method(d) => { // Weird but true: we pass self in the *environment* slot! let llself = PointerCast(bcx, d.llself, T_opaque_box_ptr(ccx)); (d.llfn, llself) } Closure(d) => { // Closures are represented as (llfn, llclosure) pair: // load the requisite values out. let pair = d.to_ref_llval(bcx); let llfn = GEPi(bcx, pair, [0u, abi::fn_field_code]); let llfn = Load(bcx, llfn); let llenv = GEPi(bcx, pair, [0u, abi::fn_field_box]); let llenv = Load(bcx, llenv); (llfn, llenv) } } }; let llretslot = trans_ret_slot(bcx, fn_expr_ty, dest); let mut llargs = ~[]; llargs.push(llretslot); llargs.push(llenv); bcx = trans_args(bcx, args, fn_expr_ty, ret_flag, autoref_arg, &mut llargs); // Now that the arguments have finished evaluating, we need to revoke // the cleanup for the self argument, if it exists match callee.data { Method(d) if d.self_mode == ast::by_copy => { revoke_clean(bcx, d.llself); } _ => {} } // Uncomment this to debug calls. /* io::println(fmt!("calling: %s", bcx.val_str(llfn))); for llargs.each |llarg| { io::println(fmt!("arg: %s", bcx.val_str(*llarg))); } io::println("---"); */ // If the block is terminated, then one or more of the args // has type _|_. Since that means it diverges, the code for // the call itself is unreachable. bcx = base::invoke(bcx, llfn, llargs); match dest { // drop the value if it is not being saved. expr::Ignore => { unsafe { if llvm::LLVMIsUndef(llretslot) != lib::llvm::True { bcx = glue::drop_ty(bcx, llretslot, ret_ty); } } } expr::SaveIn(_) => { } } if ty::type_is_bot(ret_ty) { Unreachable(bcx); } else if ret_in_loop { let ret_flag_result = bool_to_i1(bcx, Load(bcx, ret_flag.get())); bcx = do with_cond(bcx, ret_flag_result) |bcx| { for (copy bcx.fcx.loop_ret).each |&(flagptr, _)| { Store(bcx, C_bool(true), flagptr); Store(bcx, C_bool(false), bcx.fcx.llretptr); } base::cleanup_and_leave(bcx, None, Some(bcx.fcx.llreturn)); Unreachable(bcx); bcx } } bcx } } pub enum CallArgs<'self> { ArgExprs(&'self [@ast::expr]), ArgVals(&'self [ValueRef]) } pub fn trans_ret_slot(+bcx: block, +fn_ty: ty::t, +dest: expr::Dest) -> ValueRef { let retty = ty::ty_fn_ret(fn_ty); match dest { expr::SaveIn(dst) => dst, expr::Ignore => { if ty::type_is_nil(retty) { unsafe { llvm::LLVMGetUndef(T_ptr(T_nil())) } } else { alloc_ty(bcx, retty) } } } } pub fn trans_args(+cx: block, +args: CallArgs, +fn_ty: ty::t, +ret_flag: Option<ValueRef>, +autoref_arg: AutorefArg, +llargs: &mut ~[ValueRef]) -> block { let _icx = cx.insn_ctxt("trans_args"); let mut temp_cleanups = ~[]; let arg_tys = ty::ty_fn_args(fn_ty); let mut bcx = cx; // First we figure out the caller's view of the types of the arguments. // This will be needed if this is a generic call, because the callee has // to cast her view of the arguments to the caller's view. match args { ArgExprs(arg_exprs) => { let last = arg_exprs.len() - 1u; for vec::eachi(arg_exprs) |i, arg_expr| { let arg_val = unpack_result!(bcx, { trans_arg_expr(bcx, arg_tys[i], *arg_expr, &mut temp_cleanups, if i == last { ret_flag } else { None }, autoref_arg) }); llargs.push(arg_val); } } ArgVals(vs) => { llargs.push_all(vs); } } // now that all arguments have been successfully built, we can revoke any // temporary cleanups, as they are only needed if argument construction // should fail (for example, cleanup of copy mode args). for vec::each(temp_cleanups) |c| { revoke_clean(bcx, *c) } return bcx; } pub enum AutorefArg { DontAutorefArg, DoAutorefArg } // temp_cleanups: cleanups that should run only if failure occurs before the // call takes place: pub fn trans_arg_expr(bcx: block, formal_ty: ty::arg, arg_expr: @ast::expr, +temp_cleanups: &mut ~[ValueRef], +ret_flag: Option<ValueRef>, +autoref_arg: AutorefArg) -> Result { let _icx = bcx.insn_ctxt("trans_arg_expr"); let ccx = bcx.ccx(); debug!("trans_arg_expr(formal_ty=(%?,%s), arg_expr=%s, \ ret_flag=%?)", formal_ty.mode, bcx.ty_to_str(formal_ty.ty), bcx.expr_to_str(arg_expr), ret_flag.map(|v| bcx.val_str(*v))); let _indenter = indenter(); // translate the arg expr to a datum let arg_datumblock = match ret_flag { None => expr::trans_to_datum(bcx, arg_expr), // If there is a ret_flag, this *must* be a loop body Some(_) => {<|fim▁hole|> blk @ @ast::expr { node: ast::expr_fn_block(ref decl, ref body), _ }) => { let scratch_ty = expr_ty(bcx, arg_expr); let scratch = alloc_ty(bcx, scratch_ty); let arg_ty = expr_ty(bcx, arg_expr); let sigil = ty::ty_closure_sigil(arg_ty); let bcx = closure::trans_expr_fn( bcx, sigil, decl, body, arg_expr.id, blk.id, Some(ret_flag), expr::SaveIn(scratch)); DatumBlock {bcx: bcx, datum: Datum {val: scratch, ty: scratch_ty, mode: ByRef, source: RevokeClean}} } _ => { bcx.sess().impossible_case( arg_expr.span, ~"ret_flag with non-loop-\ body expr"); } } } }; let mut arg_datum = arg_datumblock.datum; let mut bcx = arg_datumblock.bcx; debug!(" arg datum: %s", arg_datum.to_str(bcx.ccx())); // finally, deal with the various modes let arg_mode = ty::resolved_mode(ccx.tcx, formal_ty.mode); let mut val; if ty::type_is_bot(arg_datum.ty) { // For values of type _|_, we generate an // "undef" value, as such a value should never // be inspected. It's important for the value // to have type lldestty (the callee's expected type). let llformal_ty = type_of::type_of(ccx, formal_ty.ty); unsafe { val = llvm::LLVMGetUndef(llformal_ty); } } else { // FIXME(#3548) use the adjustments table match autoref_arg { DoAutorefArg => { assert!(! bcx.ccx().maps.moves_map.contains(&arg_expr.id)); val = arg_datum.to_ref_llval(bcx); } DontAutorefArg => { match arg_mode { ast::by_ref => { // This assertion should really be valid, but because // the explicit self code currently passes by-ref, it // does not hold. // //assert !bcx.ccx().maps.moves_map.contains_key( // &arg_expr.id); val = arg_datum.to_ref_llval(bcx); } ast::by_copy => { debug!("by copy arg with type %s, storing to scratch", bcx.ty_to_str(arg_datum.ty)); let scratch = scratch_datum(bcx, arg_datum.ty, false); arg_datum.store_to_datum(bcx, arg_expr.id, INIT, scratch); // Technically, ownership of val passes to the callee. // However, we must cleanup should we fail before the // callee is actually invoked. scratch.add_clean(bcx); temp_cleanups.push(scratch.val); match arg_datum.appropriate_mode() { ByValue => { val = Load(bcx, scratch.val); } ByRef => { val = scratch.val; } } } } } } if formal_ty.ty != arg_datum.ty { // this could happen due to e.g. subtyping let llformal_ty = type_of::type_of_explicit_arg(ccx, &formal_ty); debug!("casting actual type (%s) to match formal (%s)", bcx.val_str(val), bcx.llty_str(llformal_ty)); val = PointerCast(bcx, val, llformal_ty); } } debug!("--- trans_arg_expr passing %s", val_str(bcx.ccx().tn, val)); return rslt(bcx, val); }<|fim▁end|>
match arg_expr.node { ast::expr_loop_body(
<|file_name|>exception.py<|end_file_name|><|fim▁begin|>""" sentry.interfaces.exception ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import __all__ = ('Exception',) from django.conf import settings from sentry.interfaces.base import Interface from sentry.interfaces.stacktrace import Stacktrace, is_newest_frame_first from sentry.utils.safe import trim from sentry.web.helpers import render_to_string class SingleException(Interface): """ A standard exception with a ``type`` and value argument, and an optional ``module`` argument describing the exception class type and module namespace. Either ``type`` or ``value`` must be present. You can also optionally bind a stacktrace interface to an exception. The spec is identical to ``sentry.interfaces.Stacktrace``. >>> { >>> "type": "ValueError", >>> "value": "My exception value", >>> "module": "__builtins__" >>> "stacktrace": { >>> # see sentry.interfaces.Stacktrace >>> } >>> } """ score = 900 display_score = 1200 @classmethod def to_python(cls, data): assert data.get('type') or data.get('value') if data.get('stacktrace') and data['stacktrace'].get('frames'): stacktrace = Stacktrace.to_python(data['stacktrace']) else: stacktrace = None kwargs = { 'type': trim(data.get('type'), 128), 'value': trim(data.get('value'), 4096), 'module': trim(data.get('module'), 128), 'stacktrace': stacktrace, } return cls(**kwargs) def to_json(self): if self.stacktrace: stacktrace = self.stacktrace.to_json() else: stacktrace = None return { 'type': self.type, 'value': self.value, 'module': self.module, 'stacktrace': stacktrace, } def get_api_context(self): if self.stacktrace: stacktrace = self.stacktrace.get_api_context() else: stacktrace = None return { 'type': self.type, 'value': self.value, 'module': self.module, 'stacktrace': stacktrace, } def get_alias(self): return 'exception' def get_path(self): return 'sentry.interfaces.Exception' def get_hash(self): output = None if self.stacktrace: output = self.stacktrace.get_hash() if output and self.type: output.append(self.type) if not output: output = filter(bool, [self.type, self.value]) return output def get_context(self, event, is_public=False, **kwargs): last_frame = None interface = event.interfaces.get('sentry.interfaces.Stacktrace') if interface is not None and interface.frames: last_frame = interface.frames[-1] e_module = self.module e_type = self.type e_value = self.value if self.module: fullname = '%s.%s' % (e_module, e_type) else: fullname = e_type if e_value and not e_type: e_type = e_value e_value = None return { 'is_public': is_public, 'event': event, 'exception_type': e_type, 'exception_value': e_value, 'exception_module': e_module, 'fullname': fullname, 'last_frame': last_frame, } class Exception(Interface): """ An exception consists of a list of values. In most cases, this list contains a single exception, with an optional stacktrace interface. Each exception has a mandatory ``value`` argument and optional ``type`` and ``module`` arguments describing the exception class type and module namespace. You can also optionally bind a stacktrace interface to an exception. The spec is identical to ``sentry.interfaces.Stacktrace``. >>> { >>> "values": [{ >>> "type": "ValueError", >>> "value": "My exception value", >>> "module": "__builtins__" >>> "stacktrace": { >>> # see sentry.interfaces.Stacktrace >>> } >>> }] >>> } Values should be sent oldest to newest, this includes both the stacktrace and the exception itself. .. note:: This interface can be passed as the 'exception' key in addition to the full interface path. """ score = 2000 @classmethod def to_python(cls, data): if 'values' not in data: data = {'values': [data]} assert data['values'] trim_exceptions(data) kwargs = { 'values': [ SingleException.to_python(v) for v in data['values'] ], } if data.get('exc_omitted'): assert len(data['exc_omitted']) == 2 kwargs['exc_omitted'] = data['exc_omitted'] else: kwargs['exc_omitted'] = None return cls(**kwargs) def to_json(self): return { 'values': [v.to_json() for v in self.values], 'exc_omitted': self.exc_omitted, } def get_api_context(self): return { 'values': [v.get_api_context() for v in self.values], 'excOmitted': self.exc_omitted, } def __getitem__(self, key): return self.values[key] def __iter__(self): return iter(self.values) def __len__(self): return len(self.values) def get_alias(self): return 'exception' def get_path(self): return 'sentry.interfaces.Exception' def compute_hashes(self, platform): system_hash = self.get_hash(system_frames=True) if not system_hash: return [] app_hash = self.get_hash(system_frames=False) if system_hash == app_hash or not app_hash: return [system_hash] return [system_hash, app_hash] def get_hash(self, system_frames=True): # optimize around the fact that some exceptions might have stacktraces # while others may not and we ALWAYS want stacktraces over values output = [] for value in self.values: if not value.stacktrace: continue stack_hash = value.stacktrace.get_hash( system_frames=system_frames, ) if stack_hash: output.extend(stack_hash) output.append(value.type) if not output: for value in self.values: output.extend(value.get_hash()) return output def get_context(self, event, is_public=False, **kwargs): newest_first = is_newest_frame_first(event) system_frames = 0 app_frames = 0 unknown_frames = 0 for exc in self.values: if not exc.stacktrace: continue for frame in exc.stacktrace.frames: if frame.in_app is False: system_frames += 1 elif frame.in_app is True: app_frames += 1 else: unknown_frames += 1 # TODO(dcramer): this should happen in normalize # We need to ensure that implicit values for in_app are handled # appropriately if unknown_frames and (app_frames or system_frames): for exc in self.values: if not exc.stacktrace: continue for frame in exc.stacktrace.frames: if frame.in_app is None: frame.in_app = bool(system_frames) if frame.in_app: app_frames += 1 else: system_frames += 1 # if there is a mix of frame styles then we indicate that system frames # are present and should be represented as a split has_system_frames = app_frames and system_frames context_kwargs = { 'event': event, 'is_public': is_public, 'newest_first': newest_first, 'has_system_frames': has_system_frames, } exceptions = [] last = len(self.values) - 1 for num, e in enumerate(self.values): context = e.get_context(**context_kwargs) if e.stacktrace: context['stacktrace'] = e.stacktrace.get_context( with_stacktrace=False, **context_kwargs) else: context['stacktrace'] = {} context['stack_id'] = 'exception_%d' % (num,) context['is_root'] = num == last exceptions.append(context) if newest_first: exceptions.reverse() if self.exc_omitted: first_exc_omitted, last_exc_omitted = self.exc_omitted else: first_exc_omitted, last_exc_omitted = None, None return { 'newest_first': newest_first, 'system_frames': system_frames if has_system_frames else 0, 'exceptions': exceptions, 'stacktrace': self.get_stacktrace(event, newest_first=newest_first), 'first_exc_omitted': first_exc_omitted, 'last_exc_omitted': last_exc_omitted, } def to_html(self, event, **kwargs): if not self.values: return '' if len(self.values) == 1 and not self.values[0].stacktrace: exception = self.values[0] context = exception.get_context(event=event, **kwargs) return render_to_string('sentry/partial/interfaces/exception.html', context) context = self.get_context(event=event, **kwargs) return render_to_string('sentry/partial/interfaces/chained_exception.html', context) <|fim▁hole|> return '' output = [] for exc in self.values: output.append(u'{0}: {1}\n'.format(exc.type, exc.value)) if exc.stacktrace: output.append(exc.stacktrace.get_stacktrace( event, system_frames=False, max_frames=5, header=False) + '\n\n') return (''.join(output)).strip() def get_stacktrace(self, *args, **kwargs): exc = self.values[0] if exc.stacktrace: return exc.stacktrace.get_stacktrace(*args, **kwargs) return '' def trim_exceptions(data, max_values=settings.SENTRY_MAX_EXCEPTIONS): # TODO: this doesnt account for cases where the client has already omitted # exceptions values = data['values'] exc_len = len(values) if exc_len <= max_values: return half_max = max_values / 2 data['exc_omitted'] = (half_max, exc_len - half_max) for n in xrange(half_max, exc_len - half_max): del values[half_max]<|fim▁end|>
def to_string(self, event, is_public=False, **kwargs): if not self.values:
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Code for calculating hamming distances and bitwise hamming distances <|fim▁hole|>pub mod hamming_distance; pub mod bitwise_hamming_distance;<|fim▁end|>
<|file_name|>neshareclientthreads.cpp<|end_file_name|><|fim▁begin|>/* NEshare is a peer-to-peer file sharing toolkit. Copyright (C) 2001, 2002 Neill Miller This file is part of NEshare. NEshare is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. NEshare is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with NEshare; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "neclientheaders.h" namespace neShareClientThreads { static ncThread s_processClientPeersThread; static ncThread s_processServentPeersThread; static ncThread s_clientListenerThread; static ncThread s_processUploadsThread; static int s_processClientPeersThreadStop = 0; static int s_processServentPeersThreadStop = 0; static int s_processUploadsThreadStop = 0; static ncSocketListener *g_nsl = (ncSocketListener *)0; /* the following are defined only to store pointers to the internal objects stored in the neClientConnection object to use internally in this namespace */ static neConfig *g_config = (neConfig *)0; static nePeerManager *g_peerClientManager = (nePeerManager *)0; static nePeerManager *g_peerServentManager = (nePeerManager *)0; static nePeerDownloadManager *g_peerDownloadManager = (nePeerDownloadManager *)0; static nePeerUploadManager *g_peerUploadManager = (nePeerUploadManager *)0; void *processLoginMessage(void *ptr) { ncSocket *newSock = (ncSocket *)ptr; if (newSock) { /* read the login message and return an appropriate response */ nemsgPeerLogin peerLoginMsg(newSock); if (peerLoginMsg.recv() == 0) { iprintf("neShareClientThreads::processLoginMessage | " "Login Message Received.\n"); /* FIXME: default TTL of connected peer is 300 seconds... this should be configurable */ nePeer *newPeer = new nePeer(newSock,300); if (newPeer) { if (g_peerServentManager->addPeer(newPeer)) { eprintf("neShareClientThreads::processLoginMessa" "ge | addPeer failed.\n"); neClientUtils::rejectNewServentPeer(newPeer); newSock->flush(); delete newSock; return (void *)0; } /* send a peer login ack */ nemsgPeerLoginAck peerLoginAckMsg(newSock); if (peerLoginAckMsg.send() == 0) { iprintf("New Peer Added (addr = %x) - Total Count" " is %d.\n",newPeer, g_peerServentManager->getNumPeers()); newSock->flush(); } } else { eprintf("neShareClientThreads::processLoginMessage | " "Cannot allocate new peer.\n"); } } else { /* drop the connection */ eprintf("neShareClientThreads::processLoginMessage | " "Login Message not received.\n"); delete newSock; } } return (void *)0; } void *listenForClients(void *ptr) { unsigned long clientControlPort = 0; assert(g_config); clientControlPort = g_config->getClientControlPort(); /* create a ncSocketListener and register a callback that will add a new user to the client peerManager (similar to the userManager in the server). */ if (g_nsl) { eprintf("FIXME: neShareClientThreads::listenForClients " "called with an already initialized socket " "listener object -- terminating\n"); assert(0); } g_nsl = new ncSocketListener(clientControlPort,SOCKTYPE_TCPIP); if (g_nsl && g_nsl->startListening(processLoginMessage, NC_NONTHREADED, NC_REUSEADDR) != NC_OK) { eprintf("ERROR!!! NEshare client listener has mysteriously " "stopped running.\nNo more incoming client " "connections are allowed.\nClient listener " "terminating.\n"); } return (void *)0; } void *processClientPeers(void *ptr) { int numReady = 0; std::vector<nePeer *> markedPeers; std::vector<nePeer *>::iterator iter; s_processClientPeersThreadStop = 1; while(s_processClientPeersThreadStop) { assert(markedPeers.empty()); numReady = g_peerClientManager->pollPeerSockets(&markedPeers); /* remove marked peers if any */ for(iter = markedPeers.begin(); iter != markedPeers.end(); iter++) { g_peerClientManager->removePeer((*iter), g_peerUploadManager, g_peerDownloadManager); } markedPeers.clear(); if (numReady == 0) { /* if there are no peer sockets ready, sleep and then try again. */ ncSleep(250); continue; } else if (numReady == -1) { /* if an error occurred, report the error and continue */ eprintf("neShareClientThreads::processClientPeers | " "peerManager::pollPeerSockets failed.\n"); continue; } /* handle ready peers, if any */ if (neClientUtils::handleReadyPeers(g_peerClientManager, g_peerDownloadManager, g_peerUploadManager)) { eprintf("neShareClientThreads::processClientPeers | a" " non-fatal peer error occured.\n"); } /* check if a cancel request was issued */ ncThread::testCancel(); } s_processClientPeersThreadStop = 1; return (void *)0; } void *processServentPeers(void *ptr) { int numReady = 0; std::vector<nePeer *> markedPeers; std::vector<nePeer *>::iterator iter; s_processServentPeersThreadStop = 1; while(s_processServentPeersThreadStop) { assert(markedPeers.empty()); numReady = g_peerServentManager->pollPeerSockets( &markedPeers); /* remove marked peers if any */ for(iter = markedPeers.begin(); iter != markedPeers.end(); iter++) { g_peerServentManager->removePeer((*iter), g_peerUploadManager, g_peerDownloadManager); } markedPeers.clear(); if (numReady == 0) { /* if there are no peer sockets ready, sleep and then try again. */ ncSleep(250); continue; } else if (numReady == -1) { /* if an error occurred, report the error and continue */ eprintf("neShareClientThreads::processServentPeers | " "peerManager::pollPeerSockets failed.\n"); continue; } /* handle ready peers, if any */ if (neClientUtils::handleReadyPeers(g_peerServentManager, g_peerDownloadManager, g_peerUploadManager)) { eprintf("neShareClientThreads::processServentPeers " "| a non-fatal peer error occured.\n"); } /* check if a cancel request was issued */ ncThread::testCancel(); } s_processServentPeersThreadStop = 1; return (void *)0; } void *processUploads(void *ptr) { s_processUploadsThreadStop = 1; while(s_processUploadsThreadStop) { /* check if there are any current uploads */ if (g_peerUploadManager->getNumUploads() == 0) { /* if not, sleep for a while */ ncSleep(500); } else { /* send another chunk to each peer with an active download */ g_peerUploadManager->sendPeerData(); } /* check if a cancel request was issued */ ncThread::testCancel(); } s_processUploadsThreadStop = 1; return (void *)0; } void startThreads(neConfig *config, nePeerManager *peerClientManager, nePeerManager *peerServentManager, nePeerDownloadManager *peerDownloadManager, nePeerUploadManager *peerUploadManager) { /* stash all incoming arguments for later use */ g_config = config; g_peerClientManager = peerClientManager; g_peerServentManager = peerServentManager; g_peerDownloadManager = peerDownloadManager; g_peerUploadManager = peerUploadManager; /* set the config object on the download manager */ g_peerDownloadManager->setConfig(g_config); /* start up client-to-client related threads */ if (s_processClientPeersThread.start( processClientPeers,(void *)0) == NC_FAILED) { eprintf("Fatal error: Cannot start " "processClientPeersThread.\n"); exit(1); } if (s_processServentPeersThread.start( processServentPeers,(void *)0) == NC_FAILED) { eprintf("Fatal error: Cannot start " "processServentPeersThread.\n"); exit(1); } if (s_clientListenerThread.start( listenForClients,(void *)0) == NC_FAILED) { eprintf("Fatal error: Cannot start " "clientListenerThread.\n"); exit(1); } if (s_processUploadsThread.start( processUploads,(void *)0) == NC_FAILED) { eprintf("Error: Cannot start upload processing thread. " "Skipping.\n"); } /* detach threads (to spin off in background) */ if (s_processClientPeersThread.detach() == NC_FAILED) { eprintf("Fatal error: Cannot detach " "processClientPeersThread.\n"); exit(1); } if (s_processServentPeersThread.detach() == NC_FAILED) { eprintf("Fatal error: Cannot detach " "processServentPeersThread.\n"); exit(1); } if (s_clientListenerThread.detach() == NC_FAILED) { eprintf("Fatal error: Cannot detach clientListenerThread.\n"); stopThreads(); exit(1); } if (s_processUploadsThread.detach() == NC_FAILED) { eprintf("Error: Cannot detach processUploadsThread. " "Skipping.\n"); } } void stopThreads() { /* stop all running client threads */ if (g_nsl) { g_nsl->stopListening(); } s_processClientPeersThreadStop = 0; s_processServentPeersThreadStop = 0; s_processUploadsThreadStop = 0;<|fim▁hole|> sleep for half a second to allow for proper thread cancellation */ ncSleep(500); /* now cancel the threads, if they haven't stopped already */ if (!s_processClientPeersThreadStop) { s_processClientPeersThread.stop(0); } if (!s_processServentPeersThreadStop) { s_processServentPeersThread.stop(0); } if (!s_processUploadsThreadStop) { s_processUploadsThread.stop(0); } s_clientListenerThread.stop(0); /* uninitialize our pointers to the objects we know about */ g_config = (neConfig *)0; g_peerClientManager = (nePeerManager *)0; g_peerServentManager = (nePeerManager *)0; g_peerDownloadManager = (nePeerDownloadManager *)0; g_peerUploadManager = (nePeerUploadManager *)0; delete g_nsl; g_nsl = (ncSocketListener *)0; } }<|fim▁end|>
/*
<|file_name|>SVGClipPainter.cpp<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "core/paint/SVGClipPainter.h" #include "core/dom/ElementTraversal.h" #include "core/layout/svg/LayoutSVGResourceClipper.h" #include "core/layout/svg/SVGResources.h" #include "core/layout/svg/SVGResourcesCache.h" #include "core/paint/LayoutObjectDrawingRecorder.h" #include "core/paint/PaintInfo.h" #include "core/paint/TransformRecorder.h" #include "platform/graphics/paint/ClipPathDisplayItem.h" #include "platform/graphics/paint/CompositingRecorder.h" #include "platform/graphics/paint/DrawingDisplayItem.h" #include "platform/graphics/paint/PaintController.h" namespace blink { namespace { class SVGClipExpansionCycleHelper {<|fim▁hole|>public: SVGClipExpansionCycleHelper(LayoutSVGResourceClipper& clip) : m_clip(clip) { clip.beginClipExpansion(); } ~SVGClipExpansionCycleHelper() { m_clip.endClipExpansion(); } private: LayoutSVGResourceClipper& m_clip; }; } // namespace bool SVGClipPainter::prepareEffect(const LayoutObject& target, const FloatRect& targetBoundingBox, const FloatRect& paintInvalidationRect, GraphicsContext& context, ClipperState& clipperState) { ASSERT(clipperState == ClipperNotApplied); ASSERT_WITH_SECURITY_IMPLICATION(!m_clip.needsLayout()); m_clip.clearInvalidationMask(); if (paintInvalidationRect.isEmpty() || m_clip.hasCycle()) return false; SVGClipExpansionCycleHelper inClipExpansionChange(m_clip); AffineTransform animatedLocalTransform = toSVGClipPathElement(m_clip.element())->calculateAnimatedLocalTransform(); // When drawing a clip for non-SVG elements, the CTM does not include the zoom factor. // In this case, we need to apply the zoom scale explicitly - but only for clips with // userSpaceOnUse units (the zoom is accounted for objectBoundingBox-resolved lengths). if (!target.isSVG() && m_clip.clipPathUnits() == SVGUnitTypes::SVG_UNIT_TYPE_USERSPACEONUSE) { ASSERT(m_clip.style()); animatedLocalTransform.scale(m_clip.style()->effectiveZoom()); } // First, try to apply the clip as a clipPath. Path clipPath; if (m_clip.asPath(animatedLocalTransform, targetBoundingBox, clipPath)) { clipperState = ClipperAppliedPath; context.paintController().createAndAppend<BeginClipPathDisplayItem>(target, clipPath); return true; } // Fall back to masking. clipperState = ClipperAppliedMask; // Begin compositing the clip mask. CompositingRecorder::beginCompositing(context, target, SkXfermode::kSrcOver_Mode, 1, &paintInvalidationRect); { TransformRecorder recorder(context, target, animatedLocalTransform); // clipPath can also be clipped by another clipPath. SVGResources* resources = SVGResourcesCache::cachedResourcesForLayoutObject(&m_clip); LayoutSVGResourceClipper* clipPathClipper = resources ? resources->clipper() : 0; ClipperState clipPathClipperState = ClipperNotApplied; if (clipPathClipper && !SVGClipPainter(*clipPathClipper).prepareEffect(m_clip, targetBoundingBox, paintInvalidationRect, context, clipPathClipperState)) { // End the clip mask's compositor. CompositingRecorder::endCompositing(context, target); return false; } drawClipMaskContent(context, target, targetBoundingBox, paintInvalidationRect); if (clipPathClipper) SVGClipPainter(*clipPathClipper).finishEffect(m_clip, context, clipPathClipperState); } // Masked content layer start. CompositingRecorder::beginCompositing(context, target, SkXfermode::kSrcIn_Mode, 1, &paintInvalidationRect); return true; } void SVGClipPainter::finishEffect(const LayoutObject& target, GraphicsContext& context, ClipperState& clipperState) { switch (clipperState) { case ClipperAppliedPath: // Path-only clipping, no layers to restore but we need to emit an end to the clip path display item. context.paintController().endItem<EndClipPathDisplayItem>(target); break; case ClipperAppliedMask: // Transfer content -> clip mask (SrcIn) CompositingRecorder::endCompositing(context, target); // Transfer clip mask -> bg (SrcOver) CompositingRecorder::endCompositing(context, target); break; default: ASSERT_NOT_REACHED(); } } void SVGClipPainter::drawClipMaskContent(GraphicsContext& context, const LayoutObject& layoutObject, const FloatRect& targetBoundingBox, const FloatRect& targetPaintInvalidationRect) { AffineTransform contentTransformation; RefPtr<const SkPicture> clipContentPicture = m_clip.createContentPicture(contentTransformation, targetBoundingBox, context); if (LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(context, layoutObject, DisplayItem::SVGClip)) return; LayoutObjectDrawingRecorder drawingRecorder(context, layoutObject, DisplayItem::SVGClip, targetPaintInvalidationRect); context.save(); context.concatCTM(contentTransformation); context.drawPicture(clipContentPicture.get()); context.restore(); } } // namespace blink<|fim▁end|>
<|file_name|>apps.py<|end_file_name|><|fim▁begin|><|fim▁hole|> name = 'traveller'<|fim▁end|>
from django.apps import AppConfig class TravellerConfig(AppConfig):
<|file_name|>util.ts<|end_file_name|><|fim▁begin|>export interface Dictionary<TValue> { [key : string] : TValue; } export function camelCase(s : string) : string { switch (s.length) { case 0: return ''; case 1: return s.toLowerCase(); default: return s.charAt(0).toLowerCase() + s.substr(1); } } export function arrayToDictionary<T>(array : T[], key : any) : Dictionary<T> { var d : Dictionary<T> = {}; var keyFn : (t : T) => string; if (typeof key === 'string') { keyFn = function (t : T) : string { return t[key]; } } else { keyFn = key; } <|fim▁hole|>}<|fim▁end|>
array.forEach(item => d[keyFn(item)] = item); return d;
<|file_name|>string_repalce_by_resub.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- <|fim▁hole|> ''' 如何调整字符串的文本格式 ''' # 将日志文件中的日期格式转变为美国日期格式mm/dd/yyyy # 使用正则表达式模块中的sub函数进行替换字符串 with open("./log.log","r") as f: for line in islice(f,0,None): #print sub("(\d{4})-(\d{2})-(\d{2})",r"\2/\3/\1",line) # 可以为每个匹配组起一个别名 print sub("(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})",r"\g<month>/\g<day>/\g<>",line)<|fim▁end|>
from re import sub from itertools import islice
<|file_name|>0006_auto_20160410_1530.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9 on 2016-04-10 10:00 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('Course', '0005_coursegroup'), ] operations = [ migrations.AlterField( model_name='department', name='hod',<|fim▁hole|> ), ]<|fim▁end|>
field=models.ForeignKey(default=False, on_delete=django.db.models.deletion.CASCADE, related_name='head_of_dept', to='Profiler.Faculty'),
<|file_name|>imports-blacklist.go<|end_file_name|><|fim▁begin|>package rule import ( "fmt" "github.com/mgechev/revive/lint" ) // ImportsBlacklistRule lints given else constructs. type ImportsBlacklistRule struct { blacklist map[string]bool } // Apply applies the rule to given file. func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { var failures []lint.Failure if file.IsTest() { return failures // skip, test file }<|fim▁hole|> if r.blacklist == nil { r.blacklist = make(map[string]bool, len(arguments)) for _, arg := range arguments { argStr, ok := arg.(string) if !ok { panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg)) } // we add quotes if not present, because when parsed, the value of the AST node, will be quoted if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' { argStr = fmt.Sprintf(`%q`, argStr) } r.blacklist[argStr] = true } } for _, is := range file.AST.Imports { path := is.Path if path != nil && r.blacklist[path.Value] { failures = append(failures, lint.Failure{ Confidence: 1, Failure: "should not use the following blacklisted import: " + path.Value, Node: is, Category: "imports", }) } } return failures } // Name returns the rule name. func (r *ImportsBlacklistRule) Name() string { return "imports-blacklist" }<|fim▁end|>
<|file_name|>diff_physics.C<|end_file_name|><|fim▁begin|>// The libMesh Finite Element Library. // Copyright (C) 2002-2020 Benjamin S. Kirk, John W. Peterson, Roy H. Stogner // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #include "libmesh/diff_context.h" #include "libmesh/diff_physics.h" #include "libmesh/system.h" namespace libMesh { DifferentiablePhysics::~DifferentiablePhysics() { DifferentiablePhysics::clear_physics(); } void DifferentiablePhysics::clear_physics () { _time_evolving.resize(0); }<|fim▁hole|> void DifferentiablePhysics::init_physics (const System & sys) { // give us flags for every variable that might be time evolving _time_evolving.resize(sys.n_vars(), false); } void DifferentiablePhysics::time_evolving (unsigned int var, unsigned int order) { if (order != 1 && order != 2) libmesh_error_msg("Input order must be 1 or 2!"); if (_time_evolving.size() <= var) _time_evolving.resize(var+1, 0); _time_evolving[var] = order; if (order == 1) _first_order_vars.insert(var); else _second_order_vars.insert(var); } bool DifferentiablePhysics::nonlocal_mass_residual(bool request_jacobian, DiffContext & c) { FEMContext & context = cast_ref<FEMContext &>(c); for (auto var : IntRange<unsigned int>(0, context.n_vars())) { if (!this->is_time_evolving(var)) continue; if (c.get_system().variable(var).type().family != SCALAR) continue; const std::vector<dof_id_type> & dof_indices = context.get_dof_indices(var); const unsigned int n_dofs = cast_int<unsigned int> (dof_indices.size()); DenseSubVector<Number> & Fs = context.get_elem_residual(var); DenseSubMatrix<Number> & Kss = context.get_elem_jacobian( var, var ); const libMesh::DenseSubVector<libMesh::Number> & Us = context.get_elem_solution(var); for (unsigned int i=0; i != n_dofs; ++i) { Fs(i) -= Us(i); if (request_jacobian) Kss(i,i) -= context.elem_solution_rate_derivative; } } return request_jacobian; } bool DifferentiablePhysics::_eulerian_time_deriv (bool request_jacobian, DiffContext & context) { // For any problem we need time derivative terms request_jacobian = this->element_time_derivative(request_jacobian, context); // For a moving mesh problem we may need the pseudoconvection term too return this->eulerian_residual(request_jacobian, context) && request_jacobian; } } // namespace libMesh<|fim▁end|>
<|file_name|>0208_create_role_executive.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-11-22 07:11 from __future__ import unicode_literals from django.core.management.sql import emit_post_migrate_signal from django.db import migrations def add_executive_group(apps, schema_editor): # create group db_alias = schema_editor.connection.alias emit_post_migrate_signal(1, False, db_alias)<|fim▁hole|> Permission = apps.get_model('auth', 'Permission') executive_group, created = Group.objects.get_or_create(name='executive') if created: # Learning unit can_access_learningunit = Permission.objects.get(codename='can_access_learningunit') executive_group.permissions.add(can_access_learningunit) class Migration(migrations.Migration): dependencies = [ ('base', '0207_auto_20171220_1035'), ] operations = [ migrations.RunPython(add_executive_group, elidable=True), ]<|fim▁end|>
Group = apps.get_model('auth', 'Group')
<|file_name|>updater.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ kintone上のデータを、バックアップを取ってから一括アップデートするスクリプト オプション指定なし→ローカルキャッシュを用いてDry Run -r(--real) →最新のデータを取得してバックアップし、更新 -f(--from-backup) →-rで問題が起きたとき用。バックアップを指定して、そのデータを元に更新する。 """ from cache import get_all, get_app import time import argparse from render import pretty def concat_lines(x, y): if isinstance(y, str): y = y.decode('utf-8') return x.rstrip('\n') + '\n' + y def add_a_tag(tags, a_tag, length=None): assert not '\n' in a_tag if length: assert len(a_tag[1:].split(a_tag[0])) == length tags.append(a_tag) def rows_to_csv(rows): import cStringIO import unicodecsv as csv f = cStringIO.StringIO() csv.writer(f, encoding='utf-8').writerows(rows) return f.getvalue() def convert(xs, args): "add new creators from 2015_creators_170113.csv" import unicodecsv as csv name2x = dict((x.name, x) for x in xs) to_update = [] to_add = [] rd = csv.reader(file('2015_creators_170113.csv'), encoding='utf-8') for row in rd: year = row[2] kubun = row[3] sc = row[4]<|fim▁hole|> theme = row[5] name = row[6] pm = row[9] affil1 = row[7] affil2 = row[8] if name in name2x: x = name2x[name] to_update.append(x) else: from mymodel import Person x = Person() x.name = name to_add.append(x) tags = [ ["未踏採択", year, kubun, sc, theme, pm], ["所属", affil1, "{}年時点".format(year), affil2]] tags = rows_to_csv(tags) x.tags = concat_lines(x.tags, tags) print name print tags return to_add, to_update def main(): parser = argparse.ArgumentParser() parser.add_argument( '--real', '-r', action='store_true', help='read from kintone and write to kintone') parser.add_argument( '--from-backup', '-f', action='store', help='read from backup and write to kintone') parser.add_argument( '--converter', '-c', action='store', help='use specific converter') parser.add_argument( '--infile', '-i', action='store', help='input file') args = parser.parse_args() if args.real: dumpdir = time.strftime('backup_%m%d_%H%M') xs = get_all(cache=False, name=dumpdir) elif args.from_backup: xs = get_all(cache=True, name=args.from_backup) else: xs = get_all(cache=True) if not args.converter: to_add, to_update = convert(xs, args) else: import imp info = imp.find_module('converter/' + args.converter) m = imp.load_module('m', *info) to_add, to_update = m.convert(xs, args) print "{} items to update, {} items to add".format(len(to_update), len(to_add)) # when recover from backup we need to ignore revision if args.from_backup: for x in xs: x.revision = -1 # ignore revision if args.real or args.from_backup: app = get_app() result = app.batch_create(to_add) assert result.ok for i in range(0, len(to_update), 100): print i, to_update[i].name result = app.batch_update(to_update[i:i + 100]) assert result.ok else: # for debug: Run this script with `ipython -i` globals()['xs'] = xs if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import datetime import time import calendar from django.core.urlresolvers import reverse from django.http import Http404 from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.http import Http404 from django.shortcuts import render, redirect from rolepermissions.decorators import has_role_decorator from models import McEvent from forms import McEventForm # Create your views here. month_names = 'January February March April May June July August September October November December'.split() #Default calendar view @login_required def index(request): cur_year, cur_month = time.localtime()[:2] events = McEvent.objects.all() context = { 'events': events, } return redirect('/calendar/%s/%s/' % (cur_year, cur_month)) @login_required def event_list(request): #only want events that occur this month; will change later cur_year, cur_month, cur_day = time.localtime()[:3] events = McEvent.objects.filter(start_date__year=cur_year, start_date__month=cur_month) context = { 'events': events, } return render(request, 'mccalendar/event_list.html', context) @login_required def month(request, year=None, month=None, change=None): year = int(year) if year else time.localtime()[0] month = int(month) if month else time.localtime()[1] #apply next/previous if applicable if change in ('next', 'prev'): if change=='next': month = month+1 if month == 13: month = 1 year += 1 elif change=='prev': month = month-1 if month == 0: month = 12 year -= 1 return redirect('/calendar/%s/%s/' % (year, month)) cur_year, cur_month, cur_day = time.localtime()[:3] cal = calendar.Calendar() cal.setfirstweekday(calendar.SUNDAY) month_days = cal.itermonthdays(year, month) events = [] lst=[[]] week = 0 for day in month_days: entries = current = False if day: events = McEvent.objects.filter( start_date__lte=datetime.date(year, month, day), end_date__gte=datetime.date(year, month, day)) if day == cur_day and year == cur_year and month == cur_month: current = True lst[week].append((day, events, current)) if len(lst[week]) == 7: lst.append([]) week += 1 context = { 'year': year, 'month': month, 'day': day, 'month_name': month_names[month-1], 'month_days': lst, } return render(request, 'mccalendar/month.html', context) @login_required def day(request, year=None, month=None, day=None): year = int(year) if year else time.localtime()[0] month = int(month) if month else time.localtime()[1] events = McEvent.objects.filter( start_date__lte=datetime.date(year, month, day), end_date__gte=datetime.date(year, month, day)) context = { 'year': year, 'month': month, 'day': day, 'month_name': month_names[month-1], 'events': events, } return render(request, 'mccalendar/day.html', context) @login_required @has_role_decorator('staff') def create_event(request): if request.method == 'POST': event = McEvent(owner=request.user.mcuser) form = McEventForm(request.POST, instance=event) if (form.is_valid()): form.save() return redirect(reverse('mccalendar:event_detail', args=[event.id])) else: form = McEventForm(request.POST, instance=event) else: form = McEventForm() context = { 'form':form, 'form_url': reverse('mccalendar:create_event') } return render(request, 'mccalendar/edit_event.html', context) @login_required @has_role_decorator('staff') def edit_event(request, event_id=None): try: event = McEvent.objects.get(id=event_id) except McEvent.DoesNotExist: return redirect('mccalendar:create_event') if request.method == 'POST': form = McEventForm(request.POST, instance=event) if (form.is_valid()): form.save() return redirect(reverse('mccalendar:event_detail', args=[event.id])) else:<|fim▁hole|> form = McEventForm(request.POST, instance=event) else: form = McEventForm(instance=event) context = { 'form':form, 'form_url': reverse('mccalendar:edit_event', args=[event_id]) } return render(request, 'mccalendar/edit_event.html', context) @login_required def event_detail(request, event_id): try: event = McEvent.objects.get(id=event_id) except McEvent.DoesNotExist: raise Http404('Event %s does not exist' %event_id) else: context = { 'event': event, } return render(request, 'mccalendar/event_detail.html', context)<|fim▁end|>
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import math def get_direction(src, target): diff = map(lambda a, b: a - b, target, src) mag = math.sqrt(sum(map(lambda a: a ** 2, diff))) if mag == 0: return [0, 0] return map(lambda a: a / mag, diff) def distance(pos1, pos2): return math.sqrt(sum(map(lambda a: a ** 2, map(lambda a, b: a - b, pos1, pos2)))) def magnitude(vector): return math.sqrt(sum(map(lambda a: a ** 2, vector))) class Drawable(object): def draw(self, surface, camera=(0, 0)):<|fim▁hole|> coordinates = (self.rect.left - camera[0], self.rect.top - camera[1]) surface.blit(self.image, coordinates)<|fim▁end|>