prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>WebStream.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012-2014 Netherlands eScience Center. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * You may obtain a copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For the full license, see: LICENSE.txt (located in the root folder of this distribution). * --- */ // source: package nl.esciencecenter.ptk.web; /** * Interface for Managed HTTP Streams. */ public interface WebStream { public boolean autoClose(); //public boolean isChunked(); }<|fim▁end|>
* * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS,
<|file_name|>LogsResource.java<|end_file_name|><|fim▁begin|>package com.falcon.cms.web.rest; import com.falcon.cms.web.rest.vm.LoggerVM; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.LoggerContext; import com.codahale.metrics.annotation.Timed; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.*; import java.util.List; import java.util.stream.Collectors;<|fim▁hole|>/** * Controller for view and managing Log Level at runtime. */ @RestController @RequestMapping("/management") public class LogsResource { @GetMapping("/logs") @Timed public List<LoggerVM> getList() { LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); return context.getLoggerList() .stream() .map(LoggerVM::new) .collect(Collectors.toList()); } @PutMapping("/logs") @ResponseStatus(HttpStatus.NO_CONTENT) @Timed public void changeLevel(@RequestBody LoggerVM jsonLogger) { LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); context.getLogger(jsonLogger.getName()).setLevel(Level.valueOf(jsonLogger.getLevel())); } }<|fim▁end|>
<|file_name|>organization.rs<|end_file_name|><|fim▁begin|>//! WARNING: This file is generated, derived from table bazaar.organization, DO NOT EDIT use chrono::datetime::DateTime; use chrono::offset::utc::UTC; use gen::column; use gen::schema; use gen::table; use rustc_serialize::json::Json; use rustc_serialize::json::ToJson; use rustorm::dao::Dao; use rustorm::dao::IsDao; use rustorm::dao::Type; use rustorm::dao::Value; use rustorm::query::Operand; use rustorm::table::Column; use rustorm::table::Foreign; use rustorm::table::IsTable; use rustorm::table::Table; use uuid::Uuid; #[derive(RustcEncodable)] #[derive(Debug, Clone)] pub struct Organization { /// primary /// default: 'uuid_generate_v4()' /// not nullable /// --inherited-- /// db data type: uuid pub organization_id: Uuid, /// db data type: uuid pub address_id: Option<Uuid>, /// db data type: character varying pub landmark: Option<String>, /// db data type: uuid pub parent_organization_id: Option<Uuid>, /// default: 'true' /// not nullable /// --inherited-- /// db data type: boolean pub active: bool, /// --inherited-- /// db data type: uuid pub client_id: Option<Uuid>, /// default: 'now()' /// not nullable /// --inherited-- /// db data type: timestamp with time zone pub created: DateTime<UTC>, /// --inherited-- /// db data type: uuid pub created_by: Option<Uuid>, /// --inherited-- /// db data type: character varying pub description: Option<String>, /// --inherited-- /// db data type: text pub help: Option<String>, /// --inherited-- /// db data type: character varying pub name: Option<String>, /// --inherited-- /// db data type: double precision pub priority: Option<f64>, /// default: 'now()' /// not nullable /// --inherited-- /// db data type: timestamp with time zone pub updated: DateTime<UTC>, /// --inherited-- /// db data type: uuid pub updated_by: Option<Uuid>, /// has one, self referential pub parent: Option<Box<Organization>>, /// has many pub organization: Vec<Organization>, } impl IsDao for Organization { fn from_dao(dao: &Dao) -> Self { Organization { organization_id: dao.get(column::organization_id), client_id: dao.get_opt(column::client_id), created: dao.get(column::created), created_by: dao.get_opt(column::created_by), updated: dao.get(column::updated), updated_by: dao.get_opt(column::updated_by), priority: dao.get_opt(column::priority), name: dao.get_opt(column::name), description: dao.get_opt(column::description), help: dao.get_opt(column::help), active: dao.get(column::active), parent_organization_id: dao.get_opt(column::parent_organization_id), address_id: dao.get_opt(column::address_id), landmark: dao.get_opt(column::landmark), parent: None, organization: vec![], } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set(column::organization_id, &self.organization_id); match self.client_id { Some(ref _value) => dao.set(column::client_id, _value), None => dao.set_null(column::client_id) } dao.set(column::created, &self.created); match self.created_by { Some(ref _value) => dao.set(column::created_by, _value), None => dao.set_null(column::created_by) } dao.set(column::updated, &self.updated); match self.updated_by { Some(ref _value) => dao.set(column::updated_by, _value), None => dao.set_null(column::updated_by) } match self.priority { Some(ref _value) => dao.set(column::priority, _value), None => dao.set_null(column::priority) } match self.name { Some(ref _value) => dao.set(column::name, _value), None => dao.set_null(column::name) } match self.description { Some(ref _value) => dao.set(column::description, _value), None => dao.set_null(column::description) } match self.help { Some(ref _value) => dao.set(column::help, _value), None => dao.set_null(column::help) } dao.set(column::active, &self.active); match self.parent_organization_id { Some(ref _value) => dao.set(column::parent_organization_id, _value), None => dao.set_null(column::parent_organization_id) } match self.address_id { Some(ref _value) => dao.set(column::address_id, _value), None => dao.set_null(column::address_id) } match self.landmark { Some(ref _value) => dao.set(column::landmark, _value), None => dao.set_null(column::landmark) } dao } } impl ToJson for Organization { fn to_json(&self) -> Json { self.to_dao().to_json() } } impl Default for Organization { fn default() -> Self { Organization{ organization_id: Default::default(), client_id: Default::default(), created: UTC::now(), created_by: Default::default(), updated: UTC::now(), updated_by: Default::default(), priority: Default::default(), name: Default::default(), description: Default::default(), help: Default::default(), active: Default::default(), parent_organization_id: Default::default(), address_id: Default::default(), landmark: Default::default(), parent: Default::default(), organization: Default::default(), } } } impl IsTable for Organization { fn table() -> Table { Table { schema: Some(schema::bazaar.to_owned()), name: table::organization.to_owned(), parent_table: Some(table::record.to_owned()), sub_table: vec![], comment: None, columns: vec![ organization_id(), client_id(), created(), created_by(), updated(), updated_by(), priority(), name(), description(), help(), active(), parent_organization_id(), address_id(), landmark(), ], is_view: false, } } } // Generated columns for easier development of dynamic queries without sacrificing wrong spelling of column names pub fn organization_id()->Column{ Column { table: Some(table::organization.to_owned()), name: column::organization_id.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: true, is_unique: false, not_null: true, is_inherited: true, default: Some(Operand::Value(Value::String("'uuid_generate_v4()'".to_owned()))), comment: None, foreign: None, } } pub fn client_id()->Column{ Column { table: Some(table::organization.to_owned()), name: column::client_id.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true,<|fim▁hole|> foreign: None, } } pub fn created()->Column{ Column { table: Some(table::organization.to_owned()), name: column::created.to_owned(), data_type: Type::DateTime, db_data_type: "timestamp with time zone".to_owned(), is_primary: false, is_unique: false, not_null: true, is_inherited: true, default: Some(Operand::Value(Value::String("'now()'".to_owned()))), comment: None, foreign: None, } } pub fn created_by()->Column{ Column { table: Some(table::organization.to_owned()), name: column::created_by.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn updated()->Column{ Column { table: Some(table::organization.to_owned()), name: column::updated.to_owned(), data_type: Type::DateTime, db_data_type: "timestamp with time zone".to_owned(), is_primary: false, is_unique: false, not_null: true, is_inherited: true, default: Some(Operand::Value(Value::String("'now()'".to_owned()))), comment: None, foreign: None, } } pub fn updated_by()->Column{ Column { table: Some(table::organization.to_owned()), name: column::updated_by.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn priority()->Column{ Column { table: Some(table::organization.to_owned()), name: column::priority.to_owned(), data_type: Type::F64, db_data_type: "double precision".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn name()->Column{ Column { table: Some(table::organization.to_owned()), name: column::name.to_owned(), data_type: Type::String, db_data_type: "character varying".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn description()->Column{ Column { table: Some(table::organization.to_owned()), name: column::description.to_owned(), data_type: Type::String, db_data_type: "character varying".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn help()->Column{ Column { table: Some(table::organization.to_owned()), name: column::help.to_owned(), data_type: Type::String, db_data_type: "text".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: true, default: None, comment: None, foreign: None, } } pub fn active()->Column{ Column { table: Some(table::organization.to_owned()), name: column::active.to_owned(), data_type: Type::Bool, db_data_type: "boolean".to_owned(), is_primary: false, is_unique: false, not_null: true, is_inherited: true, default: Some(Operand::Value(Value::String("'true'".to_owned()))), comment: None, foreign: None, } } pub fn parent_organization_id()->Column{ Column { table: Some(table::organization.to_owned()), name: column::parent_organization_id.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: false, default: None, comment: None, foreign: Some( Foreign { schema: Some("bazaar".to_owned()), table: "organization".to_owned(), column: "organization_id".to_owned(), }), } } pub fn address_id()->Column{ Column { table: Some(table::organization.to_owned()), name: column::address_id.to_owned(), data_type: Type::Uuid, db_data_type: "uuid".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: false, default: None, comment: None, foreign: None, } } pub fn landmark()->Column{ Column { table: Some(table::organization.to_owned()), name: column::landmark.to_owned(), data_type: Type::String, db_data_type: "character varying".to_owned(), is_primary: false, is_unique: false, not_null: false, is_inherited: false, default: None, comment: None, foreign: None, } }<|fim▁end|>
default: None, comment: None,
<|file_name|>model.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow; use std::env; use std::mem; use std::process::Command; use std::rc::Rc; use std::sync::Arc; use std::time::{Duration, Instant}; use chrono::Duration as TimerDuration; use defer_drop::DeferDrop; use regex::Regex; use timer::{Guard as TimerGuard, Timer}; use tuikit::prelude::{Event as TermEvent, *}; use crate::engine::factory::{AndOrEngineFactory, ExactOrFuzzyEngineFactory, RegexEngineFactory}; use crate::event::{Event, EventHandler, EventReceiver, EventSender}; use crate::global::current_run_num; use crate::header::Header; use crate::input::parse_action_arg; use crate::item::{parse_criteria, ItemPool, MatchedItem, RankBuilder, RankCriteria}; use crate::matcher::{Matcher, MatcherControl}; use crate::options::SkimOptions; use crate::output::SkimOutput; use crate::previewer::Previewer; use crate::query::Query; use crate::reader::{Reader, ReaderControl}; use crate::selection::Selection; use crate::spinlock::SpinLock; use crate::theme::ColorTheme; use crate::util::{depends_on_items, inject_command, margin_string_to_size, parse_margin, InjectContext}; use crate::{FuzzyAlgorithm, MatchEngineFactory, MatchRange, SkimItem}; use std::cmp::max; const REFRESH_DURATION: i64 = 100; const SPINNER_DURATION: u32 = 200; // const SPINNERS: [char; 8] = ['-', '\\', '|', '/', '-', '\\', '|', '/']; const SPINNERS_INLINE: [char; 2] = ['-', '<']; const SPINNERS_UNICODE: [char; 10] = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];<|fim▁hole|> lazy_static! { static ref RE_FIELDS: Regex = Regex::new(r"\\?(\{-?[0-9.,q]*?})").unwrap(); static ref RE_PREVIEW_OFFSET: Regex = Regex::new(r"^\+([0-9]+|\{-?[0-9]+\})(-[0-9]+|-/[1-9][0-9]*)?$").unwrap(); static ref DEFAULT_CRITERION: Vec<RankCriteria> = vec![RankCriteria::Score, RankCriteria::Begin, RankCriteria::End,]; } pub struct Model { reader: Reader, query: Query, selection: Selection, num_options: usize, select1: bool, exit0: bool, sync: bool, use_regex: bool, regex_matcher: Matcher, matcher: Matcher, term: Arc<Term>, item_pool: Arc<DeferDrop<ItemPool>>, rx: EventReceiver, tx: EventSender, fuzzy_algorithm: FuzzyAlgorithm, reader_timer: Instant, matcher_timer: Instant, reader_control: Option<ReaderControl>, matcher_control: Option<MatcherControl>, header: Header, preview_hidden: bool, previewer: Option<Previewer>, preview_direction: Direction, preview_size: Size, margin_top: Size, margin_right: Size, margin_bottom: Size, margin_left: Size, layout: String, delimiter: Regex, inline_info: bool, no_clear_if_empty: bool, theme: Arc<ColorTheme>, // timer thread for scheduled events timer: Timer, hb_timer_guard: Option<TimerGuard>, // for AppendAndSelect action rank_builder: Arc<RankBuilder>, } impl Model { pub fn new(rx: EventReceiver, tx: EventSender, reader: Reader, term: Arc<Term>, options: &SkimOptions) -> Self { let default_command = match env::var("SKIM_DEFAULT_COMMAND").as_ref().map(String::as_ref) { Ok("") | Err(_) => "find .".to_owned(), Ok(val) => val.to_owned(), }; let theme = Arc::new(ColorTheme::init_from_options(options)); let query = Query::from_options(&options) .replace_base_cmd_if_not_set(&default_command) .theme(theme.clone()) .build(); let criterion = if let Some(ref tie_breaker) = options.tiebreak { tie_breaker.split(',').filter_map(parse_criteria).collect() } else { DEFAULT_CRITERION.clone() }; let rank_builder = Arc::new(RankBuilder::new(criterion)); let selection = Selection::with_options(options).theme(theme.clone()); let regex_engine: Rc<dyn MatchEngineFactory> = Rc::new(RegexEngineFactory::builder().rank_builder(rank_builder.clone()).build()); let regex_matcher = Matcher::builder(regex_engine).build(); let matcher = if let Some(engine_factory) = options.engine_factory.as_ref() { // use provided engine Matcher::builder(engine_factory.clone()).case(options.case).build() } else { let fuzzy_engine_factory: Rc<dyn MatchEngineFactory> = Rc::new(AndOrEngineFactory::new( ExactOrFuzzyEngineFactory::builder() .exact_mode(options.exact) .rank_builder(rank_builder.clone()) .build(), )); Matcher::builder(fuzzy_engine_factory).case(options.case).build() }; let item_pool = Arc::new(DeferDrop::new(ItemPool::new().lines_to_reserve(options.header_lines))); let header = Header::empty() .with_options(options) .item_pool(item_pool.clone()) .theme(theme.clone()); let margins = options .margin .map(parse_margin) .expect("option margin is should be specified (by default)"); let (margin_top, margin_right, margin_bottom, margin_left) = margins; let mut ret = Model { reader, query, selection, num_options: 0, select1: false, exit0: false, sync: false, use_regex: options.regex, regex_matcher, matcher, term, item_pool, rx, tx, reader_timer: Instant::now(), matcher_timer: Instant::now(), reader_control: None, matcher_control: None, fuzzy_algorithm: FuzzyAlgorithm::default(), header, preview_hidden: true, previewer: None, preview_direction: Direction::Right, preview_size: Size::Default, margin_top, margin_right, margin_bottom, margin_left, layout: "default".to_string(), delimiter: Regex::new(DELIMITER_STR).unwrap(), inline_info: false, no_clear_if_empty: false, theme, timer: Timer::new(), hb_timer_guard: None, rank_builder, }; ret.parse_options(options); ret } fn parse_options(&mut self, options: &SkimOptions) { if let Some(delimiter) = options.delimiter { self.delimiter = Regex::new(delimiter).unwrap_or_else(|_| Regex::new(DELIMITER_STR).unwrap()); } self.layout = options.layout.to_string(); if options.inline_info { self.inline_info = true; } if options.regex { self.use_regex = true; } self.fuzzy_algorithm = options.algorithm; // preview related let (preview_direction, preview_size, preview_wrap, preview_shown) = options .preview_window .map(Self::parse_preview) .expect("option 'preview-window' should be set (by default)"); self.preview_direction = preview_direction; self.preview_size = preview_size; self.preview_hidden = !preview_shown; if let Some(preview_cmd) = options.preview { let tx = Arc::new(SpinLock::new(self.tx.clone())); self.previewer = Some( Previewer::new(Some(preview_cmd.to_string()), move || { let _ = tx.lock().send((Key::Null, Event::EvHeartBeat)); }) .wrap(preview_wrap) .delimiter(self.delimiter.clone()) .preview_offset( options .preview_window .map(Self::parse_preview_offset) .unwrap_or_else(|| "".to_string()), ), ); } self.select1 = options.select1; self.exit0 = options.exit0; self.sync = options.sync; self.no_clear_if_empty = options.no_clear_if_empty; } // -> (direction, size, wrap, shown) fn parse_preview(preview_option: &str) -> (Direction, Size, bool, bool) { let options = preview_option.split(':').collect::<Vec<&str>>(); let mut direction = Direction::Right; let mut shown = true; let mut wrap = false; let mut size = Size::Percent(50); for option in options { // mistake if option.is_empty() { continue; } let first_char = option.chars().next().unwrap_or('A'); // raw string if first_char.is_digit(10) { size = margin_string_to_size(option); } else { match option.to_uppercase().as_str() { "UP" => direction = Direction::Up, "DOWN" => direction = Direction::Down, "LEFT" => direction = Direction::Left, "RIGHT" => direction = Direction::Right, "HIDDEN" => shown = false, "WRAP" => wrap = true, _ => {} } } } (direction, size, wrap, shown) } // -> string fn parse_preview_offset(preview_window: &str) -> String { for token in preview_window.split(':') { if RE_PREVIEW_OFFSET.is_match(token) { return token.to_string(); } } "".to_string() } fn act_heart_beat(&mut self, env: &mut ModelEnv) { // save the processed items let matcher_stopped = self .matcher_control .as_ref() .map(|ctrl| ctrl.stopped()) .unwrap_or(false); if matcher_stopped { let reader_stopped = self.reader_control.as_ref().map(ReaderControl::is_done).unwrap_or(true); let ctrl = self.matcher_control.take().unwrap(); let lock = ctrl.into_items(); let mut items = lock.lock(); let matched = mem::replace(&mut *items, Vec::new()); match env.clear_selection { ClearStrategy::DontClear => {} ClearStrategy::Clear => { self.selection.clear(); env.clear_selection = ClearStrategy::DontClear; } ClearStrategy::ClearIfNotNull => { if (!self.no_clear_if_empty && reader_stopped) || !matched.is_empty() { self.selection.clear(); env.clear_selection = ClearStrategy::DontClear; } } }; self.num_options += matched.len(); self.selection.append_sorted_items(matched); } let items_consumed = self.item_pool.num_not_taken() == 0; let reader_stopped = self.reader_control.as_ref().map(|c| c.is_done()).unwrap_or(true); let processed = reader_stopped && items_consumed; // run matcher if matcher had been stopped and reader had new items. if !processed && self.matcher_control.is_none() { self.restart_matcher(); } // send next heart beat if matcher is still running or there are items not been processed. if self.matcher_control.is_some() || !processed { let tx = self.tx.clone(); let hb_timer_guard = self.timer .schedule_with_delay(TimerDuration::milliseconds(REFRESH_DURATION), move || { let _ = tx.send((Key::Null, Event::EvHeartBeat)); }); self.hb_timer_guard.replace(hb_timer_guard); } } fn act_rotate_mode(&mut self, env: &mut ModelEnv) { self.use_regex = !self.use_regex; // restart matcher if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } env.clear_selection = ClearStrategy::Clear; self.item_pool.reset(); self.num_options = 0; self.restart_matcher(); } fn handle_select1_or_exit0(&mut self) { if !self.select1 && !self.exit0 && !self.sync { return; } let items_consumed = self.item_pool.num_not_taken() == 0; let reader_stopped = self.reader_control.as_ref().map(|c| c.is_done()).unwrap_or(true); let matcher_stopped = self.matcher_control.as_ref().map(|ctrl| ctrl.stopped()).unwrap_or(true); let processed = reader_stopped && items_consumed && matcher_stopped; let num_matched = self.selection.get_num_options(); if processed { if num_matched == 1 && self.select1 { debug!("select-1 triggered, accept"); let _ = self.tx.send((Key::Null, Event::EvActAccept(None))); } else if num_matched == 0 && self.exit0 { debug!("exit-0 triggered, accept"); let _ = self.tx.send((Key::Null, Event::EvActAbort)); } else { // no longer need need to handle select-1, exit-1, sync, etc. self.select1 = false; self.exit0 = false; self.sync = false; let _ = self.term.restart(); } } } fn on_cmd_query_change(&mut self, env: &mut ModelEnv) { // stop matcher if let Some(ctrl) = self.reader_control.take() { ctrl.kill(); } if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } env.clear_selection = ClearStrategy::ClearIfNotNull; self.item_pool.clear(); self.num_options = 0; // restart reader self.reader_control.replace(self.reader.run(&env.cmd)); self.restart_matcher(); self.reader_timer = Instant::now(); } fn on_query_change(&mut self, env: &mut ModelEnv) { // restart matcher if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } env.clear_selection = ClearStrategy::Clear; self.item_pool.reset(); self.num_options = 0; self.restart_matcher(); } fn act_execute(&mut self, cmd: &str) { let item = self.selection.get_current_item(); if depends_on_items(cmd) && item.is_none() { debug!("act_execute: command refers to items and there is no item for now"); debug!("command to execute: [{}]", cmd); return; } let _ = self.term.pause(); self.act_execute_silent(cmd); let _ = self.term.restart(); } fn act_execute_silent(&mut self, cmd: &str) { let current_index = self.selection.get_current_item_idx(); let current_item = self.selection.get_current_item(); if depends_on_items(cmd) && current_item.is_none() { debug!("act_execute_silent: command refers to items and there is no item for now"); debug!("command to execute: [{}]", cmd); return; } let current_selection = current_item .as_ref() .map(|item| item.output()) .unwrap_or_else(|| Cow::Borrowed("")); let query = self.query.get_fz_query(); let cmd_query = self.query.get_cmd_query(); let (indices, selections) = self.selection.get_selected_indices_and_items(); let tmp: Vec<Cow<str>> = selections.iter().map(|item| item.text()).collect(); let selected_texts: Vec<&str> = tmp.iter().map(|cow| cow.as_ref()).collect(); let context = InjectContext { current_index, delimiter: &self.delimiter, current_selection: &current_selection, selections: &selected_texts, indices: &indices, query: &query, cmd_query: &cmd_query, }; let cmd = inject_command(cmd, context).to_string(); let shell = env::var("SHELL").unwrap_or_else(|_| "sh".to_string()); let _ = Command::new(shell).arg("-c").arg(cmd).status(); } #[allow(clippy::trivial_regex)] fn act_append_and_select(&mut self, env: &mut ModelEnv) { let query = self.query.get_fz_query(); if query.is_empty() { return; } let item_len = query.len(); let item: Arc<dyn SkimItem> = Arc::new(query); let new_len = self.item_pool.append(vec![item.clone()]); let item_idx = (max(new_len, 1) - 1) as u32; let matched_item = MatchedItem { item, rank: self.rank_builder.build_rank(0, 0, 0, item_len), matched_range: Some(MatchRange::ByteRange(0, 0)), item_idx, }; self.selection.act_select_matched(current_run_num(), matched_item); self.act_heart_beat(env); } pub fn start(&mut self) -> Option<SkimOutput> { let mut env = ModelEnv { cmd: self.query.get_cmd(), query: self.query.get_fz_query(), cmd_query: self.query.get_cmd_query(), in_query_mode: self.query.in_query_mode(), clear_selection: ClearStrategy::DontClear, }; self.reader_control = Some(self.reader.run(&env.cmd)); // In the event loop, there might need let mut next_event = Some((Key::Null, Event::EvHeartBeat)); loop { let (key, ev) = next_event.take().or_else(|| self.rx.recv().ok())?; debug!("handle event: {:?}", ev); match ev { Event::EvHeartBeat => { // consume following HeartBeat event next_event = self.consume_additional_event(&Event::EvHeartBeat); self.act_heart_beat(&mut env); self.handle_select1_or_exit0(); } Event::EvActIfNonMatched(ref arg_str) => { let matched = self.num_options + self.matcher_control.as_ref().map(|c| c.get_num_matched()).unwrap_or(0); if matched == 0 { next_event = parse_action_arg(arg_str).map(|ev| (key, ev)); continue; } } Event::EvActIfQueryEmpty(ref arg_str) => { if env.query.is_empty() { next_event = parse_action_arg(arg_str).map(|ev| (key, ev)); continue; } } Event::EvActIfQueryNotEmpty(ref arg_str) => { if !env.query.is_empty() { next_event = parse_action_arg(arg_str).map(|ev| (key, ev)); continue; } } Event::EvActTogglePreview => { self.preview_hidden = !self.preview_hidden; } Event::EvActRotateMode => { self.act_rotate_mode(&mut env); } Event::EvActAccept(accept_key) => { if let Some(ctrl) = self.reader_control.take() { ctrl.kill(); } if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } return Some(SkimOutput { is_abort: false, final_event: Event::EvActAccept(accept_key), final_key: key, query: self.query.get_fz_query(), cmd: self.query.get_cmd_query(), selected_items: self.selection.get_selected_indices_and_items().1, }); } Event::EvActAbort => { if let Some(ctrl) = self.reader_control.take() { ctrl.kill(); } if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } return Some(SkimOutput { is_abort: true, final_event: ev.clone(), final_key: key, query: self.query.get_fz_query(), cmd: self.query.get_cmd_query(), selected_items: self.selection.get_selected_indices_and_items().1, }); } Event::EvActDeleteCharEOF => { if env.in_query_mode && env.query.is_empty() || !env.in_query_mode && env.cmd_query.is_empty() { next_event = Some((key, Event::EvActAbort)); continue; } } Event::EvActExecute(ref cmd) => { self.act_execute(cmd); } Event::EvActExecuteSilent(ref cmd) => { self.act_execute_silent(cmd); } Event::EvActAppendAndSelect => { self.act_append_and_select(&mut env); } Event::EvInputKey(key) => { // dispatch key(normally the mouse keys) to sub-widgets self.do_with_widget(|root| { let (width, height) = self.term.term_size().unwrap(); let rect = Rectangle { top: 0, left: 0, width, height, }; let messages = root.on_event(TermEvent::Key(key), rect); for message in messages { let _ = self.tx.send((key, message)); } }) } Event::EvActRefreshCmd => { self.on_cmd_query_change(&mut env); } Event::EvActRefreshPreview => { self.draw_preview(&env, true); } _ => {} } // dispatch events to sub-components self.header.handle(&ev); self.query.handle(&ev); env.cmd_query = self.query.get_cmd_query(); let new_query = self.query.get_fz_query(); let new_cmd = self.query.get_cmd(); // re-run reader & matcher if needed; if new_cmd != env.cmd { env.cmd = new_cmd; self.on_cmd_query_change(&mut env); } else if new_query != env.query { env.query = new_query; self.on_query_change(&mut env); } self.selection.handle(&ev); if let Some(previewer) = self.previewer.as_mut() { previewer.handle(&ev); } self.draw_preview(&env, false); let _ = self.do_with_widget(|root| self.term.draw(&root)); let _ = self.term.present(); } } fn draw_preview(&mut self, env: &ModelEnv, force: bool) { if self.preview_hidden { return; } // re-draw let item_index = self.selection.get_current_item_idx(); let item = self.selection.get_current_item(); if let Some(previewer) = self.previewer.as_mut() { let selections = &self.selection; let get_selected_items = || selections.get_selected_indices_and_items(); previewer.on_item_change( item_index, item, env.query.to_string(), env.cmd_query.to_string(), selections.get_num_of_selected_exclude_current(), get_selected_items, force, ); } } fn consume_additional_event(&self, target_event: &Event) -> Option<(Key, Event)> { // consume additional HeartBeat event let mut rx_try_iter = self.rx.try_iter().peekable(); while let Some((_key, ev)) = rx_try_iter.peek() { if *ev == *target_event { let _ = rx_try_iter.next(); } else { break; } } // once the event is peeked, it is removed from the pipe, thus need to be saved. rx_try_iter.next() } fn restart_matcher(&mut self) { self.matcher_timer = Instant::now(); let query = self.query.get_fz_query(); // kill existing matcher if exits if let Some(ctrl) = self.matcher_control.take() { ctrl.kill(); } // if there are new items, move them to item pool let processed = self.reader_control.as_ref().map(|c| c.is_done()).unwrap_or(true); if !processed { // take out new items and put them into items let new_items = self.reader_control.as_ref().map(|c| c.take()).unwrap(); let _ = self.item_pool.append(new_items); }; // send heart beat (so that heartbeat/refresh is triggered) let _ = self.tx.send((Key::Null, Event::EvHeartBeat)); let matcher = if self.use_regex { &self.regex_matcher } else { &self.matcher }; let tx = self.tx.clone(); let new_matcher_control = matcher.run(&query, self.item_pool.clone(), move |_| { // notify refresh immediately let _ = tx.send((Key::Null, Event::EvHeartBeat)); }); self.matcher_control.replace(new_matcher_control); } /// construct the widget tree fn do_with_widget<R, F>(&'_ self, action: F) -> R where F: Fn(Box<dyn Widget<Event> + '_>) -> R, { let total = self.item_pool.len(); let matcher_mode = if self.use_regex { "RE".to_string() } else { "".to_string() }; let matched = self.num_options + self.matcher_control.as_ref().map(|c| c.get_num_matched()).unwrap_or(0); let matcher_running = self.item_pool.num_not_taken() != 0 || matched != self.num_options; let processed = self .matcher_control .as_ref() .map(|c| c.get_num_processed()) .unwrap_or(total); let status = Status { total, matched, processed, matcher_running, multi_selection: self.selection.is_multi_selection(), selected: self.selection.get_num_selected(), current_item_idx: self.selection.get_current_item_idx(), hscroll_offset: self.selection.get_hscroll_offset(), reading: !self.reader_control.as_ref().map(|c| c.is_done()).unwrap_or(true), time_since_read: self.reader_timer.elapsed(), time_since_match: self.matcher_timer.elapsed(), matcher_mode, theme: self.theme.clone(), inline_info: self.inline_info, }; let status_inline = status.clone(); let win_selection = Win::new(&self.selection); let win_query = Win::new(&self.query) .basis(if self.inline_info { 0 } else { 1 }) .grow(0) .shrink(0); let win_status = Win::new(status) .basis(if self.inline_info { 0 } else { 1 }) .grow(0) .shrink(0); let win_header = Win::new(&self.header).grow(0).shrink(0); let win_query_status = HSplit::default() .basis(if self.inline_info { 1 } else { 0 }) .grow(0) .shrink(0) .split(Win::new(&self.query).grow(0).shrink(0)) .split(Win::new(status_inline).grow(1).shrink(0)); let layout = &self.layout as &str; let win_main = match layout { "reverse" => VSplit::default() .split(win_query_status) .split(win_query) .split(win_status) .split(win_header) .split(win_selection), "reverse-list" => VSplit::default() .split(win_selection) .split(win_header) .split(win_status) .split(win_query) .split(win_query_status), _ => VSplit::default() .split(win_selection) .split(win_header) .split(win_status) .split(win_query) .split(win_query_status), }; let screen: Box<dyn Widget<Event>> = if !self.preview_hidden && self.previewer.is_some() { let previewer = self.previewer.as_ref().unwrap(); let win = Win::new(previewer) .basis(self.preview_size) .grow(0) .shrink(0) .border_attr(self.theme.border()); let win_preview = match self.preview_direction { Direction::Up => win.border_bottom(true), Direction::Right => win.border_left(true), Direction::Down => win.border_top(true), Direction::Left => win.border_right(true), }; match self.preview_direction { Direction::Up => Box::new(VSplit::default().split(win_preview).split(win_main)), Direction::Right => Box::new(HSplit::default().split(win_main).split(win_preview)), Direction::Down => Box::new(VSplit::default().split(win_main).split(win_preview)), Direction::Left => Box::new(HSplit::default().split(win_preview).split(win_main)), } } else { Box::new(win_main) }; let root = Win::new(screen) .margin_top(self.margin_top) .margin_right(self.margin_right) .margin_bottom(self.margin_bottom) .margin_left(self.margin_left); action(Box::new(root)) } } struct ModelEnv { pub cmd: String, pub query: String, pub cmd_query: String, pub clear_selection: ClearStrategy, pub in_query_mode: bool, } #[derive(Clone)] struct Status { total: usize, matched: usize, processed: usize, matcher_running: bool, multi_selection: bool, selected: usize, current_item_idx: usize, hscroll_offset: i64, reading: bool, time_since_read: Duration, time_since_match: Duration, matcher_mode: String, theme: Arc<ColorTheme>, inline_info: bool, } #[allow(unused_assignments)] impl Draw for Status { fn draw(&self, canvas: &mut dyn Canvas) -> DrawResult<()> { // example: // /--num_matched/num_read /-- current_item_index // [| 869580/869580 0.] // `-spinner `-- still matching // example(inline): // /--num_matched/num_read /-- current_item_index // [> - 549334/549334 0.] // `-spinner `-- still matching canvas.clear()?; let (screen_width, _) = canvas.size()?; let info_attr = self.theme.info(); let info_attr_bold = Attr { effect: Effect::BOLD, ..self.theme.info() }; let a_while_since_read = self.time_since_read > Duration::from_millis(50); let a_while_since_match = self.time_since_match > Duration::from_millis(50); let mut col = 0; let spinner_set: &[char] = if self.inline_info { &SPINNERS_INLINE } else { &SPINNERS_UNICODE }; if self.inline_info { col += canvas.put_char_with_attr(0, col, ' ', info_attr)?; } // draw the spinner if self.reading && a_while_since_read { let mills = (self.time_since_read.as_secs() * 1000) as u32 + self.time_since_read.subsec_millis(); let index = (mills / SPINNER_DURATION) % (spinner_set.len() as u32); let ch = spinner_set[index as usize]; col += canvas.put_char_with_attr(0, col, ch, self.theme.spinner())?; } else if self.inline_info { col += canvas.put_char_with_attr(0, col, '<', self.theme.prompt())?; } else { col += canvas.put_char_with_attr(0, col, ' ', self.theme.prompt())?; } // display matched/total number col += canvas.print_with_attr(0, col, format!(" {}/{}", self.matched, self.total).as_ref(), info_attr)?; // display the matcher mode if !self.matcher_mode.is_empty() { col += canvas.print_with_attr(0, col, format!("/{}", &self.matcher_mode).as_ref(), info_attr)?; } // display the percentage of the number of processed items if self.matcher_running && a_while_since_match { col += canvas.print_with_attr( 0, col, format!(" ({}%) ", self.processed * 100 / self.total).as_ref(), info_attr, )?; } // selected number if self.multi_selection && self.selected > 0 { col += canvas.print_with_attr(0, col, format!(" [{}]", self.selected).as_ref(), info_attr_bold)?; } // item cursor let line_num_str = format!( " {}/{}{}", self.current_item_idx, self.hscroll_offset, if self.matcher_running { '.' } else { ' ' } ); canvas.print_with_attr(0, screen_width - line_num_str.len(), &line_num_str, info_attr_bold)?; Ok(()) } } impl Widget<Event> for Status {} #[derive(PartialEq, Eq, Clone, Debug, Copy)] enum Direction { Up, Down, Left, Right, } #[derive(PartialEq, Eq, Clone, Debug, Copy)] enum ClearStrategy { DontClear, Clear, ClearIfNotNull, }<|fim▁end|>
const DELIMITER_STR: &str = r"[\t\n ]+";
<|file_name|>tempconv.js<|end_file_name|><|fim▁begin|>if (Meteor.isClient) { Session.setDefault('degF',-40); Session.setDefault('degC',-40); Template.temperatureBoxes.helpers({ degF: function(){ return Session.get('degF'); }, degC: function(){<|fim▁hole|> } }); Template.temperatureBoxes.events({ 'keyup #c': function(e){ if(e.which === 13) { var degC = e.target.value; var degF = Math.round(degC*9/5 + 32); Session.set('degF',degF); } }, 'keyup #f': function(e){ if(e.which === 13) { var degF = e.target.value; var degC = Math.round((degF-32)* 5/9); Session.set('degC',degC); } } }); } if (Meteor.isServer) { Meteor.startup(function () { // code to run on server at startup }); }<|fim▁end|>
return Session.get('degC');
<|file_name|>main.js<|end_file_name|><|fim▁begin|>function divReplaceWith(selector, url) { $.get(url, function(response) {<|fim▁hole|>}<|fim▁end|>
$(selector).html(response); })
<|file_name|>explicit-fail-msg.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern:wooooo fn main() { let mut a = 1; if 1 == 1 { a = 2; } fail!(~"woooo" + "o"); }<|fim▁end|>
// file at the top-level directory of this distribution and at
<|file_name|>0011_auto_20160716_1025.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9 on 2016-07-16 10:25 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('testCon', '0010_auto_20160713_1534'), ] operations = [ migrations.AddField( model_name='account_teacher', name='teacherName', field=models.CharField(default='TeacherName', max_length=20), preserve_default=False, ),<|fim▁hole|> migrations.AddField( model_name='classinfo', name='classCode', field=models.CharField(default=0, max_length=10), preserve_default=False, ), migrations.AddField( model_name='classinfo', name='username', field=models.CharField(default=0, max_length=20), preserve_default=False, ), migrations.AlterField( model_name='account_teacher', name='username', field=models.CharField(max_length=20, unique=True), ), ]<|fim▁end|>
<|file_name|>module.js<|end_file_name|><|fim▁begin|>angular .module('eventApp', [ 'ngResource', 'ui.bootstrap', 'ui.select', 'ui.bootstrap.datetimepicker', 'global', 'messagingApp', 'datasetApp' ]) .constant('HEK_URL', 'http://www.lmsal.com/hek/her') .constant('HEK_QUERY_PARAMS', { 'cosec': 2, // ask for json 'cmd': 'search', // search command 'type': 'column', 'event_coordsys': 'helioprojective', //always specify wide coordinates, otherwise does not work 'x1': '-5000', 'x2': '5000', 'y1': '-5000', 'y2': '5000', 'return': 'event_type,event_starttime,event_endtime,kb_archivid,gs_thumburl,frm_name,frm_identifier', // limit the returned fields 'result_limit': 10, // limit the number of results 'event_type': '**', // override to only select some event types 'event_starttime': new Date(Date.UTC(1975, 9, 1)).toISOString(), // The first HEK event is in september 1975 'event_endtime': new Date().toISOString() }) .constant('HEK_GET_PARAMS', { 'cosec': 2, // ask for json 'cmd': 'export-voevent' // search command }) .constant('EVENT_TYPES', { AR : 'Active Region', CE : 'CME', CD : 'Coronal Dimming', CH : 'Coronal Hole', CW : 'Coronal Wave', FI : 'Filament', FE : 'Filament Eruption',<|fim▁hole|> M_FL : 'Flare (M1+)', X_FL : 'Flare (X1+)', LP : 'Loop', OS : 'Oscillation', SS : 'Sunspot', EF : 'Emerging Flux', CJ : 'Coronal Jet', PG : 'Plage', OT : 'Other', NR : 'Nothing Reported', SG : 'Sigmoid', SP : 'Spray Surge', CR : 'Coronal Rain', CC : 'Coronal Cavity', ER : 'Eruption', TO : 'Topological Object' });<|fim▁end|>
FA : 'Filament Activation', FL : 'Flare', C_FL : 'Flare (C1+)',
<|file_name|>test_core.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import absolute_import from django.apps import apps from django.conf import settings from sentry.models import Organization, Project, ProjectKey, Team, User from sentry.receivers.core import create_default_projects, DEFAULT_SENTRY_PROJECT_ID from sentry.testutils import TestCase class CreateDefaultProjectsTest(TestCase): def test_simple(self): user, _ = User.objects.get_or_create(is_superuser=True, defaults={"username": "test"}) Organization.objects.all().delete() Team.objects.filter(slug="sentry").delete() Project.objects.filter(id=settings.SENTRY_PROJECT).delete() config = apps.get_app_config("sentry") create_default_projects(config) project = Project.objects.get(id=settings.SENTRY_PROJECT)<|fim▁hole|> assert team.slug == "sentry" pk = ProjectKey.objects.get(project=project) assert not pk.roles.api assert pk.roles.store # ensure that we dont hit an error here create_default_projects(config) def test_without_user(self): User.objects.filter(is_superuser=True).delete() Team.objects.filter(slug="sentry").delete() Project.objects.filter(id=settings.SENTRY_PROJECT).delete() config = apps.get_app_config("sentry") create_default_projects(config) project = Project.objects.get(id=settings.SENTRY_PROJECT) assert project.public is False assert project.name == "Internal" assert project.slug == "internal" team = project.teams.first() assert team.slug == "sentry" pk = ProjectKey.objects.get(project=project) assert not pk.roles.api assert pk.roles.store # ensure that we dont hit an error here create_default_projects(config) def test_no_sentry_project(self): with self.settings(SENTRY_PROJECT=None): User.objects.filter(is_superuser=True).delete() Team.objects.filter(slug="sentry").delete() Project.objects.filter(id=DEFAULT_SENTRY_PROJECT_ID).delete() config = apps.get_app_config("sentry") create_default_projects(config) project = Project.objects.get(id=DEFAULT_SENTRY_PROJECT_ID) assert project.public is False assert project.name == "Internal" assert project.slug == "internal" team = project.teams.first() assert team.slug == "sentry" pk = ProjectKey.objects.get(project=project) assert not pk.roles.api assert pk.roles.store # ensure that we dont hit an error here create_default_projects(config)<|fim▁end|>
assert project.public is False assert project.name == "Internal" assert project.slug == "internal" team = project.teams.first()
<|file_name|>index.module.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../typings/bundle.d.ts" /> /// <reference path="index.route.ts" /> /// <reference path="index.config.ts" /> /// <reference path="index.run.ts" /> /// <reference path="main/main.controller.ts" /> /// <reference path="./components/navbar/navbar.directive.ts" /> /// <reference path="./components/malarkey/malarkey.directive.ts" /> /// <reference path="./components/webDevTec/webDevTec.service.ts" /> /// <reference path="./components/githubContributor/githubContributor.service.ts" /> declare var malarkey: any; declare var toastr: Toastr; declare var moment: moment.MomentStatic; module kidori { 'use strict'; angular.module('kidori', ['ngAnimate', 'ngResource', 'ngRoute', 'mgcrea.ngStrap']) .constant('malarkey', malarkey) .constant('toastr', toastr) .constant('moment', moment) .config(Config) .config(RouterConfig) .run(RunBlock) .service('githubContributor', GithubContributor)<|fim▁hole|>}<|fim▁end|>
.service('webDevTec', WebDevTecService) .controller('MainController', MainController) .directive('acmeNavbar', acmeNavbar) .directive('acmeMalarkey', acmeMalarkey);
<|file_name|>txsizes.go<|end_file_name|><|fim▁begin|>package spvwallet // Copyright (c) 2016 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. /* Copied here from a btcd internal package*/ import ( "github.com/btcsuite/btcd/wire" ) // Worst case script and input/output size estimates. const ( // RedeemP2PKHSigScriptSize is the worst case (largest) serialize size // of a transaction input script that redeems a compressed P2PKH output. // It is calculated as: // // - OP_DATA_73 // - 72 bytes DER signature + 1 byte sighash // - OP_DATA_33 // - 33 bytes serialized compressed pubkey RedeemP2PKHSigScriptSize = 1 + 73 + 1 + 33 // RedeemP2SHMultisigSigScriptSize is the worst case (largest) serialize size // of a transaction input script that redeems a 2 of 3 P2SH multisig output with compressed keys. // It is calculated as: // // - OP_0 // - OP_DATA_72 // - 72 bytes DER signature // - OP_DATA_72 // - 72 bytes DER signature // - OP_PUSHDATA // - OP_2 // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP3 // - OP_CHECKMULTISIG RedeemP2SH2of3MultisigSigScriptSize = 1 + 1 + 72 + 1 + 72 + 1 + 1 + 1 + 33 + 1 + 33 + 1 + 33 + 1 + 1 // RedeemP2SH1of2MultisigSigScriptSize is the worst case (largest) serialize size // of a transaction input script that redeems a 1 of 2 P2SH multisig output with compressed keys. // It is calculated as: // // - OP_0 // - OP_DATA_72 // - 72 bytes DER signature // - OP_PUSHDATA // - OP_1 // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP2 // - OP_CHECKMULTISIG RedeemP2SH1of2MultisigSigScriptSize = 1 + 1 + 72 + 1 + 1 + 1 + 33 + 1 + 33 + 1 + 1 // RedeemP2SHMultisigTimelock1SigScriptSize is the worst case (largest) serialize size // of a transaction input script that redeems a compressed P2SH timelocked multisig using the timeout. // It is calculated as: // // - OP_DATA_72 // - 72 bytes DER signature // - OP_0 // - OP_PUSHDATA // - OP_IF // - OP_2 // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP3 // - OP_CHECKMULTISIG // - OP_ELSE // - OP_PUSHDATA // - 2 byte block height // - OP_CHECKSEQUENCEVERIFY // - OP_DROP // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_CHECKSIG // - OP_ENDIF RedeemP2SHMultisigTimelock1SigScriptSize = 1 + 72 + 1 + 1 + 1 + 1 + 1 + 33 + 1 + 33 + 1 + 33 + 1 + 1 + 1 + 1 + 2 + 1 + 1 + 1 + 33 + 1 + 1 // RedeemP2SHMultisigTimelock2SigScriptSize is the worst case (largest) serialize size // of a transaction input script that redeems a compressed P2SH timelocked multisig without using the timeout. // It is calculated as: // // - OP_0 // - OP_DATA_72 // - 72 bytes DER signature // - OP_DATA_72 // - 72 bytes DER signature // - OP_1 // - OP_PUSHDATA // - OP_IF // - OP_2 // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP3 // - OP_CHECKMULTISIG // - OP_ELSE // - OP_PUSHDATA // - 2 byte block height // - OP_CHECKSEQUENCEVERIFY // - OP_DROP // - OP_DATA_33 // - 33 bytes serialized compressed pubkey // - OP_CHECKSIG // - OP_ENDIF RedeemP2SHMultisigTimelock2SigScriptSize = 1 + 1 + 72 + +1 + 72 + 1 + 1 + 1 + 1 + 1 + 33 + 1 + 33 + 1 + 33 + 1 + 1 + 1 + 1 + 2 + 1 + 1 + 1 + 33 + 1 + 1 // P2PKHPkScriptSize is the size of a transaction output script that // pays to a compressed pubkey hash. It is calculated as: // // - OP_DUP // - OP_HASH160 // - OP_DATA_20 // - 20 bytes pubkey hash // - OP_EQUALVERIFY // - OP_CHECKSIG P2PKHPkScriptSize = 1 + 1 + 1 + 20 + 1 + 1 // RedeemP2PKHInputSize is the worst case (largest) serialize size of a // transaction input redeeming a compressed P2PKH output. It is // calculated as: // // - 32 bytes previous tx // - 4 bytes output index // - 1 byte script len // - signature script // - 4 bytes sequence RedeemP2PKHInputSize = 32 + 4 + 1 + RedeemP2PKHSigScriptSize + 4 // RedeemP2SH2of3MultisigInputSize is the worst case (largest) serialize size of a // transaction input redeeming a compressed P2SH 2 of 3 multisig output. It is // calculated as: // // - 32 bytes previous tx // - 4 bytes output index // - 1 byte script len // - 4 bytes sequence /// - witness discounted signature script RedeemP2SH2of3MultisigInputSize = 32 + 4 + 1 + 4 + (RedeemP2SH2of3MultisigSigScriptSize / 4) // RedeemP2SH1of2MultisigInputSize is the worst case (largest) serialize size of a // transaction input redeeming a compressed P2SH 2 of 3 multisig output. It is // calculated as: // // - 32 bytes previous tx // - 4 bytes output index // - 1 byte script len // - 4 bytes sequence /// - witness discounted signature script RedeemP2SH1of2MultisigInputSize = 32 + 4 + 1 + 4 + (RedeemP2SH1of2MultisigSigScriptSize / 4) // RedeemP2SHMultisigTimelock1InputSize is the worst case (largest) serialize size of a // transaction input redeeming a compressed p2sh timelocked multig output with using the timeout. It is // calculated as: // // - 32 bytes previous tx // - 4 bytes output index // - 1 byte script len // - 4 bytes sequence /// - witness discounted signature script RedeemP2SHMultisigTimelock1InputSize = 32 + 4 + 1 + 4 + (RedeemP2SHMultisigTimelock1SigScriptSize / 4) // RedeemP2SHMultisigTimelock2InputSize is the worst case (largest) serialize size of a // transaction input redeeming a compressed P2SH timelocked multisig output without using the timeout. It is // calculated as: // // - 32 bytes previous tx // - 4 bytes output index // - 1 byte script len // - 4 bytes sequence /// - witness discounted signature script RedeemP2SHMultisigTimelock2InputSize = 32 + 4 + 1 + 4 + (RedeemP2SHMultisigTimelock2SigScriptSize / 4) // P2PKHOutputSize is the serialize size of a transaction output with a // P2PKH output script. It is calculated as: // // - 8 bytes output value // - 1 byte compact int encoding value 25 // - 25 bytes P2PKH output script P2PKHOutputSize = 8 + 1 + P2PKHPkScriptSize ) type InputType int const ( P2PKH InputType = iota P2SH_1of2_Multisig P2SH_2of3_Multisig P2SH_Multisig_Timelock_1Sig P2SH_Multisig_Timelock_2Sigs ) // EstimateSerializeSize returns a worst case serialize size estimate for a // signed transaction that spends inputCount number of compressed P2PKH outputs // and contains each transaction output from txOuts. The estimated size is // incremented for an additional P2PKH change output if addChangeOutput is true. func EstimateSerializeSize(inputCount int, txOuts []*wire.TxOut, addChangeOutput bool, inputType InputType) int { changeSize := 0 outputCount := len(txOuts) if addChangeOutput { changeSize = P2PKHOutputSize outputCount++ } var redeemScriptSize int switch inputType { case P2PKH: redeemScriptSize = RedeemP2PKHInputSize case P2SH_1of2_Multisig: redeemScriptSize = RedeemP2SH1of2MultisigInputSize case P2SH_2of3_Multisig: redeemScriptSize = RedeemP2SH2of3MultisigInputSize case P2SH_Multisig_Timelock_1Sig: redeemScriptSize = RedeemP2SHMultisigTimelock1InputSize case P2SH_Multisig_Timelock_2Sigs: redeemScriptSize = RedeemP2SHMultisigTimelock2InputSize } // 10 additional bytes are for version, locktime, and segwit flags return 10 + wire.VarIntSerializeSize(uint64(inputCount)) + wire.VarIntSerializeSize(uint64(outputCount)) + inputCount*redeemScriptSize + SumOutputSerializeSizes(txOuts) +<|fim▁hole|>} // SumOutputSerializeSizes sums up the serialized size of the supplied outputs. func SumOutputSerializeSizes(outputs []*wire.TxOut) (serializeSize int) { for _, txOut := range outputs { serializeSize += txOut.SerializeSize() } return serializeSize }<|fim▁end|>
changeSize
<|file_name|>de.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- { ' (late)': ' (verspätet)', '!=': '!=', '!langcode!': 'de', '!langname!': 'Deutsch (DE)', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '""Update" ist ein optionaler Ausdruck wie "Feld1 = \'newvalue". JOIN Ergebnisse können nicht aktualisiert oder gelöscht werden', '%(nrows)s records found': '%(nrows)s Einträge gefunden', '%d.%m.%Y': '%d.%m.%Y', '%s %%(shop)': '%s %%(shop)', '%s %%(shop[0])': '%s %%(shop[0])', '%s %%{quark[0]}': '%s %%{quark[0]}', '%s %%{row} deleted': '%s %%{row} gelöscht', '%s %%{row} updated': '%s %%{row} aktualisiert', '%s %%{shop[0]}': '%s %%{shop[0]}', '%s %%{shop}': '%s %%{shop}', '%s selected': '%s ausgewählt', '%Y-%m-%d': '%d.%m.%Y', '%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S', '+ And': '+ Und', '+ Or': '+ Oder', '<': '<', '<=': '<=', '=': '=', '>': '>', '>=': '>=', '?': '?', '@markmin\x01**Hello World**': '**Hallo Welt**', '@markmin\x01An error occured, please [[reload %s]] the page': 'Ein Fehler ist aufgetreten, bitte [[laden %s]] Sie die Seite neu', 'A file ({filename}) was uploaded for task {task} by {firstname} {lastname} with the hash (SHA256) {hash}.': 'Eine Datei ({filename}) für die Aufgabe {task} mit dem Hash (SHA256) {hash} wurde von {firstname} {lastname} hochgeladen.', 'About': 'Über', 'Access Control': 'Zugangskontrolle', 'Add Record': 'Eintrag hinzufügen', 'Add record to database': 'Eintrag in Datenbank hinzufügen', 'Add this to the search as an AND term': 'Add this to the search as an AND term', 'Add this to the search as an OR term': 'Add this to the search as an OR term', 'admin': 'admin', 'Administrative Interface': 'Administrationsoberfläche', 'Administrator view: Task of all users are shown!': 'Administrator-Ansicht: Es werden die Aufgaben aller Benutzer angezeigt!', 'After the file was uploaded, the chosen teacher will be informed by email. The uploader also gets an email with the hash (SHA256) of the uploaded file.': 'Nach Betätigung des Upload-Buttons wird die Datei übermittelt und die ausgewählte Lehrkraft per Mail darüber informiert. Der Uploader bekommt ebenfalls eine Benachrichtigungsmail mit der Hash (SHA256) der hochgeladenen Datei.', 'Ajax Recipes': 'Ajax Rezepte', 'appadmin is disabled because insecure channel': 'Appadmin ist deaktiviert, wegen der Benutzung eines unsicheren Kanals', 'Apply changes': 'Änderungen übernehmen', 'Are you sure you want to delete this object?': 'Sind Sie sich sicher, dass Sie dieses Objekt löschen wollen?', 'AttendingClass': 'Klasse', 'Available Databases and Tables': 'Verfügbare Datenbanken und Tabellen', 'Back': 'Zurück', 'Buy this book': 'Dieses Buch kaufen', "Buy web2py's book": "Buy web2py's book", 'cache': 'cache', 'Cache': 'Cache', 'Cache Cleared': 'Cache geleert', 'Cache Keys': 'Cache Schlüssel', 'Cannot be empty': 'Darf nicht leer sein', 'Change Password': 'Passwort ändern', 'Change password': 'Passwort ändern', 'Check to delete': 'Auswählen um zu löschen', 'Choose a file to be uploaded!': 'Wählen Sie eine Datei aus, die hochgeladen werden soll!', 'Choose a task:': 'Wählen Sie eine Aufgabe aus:', 'choose one': 'Wählen Sie einen aus', 'Class': 'Klasse', 'Clear': 'Löschen', 'Clear CACHE?': 'CACHE löschen?', 'Clear DISK': 'DISK löschen', 'Clear RAM': 'RAM löschen', 'Click on the link %(link)s to reset your password': 'Click on the link %(link)s to reset your password', 'Client IP': 'Client IP', 'Close': 'Schließen', 'Collect uploaded files': 'Hochgeladene Dateien einsammeln', 'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export': 'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export', 'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows': 'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows', 'Community': 'Community', 'Components and Plugins': 'Komponenten und Plugins', 'Config.ini': 'Config.ini', 'Confirm Password': 'Passwort bestätigen', 'contains': 'contains', 'Controller': 'Controller', 'Copyright': 'Copyright', 'Created By': 'Erstellt von', 'Created On': 'Erstellt am', 'CSV': 'CSV', 'CSV (hidden cols)': 'CSV (versteckte Spalten)', 'Current request': 'Derzeitiger Request', 'Current response': 'Derzeitige Response', 'Current session': 'Derzeitige Session', 'customize me!': 'Pass mich an!', 'data uploaded': 'Datei hochgeladen', 'Database': 'Datenbank', 'Database %s select': 'Datenbank %s ausgewählt', 'Database Administration (appadmin)': 'Datenbankadministration (appadmin)', 'db': 'db', 'DB Model': 'Muster-DB', 'Delete': 'Löschen', 'Delete:': 'Löschen:', 'Demo': 'Demo', 'Deployment Recipes': 'Entwicklungsrezepte', 'Description': 'Beschreibung', 'design': 'Design', 'Design': 'Design', 'DISK': 'DISK', 'Disk Cache Keys': 'Disk Cache Keys', 'Disk Cleared': 'Disk gelöscht', 'Documentation': 'Dokumentation', "Don't know what to do?": 'Wissen Sie nicht weiter?', 'done!': 'Fertig!', 'Download': 'Download', 'Download all uploaded files...': 'Alle hochgeladenen Dateien herunterladen...', 'Download files': 'Dateien herunterladen', 'Duedate': 'Fälligkeit', 'DueDate': 'Fälligkeit', 'E-mail': 'Email', 'Edit': 'Bearbeiten', 'Edit current record': 'Diesen Eintrag editieren', 'Email and SMS': 'Email und SMS', 'Email sent': 'Email wurde versandt', 'Enter a valid email address': 'Geben Sie eine gültige Email-Adresse ein', 'Enter a value': 'Geben Sie einen Wert ein', 'Enter an integer between %(min)g and %(max)g': 'Eine Zahl zwischen %(min)g und %(max)g eingeben', 'enter an integer between %(min)g and %(max)g': 'eine Zahl zwischen %(min)g und %(max)g eingeben', 'enter date and time as %(format)s': 'ein Datum und eine Uhrzeit als %(format)s eingeben', 'Enter date as %(format)s': 'Geben Sie ein Datum mit dem Format %(format)s an', 'Enter from %(min)g to %(max)g characters': 'Geben Sie zwischen %(min)g und %(max)g Zeichen ein', 'Errors': 'Fehlermeldungen', 'Errors in form, please check it out.': 'Bitte überprüfen Sie das Formular, denn es enthält Fehler.', 'export as csv file': 'als csv Datei exportieren', 'Export:': 'Export:', 'FAQ': 'FAQ', 'file': 'file', 'file ## download': 'Datei ## herunterladen', 'File size is to large!': 'Datei ist zu groß!', 'File successfully uploaded': 'Datei erfolgreich hochgeladen',<|fim▁hole|>'First Name': 'Vorname', 'FirstName': 'FirstName', 'Forgot username?': 'Benutzernamen vergessen?', 'Forms and Validators': 'Forms und Validators', 'Free Applications': 'Kostenlose Anwendungen', 'Given task and teacher does not match!': 'Angegebene Aufgabe und Lehrkraft passen nicht zusammen!', 'Graph Model': 'Muster-Graph', 'Group %(group_id)s created': 'Gruppe %(group_id)s erstellt', 'Group ID': 'Gruppen ID', 'Group uniquely assigned to user %(id)s': 'Gruppe eindeutigem Benutzer %(id)s zugewiesen', 'Groups': 'Gruppen', 'Hash': 'Hash', 'Hello World': 'Hallo Welt', 'Hello World ## Kommentar': 'Hallo Welt ', 'Hello World## Kommentar': 'Hallo Welt', 'Help': 'Hilfe', 'Helping web2py': 'web2py helfen', 'Home': 'Startseite', 'Hosted on': 'Hosted on', 'How did you get here?': 'Wie sind Sie hier her gelangt?', 'HTML': 'HTML', 'HTML export of visible columns': 'Sichtbare Spalten nach HTML exportieren', 'Id': 'Id', 'import': 'Importieren', 'Import/Export': 'Importieren/Exportieren', 'in': 'in', 'Insufficient privileges': 'Keine ausreichenden Rechte', 'Internal State': 'Innerer Zustand', 'Introduction': 'Einführung', 'Invalid email': 'Ungültige Email', 'Invalid login': 'Ungültiger Login', 'Invalid Query': 'Ungültige Query', 'invalid request': 'Ungültiger Request', 'Is Active': 'Ist aktiv', 'JSON': 'JSON', 'JSON export of visible columns': 'Sichtbare Spalten nach JSON exportieren', 'Key': 'Schlüssel', 'Klasse': 'Klasse', 'Last name': 'Nachname', 'Last Name': 'Nachname', 'LastName': 'Nachname', 'Layout': 'Layout', 'Layout Plugins': 'Layout Plugins', 'Layouts': 'Layouts', 'Live Chat': 'Live Chat', 'Log In': 'Einloggen', 'Log Out': 'Ausloggen', 'Logged in': 'Eingeloggt', 'Logged out': 'Ausgeloggt', 'Login': 'Einloggen', 'Login to UpLoad': 'In UpLoad einloggen', 'Logout': 'Ausloggen', 'Lost Password': 'Passwort vergessen', 'Lost password?': 'Passwort vergessen?', 'Mail': 'Mail', 'Manage %(action)s': '%(action)s verwalten', 'Manage Access Control': 'Zugangskontrolle verwalten', 'Manage Cache': 'Cache verwalten', 'Manage tasks': 'Aufgaben verwalten', 'Manage teachers': 'Lehrkräfte verwalten', 'Memberships': 'Mitgliedschaften', 'Menu Model': 'Menü-Muster', 'Modified By': 'Verändert von', 'Modified On': 'Verändert am', 'My Sites': 'Meine Seiten', 'Nachname': 'Nachname', 'Name': 'Name', 'New password': 'Neues Passwort', 'New Record': 'Neuer Eintrag', 'new record inserted': 'neuer Eintrag hinzugefügt', 'New Search': 'Neue Suche', 'next %s rows': 'nächste %s Reihen', 'No databases in this application': 'Keine Datenbank in dieser Anwendung', 'No hash for upload given.': 'Kein Upload mit angegebener Hash gefunden', 'No records found': 'Keine Einträge gefunden', 'No task number given.': 'Es wurde keine Aufgabe gewählt.', 'Not Authorized': 'Zugriff verboten', 'not authorized': 'Zugriff verboten', 'not in': 'not in', 'Object or table name': 'Objekt- oder Tabellenname', 'Old password': 'Altes Passwort', 'Online book': 'Online book', 'Online examples': 'Online Beispiele', 'OpenForSubmission': 'Upload freigeschaltet', 'or import from csv file': 'oder von csv Datei importieren', 'Origin': 'Ursprung', 'Other Plugins': 'Andere Plugins', 'Other Recipes': 'Andere Rezepte', 'Overview': 'Überblick', 'Password': 'Passwort', 'Password changed': 'Passwort wurde geändert', "Password fields don't match": 'Passwortfelder sind nicht gleich', 'Password reset': 'Passwort wurde zurückgesetzt', 'Permission': 'Zugriffsrecht', 'Permissions': 'Zugriffsrechte', 'please input your password again': 'Bitte geben Sie ihr Passwort erneut ein', 'Plugins': 'Plugins', 'Powered by': 'Unterstützt von', 'Preface': 'Allgemeines', 'previous %s rows': 'vorherige %s Reihen', 'Profile': 'Profil', 'Profile updated': 'Profil aktualisiert', 'pygraphviz library not found': 'pygraphviz Bibliothek wurde nicht gefunden', 'Python': 'Python', 'Query:': 'Query:', 'Quick Examples': 'Kurze Beispiele', 'RAM': 'RAM', 'RAM Cache Keys': 'RAM Cache Keys', 'Ram Cleared': 'Ram Cleared', 'Recipes': 'Rezepte', 'Record': 'Eintrag', 'record does not exist': 'Eintrag existiert nicht', 'Record ID': 'ID des Eintrags', 'Record id': 'id des Eintrags', 'Register': 'Register', 'Registration identifier': 'Registrierungsbezeichnung', 'Registration key': 'Registierungsschlüssel', 'Registration successful': 'Registrierung erfolgreich', 'Remember me (for 30 days)': 'Eingeloggt bleiben (30 Tage lang)', 'Request reset password': 'Request reset password', 'Request Reset Password': 'Request Reset Password', 'Reset Password': 'Reset Password', 'Reset Password key': 'Passwortschlüssel zurücksetzen', 'Role': 'Rolle', 'Roles': 'Rollen', 'Rows in Table': 'Tabellenreihen', 'Rows selected': 'Reihen ausgewählt', 'Save model as...': 'Speichere Vorlage als...', 'Search': 'Suche', 'Semantic': 'Semantik', 'SendMessages': 'Sende Nachrichten', 'Services': 'Dienste', 'Sign Up': 'Sign Up', 'Size of cache:': 'Cachegröße:', 'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow': 'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow', 'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.': 'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.', 'Start building a new search': 'Start building a new search', 'StartDate': 'Beginn', 'starts with': 'starts with', 'state': 'Status', 'Statistics': 'Statistik', 'Stylesheet': 'Stylesheet', 'Submission for given task no yet allowed!': 'Upload für die gegebene Aufgabe ist noch nicht erlaubt!', 'submit': 'Abschicken', 'Submit': 'Abschicken', 'SubmittedOnTime': 'Pünklich abgegeben', 'Submittedontime': 'Pünklich abgegeben', 'Success!': 'Erfolg!', 'Support': 'Support', 'Table': 'Tabelle', 'Task': 'Aufgabe', 'Task is currently not open for submission.': 'Für diese Aufgabe sind im Moment keine Uploads erlaubt.', 'Teacher': 'Lehrkraft', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'Die "query" ist eine Bedingung wie "db.tabelle1.feld1==\'wert\'". So etwas wie "db.tabelle1.feld1==db.tabelle2.feld2" resultiert in einem SQL JOIN.', 'The Core': 'Der Core', 'The output of the file is a dictionary that was rendered by the view %s': 'Die Ausgabe der Datei ist ein "dictionary", welches vom "view" %s gerendert wurde', 'The Views': 'Die Views', 'This App': 'Diese App', 'This email already has an account': 'Zu dieser Email-Adresse gibt es bereits einen Benutzer.', 'This exact file has been uploaded already by a user.': 'Exakt diese Datei wurde bereits von einem Benutzer hochgeladen.', 'This page only works with JavaScript.': 'Diese Seite funktioniert nur mit JavaScript.', 'Time in Cache (h:m:s)': 'Zeit im Cache (h:m:s)', 'Timestamp': 'Zeitstempel', 'Token': 'Token', 'Traceback': 'Traceback', 'TSV (Spreadsheets)': 'TSV (Tabellenkalkulation)', 'TSV (Spreadsheets, hidden cols)': 'TSV (Tabellenkalkulation, versteckte Spalten)', 'Twitter': 'Twitter', 'unable to parse csv file': 'csv Datei konnte nicht geparst werden', 'Update:': 'Update:', 'Upload file': 'Datei hochladen', 'UpLoad@BBS is used to upload presentations, project documentation and tests. To upload a file you have to fill out the form with information about the uploader, the teacher and the task for which you want to upload a file. Then you can choose a file to be uploaded. The maximum file size is 5MiB.': 'UpLoad@BBS dient dem einfachen Abgeben von Projektarbeiten, Präsentationen und Klassenarbeiten. Um eine Datei hochzuladen, müssen zunächst Informationen zum Absender, der Lehrkraft und der Aufgabe angegeben werden. Anschließend ist die abzugebende Datei auszuwählen. Die maximal erlaubte Dateigröße beträgt 5MiB.', 'UploadedFile': 'Hochgeladene Datei', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Benutze (...)&(...) für AND, (...)|(...) für OR, und ~(...) für NOT um komplexere Queries zu erstellen.', 'User': 'Benutzer', 'User %(id)s Logged-in': 'Benutzer %(id)s hat sich eingeloggt', 'User %(id)s Logged-out': 'Benutzer %(id)s hat sich ausgeloggt', 'User %(id)s Registered': 'Benutzer %(id)s hat sich registriert', 'User ID': 'Benutzer ID', 'Users': 'Benutzer', 'value already in database or empty': 'Wert ist bereits in der Datenbank oder leer', 'Value not in database': 'Ausgewählter Wert nicht in Datenbank enthalten', 'Verify Password': 'Passwort überprüfen', 'Videos': 'Videos', 'View': 'Ansicht', 'View uploaded file': 'Hochgeladene Datei ansehen', 'View uploaded files': 'Hochgeladene Dateien ansehen', 'View uploads': 'Hochgeladene Datei ansehen', 'Welcome': 'Willkommen', 'Welcome to web2py!': 'Willkommen bei web2py!', 'Which called the function %s located in the file %s': 'Welche die Funktion %s in der Datei %s aufrief', 'Working...': 'Arbeite...', 'Wrong token for given task.': 'Ungültiger Token für gewählte Aufgabe eingegeben.', 'Wrong token given!': 'Ungültiger Token eingegeben!', 'XML': 'XML', 'XML export of columns shown': 'XML export of columns shown', 'You already created a Task with the same name. Please delete the old task or rename this one.': 'Sie haben bereits eine Aufgabe mit diesem Namen erstellt. Bitte löschen sie die alte Aufgabe oder geben Sie einen anderen Namen an.', 'You already uploaded a file for this task!': 'Sie haben bereits eine Datei für diese Aufgabe hochgeladen!', 'You are successfully running web2py': 'web2py wird erfolgreich ausgeführt', 'You can modify this application and adapt it to your needs': 'Sie können diese Anwendung verändern und Ihren Bedürfnissen anpassen', 'You can only create tasks for yourself.': 'Sie dürfen nur eigene Aufgaben erstellen.', 'You visited the url %s': 'Sie haben die URL %s besucht', 'Your file ({filename}) with the hash (SHA256) {hash} has been successfully uploaded.': 'Ihre Datei ({filename}) mit dem Hash (SHA256) {hash} wurde erfolgreich hochgeladen.', }<|fim▁end|>
'File successfully uploaded!': 'Datei erfolgreich hochgeladen!', 'File to be uploaded': 'Hochzuladene Datei', 'File uploaded for task {task}': 'Datei hochgeladen für Aufgabe {task}', 'First name': 'Vorname',
<|file_name|>test_notifications.py<|end_file_name|><|fim▁begin|>import collections import datetime import mock import pytz from babel import dates, Locale from schema import Schema, And, Use, Or from modularodm import Q from modularodm.exceptions import NoResultsFound from nose.tools import * # noqa PEP8 asserts from framework.auth import Auth from framework.auth.core import User from framework.auth.signals import contributor_removed from framework.auth.signals import node_deleted from framework.guid.model import Guid from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications from website.notifications import constants from website.notifications.model import NotificationDigest from website.notifications.model import NotificationSubscription from website.notifications import emails from website.notifications import utils from website.project.model import Node, Comment from website import mails from website.util import api_url_for from website.util import web_url_for from tests import factories from tests.base import capture_signals from tests.base import OsfTestCase class TestNotificationsModels(OsfTestCase): def setUp(self): super(TestNotificationsModels, self).setUp() # Create project with component self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.parent = factories.ProjectFactory(creator=self.user) self.node = factories.NodeFactory(creator=self.user, parent=self.parent) def test_has_permission_on_children(self): non_admin_user = factories.UserFactory() parent = factories.ProjectFactory() parent.add_contributor(contributor=non_admin_user, permissions=['read']) parent.save() node = factories.NodeFactory(parent=parent, category='project') sub_component = factories.NodeFactory(parent=node) sub_component.add_contributor(contributor=non_admin_user) sub_component.save() sub_component2 = factories.NodeFactory(parent=node) assert_true( node.has_permission_on_children(non_admin_user, 'read') ) def test_check_user_has_permission_excludes_deleted_components(self): non_admin_user = factories.UserFactory() parent = factories.ProjectFactory() parent.add_contributor(contributor=non_admin_user, permissions=['read']) parent.save() node = factories.NodeFactory(parent=parent, category='project') sub_component = factories.NodeFactory(parent=node) sub_component.add_contributor(contributor=non_admin_user) sub_component.is_deleted = True sub_component.save() sub_component2 = factories.NodeFactory(parent=node) assert_false( node.has_permission_on_children(non_admin_user,'read') ) def test_check_user_does_not_have_permission_on_private_node_child(self): non_admin_user = factories.UserFactory() parent = factories.ProjectFactory() parent.add_contributor(contributor=non_admin_user, permissions=['read']) parent.save() node = factories.NodeFactory(parent=parent, category='project') sub_component = factories.NodeFactory(parent=node) assert_false( node.has_permission_on_children(non_admin_user,'read') ) def test_check_user_child_node_permissions_false_if_no_children(self): non_admin_user = factories.UserFactory() parent = factories.ProjectFactory() parent.add_contributor(contributor=non_admin_user, permissions=['read']) parent.save() node = factories.NodeFactory(parent=parent, category='project') assert_false( node.has_permission_on_children(non_admin_user,'read') ) def test_check_admin_has_permissions_on_private_component(self): parent = factories.ProjectFactory() node = factories.NodeFactory(parent=parent, category='project') sub_component = factories.NodeFactory(parent=node) assert_true( node.has_permission_on_children(parent.creator,'read') ) def test_check_user_private_node_child_permissions_excludes_pointers(self): user = factories.UserFactory() parent = factories.ProjectFactory() pointed = factories.ProjectFactory(contributor=user) parent.add_pointer(pointed, Auth(parent.creator)) parent.save() assert_false( parent.has_permission_on_children(user,'read') ) class TestSubscriptionView(OsfTestCase): def setUp(self): super(TestSubscriptionView, self).setUp() self.node = factories.NodeFactory() self.user = self.node.creator def test_create_new_subscription(self): payload = { 'id': self.node._id, 'event': 'comments', 'notification_type': 'email_transactional' } url = api_url_for('configure_subscription') self.app.post_json(url, payload, auth=self.node.creator.auth) # check that subscription was created event_id = self.node._id + '_' + 'comments' s = NotificationSubscription.find_one(Q('_id', 'eq', event_id)) # check that user was added to notification_type field assert_equal(payload['id'], s.owner._id) assert_equal(payload['event'], s.event_name) assert_in(self.node.creator, getattr(s, payload['notification_type'])) # change subscription new_payload = { 'id': self.node._id, 'event': 'comments', 'notification_type': 'email_digest' } url = api_url_for('configure_subscription') self.app.post_json(url, new_payload, auth=self.node.creator.auth) s.reload() assert_false(self.node.creator in getattr(s, payload['notification_type'])) assert_in(self.node.creator, getattr(s, new_payload['notification_type'])) def test_adopt_parent_subscription_default(self): payload = { 'id': self.node._id, 'event': 'comments', 'notification_type': 'adopt_parent' } url = api_url_for('configure_subscription') self.app.post_json(url, payload, auth=self.node.creator.auth) event_id = self.node._id + '_' + 'comments' # confirm subscription was not created with assert_raises(NoResultsFound): NotificationSubscription.find_one(Q('_id', 'eq', event_id)) def test_change_subscription_to_adopt_parent_subscription_removes_user(self): payload = { 'id': self.node._id, 'event': 'comments', 'notification_type': 'email_transactional' } url = api_url_for('configure_subscription') self.app.post_json(url, payload, auth=self.node.creator.auth) # check that subscription was created event_id = self.node._id + '_' + 'comments' s = NotificationSubscription.find_one(Q('_id', 'eq', event_id)) # change subscription to adopt_parent new_payload = { 'id': self.node._id, 'event': 'comments', 'notification_type': 'adopt_parent' } url = api_url_for('configure_subscription') self.app.post_json(url, new_payload, auth=self.node.creator.auth) s.reload() # assert that user is removed from the subscription entirely for n in constants.NOTIFICATION_TYPES: assert_false(self.node.creator in getattr(s, n)) class TestRemoveContributor(OsfTestCase): def setUp(self): super(OsfTestCase, self).setUp() self.project = factories.ProjectFactory() self.contributor = factories.UserFactory() self.project.add_contributor(contributor=self.contributor, permissions=['read']) self.project.save() self.subscription = factories.NotificationSubscriptionFactory( _id=self.project._id + '_comments', owner=self.project ) self.subscription.save() self.subscription.email_transactional.append(self.contributor) self.subscription.email_transactional.append(self.project.creator) self.subscription.save() self.node = factories.NodeFactory(parent=self.project) self.node.add_contributor(contributor=self.project.creator, permissions=['read', 'write', 'admin']) self.node.save() self.node_subscription = factories.NotificationSubscriptionFactory( _id=self.node._id + '_comments', owner=self.node ) self.node_subscription.save() self.node_subscription.email_transactional.append(self.project.creator) self.node_subscription.email_transactional.append(self.node.creator) self.node_subscription.save() def test_removed_non_admin_contributor_is_removed_from_subscriptions(self): assert_in(self.contributor, self.subscription.email_transactional) self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) assert_not_in(self.contributor, self.project.contributors) assert_not_in(self.contributor, self.subscription.email_transactional) def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self): assert_in(self.node.creator, self.node_subscription.email_transactional) self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator)) assert_not_in(self.node.creator, self.node.contributors) assert_not_in(self.node.creator, self.node_subscription.email_transactional) def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self): # Admin on parent project is removed as a contributor on a component. Check # that admin is not removed from component subscriptions, as the admin # now has read-only access. assert_in(self.project.creator, self.node_subscription.email_transactional) self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator)) assert_not_in(self.project.creator, self.node.contributors) assert_in(self.project.creator, self.node_subscription.email_transactional) def test_remove_contributor_signal_called_when_contributor_is_removed(self): with capture_signals() as mock_signals: self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) assert_equal(mock_signals.signals_sent(), set([contributor_removed])) class TestRemoveNodeSignal(OsfTestCase): def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): project = factories.ProjectFactory() subscription = factories.NotificationSubscriptionFactory( _id=project._id + '_comments', owner=project ) subscription.save() subscription.email_transactional.append(project.creator) subscription.save() s = getattr(project.creator, 'email_transactional', []) assert_equal(len(s), 1) with capture_signals() as mock_signals: project.remove_node(auth=Auth(project.creator)) assert_true(project.is_deleted) assert_equal(mock_signals.signals_sent(), set([node_deleted])) s = getattr(project.creator, 'email_transactional', []) assert_equal(len(s), 0) with assert_raises(NoResultsFound): NotificationSubscription.find_one(Q('owner', 'eq', project)) def list_or_dict(data): # Generator only returns lists or dicts from list or dict if isinstance(data, dict): for key in data: if isinstance(data[key], dict) or isinstance(data[key], list): yield data[key] elif isinstance(data, list): for item in data: if isinstance(item, dict) or isinstance(item, list): yield item def has(data, sub_data): # Recursive approach to look for a subset of data in data. # WARNING: Don't use on huge structures # :param data: Data structure # :param sub_data: subset being checked for # :return: True or False try: (item for item in data if item == sub_data).next() return True except StopIteration: lists_and_dicts = list_or_dict(data) for item in lists_and_dicts: if has(item, sub_data): return True return False def subscription_schema(project, structure, level=0): # builds a schema from a list of nodes and events # :param project: validation type # :param structure: list of nodes (another list) and events # :return: schema sub_list = [] for item in list_or_dict(structure): sub_list.append(subscription_schema(project, item, level=level+1)) sub_list.append(event_schema(level)) node_schema = { 'node': { 'id': Use(type(project._id), error="node_id{}".format(level)), 'title': Use(type(project.title), error="node_title{}".format(level)), 'url': Use(type(project.url), error="node_{}".format(level)) }, 'kind': And(str, Use(lambda s: s in ('node', 'folder'), error="kind didn't match node or folder {}".format(level))), 'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'), 'category': Use(lambda s: s in Node.CATEGORY_MAP, error='category not in Node.CATEGORY_MAP'), 'permissions': { 'view': Use(lambda s: s in (True, False), error='view permissions is not True/False') }, 'children': sub_list } if level == 0: return Schema([node_schema]) return node_schema def event_schema(level=None): return { 'event': { 'title': And(Use(str, error="event_title{} not a string".format(level)), Use(lambda s: s in constants.NOTIFICATION_TYPES, error="event_title{} not in list".format(level))), 'description': And(Use(str, error="event_desc{} not a string".format(level)), Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE, error="event_desc{} not in list".format(level))), 'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)), 'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES) }, 'kind': 'event', 'children': And(list, lambda l: len(l) == 0) } class TestNotificationUtils(OsfTestCase): def setUp(self): super(TestNotificationUtils, self).setUp() self.user = factories.UserFactory() self.project = factories.ProjectFactory(creator=self.user) self.project_subscription = factories.NotificationSubscriptionFactory( _id=self.project._id + '_' + 'comments', owner=self.project, event_name='comments' ) self.project_subscription.save() self.project_subscription.email_transactional.append(self.user) self.project_subscription.save() self.node = factories.NodeFactory(parent=self.project, creator=self.user) self.node_subscription = factories.NotificationSubscriptionFactory( _id=self.node._id + '_' + 'comments', owner=self.node, event_name='comments' ) self.node_subscription.save() self.node_subscription.email_transactional.append(self.user) self.node_subscription.save() self.user_subscription = factories.NotificationSubscriptionFactory( _id=self.user._id + '_' + 'comment_replies', owner=self.user, event_name='comment_replies' ) self.user_subscription.save() self.user_subscription.email_transactional.append(self.user) self.user_subscription.save() def test_to_subscription_key(self): key = utils.to_subscription_key('xyz', 'comments') assert_equal(key, 'xyz_comments') def test_from_subscription_key(self): parsed_key = utils.from_subscription_key('xyz_comment_replies') assert_equal(parsed_key, { 'uid': 'xyz', 'event': 'comment_replies' }) def test_get_all_user_subscriptions(self): user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)] assert_in(self.project_subscription, user_subscriptions) assert_in(self.node_subscription, user_subscriptions) assert_in(self.user_subscription, user_subscriptions) assert_equal(len(user_subscriptions), 3) def test_get_all_node_subscriptions_given_user_subscriptions(self): user_subscriptions = utils.get_all_user_subscriptions(self.user) node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node, user_subscriptions=user_subscriptions)] assert_equal(node_subscriptions, [self.node_subscription]) def test_get_all_node_subscriptions_given_user_and_node(self): node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)] assert_equal(node_subscriptions, [self.node_subscription]) def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): configured_ids = utils.get_configured_projects(self.user) # No dupilcates! assert_equal(len(configured_ids), 1) assert_in(self.project._id, configured_ids) assert_not_in(self.node._id, configured_ids) assert_not_in(self.user._id, configured_ids) def test_get_configured_project_ids_excludes_deleted_projects(self): project = factories.ProjectFactory() subscription = factories.NotificationSubscriptionFactory( _id=project._id + '_' + 'comments', owner=project ) subscription.save() subscription.email_transactional.append(self.user) subscription.save() project.is_deleted = True project.save() assert_not_in(project._id, utils.get_configured_projects(self.user)) def test_get_configured_project_ids_excludes_node_with_project_category(self): node = factories.NodeFactory(parent=self.project, category='project') node_subscription = factories.NotificationSubscriptionFactory( _id=node._id + '_' + 'comments', owner=node, event_name='comments' ) node_subscription.save() node_subscription.email_transactional.append(self.user) node_subscription.save() assert_not_in(node._id, utils.get_configured_projects(self.user)) def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self): private_project = factories.ProjectFactory() node = factories.NodeFactory(parent=private_project) node_subscription = factories.NotificationSubscriptionFactory( _id=node._id + '_comments', owner=node, event_name='comments' ) node_subscription.email_transactional.append(node.creator) node_subscription.save() configured_project_ids = utils.get_configured_projects(node.creator) assert_in(private_project._id, configured_project_ids) def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self): private_project = factories.ProjectFactory() node = factories.NodeFactory(parent=private_project) configured_project_ids = utils.get_configured_projects(node.creator) assert_not_in(private_project._id, configured_project_ids) def test_get_parent_notification_type(self): nt = utils.get_parent_notification_type(self.node, 'comments', self.user) assert_equal(nt, 'email_transactional') def test_get_parent_notification_type_no_parent_subscriptions(self): node = factories.NodeFactory() nt = utils.get_parent_notification_type(node._id, 'comments', self.user) assert_equal(nt, None) def test_get_parent_notification_type_no_parent(self): project = factories.ProjectFactory() nt = utils.get_parent_notification_type(project._id, 'comments', self.user) assert_equal(nt, None) def test_get_parent_notification_type_handles_user_id(self): nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user) assert_equal(nt, None) def test_format_data_project_settings(self): data = utils.format_data(self.user, [self.project._id]) parent_event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, 'kind': 'event', 'children': [] } child_event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': 'email_transactional' }, 'kind': 'event', 'children': [] } expected_new = [['event'], 'event'] schema = subscription_schema(self.project, expected_new) assert schema.validate(data) assert has(data, parent_event) assert has(data, child_event) def test_format_data_node_settings(self): data = utils.format_data(self.user, [self.node._id]) event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': 'email_transactional' }, 'kind': 'event', 'children': [] } schema = subscription_schema(self.project, ['event']) assert schema.validate(data) assert has(data, event) def test_format_includes_admin_view_only_component_subscriptions(self): # Test private components in which parent project admins are not contributors still appear in their # notifications settings. node = factories.NodeFactory(parent=self.project) data = utils.format_data(self.user, [self.project._id]) event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'adopt_parent', 'parent_notification_type': 'email_transactional' }, 'kind': 'event', 'children': [], } schema = subscription_schema(self.project, ['event', ['event'], ['event']]) assert schema.validate(data) assert has(data, event) def test_format_data_excludes_pointers(self): project = factories.ProjectFactory() subscription = factories.NotificationSubscriptionFactory( _id=project._id + '_comments', owner=project, event_name='comments' ) subscription.email_transactional.append(project.creator) subscription.save() pointed = factories.ProjectFactory() project.add_pointer(pointed, Auth(project.creator)) project.save() configured_project_ids = utils.get_configured_projects(project.creator) data = utils.format_data(project.creator, configured_project_ids) event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, 'kind': 'event', 'children': [], } schema = subscription_schema(self.project, ['event']) assert schema.validate(data) assert has(data, event) def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self): private_project = factories.ProjectFactory() node = factories.NodeFactory(parent=private_project) node_subscription = factories.NotificationSubscriptionFactory( _id=node._id + '_comments', owner=node, event_name='comments' ) node_subscription.email_transactional.append(node.creator) node_subscription.save() configured_project_ids = utils.get_configured_projects(node.creator) data = utils.format_data(node.creator, configured_project_ids) event = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, 'kind': 'event', 'children': [], } schema = subscription_schema(self.project, ['event', ['event']]) assert schema.validate(data) assert has(data, event) def test_format_user_subscriptions(self): data = utils.format_user_subscriptions(self.user) expected = [{ 'event': { 'title': 'comment_replies', 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['comment_replies'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, 'kind': 'event', 'children': [], }] assert_equal(data, expected) def test_format_data_user_settings(self): data = utils.format_user_and_project_subscriptions(self.user) expected = [ { 'node': { 'id': self.user._id, 'title': 'User Notifications' }, 'kind': 'heading', 'children': utils.format_user_subscriptions(self.user) }, { 'node': { 'id': '', 'title': 'Project Notifications' }, 'kind': 'heading', 'children': utils.format_data(self.user, utils.get_configured_projects(self.user)) }] assert_equal(data, expected) def test_serialize_user_level_event(self): user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)] user_subscription = None for subscription in user_subscriptions: if 'comment_replies' in getattr(subscription, 'event_name'): user_subscription = subscription data = utils.serialize_event(self.user, event_description='comment_replies', subscription=user_subscription) expected = { 'event': { 'title': 'comment_replies', 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['comment_replies'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, 'kind': 'event', 'children': [] } assert_equal(data, expected) def test_serialize_node_level_event(self): node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)] data = utils.serialize_event(user=self.user, event_description='comments', subscription=node_subscriptions[0], node=self.node) expected = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'email_transactional', 'parent_notification_type': 'email_transactional' }, 'kind': 'event', 'children': [], } assert_equal(data, expected) def test_serialize_node_level_event_that_adopts_parent_settings(self): user = factories.UserFactory() self.project.add_contributor(contributor=user, permissions=['read']) self.project.save() self.project_subscription.email_transactional.append(user) self.project_subscription.save() self.node.add_contributor(contributor=user, permissions=['read']) self.node.save() node_subscriptions = [x for x in utils.get_all_node_subscriptions(user, self.node)] data = utils.serialize_event(user=user, event_description='comments', subscription=node_subscriptions, node=self.node) expected = { 'event': { 'title': 'comments', 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], 'notificationType': 'adopt_parent', 'parent_notification_type': 'email_transactional' }, 'kind': 'event', 'children': [], } assert_equal(data, expected) class TestNotificationsDict(OsfTestCase): def test_notifications_dict_add_message_returns_proper_format(self): d = utils.NotificationsDict() message = { 'message': 'Freddie commented on your project', 'timestamp': datetime.datetime.utcnow().replace(tzinfo=pytz.utc) } message2 = { 'message': 'Mercury commented on your component', 'timestamp': datetime.datetime.utcnow().replace(tzinfo=pytz.utc) } d.add_message(['project'], message) d.add_message(['project', 'node'], message2) expected = { 'messages': [], 'children': collections.defaultdict( utils.NotificationsDict, { 'project': { 'messages': [message], 'children': collections.defaultdict(utils.NotificationsDict, { 'node': { 'messages': [message2], 'children': collections.defaultdict(utils.NotificationsDict, {}) } }) } } )} assert_equal(d, expected) class TestCompileSubscriptions(OsfTestCase): def setUp(self): super(TestCompileSubscriptions, self).setUp() self.user_1 = factories.UserFactory() self.user_2 = factories.UserFactory() self.user_3 = factories.UserFactory() self.user_4 = factories.UserFactory() # Base project + 1 project shared with 3 + 1 project shared with 2 self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1) self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) # Adding contributors for node in [self.base_project, self.shared_node, self.private_node]: node.add_contributor(self.user_2, permissions='admin') self.base_project.add_contributor(self.user_3, permissions='write') self.shared_node.add_contributor(self.user_3, permissions='write') # Setting basic subscriptions self.base_sub = factories.NotificationSubscriptionFactory( _id=self.base_project._id + '_file_updated', owner=self.base_project, event_name='file_updated' ) self.base_sub.save() self.shared_sub = factories.NotificationSubscriptionFactory( _id=self.shared_node._id + '_file_updated', owner=self.shared_node, event_name='file_updated' ) self.shared_sub.save() self.private_sub = factories.NotificationSubscriptionFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() def test_no_subscription(self): node = factories.NodeFactory() result = emails.compile_subscriptions(node, 'file_updated') assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result) def test_no_subscribers(self): node = factories.NodeFactory() node_sub = factories.NotificationSubscriptionFactory( _id=node._id + '_file_updated', owner=node, event_name='file_updated' ) node_sub.save() result = emails.compile_subscriptions(node, 'file_updated') assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result) def test_creator_subbed_parent(self): # Basic sub check self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() result = emails.compile_subscriptions(self.base_project, 'file_updated') assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result) def test_creator_subbed_to_parent_from_child(self): # checks the parent sub is the one to appear without a child sub self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() result = emails.compile_subscriptions(self.shared_node, 'file_updated') assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result) def test_creator_subbed_to_both_from_child(self): # checks that only one sub is in the list. self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() self.shared_sub.email_transactional.append(self.user_1) self.shared_sub.save() result = emails.compile_subscriptions(self.shared_node, 'file_updated') assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result) def test_creator_diff_subs_to_both_from_child(self): # Check that the child node sub overrides the parent node sub self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() self.shared_sub.none.append(self.user_1) self.shared_sub.save() result = emails.compile_subscriptions(self.shared_node, 'file_updated') assert_equal({'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []}, result) def test_user_wo_permission_on_child_node_not_listed(self): # Tests to see if a user without permission gets an Email about a node they cannot see. self.base_sub.email_transactional.append(self.user_3) self.base_sub.save() result = emails.compile_subscriptions(self.private_node, 'file_updated') assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result) def test_several_nodes_deep(self): self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() node2 = factories.NodeFactory(parent=self.shared_node) node3 = factories.NodeFactory(parent=node2) node4 = factories.NodeFactory(parent=node3) node5 = factories.NodeFactory(parent=node4) subs = emails.compile_subscriptions(node5, 'file_updated') assert_equal(subs, {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []}) def test_several_nodes_deep_precedence(self): self.base_sub.email_transactional.append(self.user_1) self.base_sub.save() node2 = factories.NodeFactory(parent=self.shared_node) node3 = factories.NodeFactory(parent=node2) node4 = factories.NodeFactory(parent=node3) node4_subscription = factories.NotificationSubscriptionFactory( _id=node4._id + '_file_updated', owner=node4, event_name='file_updated' ) node4_subscription.save() node4_subscription.email_digest.append(self.user_1) node4_subscription.save() node5 = factories.NodeFactory(parent=node4) subs = emails.compile_subscriptions(node5, 'file_updated') assert_equal(subs, {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []}) class TestMoveSubscription(OsfTestCase): def setUp(self): super(TestMoveSubscription, self).setUp() self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank. self.user_1 = factories.AuthUserFactory() self.auth = Auth(user=self.user_1) self.user_2 = factories.AuthUserFactory() self.user_3 = factories.AuthUserFactory() self.user_4 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1) self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.email_transactional.extend([self.user_1]) self.sub.save() self.file_sub = factories.NotificationSubscriptionFactory( _id=self.project._id + '_xyz42_file_updated', owner=self.project, event_name='xyz42_file_updated' ) self.file_sub.save() def test_separate_users(self): self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.private_node.save() subbed, removed = utils.separate_users( self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id] ) assert_equal([self.user_2._id, self.user_3._id], subbed) assert_equal([self.user_4._id], removed) def test_event_subs_same(self): self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4]) self.file_sub.save() self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.private_node.save() results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results) def test_event_nodes_same(self): self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4]) self.file_sub.save() self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.private_node.save() results = utils.users_to_remove('xyz42_file_updated', self.project, self.project) assert_equal({'email_transactional': [], 'email_digest': [], 'none': []}, results) def test_move_sub(self): # Tests old sub is replaced with new sub. utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) assert_equal('abc42_file_updated', self.file_sub.event_name) assert_equal(self.private_node, self.file_sub.owner) assert_equal(self.private_node._id + '_abc42_file_updated', self.file_sub._id) def test_move_sub_with_none(self): # Attempt to reproduce an error that is seen when moving files self.project.add_contributor(self.user_2, permissions=['write', 'read'], auth=self.auth) self.project.save() self.file_sub.none.append(self.user_2) self.file_sub.save() results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) assert_equal({'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]}, results) def test_remove_one_user(self): # One user doesn't have permissions on the node the sub is moved to. Should be listed. self.file_sub.email_transactional.extend([self.user_2, self.user_3, self.user_4]) self.file_sub.save() self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.private_node.save() results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results) def test_remove_one_user_warn_another(self): # Two users do not have permissions on new node, but one has a project sub. Both should be listed. self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.save() self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.project.save() self.sub.email_digest.append(self.user_3) self.sub.save() self.file_sub.email_transactional.extend([self.user_2, self.user_4]) results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []}, results) assert_in(self.user_3, self.sub.email_digest) # Is not removed from the project subscription. def test_warn_user(self): # One user with a project sub does not have permission on new node. User should be listed. self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth) self.private_node.save() self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.project.save() self.sub.email_digest.append(self.user_3) self.sub.save() self.file_sub.email_transactional.extend([self.user_2]) results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) assert_equal({'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []}, results) assert_in(self.user_3, self.sub.email_digest) # Is not removed from the project subscription. def test_user_node_subbed_and_not_removed(self): self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.project.save() self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth) self.private_node.save()<|fim▁hole|> assert_equal([], self.file_sub.email_digest) class TestSendEmails(OsfTestCase): def setUp(self): super(TestSendEmails, self).setUp() self.user = factories.AuthUserFactory() self.project = factories.ProjectFactory() self.project_subscription = factories.NotificationSubscriptionFactory( _id=self.project._id + '_' + 'comments', owner=self.project, event_name='comments' ) self.project_subscription.save() self.project_subscription.email_transactional.append(self.project.creator) self.project_subscription.save() self.node = factories.NodeFactory(parent=self.project) self.node_subscription = factories.NotificationSubscriptionFactory( _id=self.node._id + '_comments', owner=self.node, event_name='comments' ) self.node_subscription.save() self.user_subscription = factories.NotificationSubscriptionFactory( _id=self.user._id + '_' + 'comment_replies', owner=self.user, event_name='comment_replies', email_transactional=[self.user._id] ) @mock.patch('website.notifications.emails.store_emails') def test_notify_no_subscription(self, mock_store): node = factories.NodeFactory() emails.notify('comments', user=self.user, node=node, timestamp=datetime.datetime.utcnow()) assert_false(mock_store.called) @mock.patch('website.notifications.emails.store_emails') def test_notify_no_subscribers(self, mock_store): node = factories.NodeFactory() node_subscription = factories.NotificationSubscriptionFactory( _id=node._id + '_comments', owner=node, event_name='comments' ) node_subscription.save() emails.notify('comments', user=self.user, node=node, timestamp=datetime.datetime.utcnow()) assert_false(mock_store.called) @mock.patch('website.notifications.emails.store_emails') def test_notify_sends_with_correct_args(self, mock_store): time_now = datetime.datetime.utcnow() emails.notify('comments', user=self.user, node=self.node, timestamp=time_now) assert_true(mock_store.called) mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user, self.node, time_now) @mock.patch('website.notifications.emails.store_emails') def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): node = factories.NodeFactory() user = factories.UserFactory() node_subscription = factories.NotificationSubscriptionFactory( _id=node._id + '_comments', owner=node, event_name='comments' ) node_subscription.save() node_subscription.none.append(user) node_subscription.save() sent = emails.notify('comments', user=user, node=node, timestamp=datetime.datetime.utcnow()) assert_false(mock_store.called) assert_equal(sent, []) @mock.patch('website.notifications.emails.store_emails') def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store): time_now = datetime.datetime.utcnow() emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator) mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies', self.user, self.node, time_now, target_user=self.project.creator) @mock.patch('website.notifications.emails.store_emails') def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store): time_now = datetime.datetime.utcnow() emails.notify('comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user) mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies', self.project.creator, self.node, time_now, target_user=self.user) @mock.patch('website.notifications.emails.store_emails') def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store): user = factories.UserFactory() time_now = datetime.datetime.utcnow() emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user) mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, self.node, time_now, target_user=user) @mock.patch('website.mails.send_mail') @mock.patch('website.notifications.emails.store_emails') def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail): time_now = datetime.datetime.utcnow() emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now, target_user=self.project.creator) assert_false(mock_store.called) assert_false(mock_send_mail.called) @mock.patch('website.notifications.emails.store_emails') def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store): # Test that comment replies on components that are not direct replies to the subscriber use the # "comments" email template. user = factories.UserFactory() time_now = datetime.datetime.utcnow() emails.notify('comments', user, self.node, time_now, target_user=user) mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, self.node, time_now, target_user=user) def test_check_node_node_none(self): subs = emails.check_node(None, 'comments') assert_equal(subs, {'email_transactional': [], 'email_digest': [], 'none': []}) def test_check_node_one(self): subs = emails.check_node(self.project, 'comments') assert_equal(subs, {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []}) @mock.patch('website.project.views.comment.notify') def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify): # user subscribed to comment replies user = factories.UserFactory() user_subscription = factories.NotificationSubscriptionFactory( _id=user._id + '_comments', owner=user, event_name='comment_replies' ) user_subscription.email_transactional.append(user) user_subscription.save() # user is not subscribed to project comment notifications project = factories.ProjectFactory() # user comments on project target = factories.CommentFactory(node=project, user=user) content = 'hammer to fall' # reply to user (note: notify is called from Comment.create) reply = Comment.create( auth=Auth(project.creator), user=project.creator, node=project, content=content, target=Guid.load(target._id), is_public=True, ) assert_true(mock_notify.called) assert_equal(mock_notify.call_count, 2) def test_get_settings_url_for_node(self): url = emails.get_settings_url(self.project._id, self.user) assert_equal(url, self.project.absolute_url + 'settings/') def test_get_settings_url_for_user(self): url = emails.get_settings_url(self.user._id, self.user) assert_equal(url, web_url_for('user_notifications', _absolute=True)) def test_get_node_lineage(self): node_lineage = emails.get_node_lineage(self.node) assert_equal(node_lineage, [self.project._id, self.node._id]) def test_localize_timestamp(self): timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) self.user.timezone = 'America/New_York' self.user.locale = 'en_US' self.user.save() tz = dates.get_timezone(self.user.timezone) locale = Locale(self.user.locale) formatted_date = dates.format_date(timestamp, format='full', locale=locale) formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date) assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime) def test_localize_timestamp_empty_timezone(self): timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) self.user.timezone = '' self.user.locale = 'en_US' self.user.save() tz = dates.get_timezone('Etc/UTC') locale = Locale(self.user.locale) formatted_date = dates.format_date(timestamp, format='full', locale=locale) formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date) assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime) def test_localize_timestamp_empty_locale(self): timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) self.user.timezone = 'America/New_York' self.user.locale = '' self.user.save() tz = dates.get_timezone(self.user.timezone) locale = Locale('en') formatted_date = dates.format_date(timestamp, format='full', locale=locale) formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date) assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime) def test_localize_timestamp_handles_unicode(self): timestamp = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) self.user.timezone = 'Europe/Moscow' self.user.locale = 'ru_RU' self.user.save() tz = dates.get_timezone(self.user.timezone) locale = Locale(self.user.locale) formatted_date = dates.format_date(timestamp, format='full', locale=locale) formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date) assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime) class TestSendDigest(OsfTestCase): def setUp(self): super(TestSendDigest, self).setUp() self.user_1 = factories.UserFactory() self.user_2 = factories.UserFactory() self.project = factories.ProjectFactory() self.timestamp = datetime.datetime.utcnow() def test_group_notifications_by_user_transactional(self): send_type = 'email_transactional' d = factories.NotificationDigestFactory( user_id=self.user_1._id, send_type=send_type, timestamp=self.timestamp, message='Hello', node_lineage=[self.project._id] ) d.save() d2 = factories.NotificationDigestFactory( user_id=self.user_2._id, send_type=send_type, timestamp=self.timestamp, message='Hello', node_lineage=[self.project._id] ) d2.save() d3 = factories.NotificationDigestFactory( user_id=self.user_2._id, send_type='email_digest', timestamp=self.timestamp, message='Hello, but this should not appear (this is a digest)', node_lineage=[self.project._id] ) d3.save() user_groups = get_users_emails(send_type) expected = [ { u'user_id': self.user_1._id, u'info': [{ u'message': u'Hello', u'node_lineage': [unicode(self.project._id)], u'_id': d._id }] }, { u'user_id': self.user_2._id, u'info': [{ u'message': u'Hello', u'node_lineage': [unicode(self.project._id)], u'_id': d2._id }] } ] assert_equal(len(user_groups), 2) assert_equal(user_groups, expected) digest_ids = [d._id, d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) def test_group_notifications_by_user_digest(self): send_type = 'email_digest' d = factories.NotificationDigestFactory( user_id=self.user_1._id, send_type=send_type, timestamp=self.timestamp, message='Hello', node_lineage=[self.project._id] ) d.save() d2 = factories.NotificationDigestFactory( user_id=self.user_2._id, send_type=send_type, timestamp=self.timestamp, message='Hello', node_lineage=[self.project._id] ) d2.save() d3 = factories.NotificationDigestFactory( user_id=self.user_2._id, send_type='email_transactional', timestamp=self.timestamp, message='Hello, but this should not appear (this is transactional)', node_lineage=[self.project._id] ) d3.save() user_groups = get_users_emails(send_type) expected = [ { u'user_id': self.user_1._id, u'info': [{ u'message': u'Hello', u'node_lineage': [unicode(self.project._id)], u'_id': d._id }] }, { u'user_id': self.user_2._id, u'info': [{ u'message': u'Hello', u'node_lineage': [unicode(self.project._id)], u'_id': d2._id }] } ] assert_equal(len(user_groups), 2) assert_equal(user_groups, expected) digest_ids = [d._id, d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) @mock.patch('website.mails.send_mail') def test_send_users_email_called_with_correct_args(self, mock_send_mail): send_type = 'email_transactional' d = factories.NotificationDigestFactory( user_id=factories.UserFactory()._id, send_type=send_type, timestamp=datetime.datetime.utcnow(), message='Hello', node_lineage=[factories.ProjectFactory()._id] ) d.save() user_groups = get_users_emails(send_type) send_users_email(send_type) assert_true(mock_send_mail.called) assert_equals(mock_send_mail.call_count, len(user_groups)) last_user_index = len(user_groups) - 1 user = User.load(user_groups[last_user_index]['user_id']) email_notification_ids = [message['_id'] for message in user_groups[last_user_index]['info']] args, kwargs = mock_send_mail.call_args assert_equal(kwargs['to_addr'], user.username) assert_equal(kwargs['mimetype'], 'html') assert_equal(kwargs['mail'], mails.DIGEST) assert_equal(kwargs['name'], user.fullname) message = group_by_node(user_groups[last_user_index]['info']) assert_equal(kwargs['message'], message) assert_equal(kwargs['callback'], remove_notifications(email_notification_ids=email_notification_ids)) def test_remove_sent_digest_notifications(self): d = factories.NotificationDigestFactory( user_id=factories.UserFactory()._id, timestamp=datetime.datetime.utcnow(), message='Hello', node_lineage=[factories.ProjectFactory()._id] ) digest_id = d._id remove_notifications(email_notification_ids=[digest_id]) with assert_raises(NoResultsFound): NotificationDigest.find_one(Q('_id', 'eq', digest_id))<|fim▁end|>
self.sub.email_digest.append(self.user_3) self.sub.save() utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
<|file_name|>kiezkassen.py<|end_file_name|><|fim▁begin|>"""Mercator proposal.""" from adhocracy_core.resources import add_resource_type_to_registry from adhocracy_core.resources import process from adhocracy_core.resources import proposal from adhocracy_core.sheets.geo import IPoint from adhocracy_core.sheets.geo import ILocationReference from adhocracy_core.sheets.image import IImageReference import adhocracy_meinberlin.sheets.kiezkassen class IProposalVersion(proposal.IProposalVersion): """Kiezkassen proposal version.""" proposal_version_meta = proposal.proposal_version_meta._replace( iresource=IProposalVersion, )._add(extended_sheets=(adhocracy_meinberlin.sheets.kiezkassen.IProposal, IPoint)) class IProposal(proposal.IProposal): """Kiezkassen proposal versions pool."""<|fim▁hole|> element_types=(IProposalVersion,), item_type=IProposalVersion, ) class IProcess(process.IProcess): """Kiezkassen participation process.""" process_meta = process.process_meta._replace( content_name='KiezkassenProcess', iresource=IProcess, element_types=(IProposal, ), is_implicit_addable=True, extended_sheets=( ILocationReference, IImageReference, ), default_workflow='kiezkassen', ) def includeme(config): """Add resource type to content.""" add_resource_type_to_registry(proposal_meta, config) add_resource_type_to_registry(proposal_version_meta, config) add_resource_type_to_registry(process_meta, config)<|fim▁end|>
proposal_meta = proposal.proposal_meta._replace( iresource=IProposal,
<|file_name|>numeral.js<|end_file_name|><|fim▁begin|>// numeral.js // version : 1.4.8 // author : Adam Draper // license : MIT // http://adamwdraper.github.com/Numeral-js/ (function () { /************************************ Constants ************************************/ var numeral, VERSION = '1.4.8', // internal storage for language config files languages = {}, currentLanguage = 'en', zeroFormat = null, // check for nodeJS hasModule = (typeof module !== 'undefined' && module.exports); /************************************ Constructors ************************************/ // Numeral prototype object function Numeral (number) { this._n = number; } /** * Implementation of toFixed() that treats floats more like decimals * * Fixes binary rounding issues (eg. (0.615).toFixed(2) === '0.61') that present * problems for accounting- and finance-related software. */ function toFixed (value, precision, optionals) { var power = Math.pow(10, precision), output; // Multiply up by precision, round accurately, then divide and use native toFixed(): output = (Math.round(value * power) / power).toFixed(precision); if (optionals) { var optionalsRegExp = new RegExp('0{1,' + optionals + '}$'); output = output.replace(optionalsRegExp, ''); } return output; } /************************************ Formatting ************************************/ // determine what type of formatting we need to do function formatNumeral (n, format) { var output; // figure out what kind of format we are dealing with if (format.indexOf('$') > -1) { // currency!!!!! output = formatCurrency(n, format); } else if (format.indexOf('%') > -1) { // percentage output = formatPercentage(n, format); } else if (format.indexOf(':') > -1) { // time output = formatTime(n, format); } else { // plain ol' numbers or bytes output = formatNumber(n, format); } // return string return output; } // revert to number function unformatNumeral (n, string) { if (string.indexOf(':') > -1) { n._n = unformatTime(string); } else { if (string === zeroFormat) { n._n = 0; } else { var stringOriginal = string; if (languages[currentLanguage].delimiters.decimal !== '.') { string = string.replace(/\./g,'').replace(languages[currentLanguage].delimiters.decimal, '.'); } // see if abbreviations are there so that we can multiply to the correct number var thousandRegExp = new RegExp(languages[currentLanguage].abbreviations.thousand + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$'), millionRegExp = new RegExp(languages[currentLanguage].abbreviations.million + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$'), billionRegExp = new RegExp(languages[currentLanguage].abbreviations.billion + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$'), trillionRegExp = new RegExp(languages[currentLanguage].abbreviations.trillion + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$'); // see if bytes are there so that we can multiply to the correct number var prefixes = ['KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'], bytesMultiplier = false; for (var power = 0; power <= prefixes.length; power++) { bytesMultiplier = (string.indexOf(prefixes[power]) > -1) ? Math.pow(1024, power + 1) : false; if (bytesMultiplier) { break; } } // do some math to create our number n._n = ((bytesMultiplier) ? bytesMultiplier : 1) * ((stringOriginal.match(thousandRegExp)) ? Math.pow(10, 3) : 1) * ((stringOriginal.match(millionRegExp)) ? Math.pow(10, 6) : 1) * ((stringOriginal.match(billionRegExp)) ? Math.pow(10, 9) : 1) * ((stringOriginal.match(trillionRegExp)) ? Math.pow(10, 12) : 1) * ((string.indexOf('%') > -1) ? 0.01 : 1) * Number(((string.indexOf('(') > -1) ? '-' : '') + string.replace(/[^0-9\.-]+/g, '')); // round if we are talking about bytes n._n = (bytesMultiplier) ? Math.ceil(n._n) : n._n; } } return n._n; } function formatCurrency (n, format) { var prependSymbol = (format.indexOf('$') <= 1) ? true : false; // remove $ for the moment var space = ''; // check for space before or after currency if (format.indexOf(' $') > -1) { space = ' '; format = format.replace(' $', ''); } else if (format.indexOf('$ ') > -1) { space = ' '; format = format.replace('$ ', ''); } else { format = format.replace('$', ''); } // format the number var output = formatNumeral(n, format); // position the symbol if (prependSymbol) { if (output.indexOf('(') > -1 || output.indexOf('-') > -1) { output = output.split(''); output.splice(1, 0, languages[currentLanguage].currency.symbol + space); output = output.join(''); } else { output = languages[currentLanguage].currency.symbol + space + output; } } else { if (output.indexOf(')') > -1) { output = output.split(''); output.splice(-1, 0, space + languages[currentLanguage].currency.symbol); output = output.join(''); } else { output = output + space + languages[currentLanguage].currency.symbol; } } return output; } function formatPercentage (n, format) { var space = ''; // check for space before % if (format.indexOf(' %') > -1) { space = ' '; format = format.replace(' %', ''); } else { format = format.replace('%', ''); } n._n = n._n * 100; var output = formatNumeral(n, format); if (output.indexOf(')') > -1 ) { output = output.split(''); output.splice(-1, 0, space + '%'); output = output.join(''); } else { output = output + space + '%'; }<|fim▁hole|> var hours = Math.floor(n._n/60/60), minutes = Math.floor((n._n - (hours * 60 * 60))/60), seconds = Math.round(n._n - (hours * 60 * 60) - (minutes * 60)); return hours + ':' + ((minutes < 10) ? '0' + minutes : minutes) + ':' + ((seconds < 10) ? '0' + seconds : seconds); } function unformatTime (string) { var timeArray = string.split(':'), seconds = 0; // turn hours and minutes into seconds and add them all up if (timeArray.length === 3) { // hours seconds = seconds + (Number(timeArray[0]) * 60 * 60); // minutes seconds = seconds + (Number(timeArray[1]) * 60); // seconds seconds = seconds + Number(timeArray[2]); } else if (timeArray.lenght === 2) { // minutes seconds = seconds + (Number(timeArray[0]) * 60); // seconds seconds = seconds + Number(timeArray[1]); } return Number(seconds); } function formatNumber (n, format) { var negP = false, optDec = false, abbr = '', bytes = '', ord = '', abs = Math.abs(n._n); // check if number is zero and a custom zero format has been set if (n._n === 0 && zeroFormat !== null) { return zeroFormat; } else { // see if we should use parentheses for negative number if (format.indexOf('(') > -1) { negP = true; format = format.slice(1, -1); } // see if abbreviation is wanted if (format.indexOf('a') > -1) { // check for space before abbreviation if (format.indexOf(' a') > -1) { abbr = ' '; format = format.replace(' a', ''); } else { format = format.replace('a', ''); } if (abs >= Math.pow(10, 12)) { // trillion abbr = abbr + languages[currentLanguage].abbreviations.trillion; n._n = n._n / Math.pow(10, 12); } else if (abs < Math.pow(10, 12) && abs >= Math.pow(10, 9)) { // billion abbr = abbr + languages[currentLanguage].abbreviations.billion; n._n = n._n / Math.pow(10, 9); } else if (abs < Math.pow(10, 9) && abs >= Math.pow(10, 6)) { // million abbr = abbr + languages[currentLanguage].abbreviations.million; n._n = n._n / Math.pow(10, 6); } else if (abs < Math.pow(10, 6) && abs >= Math.pow(10, 3)) { // thousand abbr = abbr + languages[currentLanguage].abbreviations.thousand; n._n = n._n / Math.pow(10, 3); } } // see if we are formatting bytes if (format.indexOf('b') > -1) { // check for space before if (format.indexOf(' b') > -1) { bytes = ' '; format = format.replace(' b', ''); } else { format = format.replace('b', ''); } var prefixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'], min, max; for (var power = 0; power <= prefixes.length; power++) { min = Math.pow(1024, power); max = Math.pow(1024, power+1); if (n._n >= min && n._n < max) { bytes = bytes + prefixes[power]; if (min > 0) { n._n = n._n / min; } break; } } } // see if ordinal is wanted if (format.indexOf('o') > -1) { // check for space before if (format.indexOf(' o') > -1) { ord = ' '; format = format.replace(' o', ''); } else { format = format.replace('o', ''); } ord = ord + languages[currentLanguage].ordinal(n._n); } if (format.indexOf('[.]') > -1) { optDec = true; format = format.replace('[.]', '.'); } var w = n._n.toString().split('.')[0], precision = format.split('.')[1], thousands = format.indexOf(','), d = '', neg = false; if (precision) { if (precision.indexOf('[') > -1) { precision = precision.replace(']', ''); precision = precision.split('['); d = toFixed(n._n, (precision[0].length + precision[1].length), precision[1].length); } else { d = toFixed(n._n, precision.length); } w = d.split('.')[0]; if (d.split('.')[1].length) { d = languages[currentLanguage].delimiters.decimal + d.split('.')[1]; } else { d = ''; } if (optDec && Number(d.slice(1)) === 0) { d = ''; } } else { w = toFixed(n._n, null); } // format number if (w.indexOf('-') > -1) { w = w.slice(1); neg = true; } if (thousands > -1) { w = w.toString().replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1' + languages[currentLanguage].delimiters.thousands); } if (format.indexOf('.') === 0) { w = ''; } return ((negP && neg) ? '(' : '') + ((!negP && neg) ? '-' : '') + w + d + ((ord) ? ord : '') + ((abbr) ? abbr : '') + ((bytes) ? bytes : '') + ((negP && neg) ? ')' : ''); } } /************************************ Top Level Functions ************************************/ numeral = function (input) { if (numeral.isNumeral(input)) { input = input.value(); } else if (!Number(input)) { input = 0; } return new Numeral(Number(input)); }; // version number numeral.version = VERSION; // compare numeral object numeral.isNumeral = function (obj) { return obj instanceof Numeral; }; // This function will load languages and then set the global language. If // no arguments are passed in, it will simply return the current global // language key. numeral.language = function (key, values) { if (!key) { return currentLanguage; } if (key && !values) { if(!languages[key]) { throw new Error('Unknown language : ' + key); } currentLanguage = key; } if (values || !languages[key]) { loadLanguage(key, values); } return numeral; }; numeral.language('en', { delimiters: { thousands: ',', decimal: '.' }, abbreviations: { thousand: 'k', million: 'm', billion: 'b', trillion: 't' }, ordinal: function (number) { var b = number % 10; return (~~ (number % 100 / 10) === 1) ? 'th' : (b === 1) ? 'st' : (b === 2) ? 'nd' : (b === 3) ? 'rd' : 'th'; }, currency: { symbol: '$' } }); numeral.zeroFormat = function (format) { if (typeof(format) === 'string') { zeroFormat = format; } else { zeroFormat = null; } }; /************************************ Helpers ************************************/ function loadLanguage(key, values) { languages[key] = values; } /************************************ Numeral Prototype ************************************/ numeral.fn = Numeral.prototype = { clone : function () { return numeral(this); }, format : function (inputString) { return formatNumeral(this, inputString ? inputString : numeral.defaultFormat); }, unformat : function (inputString) { return unformatNumeral(this, inputString ? inputString : numeral.defaultFormat); }, value : function () { return this._n; }, valueOf : function () { return this._n; }, set : function (value) { this._n = Number(value); return this; }, add : function (value) { this._n = this._n + Number(value); return this; }, subtract : function (value) { this._n = this._n - Number(value); return this; }, multiply : function (value) { this._n = this._n * Number(value); return this; }, divide : function (value) { this._n = this._n / Number(value); return this; }, difference : function (value) { var difference = this._n - Number(value); if (difference < 0) { difference = -difference; } return difference; } }; /************************************ Exposing Numeral ************************************/ // CommonJS module is defined if (hasModule) { module.exports = numeral; } /*global ender:false */ if (typeof ender === 'undefined') { // here, `this` means `window` in the browser, or `global` on the server // add `numeral` as a global object via a string identifier, // for Closure Compiler 'advanced' mode this['numeral'] = numeral; } /*global define:false */ if (typeof define === 'function' && define.amd) { define([], function () { return numeral; }); } }).call(this);<|fim▁end|>
return output; } function formatTime (n, format) {
<|file_name|>permission_bubble_media_access_handler.cc<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/media/webrtc/permission_bubble_media_access_handler.h" #include <memory> #include <utility> #include "base/bind.h" #include "base/callback_helpers.h" #include "base/metrics/field_trial.h" #include "base/task/post_task.h" #include "build/build_config.h" #include "chrome/browser/media/webrtc/media_capture_devices_dispatcher.h" #include "chrome/browser/media/webrtc/media_stream_capture_indicator.h" #include "chrome/browser/media/webrtc/media_stream_device_permissions.h" #include "chrome/browser/permissions/permission_manager_factory.h" #include "chrome/browser/profiles/profile.h" #include "chrome/common/pref_names.h" #include "components/content_settings/browser/tab_specific_content_settings.h" #include "components/content_settings/core/browser/host_content_settings_map.h" #include "components/permissions/permission_manager.h" #include "components/permissions/permission_result.h" #include "components/pref_registry/pref_registry_syncable.h" #include "components/prefs/pref_service.h" #include "components/webrtc/media_stream_devices_controller.h" #include "content/public/browser/browser_task_traits.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/notification_service.h" #include "content/public/browser/notification_types.h" #include "content/public/browser/web_contents.h" #if defined(OS_ANDROID) #include <vector> #include "chrome/browser/flags/android/chrome_feature_list.h" #include "chrome/browser/media/webrtc/screen_capture_infobar_delegate_android.h" #include "components/permissions/permission_uma_util.h" #include "components/permissions/permission_util.h" #endif // defined(OS_ANDROID) #if defined(OS_MACOSX) #include "base/metrics/histogram_macros.h" #include "chrome/browser/content_settings/chrome_content_settings_utils.h" #include "chrome/browser/media/webrtc/system_media_capture_permissions_mac.h" #include "chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.h" #endif using content::BrowserThread; using RepeatingMediaResponseCallback = base::RepeatingCallback<void(const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, std::unique_ptr<content::MediaStreamUI> ui)>; #if defined(OS_MACOSX) using system_media_permissions::SystemPermission; #endif namespace { void UpdateTabSpecificContentSettings( content::WebContents* web_contents, const content::MediaStreamRequest& request, ContentSetting audio_setting, ContentSetting video_setting) { if (!web_contents) return; auto* content_settings = content_settings::TabSpecificContentSettings::FromWebContents( web_contents); if (!content_settings) return; content_settings::TabSpecificContentSettings::MicrophoneCameraState microphone_camera_state = content_settings::TabSpecificContentSettings:: MICROPHONE_CAMERA_NOT_ACCESSED; std::string selected_audio_device; std::string selected_video_device; std::string requested_audio_device = request.requested_audio_device_id; std::string requested_video_device = request.requested_video_device_id; // TODO(raymes): Why do we use the defaults here for the selected devices? // Shouldn't we just use the devices that were actually selected? Profile* profile = Profile::FromBrowserContext(web_contents->GetBrowserContext()); if (audio_setting != CONTENT_SETTING_DEFAULT) { selected_audio_device = requested_audio_device.empty()<|fim▁hole|> ? profile->GetPrefs()->GetString(prefs::kDefaultAudioCaptureDevice) : requested_audio_device; microphone_camera_state |= content_settings::TabSpecificContentSettings::MICROPHONE_ACCESSED | (audio_setting == CONTENT_SETTING_ALLOW ? 0 : content_settings::TabSpecificContentSettings:: MICROPHONE_BLOCKED); } if (video_setting != CONTENT_SETTING_DEFAULT) { selected_video_device = requested_video_device.empty() ? profile->GetPrefs()->GetString(prefs::kDefaultVideoCaptureDevice) : requested_video_device; microphone_camera_state |= content_settings::TabSpecificContentSettings::CAMERA_ACCESSED | (video_setting == CONTENT_SETTING_ALLOW ? 0 : content_settings::TabSpecificContentSettings::CAMERA_BLOCKED); } content_settings->OnMediaStreamPermissionSet( PermissionManagerFactory::GetForProfile(profile)->GetCanonicalOrigin( ContentSettingsType::MEDIASTREAM_CAMERA, request.security_origin, web_contents->GetLastCommittedURL()), microphone_camera_state, selected_audio_device, selected_video_device, requested_audio_device, requested_video_device); } } // namespace struct PermissionBubbleMediaAccessHandler::PendingAccessRequest { PendingAccessRequest(const content::MediaStreamRequest& request, RepeatingMediaResponseCallback callback) : request(request), callback(callback) {} ~PendingAccessRequest() {} // TODO(gbillock): make the MediaStreamDevicesController owned by // this object when we're using bubbles. content::MediaStreamRequest request; RepeatingMediaResponseCallback callback; }; PermissionBubbleMediaAccessHandler::PermissionBubbleMediaAccessHandler() { // PermissionBubbleMediaAccessHandler should be created on UI thread. // Otherwise, it will not receive // content::NOTIFICATION_WEB_CONTENTS_DESTROYED, and that will result in // possible use after free. DCHECK_CURRENTLY_ON(BrowserThread::UI); notifications_registrar_.Add(this, content::NOTIFICATION_WEB_CONTENTS_DESTROYED, content::NotificationService::AllSources()); } PermissionBubbleMediaAccessHandler::~PermissionBubbleMediaAccessHandler() {} bool PermissionBubbleMediaAccessHandler::SupportsStreamType( content::WebContents* web_contents, const blink::mojom::MediaStreamType type, const extensions::Extension* extension) { #if defined(OS_ANDROID) return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE || type == blink::mojom::MediaStreamType::GUM_DESKTOP_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DISPLAY_VIDEO_CAPTURE; #else return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE || type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE; #endif } bool PermissionBubbleMediaAccessHandler::CheckMediaAccessPermission( content::RenderFrameHost* render_frame_host, const GURL& security_origin, blink::mojom::MediaStreamType type, const extensions::Extension* extension) { content::WebContents* web_contents = content::WebContents::FromRenderFrameHost(render_frame_host); Profile* profile = Profile::FromBrowserContext(web_contents->GetBrowserContext()); ContentSettingsType content_settings_type = type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE ? ContentSettingsType::MEDIASTREAM_MIC : ContentSettingsType::MEDIASTREAM_CAMERA; DCHECK(!security_origin.is_empty()); GURL embedding_origin = web_contents->GetLastCommittedURL().GetOrigin(); permissions::PermissionManager* permission_manager = PermissionManagerFactory::GetForProfile(profile); return permission_manager ->GetPermissionStatusForFrame(content_settings_type, render_frame_host, security_origin) .content_setting == CONTENT_SETTING_ALLOW; } void PermissionBubbleMediaAccessHandler::HandleRequest( content::WebContents* web_contents, const content::MediaStreamRequest& request, content::MediaResponseCallback callback, const extensions::Extension* extension) { DCHECK_CURRENTLY_ON(BrowserThread::UI); #if defined(OS_ANDROID) if (blink::IsScreenCaptureMediaType(request.video_type) && !base::FeatureList::IsEnabled( chrome::android::kUserMediaScreenCapturing)) { // If screen capturing isn't enabled on Android, we'll use "invalid state" // as result, same as on desktop. std::move(callback).Run( blink::MediaStreamDevices(), blink::mojom::MediaStreamRequestResult::INVALID_STATE, nullptr); return; } #endif // defined(OS_ANDROID) RequestsMap& requests_map = pending_requests_[web_contents]; requests_map.emplace( next_request_id_++, PendingAccessRequest( request, base::AdaptCallbackForRepeating(std::move(callback)))); // If this is the only request then show the infobar. if (requests_map.size() == 1) ProcessQueuedAccessRequest(web_contents); } void PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest( content::WebContents* web_contents) { DCHECK_CURRENTLY_ON(BrowserThread::UI); auto it = pending_requests_.find(web_contents); if (it == pending_requests_.end() || it->second.empty()) { // Don't do anything if the tab was closed. return; } DCHECK(!it->second.empty()); const int request_id = it->second.begin()->first; const content::MediaStreamRequest& request = it->second.begin()->second.request; #if defined(OS_ANDROID) if (blink::IsScreenCaptureMediaType(request.video_type)) { ScreenCaptureInfoBarDelegateAndroid::Create( web_contents, request, base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, base::Unretained(this), web_contents, request_id)); return; } #endif webrtc::MediaStreamDevicesController::RequestPermissions( request, MediaCaptureDevicesDispatcher::GetInstance(), base::BindOnce( &PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse, base::Unretained(this), web_contents, request_id, request)); } void PermissionBubbleMediaAccessHandler::UpdateMediaRequestState( int render_process_id, int render_frame_id, int page_request_id, blink::mojom::MediaStreamType stream_type, content::MediaRequestState state) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (state != content::MEDIA_REQUEST_STATE_CLOSING) return; bool found = false; for (auto requests_it = pending_requests_.begin(); requests_it != pending_requests_.end(); ++requests_it) { RequestsMap& requests_map = requests_it->second; for (RequestsMap::iterator it = requests_map.begin(); it != requests_map.end(); ++it) { if (it->second.request.render_process_id == render_process_id && it->second.request.render_frame_id == render_frame_id && it->second.request.page_request_id == page_request_id) { requests_map.erase(it); found = true; break; } } if (found) break; } } // static void PermissionBubbleMediaAccessHandler::RegisterProfilePrefs( user_prefs::PrefRegistrySyncable* prefs) { prefs->RegisterBooleanPref(prefs::kVideoCaptureAllowed, true); prefs->RegisterBooleanPref(prefs::kAudioCaptureAllowed, true); prefs->RegisterListPref(prefs::kVideoCaptureAllowedUrls); prefs->RegisterListPref(prefs::kAudioCaptureAllowedUrls); } void PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse( content::WebContents* web_contents, int request_id, content::MediaStreamRequest request, const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, bool blocked_by_feature_policy, ContentSetting audio_setting, ContentSetting video_setting) { if (pending_requests_.find(web_contents) == pending_requests_.end()) { // WebContents has been destroyed. Don't need to do anything. return; } // If the kill switch is, or the request was blocked because of feature // policy we don't update the tab context. if (result != blink::mojom::MediaStreamRequestResult::KILL_SWITCH_ON && !blocked_by_feature_policy) { UpdateTabSpecificContentSettings(web_contents, request, audio_setting, video_setting); } std::unique_ptr<content::MediaStreamUI> ui; if (!devices.empty()) { ui = MediaCaptureDevicesDispatcher::GetInstance() ->GetMediaStreamCaptureIndicator() ->RegisterMediaStream(web_contents, devices); } OnAccessRequestResponse(web_contents, request_id, devices, result, std::move(ui)); } void PermissionBubbleMediaAccessHandler::OnAccessRequestResponse( content::WebContents* web_contents, int request_id, const blink::MediaStreamDevices& devices, blink::mojom::MediaStreamRequestResult result, std::unique_ptr<content::MediaStreamUI> ui) { DCHECK_CURRENTLY_ON(BrowserThread::UI); auto request_maps_it = pending_requests_.find(web_contents); if (request_maps_it == pending_requests_.end()) { // WebContents has been destroyed. Don't need to do anything. return; } RequestsMap& requests_map(request_maps_it->second); if (requests_map.empty()) return; auto request_it = requests_map.find(request_id); DCHECK(request_it != requests_map.end()); if (request_it == requests_map.end()) return; blink::mojom::MediaStreamRequestResult final_result = result; #if defined(OS_MACOSX) // If the request was approved, ask for system permissions if needed, and run // this function again when done. if (result == blink::mojom::MediaStreamRequestResult::OK) { const content::MediaStreamRequest& request = request_it->second.request; if (request.audio_type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE) { const SystemPermission system_audio_permission = system_media_permissions::CheckSystemAudioCapturePermission(); UMA_HISTOGRAM_ENUMERATION( "Media.Audio.Capture.Mac.MicSystemPermission.UserMedia", system_audio_permission); if (system_audio_permission == SystemPermission::kNotDetermined) { // Using WeakPtr since callback can come at any time and we might be // destroyed. system_media_permissions::RequestSystemAudioCapturePermisson( base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, weak_factory_.GetWeakPtr(), web_contents, request_id, devices, result, std::move(ui)), {content::BrowserThread::UI}); return; } else if (system_audio_permission == SystemPermission::kRestricted || system_audio_permission == SystemPermission::kDenied) { content_settings::UpdateLocationBarUiForWebContents(web_contents); final_result = blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED; system_media_permissions::SystemAudioCapturePermissionBlocked(); } else { DCHECK_EQ(system_audio_permission, SystemPermission::kAllowed); content_settings::UpdateLocationBarUiForWebContents(web_contents); } } if (request.video_type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE) { const SystemPermission system_video_permission = system_media_permissions::CheckSystemVideoCapturePermission(); UMA_HISTOGRAM_ENUMERATION( "Media.Video.Capture.Mac.CameraSystemPermission.UserMedia", system_video_permission); if (system_video_permission == SystemPermission::kNotDetermined) { // Using WeakPtr since callback can come at any time and we might be // destroyed. system_media_permissions::RequestSystemVideoCapturePermisson( base::BindOnce( &PermissionBubbleMediaAccessHandler::OnAccessRequestResponse, weak_factory_.GetWeakPtr(), web_contents, request_id, devices, result, std::move(ui)), {content::BrowserThread::UI}); return; } else if (system_video_permission == SystemPermission::kRestricted || system_video_permission == SystemPermission::kDenied) { content_settings::UpdateLocationBarUiForWebContents(web_contents); final_result = blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED; system_media_permissions::SystemVideoCapturePermissionBlocked(); } else { DCHECK_EQ(system_video_permission, SystemPermission::kAllowed); content_settings::UpdateLocationBarUiForWebContents(web_contents); } } } #endif // defined(OS_MACOSX) RepeatingMediaResponseCallback callback = std::move(request_it->second.callback); requests_map.erase(request_it); if (!requests_map.empty()) { // Post a task to process next queued request. It has to be done // asynchronously to make sure that calling infobar is not destroyed until // after this function returns. base::PostTask( FROM_HERE, {BrowserThread::UI}, base::BindOnce( &PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest, base::Unretained(this), web_contents)); } std::move(callback).Run(devices, final_result, std::move(ui)); } void PermissionBubbleMediaAccessHandler::Observe( int type, const content::NotificationSource& source, const content::NotificationDetails& details) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK_EQ(content::NOTIFICATION_WEB_CONTENTS_DESTROYED, type); pending_requests_.erase(content::Source<content::WebContents>(source).ptr()); }<|fim▁end|>
<|file_name|>rpc_blockchain.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test RPCs related to blockchainstate. Test the following RPCs: - getblockchaininfo - gettxoutsetinfo - getdifficulty - getbestblockhash - getblockhash - getblockheader - getchaintxstats - getnetworkhashps - verifychain Tests correspond to code in rpc/blockchain.cpp. """ from decimal import Decimal import http.client import subprocess from test_framework.test_framework import SyscoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_greater_than_or_equal, assert_raises, assert_raises_rpc_error, assert_is_hex_string, assert_is_hash_string, ) from test_framework.blocktools import ( create_block, create_coinbase, TIME_GENESIS_BLOCK, ) from test_framework.messages import ( msg_block, ) from test_framework.mininode import ( P2PInterface, ) class BlockchainTest(SyscoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.supports_cli = False def run_test(self): self.mine_chain() self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1']) # Set extra args with pruning after rescan is complete self._test_getblockchaininfo() self._test_getchaintxstats() self._test_gettxoutsetinfo() self._test_getblockheader() self._test_getdifficulty() self._test_getnetworkhashps() self._test_stopatheight() self._test_waitforblockheight() assert self.nodes[0].verifychain(4, 0) def mine_chain(self): self.log.info('Create some old blocks') address = self.nodes[0].get_deterministic_priv_key().address for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600): # ten-minute steps from genesis block time self.nodes[0].setmocktime(t) self.nodes[0].generatetoaddress(1, address) assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200) def _test_getblockchaininfo(self): self.log.info("Test getblockchaininfo") keys = [ 'bestblockhash', 'blocks', 'chain', 'chainwork', 'difficulty', 'headers', 'initialblockdownload', 'mediantime', 'pruned', 'size_on_disk', 'softforks', 'verificationprogress', 'warnings', ] res = self.nodes[0].getblockchaininfo() # result should have these additional pruning keys if manual pruning is enabled assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys)) # size_on_disk should be > 0 assert_greater_than(res['size_on_disk'], 0) # pruneheight should be greater or equal to 0 assert_greater_than_or_equal(res['pruneheight'], 0) # check other pruning fields given that prune=1 assert res['pruned'] assert not res['automatic_pruning'] self.restart_node(0, ['-stopatheight=207']) res = self.nodes[0].getblockchaininfo() # should have exact keys assert_equal(sorted(res.keys()), keys) self.restart_node(0, ['-stopatheight=207', '-prune=550']) res = self.nodes[0].getblockchaininfo() # result should have these additional pruning keys if prune=550 assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys)) # check related fields assert res['pruned'] assert_equal(res['pruneheight'], 0) assert res['automatic_pruning'] assert_equal(res['prune_target_size'], 576716800) assert_greater_than(res['size_on_disk'], 0) assert_equal(res['softforks'], { 'bip34': {'type': 'buried', 'active': False, 'height': 500}, 'bip66': {'type': 'buried', 'active': False, 'height': 1251}, 'bip65': {'type': 'buried', 'active': False, 'height': 1351}, 'csv': {'type': 'buried', 'active': False, 'height': 432}, 'segwit': {'type': 'buried', 'active': True, 'height': 0}, 'testdummy': { 'type': 'bip9', 'bip9': { 'status': 'started', 'bit': 28, 'start_time': 0, 'timeout': 0x7fffffffffffffff, # testdummy does not have a timeout so is set to the max int64 value 'since': 144, 'statistics': { 'period': 144, 'threshold': 108, 'elapsed': 57, 'count': 57, 'possible': True,<|fim▁hole|> def _test_getchaintxstats(self): self.log.info("Test getchaintxstats") # Test `getchaintxstats` invalid extra parameters assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0) # Test `getchaintxstats` invalid `nblocks` assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '') assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1) assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount()) # Test `getchaintxstats` invalid `blockhash` assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0) assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0') assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000') assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000') blockhash = self.nodes[0].getblockhash(200) self.nodes[0].invalidateblock(blockhash) assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash) self.nodes[0].reconsiderblock(blockhash) chaintxstats = self.nodes[0].getchaintxstats(nblocks=1) # 200 txs plus genesis tx assert_equal(chaintxstats['txcount'], 201) # tx rate should be 1 per 10 minutes, or 1/600 # we have to round because of binary math assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1)) b1_hash = self.nodes[0].getblockhash(1) b1 = self.nodes[0].getblock(b1_hash) b200_hash = self.nodes[0].getblockhash(200) b200 = self.nodes[0].getblock(b200_hash) time_diff = b200['mediantime'] - b1['mediantime'] chaintxstats = self.nodes[0].getchaintxstats() assert_equal(chaintxstats['time'], b200['time']) assert_equal(chaintxstats['txcount'], 201) assert_equal(chaintxstats['window_final_block_hash'], b200_hash) assert_equal(chaintxstats['window_final_block_height'], 200) assert_equal(chaintxstats['window_block_count'], 199) assert_equal(chaintxstats['window_tx_count'], 199) assert_equal(chaintxstats['window_interval'], time_diff) assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199)) chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash) assert_equal(chaintxstats['time'], b1['time']) assert_equal(chaintxstats['txcount'], 2) assert_equal(chaintxstats['window_final_block_hash'], b1_hash) assert_equal(chaintxstats['window_final_block_height'], 1) assert_equal(chaintxstats['window_block_count'], 0) assert 'window_tx_count' not in chaintxstats assert 'window_interval' not in chaintxstats assert 'txrate' not in chaintxstats def _test_gettxoutsetinfo(self): node = self.nodes[0] res = node.gettxoutsetinfo() assert_equal(res['total_amount'], Decimal('8725.00000000')) assert_equal(res['transactions'], 200) assert_equal(res['height'], 200) assert_equal(res['txouts'], 200) assert_equal(res['bogosize'], 15000), assert_equal(res['bestblock'], node.getblockhash(200)) size = res['disk_size'] assert size > 6400 assert size < 64000 assert_equal(len(res['bestblock']), 64) assert_equal(len(res['hash_serialized_2']), 64) self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block") b1hash = node.getblockhash(1) node.invalidateblock(b1hash) res2 = node.gettxoutsetinfo() assert_equal(res2['transactions'], 0) assert_equal(res2['total_amount'], Decimal('0')) assert_equal(res2['height'], 0) assert_equal(res2['txouts'], 0) assert_equal(res2['bogosize'], 0), assert_equal(res2['bestblock'], node.getblockhash(0)) assert_equal(len(res2['hash_serialized_2']), 64) self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block") node.reconsiderblock(b1hash) res3 = node.gettxoutsetinfo() # The field 'disk_size' is non-deterministic and can thus not be # compared between res and res3. Everything else should be the same. del res['disk_size'], res3['disk_size'] assert_equal(res, res3) def _test_getblockheader(self): node = self.nodes[0] assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense") assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(blockhash=besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_equal(header['nTx'], 1) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonce'], int) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal) def _test_getdifficulty(self): difficulty = self.nodes[0].getdifficulty() # 1 hash in 2 should be valid, so difficulty should be 1/2**31 # binary => decimal => binary math is why we do this check assert abs(difficulty * 2**31 - 1) < 0.0001 def _test_getnetworkhashps(self): hashes_per_second = self.nodes[0].getnetworkhashps() # This should be 2 hashes every 10 minutes or 1/300 assert abs(hashes_per_second * 300 - 1) < 0.0001 def _test_stopatheight(self): assert_equal(self.nodes[0].getblockcount(), 200) self.nodes[0].generatetoaddress(6, self.nodes[0].get_deterministic_priv_key().address) assert_equal(self.nodes[0].getblockcount(), 206) self.log.debug('Node should not stop at this height') assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3)) try: self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') self.nodes[0].wait_until_stopped() self.start_node(0) assert_equal(self.nodes[0].getblockcount(), 207) def _test_waitforblockheight(self): self.log.info("Test waitforblockheight") node = self.nodes[0] node.add_p2p_connection(P2PInterface()) current_height = node.getblock(node.getbestblockhash())['height'] # Create a fork somewhere below our current height, invalidate the tip # of that fork, and then ensure that waitforblockheight still # works as expected. # # (Previously this was broken based on setting # `rpc/blockchain.cpp:latestblock` incorrectly.) # b20hash = node.getblockhash(20) b20 = node.getblock(b20hash) def solve_and_send_block(prevhash, height, time): b = create_block(prevhash, create_coinbase(height), time) b.solve() node.p2p.send_message(msg_block(b)) node.p2p.sync_with_ping() return b b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1) b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1) node.invalidateblock(b22f.hash) def assert_waitforheight(height, timeout=2): assert_equal( node.waitforblockheight(height=height, timeout=timeout)['height'], current_height) assert_waitforheight(0) assert_waitforheight(current_height - 1) assert_waitforheight(current_height) assert_waitforheight(current_height + 1) if __name__ == '__main__': BlockchainTest().main()<|fim▁end|>
}, }, 'active': False} })
<|file_name|>new-box.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn f(x: Box<int>) { let y: &int = &*x; println!("{}", *x); println!("{}", *y); } trait Trait { fn printme(&self); } struct Struct; impl Trait for Struct { fn printme(&self) { println!("hello world!"); } } fn g(x: Box<Trait>) { x.printme(); let y: &Trait = &*x; y.printme();<|fim▁hole|> fn main() { f(box 1234); g(box Struct as Box<Trait>); }<|fim▁end|>
}
<|file_name|>persistent-object-attribute-presenter.ts<|end_file_name|><|fim▁begin|>namespace Vidyano.WebComponents { "use strict"; @WebComponent.register({ properties: { attribute: Object, noLabel: { type: Boolean, reflectToAttribute: true, value: false }, editing: { type: Boolean, reflectToAttribute: true, computed: "_computeEditing(attribute.parent.isEditing, nonEdit)" }, nonEdit: { type: Boolean, reflectToAttribute: true, value: false, observer: "_nonEditChanged" },<|fim▁hole|> reflectToAttribute: true, computed: "_computeRequired(attribute, attribute.isRequired, attribute.value)" }, disabled: { type: Boolean, reflectToAttribute: true, value: false, observer: "_disabledChanged" }, readOnly: { type: Boolean, reflectToAttribute: true, computed: "_computeReadOnly(attribute.isReadOnly, attribute.parent.isFrozen, disabled)" }, bulkEdit: { type: Boolean, reflectToAttribute: true, computed: "attribute.parent.isBulkEdit" }, loading: { type: Boolean, reflectToAttribute: true, readOnly: true, value: true, observer: "_loadingChanged" }, height: { type: Number, reflectToAttribute: true }, hidden: { type: Boolean, reflectToAttribute: true, computed: "_isHidden(attribute.isVisible)" }, hasError: { type: Boolean, reflectToAttribute: true, computed: "_computeHasError(attribute.validationError)" } }, hostAttributes: { "tabindex": "-1" }, listeners: { "focus": "_onFocus" }, observers: [ "_attributeChanged(attribute, isAttached)" ], forwardObservers: [ "attribute.parent.isEditing", "attribute.parent.isFrozen", "attribute.isRequired", "attribute.isReadOnly", "attribute.isVisible", "attribute.value", "attribute.isValueChanged", "attribute.validationError", "attribute.parent.isBulkEdit" ] }) export class PersistentObjectAttributePresenter extends WebComponent implements IConfigurable { private static _attributeImports: { [key: string]: Promise<any>; } = { "AsDetail": undefined, "BinaryFile": undefined, "Boolean": undefined, "ComboBox": undefined, "CommonMark": undefined, "DateTime": undefined, "DropDown": undefined, "FlagsEnum": undefined, "Image": undefined, "KeyValueList": undefined, "MultiLineString": undefined, "MultiString": undefined, "Numeric": undefined, "Password": undefined, "Reference": undefined, "String": undefined, "TranslatedString": undefined, "User": undefined }; private _renderedAttribute: Vidyano.PersistentObjectAttribute; private _renderedAttributeElement: Vidyano.WebComponents.Attributes.PersistentObjectAttribute; private _customTemplate: PolymerTemplate; private _focusQueued: boolean; readonly loading: boolean; private _setLoading: (loading: boolean) => void; attribute: Vidyano.PersistentObjectAttribute; nonEdit: boolean; noLabel: boolean; height: number; disabled: boolean; readOnly: boolean; attached() { if (!this._customTemplate) this._customTemplate = <PolymerTemplate><any>Polymer.dom(this).querySelector("template[is='dom-template']"); super.attached(); } queueFocus() { const activeElement = document.activeElement; this.focus(); if (activeElement !== document.activeElement) this._focusQueued = true; } private _attributeChanged(attribute: Vidyano.PersistentObjectAttribute, isAttached: boolean) { if (this._renderedAttribute) { Polymer.dom(this.$["content"]).children.forEach(c => Polymer.dom(this.$["content"]).removeChild(c)); this._renderedAttributeElement = this._renderedAttribute = null; } if (attribute && isAttached) { this._setLoading(true); if (!this.getAttribute("height")) this.height = this.app.configuration.getAttributeConfig(attribute).calculateHeight(attribute); let attributeType: string; if (Vidyano.Service.isNumericType(attribute.type)) attributeType = "Numeric"; else if (Vidyano.Service.isDateTimeType(attribute.type)) attributeType = "DateTime"; else if (attribute.parent.isBulkEdit && (attribute.type === "YesNo" || attribute.type === "Boolean")) attributeType = "NullableBoolean"; else attributeType = attribute.type; if (Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[attributeType] !== undefined) { this._renderAttribute(attribute, attributeType); return; } const typeImport = this._getAttributeTypeImportInfo(attributeType); if (!typeImport) { Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[attributeType] = Promise.resolve(false); this._renderAttribute(attribute, attributeType); return; } let synonymResolvers: ((result: {}) => void)[]; if (typeImport.synonyms) { synonymResolvers = []; typeImport.synonyms.forEach(s => Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[s] = new Promise(resolve => { synonymResolvers.push(resolve); })); } Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[attributeType] = new Promise(async (resolve) => { try { await this.importHref(this.resolveUrl("../Attributes/" + typeImport.filename)); if (synonymResolvers) synonymResolvers.forEach(resolver => resolver(true)); this._renderAttribute(attribute, attributeType); resolve(true); } catch (err) { Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[attributeType] = Promise.resolve(false); this._setLoading(false); resolve(false); } }); } } private _getAttributeTypeImportInfo(type: string): { filename: string; synonyms?: string[]; } { let synonyms: string[]; for (const key in Vidyano.WebComponents.Attributes.PersistentObjectAttribute.typeSynonyms) { const typeSynonyms = Vidyano.WebComponents.Attributes.PersistentObjectAttribute.typeSynonyms[key]; if (key === type) synonyms = typeSynonyms; else if (typeSynonyms.indexOf(type) >= 0) { type = key; synonyms = typeSynonyms; } } if (type === "AsDetail") return { filename: "PersistentObjectAttributeAsDetail/persistent-object-attribute-as-detail.html", synonyms: synonyms }; else if (type === "BinaryFile") return { filename: "PersistentObjectAttributeBinaryFile/persistent-object-attribute-binary-file.html", synonyms: synonyms }; else if (type === "Boolean" || type === "NullableBoolean") return { filename: "PersistentObjectAttributeBoolean/persistent-object-attribute-boolean.html", synonyms: synonyms }; else if (type === "ComboBox") return { filename: "PersistentObjectAttributeComboBox/persistent-object-attribute-combo-box.html", synonyms: synonyms }; else if (type === "CommonMark") return { filename: "PersistentObjectAttributeCommonMark/persistent-object-attribute-common-mark.html", synonyms: synonyms }; else if (type === "DateTime") return { filename: "PersistentObjectAttributeDateTime/persistent-object-attribute-date-time.html", synonyms: synonyms }; else if (type === "DropDown") return { filename: "PersistentObjectAttributeDropDown/persistent-object-attribute-drop-down.html", synonyms: synonyms }; else if (type === "FlagsEnum") return { filename: "PersistentObjectAttributeFlagsEnum/persistent-object-attribute-flags-enum.html", synonyms: synonyms }; else if (type === "Image") return { filename: "PersistentObjectAttributeImage/persistent-object-attribute-image.html", synonyms: synonyms }; else if (type === "KeyValueList") return { filename: "PersistentObjectAttributeKeyValueList/persistent-object-attribute-key-value-list.html", synonyms: synonyms }; else if (type === "MultiLineString") return { filename: "PersistentObjectAttributeMultiLineString/persistent-object-attribute-multi-line-string.html", synonyms: synonyms }; else if (type === "MultiString") return { filename: "PersistentObjectAttributeMultiString/persistent-object-attribute-multi-string.html", synonyms: synonyms }; else if (type === "Numeric") return { filename: "PersistentObjectAttributeNumeric/persistent-object-attribute-numeric.html", synonyms: synonyms }; else if (type === "Password") return { filename: "PersistentObjectAttributePassword/persistent-object-attribute-password.html", synonyms: synonyms }; else if (type === "Reference") return { filename: "PersistentObjectAttributeReference/persistent-object-attribute-reference.html", synonyms: synonyms }; else if (type === "String") return { filename: "PersistentObjectAttributeString/persistent-object-attribute-string.html", synonyms: synonyms }; else if (type === "TranslatedString") return { filename: "PersistentObjectAttributeTranslatedString/persistent-object-attribute-translated-string.html", synonyms: synonyms }; else if (type === "User") return { filename: "PersistentObjectAttributeUser/persistent-object-attribute-user.html", synonyms: synonyms }; return null; } private async _renderAttribute(attribute: Vidyano.PersistentObjectAttribute, attributeType: string) { await Vidyano.WebComponents.PersistentObjectAttributePresenter._attributeImports[attributeType]; if (!this.isAttached || attribute !== this.attribute || this._renderedAttribute === attribute) return; let focusTarget: HTMLElement; try { if (this._customTemplate) Polymer.dom(focusTarget = this.$["content"]).appendChild(this._customTemplate.stamp({ attribute: attribute }).root); else { const config = <PersistentObjectAttributeConfig>this.app.configuration.getAttributeConfig(attribute); this.noLabel = this.noLabel || (config && !!config.noLabel); if (!!config && config.hasTemplate) Polymer.dom(this.$["content"]).appendChild(config.stamp(attribute, config.as || "attribute")); else { this._renderedAttributeElement = <WebComponents.Attributes.PersistentObjectAttribute>new (Vidyano.WebComponents.Attributes["PersistentObjectAttribute" + attributeType] || Vidyano.WebComponents.Attributes.PersistentObjectAttributeString)(); this._renderedAttributeElement.classList.add("attribute"); this._renderedAttributeElement.attribute = attribute; this._renderedAttributeElement.nonEdit = this.nonEdit; this._renderedAttributeElement.disabled = this.disabled; Polymer.dom(this.$["content"]).appendChild(focusTarget = this._renderedAttributeElement); } } this._renderedAttribute = attribute; } finally { this._setLoading(false); if (this._focusQueued) { Polymer.dom(focusTarget).flush(); const activeElement = document.activeElement; let retry = 0; const interval = setInterval(() => { if (++retry > 20 || document.activeElement !== activeElement) return clearInterval(interval); focusTarget.focus(); }, 25); this._focusQueued = false; } } } private _computeEditing(isEditing: boolean, nonEdit: boolean): boolean { return !nonEdit && isEditing; } private _nonEditChanged(nonEdit: boolean) { if (this._renderedAttributeElement) this._renderedAttributeElement.nonEdit = nonEdit; } private _disabledChanged(disabled: boolean) { if (!this._renderedAttributeElement) return; this._renderedAttributeElement.disabled = disabled; } private _computeRequired(attribute: Vidyano.PersistentObjectAttribute, required: boolean, value: any): boolean { return required && (value == null || (attribute && attribute.rules && attribute.rules.contains("NotEmpty") && value === "")); } private _computeReadOnly(isReadOnly: boolean, isFrozen: boolean, disabled: boolean): boolean { return isReadOnly || disabled || isFrozen; } private _computeHasError(validationError: string): boolean { return !StringEx.isNullOrEmpty(validationError); } private _isHidden(isVisible: boolean): boolean { return !isVisible; } private _onFocus() { const target = <HTMLElement>this._renderedAttributeElement || this._getFocusableElement(); if (!target) return; target.focus(); } private _loadingChanged(loading: boolean) { if (loading) this.fire("attribute-loading", { attribute: this.attribute }, { bubbles: true }); else { Polymer.dom(this).flush(); this.fire("attribute-loaded", { attribute: this.attribute }, { bubbles: true }); } } _viConfigure(actions: IConfigurableAction[]) { if (this.attribute.parent.isSystem) return; actions.push({ label: `Attribute: ${this.attribute.name}`, icon: "viConfigure", action: () => { this.app.changePath(`Management/PersistentObject.1456569d-e02b-44b3-9d1a-a1e417061c77/${this.attribute.id}`); } }); } } }<|fim▁end|>
required: { type: Boolean,
<|file_name|>UserAuthenticationManager.java<|end_file_name|><|fim▁begin|>package fi.rivermouth.talous.auth; import java.util.ArrayList; import java.util.List; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import fi.rivermouth.talous.domain.User; public class UserAuthenticationManager implements AuthenticationManager { <|fim▁hole|> grantedAuths.add(new SimpleGrantedAuthority(User.ROLE)); return new UsernamePasswordAuthenticationToken(authentication.getName(), authentication.getCredentials(), grantedAuths); } }<|fim▁end|>
@Override public Authentication authenticate(Authentication authentication) { List<GrantedAuthority> grantedAuths = new ArrayList<GrantedAuthority>();
<|file_name|>location.py<|end_file_name|><|fim▁begin|>################################################################################ ## ## ## This file is a part of TADEK. ## ## ## ## TADEK - Test Automation in a Distributed Environment ## ## (http://tadek.comarch.com) ## ## ## ## Copyright (C) 2011 Comarch S.A. ## ## All rights reserved. ## ## ## ## TADEK is free software for non-commercial purposes. For commercial ones ## ## we offer a commercial license. Please check http://tadek.comarch.com for ## ## details or write to [email protected] ## ## ## ## You can redistribute it and/or modify it under the terms of the ## ## GNU General Public License as published by the Free Software Foundation, ## ## either version 3 of the License, or (at your option) any later version. ## ## ## ## TADEK is distributed in the hope that it will be useful, ## ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## ## GNU General Public License for more details. ## ## ## ## You should have received a copy of the GNU General Public License ## ## along with TADEK bundled with this file in the file LICENSE. ## ## If not, see http://www.gnu.org/licenses/. ## ## ## ## Please notice that Contributor Agreement applies to any contribution ## ## you make to TADEK. The Agreement must be completed, signed and sent ## ## to Comarch before any contribution is made. You should have received ## ## a copy of Contribution Agreement along with TADEK bundled with this file ## ## in the file CONTRIBUTION_AGREEMENT.pdf or see http://tadek.comarch.com ## ## or write to [email protected] ## ## ## ################################################################################ import os import sys from tadek import models from tadek import teststeps from tadek import testcases from tadek import testsuites from tadek.core import locale from tadek.core.structs import ErrorBox _DIRS_MAP = { "models": models, "teststeps": teststeps, "testcases": testcases, "testsuites": testsuites } _LOCALE_DIR = "locale" class NameConflictError(Exception): ''' Raised when a name conflict module takes place inside some. ''' def __init__(self, module, name): Exception.__init__(self, '.'.join([module.__name__, name])) def add(path, enabled=True): ''' Adds a location of models and test cases specified by the path. :param path: A path to a location directory :type path: string :param enabled: True if an added location should be enabled, False otherwise :type enabled: boolean ''' path = os.path.abspath(path) if path in _cache: return None _cache[path] = enabled if enabled: return enable(path) return [] def remove(path): ''' Removes a location of models and test cases specified by the path. :param path: A path to a location directory :type path: string ''' path = os.path.abspath(path) if path not in _cache: return disable(path) del _cache[path] def get(enabled=None): ''' Gets a list of all locations. ''' if enabled is None: return _cache.keys() elif enabled: return [path for path in _cache if _cache[path]] else: return [path for path in _cache if not _cache[path]] def enable(path): ''' Enables a location of models and test cases specified by the path. :param path: A path to a location directory :type path: string ''' path = os.path.abspath(path) if path not in _cache: return None _cache[path] = True errors = [] for dirname, module in _DIRS_MAP.iteritems(): errors.extend(_addModuleDir(module, os.path.join(path, dirname))) # Add a corresponding locale locale.add(os.path.join(path, _LOCALE_DIR)) if errors: disable(path) return errors def disable(path): ''' Disables a location of models and test cases specified by the path. :param path: A path to a location directory :type path: string ''' path = os.path.abspath(path) for dirname, module in _DIRS_MAP.iteritems(): _removeModuleDir(module, os.path.join(path, dirname)) # Remove a corresponding locale locale.remove(os.path.join(path, _LOCALE_DIR)) _cache[path] = False def clear(): ''' Clears imported modules from all locations. ''' for module in _DIRS_MAP.itervalues(): _clearModule(module) # A locations cache _cache = {} # Location directories oriented functions: def getModels(): ''' Gets a dictionary containing all currently avalaible models modules. :return: A dictionary with models modules :rtype: dictionary ''' content = _getModuleContent(models) content.pop("__init__", None) return content def getSteps(): ''' Gets a dictionary containing all currently avalaible root test steps modules. :return: A dictionary with test steps modules :rtype: dictionary ''' content = _getModuleContent(teststeps) content.pop("__init__", None) return content def getCases(): ''' Gets a dictionary containing all currently avalaible root test cases modules. :return: A dictionary with test cases modules :rtype: dictionary ''' content = _getModuleContent(testcases) content.pop("__init__", None) return content def getSuites(): ''' Gets a dictionary containing all currently avalaible root test suites modules. :return: A dictionary with test suites modules :rtype: dictionary ''' content = _getModuleContent(testsuites) content.pop("__init__", None) return content _MODULE_EXTS = (".py", ".pyc", ".pyo") def _getDirContent(dir, package=None): ''' Gets content of the given directory. ''' content = {} for file in sorted(os.listdir(dir)): name = None path = os.path.join(dir, file) if os.path.isfile(path): name, ext = os.path.splitext(file) if ext not in _MODULE_EXTS or (package and name == "__init__"): continue name = '.'.join([package, name]) if package else name elif os.path.isdir(path): pkg = False for ext in _MODULE_EXTS: if os.path.exists(os.path.join(path, "__init__" + ext)): pkg = True break if not pkg: continue name = '.'.join([package, file]) if package else file content.update(_getDirContent(path, name)) path = os.path.join(path, "__init__" + ext) if name and name not in content: content[name] = path return content <|fim▁hole|> ''' Gets content of the given module from the specified directory. ''' content = {} for path in module.__path__: for name, path in _getDirContent(path).iteritems(): if name not in content: content[name] = path return content def _addModuleDir(module, path): ''' Adds a directory of the given path to the specified module object. ''' errors = [] if not os.path.isdir(path) or path is module.__path__: return errors content = _getModuleContent(module) for name in _getDirContent(path): try: if name in content: raise NameConflictError(module, name) except NameConflictError: errors.append(ErrorBox(name=name, path=path)) if not errors: module.__path__.append(path) return errors def _clearModule(module, path=None): ''' Clears the imported module. ''' patterns = [] if not path: patterns.append(module.__name__ + '.') elif path in module.__path__: for name in _getDirContent(path): patterns.append('.'.join([module.__name__, name])) for name in sys.modules.keys(): for pattern in patterns: if pattern in name: del sys.modules[name] break def _removeModuleDir(module, path): ''' Removes a directory of the given path from the specified module object. ''' if path not in module.__path__ or path == module.__path__[0]: return _clearModule(module, path) module.__path__.remove(path)<|fim▁end|>
def _getModuleContent(module):
<|file_name|>log.go<|end_file_name|><|fim▁begin|>// Package flog implements logging utilities for flamingo package flog import "github.com/Sirupsen/logrus" type Parameter interface { Convert() map[string]interface{} } type Fields struct { Event string Error error } func (f Fields) Convert() map[string]interface{} { fields := map[string]interface{}{} if f.Event != "" { fields["event"] = f.Event } if f.Error != nil { fields["error"] = f.Error } return fields } type Details map[string]interface{} func (d Details) Convert() map[string]interface{} { return d } type DebugFields map[string]interface{} func (d DebugFields) Convert() map[string]interface{} { // TODO(tmrts): Handle debug information return map[string]interface{}{} } func transform(params []Parameter) logrus.Fields { logrusFields := logrus.Fields{} for _, p := range params { fieldMap := p.Convert() for k, v := range fieldMap { logrusFields[k] = v } } return logrusFields } func Debug(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Debug(msg) } func Info(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Info(msg) } func Warn(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Warning(msg) } func Error(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Error(msg)<|fim▁hole|> func Fatal(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Fatal(msg) } func Panic(msg string, params ...Parameter) { f := transform(params) logrus.WithFields(f).Panic(msg) }<|fim▁end|>
}
<|file_name|>fn-simple.rs<|end_file_name|><|fim▁begin|>// rustfmt-normalize_comments: true fn simple(/*pre-comment on a function!?*/ i: i32/*yes, it's possible! */ ,response: NoWay /* hose */) { fn op(x: Typ, key : &[u8], upd : Box<Fn(Option<&memcache::Item>) -> (memcache::Status, Result<memcache::Item, Option<String>>)>) -> MapResult {} "cool"} fn weird_comment(/* /*/ double level */ comment */ x: Hello /*/*/* tripple, even */*/*/, // Does this work? y: World ) { simple(/* does this preserve comments now? */ 42, NoWay) } fn generic<T>(arg: T) -> &SomeType where T: Fn(// First arg A, // Second argument B, C, D, /* pre comment */ E /* last comment */) -> &SomeType { arg(a, b, c, d, e) <|fim▁hole|>fn foo() -> ! {} pub fn http_fetch_async(listener:Box< AsyncCORSResponseListener+Send >, script_chan: Box<ScriptChan+Send>) { } fn some_func<T:Box<Trait+Bound>>(val:T){} fn zzzzzzzzzzzzzzzzzzzz<Type, NodeType> (selff: Type, mut handle: node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>) -> SearchStack<'a, K, V, Type, NodeType>{ } unsafe fn generic_call(cx: *mut JSContext, argc: libc::c_uint, vp: *mut JSVal, is_lenient: bool, call: unsafe extern fn(*const JSJitInfo, *mut JSContext, HandleObject, *mut libc::c_void, u32, *mut JSVal) -> u8) { let f: fn ( _ , _ ) -> _ = panic!() ; } pub fn start_export_thread<C: CryptoSchemee + 'static>(database: &Database, crypto_scheme: &C, block_size: usize, source_path: &Path) -> BonzoResult<mpsc::Consumer<'static, FileInstruction>> {} pub fn waltz(cwd: &Path) -> CliAssert { { { formatted_comment = rewrite_comment(comment, block_style, width, offset, formatting_fig); } } } // #2003 mod foo { fn __bindgen_test_layout_i_open0_c_open1_char_a_open2_char_close2_close1_close0_instantiation() { foo(); } }<|fim▁end|>
}
<|file_name|>wxdash.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7 from __future__ import print_function # -*- coding: utf-8 -*- import wx import threading import lcm import random import Forseti import configurator BLUE = (24, 25, 141) GOLD = (241, 169, 50) class TeamPanel(wx.Panel): def __init__(self, remote, letter, number, name, colour, *args, **kwargs): super(TeamPanel, self).__init__(*args, **kwargs) self.remote = remote self.InitUI(letter, number, name, colour) def InitUI(self, letter, number, name, colour=None): if colour is not None: self.SetBackgroundColour(colour) dc = wx.ScreenDC() self.num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) self.num_ctrl.AppendText(str(number)) self.get_button = wx.Button(self, label='Get', size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) self.get_button.Bind(wx.EVT_BUTTON, self.do_get_name) self.name_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 16, dc.GetCharHeight())) self.name_ctrl.AppendText(name) name_num_box = wx.BoxSizer(wx.HORIZONTAL) name_num_box.Add(wx.StaticText(self, label=letter, size=(dc.GetCharWidth() * 0.6, dc.GetCharHeight()))) name_num_box.Add(self.num_ctrl) name_num_box.Add(self.get_button) name_num_box.Add(self.name_ctrl) #button_box = wx.BoxSizer(wx.HORIZONTAL) #button_box.Add(wx.Button(self, label='Reset')) #button_box.Add(wx.Button(self, label='Configure')) #button_box.Add(wx.Button(self, label='Disable')) self.vbox = wx.BoxSizer(wx.VERTICAL) self.vbox.Add(name_num_box, flag=wx.CENTER) #vbox.Add(button_box, flag=wx.CENTER) self.SetSizer(self.vbox) self.Show(True) def do_get_name(self, event): self.name = configurator.get_team_name(self.number) @property def name(self): return self.name_ctrl.GetValue() @name.setter def name(self, val): self.name_ctrl.SetValue(val) @property def number(self): try: return int(self.num_ctrl.GetValue()) except ValueError: return 0 @number.setter def number(self, val): self.num_ctrl.SetValue(str(val)) class MatchControl(wx.Panel): def __init__(self, remote, *args, **kwargs): super(MatchControl, self).__init__(*args, **kwargs) self.remote = remote self.InitUI() def InitUI(self): vbox = wx.BoxSizer(wx.VERTICAL) dc = wx.ScreenDC() match_number = wx.BoxSizer(wx.HORIZONTAL) match_number.Add(wx.StaticText(self, label='Match #'.format(1))) self.match_num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) match_number.Add(self.match_num_ctrl) vbox.Add(match_number, flag=wx.CENTER) teamSizer = wx.GridSizer(3, 2) self.team_panels = [ TeamPanel(self.remote, 'A', 0, 'Unknown Team', BLUE, self), TeamPanel(self.remote, 'C', 0, 'Unknown Team', GOLD, self), TeamPanel(self.remote, 'B', 0, 'Unknown Team', BLUE, self), TeamPanel(self.remote, 'D', 0, 'Unknown Team', GOLD, self), ] teamSizer.AddMany( [wx.StaticText(self, label='Blue Team'), wx.StaticText(self, label='Gold Team')] + [(panel, 0) for panel in self.team_panels]) vbox.Add(teamSizer, flag=wx.CENTER) buttons = wx.BoxSizer(wx.HORIZONTAL) self.init_button = wx.Button(self, label='Init') self.init_button.Bind(wx.EVT_BUTTON, self.do_init) self.go_button = wx.Button(self, label='GO!') self.go_button.Bind(wx.EVT_BUTTON, self.do_go) self.pause_button = wx.Button(self, label='Pause') self.pause_button.Bind(wx.EVT_BUTTON, self.do_pause) #self.save_button = wx.Button(self, label='Save') #self.save_button.Bind(wx.EVT_BUTTON, self.do_save) self.time_text = wx.StaticText(self, label='0:00') self.stage_text = wx.StaticText(self, label='Unknown') self.remote.time_text = self.time_text #buttons.Add(self.save_button, flag=wx.LEFT) buttons.Add(self.init_button) buttons.Add(self.go_button) buttons.Add(self.pause_button) buttons.Add(self.time_text) buttons.Add(self.stage_text) vbox.Add(buttons, flag=wx.CENTER) self.SetSizer(vbox) self.Show(True) def do_go(self, e): self.remote.do_go() def do_pause(self, e): self.remote.do_pause() def do_save(self, e): self.remote.do_save(self.get_match()) def do_init(self, e): self.remote.do_init(self.get_match()) def _set_match_panel(self, match, team_idx, panel_idx): match.team_numbers[team_idx] = self.team_panels[panel_idx].number match.team_names[team_idx] = self.team_panels[panel_idx].name def _set_panel_match(self, match, team_idx, panel_idx): self.team_panels[panel_idx].number = match.team_numbers[team_idx] self.team_panels[panel_idx].name = match.team_names[team_idx] def get_match(self): match = Forseti.Match() self._set_match_panel(match, 0, 0) self._set_match_panel(match, 1, 2) self._set_match_panel(match, 2, 1) self._set_match_panel(match, 3, 3) try: match.match_number = int(self.match_num_ctrl.GetValue()) except ValueError: match.match_number = random.getrandbits(31) return match def set_match(self, match): self._set_panel_match(match, 0, 0) self._set_panel_match(match, 1, 2) self._set_panel_match(match, 2, 1) self._set_panel_match(match, 3, 3) self.match_num_ctrl.SetValue(str(match.match_number)) def set_time(self, match): self.time_text.SetLabel(format_time(match.game_time_so_far)) self.stage_text.SetLabel(match.stage_name) class ScheduleControl(wx.Panel): def __init__(self, remote, match_control, *args, **kwargs): self.remote = remote super(ScheduleControl, self).__init__(*args, **kwargs) self.InitUI() self.remote.match_list_box = self.match_list self.match_control = match_control def InitUI(self): self.match_list = wx.ListBox(self) self.match_list.Bind(wx.EVT_LISTBOX, self.choose_match) hbox = wx.BoxSizer(wx.HORIZONTAL) self.load_button = wx.Button(self, label='Load All') self.load_button.Bind(wx.EVT_BUTTON, self.do_load) hbox.Add(self.load_button) self.clear_first = wx.CheckBox(self, label='Clear first') self.clear_first.SetValue(True) hbox.Add(self.clear_first) vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(self.match_list, 1, wx.EXPAND) vbox.Add(hbox) self.SetSizer(vbox) self.Show(True) def do_load(self, e): self.remote.do_load(self.clear_first.GetValue()) def choose_match(self, event): self.match_control.set_match(event.GetClientData())<|fim▁hole|>class MainWindow(wx.Frame): def __init__(self, remote, *args, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) self.remote = remote self.InitUI() def InitUI(self): menubar = wx.MenuBar() fileMenu = wx.Menu() fitem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application') menubar.Append(fileMenu, '&File') self.SetMenuBar(menubar) match_control = MatchControl(self.remote, self) schedule_control = ScheduleControl(self.remote, match_control, self) self.remote.match_control = match_control vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(match_control, 0, wx.ALIGN_CENTER | wx.ALIGN_TOP, 8) vbox.Add(schedule_control, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 8) self.Bind(wx.EVT_MENU, self.OnQuit, fitem) self.SetSize((800, 600)) self.SetSizer(vbox) self.SetTitle('Forseti Dashboard') self.Centre() self.Show(True) def OnQuit(self, e): self.Close() def format_match(match): print(match.match_number) print(match.team_names) print(match.team_numbers) return '{}: {} ({}) & {} ({}) vs. {} ({}) & {} ({})'.format( match.match_number, match.team_names[0], match.team_numbers[0], match.team_names[1], match.team_numbers[1], match.team_names[2], match.team_numbers[2], match.team_names[3], match.team_numbers[3], ) class Remote(object): def __init__(self): self.lc = lcm.LCM('udpm://239.255.76.67:7667?ttl=1') self.lc.subscribe('Schedule/Schedule', self.handle_schedule) self.lc.subscribe('Timer/Time', self.handle_time) self.match_list_box = None self.match_control = None self.thread = threading.Thread(target=self._loop) self.thread.daemon = True def start(self): self.thread.start() def _loop(self): while True: try: self.lc.handle() except Exception as ex: print('Got exception while handling lcm message', ex) def handle_schedule(self, channel, data): msg = Forseti.Schedule.decode(data) for i in range(msg.num_matches): self.match_list_box.Insert(format_match(msg.matches[i]), i, msg.matches[i]) def handle_time(self, channel, data): msg = Forseti.Time.decode(data) #wx.CallAfter(self.time_text.SetLabel, format_time(msg.game_time_so_far)) wx.CallAfter(self.match_control.set_time, msg) def do_load(self, clear_first): if clear_first: self.match_list_box.Clear() msg = Forseti.ScheduleLoadCommand() msg.clear_first = clear_first print('Requesting load') self.lc.publish('Schedule/Load', msg.encode()) def do_save(self, match): self.lc.publish('Match/Save', match.encode()) def do_init(self, match): self.lc.publish('Match/Init', match.encode()) def do_time_ctrl(self, command): msg = Forseti.TimeControl() msg.command_name = command self.lc.publish('Timer/Control', msg.encode()) def do_go(self): self.do_time_ctrl('start') def do_pause(self): self.do_time_ctrl('pause') def format_time(seconds): return '{}:{:02}'.format(seconds // 60, seconds % 60) def main(): app = wx.App() remote = Remote() MainWindow(remote, None) remote.start() remote.do_load(False) app.MainLoop() if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>encodable.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! The compiler code necessary to implement the #[deriving(Encodable)] (and Decodable, in decodable.rs) extension. The idea here is that type-defining items may be tagged with #[deriving(Encodable, Decodable)]. For example, a type like: #[deriving(Encodable, Decodable)] struct Node {id: uint} would generate two implementations like: impl<S:extra::serialize::Encoder> Encodable<S> for Node { fn encode(&self, s: &S) { s.emit_struct("Node", 1, || {<|fim▁hole|> }) } } impl<D:Decoder> Decodable for node_id { fn decode(d: &D) -> Node { d.read_struct("Node", 1, || { Node { id: d.read_field(~"x", 0, || decode(d)) } }) } } Other interesting scenarios are whe the item has type parameters or references other non-built-in types. A type definition like: #[deriving(Encodable, Decodable)] struct spanned<T> {node: T, span: Span} would yield functions like: impl< S: Encoder, T: Encodable<S> > spanned<T>: Encodable<S> { fn encode<S:Encoder>(s: &S) { s.emit_rec(|| { s.emit_field("node", 0, || self.node.encode(s)); s.emit_field("span", 1, || self.span.encode(s)); }) } } impl< D: Decoder, T: Decodable<D> > spanned<T>: Decodable<D> { fn decode(d: &D) -> spanned<T> { d.read_rec(|| { { node: d.read_field(~"node", 0, || decode(d)), span: d.read_field(~"span", 1, || decode(d)), } }) } } */ use ast::{MetaItem, Item, Expr, MutImmutable, MutMutable}; use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; pub fn expand_deriving_encodable(cx: &ExtCtxt, span: Span, mitem: @MetaItem, in_items: ~[@Item]) -> ~[@Item] { let trait_def = TraitDef { cx: cx, span: span, path: Path::new_(~["extra", "serialize", "Encodable"], None, ~[~Literal(Path::new_local("__E"))], true), additional_bounds: ~[], generics: LifetimeBounds { lifetimes: ~[], bounds: ~[("__E", ~[Path::new(~["extra", "serialize", "Encoder"])])], }, methods: ~[ MethodDef { name: "encode", generics: LifetimeBounds::empty(), explicit_self: Some(Some(Borrowed(None, MutImmutable))), args: ~[Ptr(~Literal(Path::new_local("__E")), Borrowed(None, MutMutable))], ret_ty: nil_ty(), inline: false, const_nonmatching: true, combine_substructure: encodable_substructure, }, ] }; trait_def.expand(mitem, in_items) } fn encodable_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure) -> @Expr { let encoder = substr.nonself_args[0]; // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_e"); let blkencoder = cx.expr_ident(span, blkarg); let encode = cx.ident_of("encode"); return match *substr.fields { Struct(ref fields) => { let emit_struct_field = cx.ident_of("emit_struct_field"); let mut stmts = ~[]; for (i, f) in fields.iter().enumerate() { let name = match f.name { Some(id) => cx.str_of(id), None => format!("_field{}", i).to_managed() }; let enc = cx.expr_method_call(span, f.self_, encode, ~[blkencoder]); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_struct_field, ~[cx.expr_str(span, name), cx.expr_uint(span, i), lambda]); stmts.push(cx.stmt_expr(call)); } let blk = cx.lambda_stmts_1(span, stmts, blkarg); cx.expr_method_call(span, encoder, cx.ident_of("emit_struct"), ~[cx.expr_str(span, cx.str_of(substr.type_ident)), cx.expr_uint(span, fields.len()), blk]) } EnumMatching(idx, variant, ref fields) => { // We're not generating an AST that the borrow checker is expecting, // so we need to generate a unique local variable to take the // mutable loan out on, otherwise we get conflicts which don't // actually exist. let me = cx.stmt_let(span, false, blkarg, encoder); let encoder = cx.expr_ident(span, blkarg); let emit_variant_arg = cx.ident_of("emit_enum_variant_arg"); let mut stmts = ~[]; for (i, f) in fields.iter().enumerate() { let enc = cx.expr_method_call(span, f.self_, encode, ~[blkencoder]); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_variant_arg, ~[cx.expr_uint(span, i), lambda]); stmts.push(cx.stmt_expr(call)); } let blk = cx.lambda_stmts_1(span, stmts, blkarg); let name = cx.expr_str(span, cx.str_of(variant.node.name)); let call = cx.expr_method_call(span, blkencoder, cx.ident_of("emit_enum_variant"), ~[name, cx.expr_uint(span, idx), cx.expr_uint(span, fields.len()), blk]); let blk = cx.lambda_expr_1(span, call, blkarg); let ret = cx.expr_method_call(span, encoder, cx.ident_of("emit_enum"), ~[cx.expr_str(span, cx.str_of(substr.type_ident)), blk]); cx.expr_block(cx.block(span, ~[me], Some(ret))) } _ => cx.bug("expected Struct or EnumMatching in deriving(Encodable)") }; }<|fim▁end|>
s.emit_field("id", 0, || s.emit_uint(self.id))
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import * as React from "react"; const style = { "padding": "0 2% 0", "fontSize": "100px", "color": "white", "textShadow": "white 0 0 9px", };<|fim▁hole|><|fim▁end|>
export default function L() { return <div style={style}>L</div>; }
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>use std::error::{FromError, Error}; use std::fmt; use std::io::IoError; use std::io::process::{ProcessOutput, ProcessExit, ExitStatus, ExitSignal}; use std::str; use semver; use rustc_serialize::json; use curl; use toml::Error as TomlError; use url; use git2; pub type CargoResult<T> = Result<T, Box<CargoError>>; // ============================================================================= // CargoError trait<|fim▁hole|>pub trait CargoError: Error { fn is_human(&self) -> bool { false } } impl fmt::String for Box<CargoError> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "{}", self.description())); Ok(()) } } impl fmt::Show for Box<CargoError> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::String::fmt(self, f) } } impl Error for Box<CargoError> { fn description(&self) -> &str { (**self).description() } fn detail(&self) -> Option<String> { (**self).detail() } fn cause(&self) -> Option<&Error> { (**self).cause() } } impl CargoError for Box<CargoError> { fn is_human(&self) -> bool { (**self).is_human() } } // ============================================================================= // Chaining errors pub trait ChainError<T> { fn chain_error<E, F>(self, callback: F) -> CargoResult<T> where E: CargoError, F: FnOnce() -> E; } struct ChainedError<E> { error: E, cause: Box<Error>, } impl<'a, T, F> ChainError<T> for F where F: FnOnce() -> CargoResult<T> { fn chain_error<E, C>(self, callback: C) -> CargoResult<T> where E: CargoError, C: FnOnce() -> E { self().chain_error(callback) } } impl<T, E: Error> ChainError<T> for Result<T, E> { fn chain_error<E2, C>(self, callback: C) -> CargoResult<T> where E2: CargoError, C: FnOnce() -> E2 { self.map_err(move |err| { Box::new(ChainedError { error: callback(), cause: Box::new(err), }) as Box<CargoError> }) } } impl<T> ChainError<T> for Option<T> { fn chain_error<E, C>(self, callback: C) -> CargoResult<T> where E: CargoError, C: FnOnce() -> E { match self { Some(t) => Ok(t), None => Err(Box::new(callback()) as Box<CargoError>), } } } impl<E: Error> Error for ChainedError<E> { fn description(&self) -> &str { self.error.description() } fn detail(&self) -> Option<String> { self.error.detail() } fn cause(&self) -> Option<&Error> { Some(&*self.cause) } } impl<E: CargoError> CargoError for ChainedError<E> { fn is_human(&self) -> bool { self.error.is_human() } } // ============================================================================= // Process errors pub struct ProcessError { pub desc: String, pub exit: Option<ProcessExit>, pub output: Option<ProcessOutput>, cause: Option<IoError>, } impl Error for ProcessError { fn description(&self) -> &str { self.desc.as_slice() } fn detail(&self) -> Option<String> { None } fn cause(&self) -> Option<&Error> { self.cause.as_ref().map(|s| s as &Error) } } impl fmt::String for ProcessError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::String::fmt(&self.desc, f) } } impl fmt::Show for ProcessError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::String::fmt(self, f) } } // ============================================================================= // Concrete errors struct ConcreteCargoError { description: String, detail: Option<String>, cause: Option<Box<Error>>, is_human: bool, } impl fmt::String for ConcreteCargoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description) } } impl fmt::Show for ConcreteCargoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::String::fmt(self, f) } } impl Error for ConcreteCargoError { fn description(&self) -> &str { self.description.as_slice() } fn detail(&self) -> Option<String> { self.detail.clone() } fn cause(&self) -> Option<&Error> { self.cause.as_ref().map(|c| &**c) } } impl CargoError for ConcreteCargoError { fn is_human(&self) -> bool { self.is_human } } // ============================================================================= // Human errors pub struct Human<E>(pub E); impl<E: Error> Error for Human<E> { fn description(&self) -> &str { self.0.description() } fn detail(&self) -> Option<String> { self.0.detail() } fn cause(&self) -> Option<&Error> { self.0.cause() } } impl<E: Error> CargoError for Human<E> { fn is_human(&self) -> bool { true } } // ============================================================================= // CLI errors pub type CliResult<T> = Result<T, CliError>; #[derive(Show)] pub struct CliError { pub error: Box<CargoError>, pub unknown: bool, pub exit_code: u32 } impl Error for CliError { fn description(&self) -> &str { self.error.description() } fn detail(&self) -> Option<String> { self.error.detail() } fn cause(&self) -> Option<&Error> { self.error.cause() } } impl CliError { pub fn new<S: Str>(error: S, code: u32) -> CliError { let error = human(error.as_slice().to_string()); CliError::from_boxed(error, code) } pub fn from_error<E: CargoError + 'static>(error: E, code: u32) -> CliError { let error = Box::new(error) as Box<CargoError>; CliError::from_boxed(error, code) } pub fn from_boxed(error: Box<CargoError>, code: u32) -> CliError { let human = error.is_human(); CliError { error: error, exit_code: code, unknown: !human } } } // ============================================================================= // various impls macro_rules! from_error { ($($p:ty,)*) => ( $(impl FromError<$p> for Box<CargoError> { fn from_error(t: $p) -> Box<CargoError> { Box::new(t) } })* ) } from_error! { semver::ReqParseError, IoError, ProcessError, git2::Error, json::DecoderError, curl::ErrCode, CliError, TomlError, url::ParseError, } impl<E: Error> FromError<Human<E>> for Box<CargoError> { fn from_error(t: Human<E>) -> Box<CargoError> { Box::new(t) } } impl CargoError for semver::ReqParseError {} impl CargoError for IoError {} impl CargoError for git2::Error {} impl CargoError for json::DecoderError {} impl CargoError for curl::ErrCode {} impl CargoError for ProcessError {} impl CargoError for CliError {} impl CargoError for TomlError {} impl CargoError for url::ParseError {} // ============================================================================= // Construction helpers pub fn process_error<S: Str>(msg: S, cause: Option<IoError>, status: Option<&ProcessExit>, output: Option<&ProcessOutput>) -> ProcessError { let exit = match status { Some(&ExitStatus(i)) | Some(&ExitSignal(i)) => i.to_string(), None => "never executed".to_string(), }; let mut desc = format!("{} (status={})", msg.as_slice(), exit); if let Some(out) = output { match str::from_utf8(out.output.as_slice()) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } match str::from_utf8(out.error.as_slice()) { Ok(s) if s.trim().len() > 0 => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } } ProcessError { desc: desc, exit: status.map(|a| a.clone()), output: output.map(|a| a.clone()), cause: cause, } } pub fn internal_error<S1: Str, S2: Str>(error: S1, detail: S2) -> Box<CargoError> { Box::new(ConcreteCargoError { description: error.as_slice().to_string(), detail: Some(detail.as_slice().to_string()), cause: None, is_human: false }) } pub fn internal<S: fmt::String>(error: S) -> Box<CargoError> { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: None, is_human: false }) } pub fn human<S: fmt::String>(error: S) -> Box<CargoError> { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: None, is_human: true }) } pub fn caused_human<S: fmt::String, E: Error>(error: S, cause: E) -> Box<CargoError> { Box::new(ConcreteCargoError { description: error.to_string(), detail: None, cause: Some(Box::new(cause) as Box<Error>), is_human: true }) }<|fim▁end|>
<|file_name|>data_source.py<|end_file_name|><|fim▁begin|>from django.db import models from .common_info import CommonInfo from django.utils import timezone from django.urls import reverse from django.core.exceptions import ValidationError from django.utils.translation import ugettext_lazy as _ from django.core.validators import URLValidator def validate_nonzero(value): if value == 0: raise ValidationError(<|fim▁hole|>class DataSource(CommonInfo): """A parent container for DataGroup objects""" STATE_CHOICES = ( ("AT", "Awaiting Triage"), ("IP", "In Progress"), ("CO", "Complete"), ("ST", "Stale"), ) PRIORITY_CHOICES = (("HI", "High"), ("MD", "Medium"), ("LO", "Low")) title = models.CharField(max_length=50) url = models.CharField(max_length=150, blank=True, validators=[URLValidator()]) estimated_records = models.PositiveIntegerField( default=47, validators=[validate_nonzero], help_text="Estimated number of documents that the data source will eventually contain.", ) state = models.CharField(max_length=2, choices=STATE_CHOICES, default="AT") description = models.TextField(blank=True) priority = models.CharField(max_length=2, choices=PRIORITY_CHOICES, default="HI") def __str__(self): return self.title def get_absolute_url(self): return reverse("data_source_edit", kwargs={"pk": self.pk})<|fim▁end|>
_("Quantity {} is not allowed".format(value)), params={"value": value} )
<|file_name|>device_service_service_broker.py<|end_file_name|><|fim▁begin|>from ..broker import Broker class DeviceServiceServiceBroker(Broker): controller = "device_service_services" def show(self, **kwargs): """Shows the details for the specified device service service. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of device service service methods. The listed methods will be called on each device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device. :type include: Array of String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return device_service_service: The device service service identified by the specified DeviceServiceServiceID. :rtype device_service_service: DeviceServiceService """ return self.api_request(self._get_method_fullname("show"), kwargs) def index(self, **kwargs): """Lists the available device service services. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient. **Inputs** | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceID: The internal NetMRI identifier for the device to which belongs this services. :type DeviceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the device service services as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of device service service methods. The listed methods will be called on each device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device. :type include: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` DeviceServiceServiceID :param sort: The data field(s) to use for sorting the output. Default is DeviceServiceServiceID. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each DeviceServiceService. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return device_service_services: An array of the DeviceServiceService objects that match the specified input criteria. :rtype device_service_services: Array of DeviceServiceService """ return self.api_list_request(self._get_method_fullname("index"), kwargs) def search(self, **kwargs): """Lists the available device service services matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below. **Inputs** | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param ChildDeviceServiceID: The internal NetMRI identifier of the child service (the used service). :type ChildDeviceServiceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. :type DataSourceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceID: The internal NetMRI identifier for the device to which belongs this services. :type DeviceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param ParentDeviceServiceID: The internal NetMRI identifier of the parent service (the user). :type ParentDeviceServiceID: Array of Integer | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvChangedCols: The fields that changed between this revision of the record and the previous revision. :type SvsvChangedCols: Array of String | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvEndTime: The ending effective time of this record, or empty if still in effect. :type SvsvEndTime: Array of DateTime | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvFirstSeenTime: The timestamp of when NetMRI saw for the first time this relationship. :type SvsvFirstSeenTime: Array of DateTime | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvProvisionData: Internal data - do not modify, may change without warning. :type SvsvProvisionData: Array of String | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvStartTime: The starting effective time of this record. :type SvsvStartTime: Array of DateTime | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvTimestamp: The date and time this record was collected or calculated. :type SvsvTimestamp: Array of DateTime | ``api version min:`` 2.6 | ``api version max:`` None | ``required:`` False | ``default:`` None :param SvsvUsage: An indicator of the kind of relationship. One of : child, protID, srcPrtID, dstPrtID, protDstID. The regular indicator is 'child'. :type SvsvUsage: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the device service services as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of device service service methods. The listed methods will be called on each device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device. :type include: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` DeviceServiceServiceID :param sort: The data field(s) to use for sorting the output. Default is DeviceServiceServiceID. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each DeviceServiceService. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param query: This value will be matched against device service services, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: ChildDeviceServiceID, DataSourceID, DeviceID, DeviceServiceServiceID, ParentDeviceServiceID, SvsvChangedCols, SvsvEndTime, SvsvFirstSeenTime, SvsvProvisionData, SvsvStartTime, SvsvTimestamp, SvsvUsage. :type query: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return device_service_services: An array of the DeviceServiceService objects that match the specified input criteria. :rtype device_service_services: Array of DeviceServiceService """ return self.api_list_request(self._get_method_fullname("search"), kwargs) def find(self, **kwargs): """Lists the available device service services matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: ChildDeviceServiceID, DataSourceID, DeviceID, DeviceServiceServiceID, ParentDeviceServiceID, SvsvChangedCols, SvsvEndTime, SvsvFirstSeenTime, SvsvProvisionData, SvsvStartTime, SvsvTimestamp, SvsvUsage. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_ChildDeviceServiceID: The operator to apply to the field ChildDeviceServiceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ChildDeviceServiceID: The internal NetMRI identifier of the child service (the used service). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_ChildDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_ChildDeviceServiceID: If op_ChildDeviceServiceID is specified, the field named in this input will be compared to the value in ChildDeviceServiceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ChildDeviceServiceID must be specified if op_ChildDeviceServiceID is specified. :type val_f_ChildDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_ChildDeviceServiceID: If op_ChildDeviceServiceID is specified, this value will be compared to the value in ChildDeviceServiceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ChildDeviceServiceID must be specified if op_ChildDeviceServiceID is specified. :type val_c_ChildDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified. :type val_f_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified. :type val_c_DataSourceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device to which belongs this services. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_DeviceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified. :type val_f_DeviceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified. :type val_c_DeviceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_DeviceServiceServiceID: The operator to apply to the field DeviceServiceServiceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_DeviceServiceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_DeviceServiceServiceID: If op_DeviceServiceServiceID is specified, the field named in this input will be compared to the value in DeviceServiceServiceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceServiceServiceID must be specified if op_DeviceServiceServiceID is specified. :type val_f_DeviceServiceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_DeviceServiceServiceID: If op_DeviceServiceServiceID is specified, this value will be compared to the value in DeviceServiceServiceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceServiceServiceID must be specified if op_DeviceServiceServiceID is specified. :type val_c_DeviceServiceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_ParentDeviceServiceID: The operator to apply to the field ParentDeviceServiceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ParentDeviceServiceID: The internal NetMRI identifier of the parent service (the user). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_ParentDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_ParentDeviceServiceID: If op_ParentDeviceServiceID is specified, the field named in this input will be compared to the value in ParentDeviceServiceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ParentDeviceServiceID must be specified if op_ParentDeviceServiceID is specified. :type val_f_ParentDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_ParentDeviceServiceID: If op_ParentDeviceServiceID is specified, this value will be compared to the value in ParentDeviceServiceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ParentDeviceServiceID must be specified if op_ParentDeviceServiceID is specified. :type val_c_ParentDeviceServiceID: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvChangedCols: The operator to apply to the field SvsvChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvChangedCols: If op_SvsvChangedCols is specified, the field named in this input will be compared to the value in SvsvChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvChangedCols must be specified if op_SvsvChangedCols is specified. :type val_f_SvsvChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvChangedCols: If op_SvsvChangedCols is specified, this value will be compared to the value in SvsvChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvChangedCols must be specified if op_SvsvChangedCols is specified. :type val_c_SvsvChangedCols: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvEndTime: The operator to apply to the field SvsvEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvEndTime: If op_SvsvEndTime is specified, the field named in this input will be compared to the value in SvsvEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvEndTime must be specified if op_SvsvEndTime is specified. :type val_f_SvsvEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvEndTime: If op_SvsvEndTime is specified, this value will be compared to the value in SvsvEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvEndTime must be specified if op_SvsvEndTime is specified. :type val_c_SvsvEndTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvFirstSeenTime: The operator to apply to the field SvsvFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvFirstSeenTime: The timestamp of when NetMRI saw for the first time this relationship. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvFirstSeenTime: If op_SvsvFirstSeenTime is specified, the field named in this input will be compared to the value in SvsvFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvFirstSeenTime must be specified if op_SvsvFirstSeenTime is specified. :type val_f_SvsvFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvFirstSeenTime: If op_SvsvFirstSeenTime is specified, this value will be compared to the value in SvsvFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvFirstSeenTime must be specified if op_SvsvFirstSeenTime is specified. :type val_c_SvsvFirstSeenTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvProvisionData: The operator to apply to the field SvsvProvisionData. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvProvisionData: Internal data - do not modify, may change without warning. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvProvisionData: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvProvisionData: If op_SvsvProvisionData is specified, the field named in this input will be compared to the value in SvsvProvisionData using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvProvisionData must be specified if op_SvsvProvisionData is specified. :type val_f_SvsvProvisionData: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvProvisionData: If op_SvsvProvisionData is specified, this value will be compared to the value in SvsvProvisionData using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvProvisionData must be specified if op_SvsvProvisionData is specified. :type val_c_SvsvProvisionData: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvStartTime: The operator to apply to the field SvsvStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvStartTime: If op_SvsvStartTime is specified, the field named in this input will be compared to the value in SvsvStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvStartTime must be specified if op_SvsvStartTime is specified. :type val_f_SvsvStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvStartTime: If op_SvsvStartTime is specified, this value will be compared to the value in SvsvStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvStartTime must be specified if op_SvsvStartTime is specified. :type val_c_SvsvStartTime: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_SvsvTimestamp: The operator to apply to the field SvsvTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvTimestamp: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvTimestamp: If op_SvsvTimestamp is specified, the field named in this input will be compared to the value in SvsvTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvTimestamp must be specified if op_SvsvTimestamp is specified. :type val_f_SvsvTimestamp: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvTimestamp: If op_SvsvTimestamp is specified, this value will be compared to the value in SvsvTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvTimestamp must be specified if op_SvsvTimestamp is specified.<|fim▁hole|> | ``required:`` False | ``default:`` None :param op_SvsvUsage: The operator to apply to the field SvsvUsage. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SvsvUsage: An indicator of the kind of relationship. One of : child, protID, srcPrtID, dstPrtID, protDstID. The regular indicator is 'child'. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_SvsvUsage: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_SvsvUsage: If op_SvsvUsage is specified, the field named in this input will be compared to the value in SvsvUsage using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SvsvUsage must be specified if op_SvsvUsage is specified. :type val_f_SvsvUsage: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_SvsvUsage: If op_SvsvUsage is specified, this value will be compared to the value in SvsvUsage using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SvsvUsage must be specified if op_SvsvUsage is specified. :type val_c_SvsvUsage: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results. :type DeviceGroupID: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param timestamp: The data returned will represent the device service services as of this date and time. If omitted, the result will indicate the most recently collected data. :type timestamp: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param methods: A list of device service service methods. The listed methods will be called on each device service service returned and included in the output. Available methods are: parent_device_service, child_device_service, data_source, device. :type methods: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_service, child_device_service, data_source, device. :type include: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` DeviceServiceServiceID :param sort: The data field(s) to use for sorting the output. Default is DeviceServiceServiceID. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each DeviceServiceService. Valid values are DeviceServiceServiceID, DeviceID, DataSourceID, ParentDeviceServiceID, ChildDeviceServiceID, SvsvFirstSeenTime, SvsvStartTime, SvsvEndTime, SvsvTimestamp, SvsvChangedCols, SvsvUsage, SvsvProvisionData. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return device_service_services: An array of the DeviceServiceService objects that match the specified input criteria. :rtype device_service_services: Array of DeviceServiceService """ return self.api_list_request(self._get_method_fullname("find"), kwargs) def parent_device_service(self, **kwargs): """The parent service object of this relationship. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return : The parent service object of this relationship. :rtype : DeviceService """ return self.api_request(self._get_method_fullname("parent_device_service"), kwargs) def child_device_service(self, **kwargs): """The child service object of this relationship. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return : The child service object of this relationship. :rtype : DeviceService """ return self.api_request(self._get_method_fullname("child_device_service"), kwargs) def data_source(self, **kwargs): """The collector NetMRI that collected this data record. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return : The collector NetMRI that collected this data record. :rtype : DataSource """ return self.api_request(self._get_method_fullname("data_source"), kwargs) def device(self, **kwargs): """The device from which this data was collected. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param DeviceServiceServiceID: The internal NetMRI identifier of this usage relationship between service objects. :type DeviceServiceServiceID: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return : The device from which this data was collected. :rtype : Device """ return self.api_request(self._get_method_fullname("device"), kwargs)<|fim▁end|>
:type val_c_SvsvTimestamp: String | ``api version min:`` None | ``api version max:`` None
<|file_name|>Book.java<|end_file_name|><|fim▁begin|>package cvut.fit.borrowsystem.domain.entity; import org.springframework.data.mongodb.core.mapping.Document; /** * Created by Jakub Tuček on 03/06/16. */ @Document(collection = "item") public class Book extends Item { private int isbn; public Book() { } public Book(String itemName, int count, int isbn) { super(itemName, count); this.isbn = isbn; } <|fim▁hole|> public int getIsbn() { return isbn; } public void setIsbn(int isbn) { this.isbn = isbn; } }<|fim▁end|>
<|file_name|>NotificationsRegistrationsAnonymousClient.ts<|end_file_name|><|fim▁begin|>/* globals module */ /** * @module notificationsRegistrationsAnonymousClient * @description Notifications Registrations Anonymous Client provides an easy way to consume Notifications REST API end-points. In order to obtain needed routes `notificationsRegistrationsAnonymousClient` uses `notificationsRegistrationsAnonymousRoute`. */ import { injectable, inject } from "inversify"; import { ApiClient, IHttpResponse, httpTYPES } from '../../httpApi'; import { IQueryModel, IGetRequestOptions, IOptions } from '../../common/contracts';; import { NotificationsRegistrationsAnonymousBatchClient, NotificationsRegistrationsAnonymousRoute, TYPES as notificationsTypes } from './'; import { IAnonymousRegistration } from './contracts'; @injectable() export class NotificationsRegistrationsAnonymousClient { get routeDefinition(): NotificationsRegistrationsAnonymousRoute { return this.notificationsRegistrationsAnonymousRoute; } get batch(): NotificationsRegistrationsAnonymousBatchClient { return this.notificationsRegistrationsAnonymousBatchClient; } constructor( @inject(notificationsTypes.NotificationsRegistrationsAnonymousRoute) protected notificationsRegistrationsAnonymousRoute: NotificationsRegistrationsAnonymousRoute, @inject(notificationsTypes.NotificationsRegistrationsAnonymousBatchClient) protected notificationsRegistrationsAnonymousBatchClient: NotificationsRegistrationsAnonymousBatchClient, @inject(httpTYPES.ApiClient) protected apiClient: ApiClient ) { } /** * Returns a promise that is resolved once the create anonymous registration action has been performed; this action creates a new anonymous registration resource. * @method * @param data An AnonymousRegistration object that needs to be inserted into the system. * @returns A promise that is resolved once the create anonymous registration action has been performed. * @example notificationsRegistrationsAnonymousClient.create({ provider: '<provider-name>', providerdata: <provider-data>, expirationData: '<expiration-date>' }) .then(function (data) { // perform success action here }, function (response, status, headers, config) { // perform error handling here }); */ create(data: IAnonymousRegistration): PromiseLike<IHttpResponse<IAnonymousRegistration>> { return this.apiClient.post<IAnonymousRegistration>(this.routeDefinition.create(), this.routeDefinition.createParams(data)); } /** * Returns a promise that is resolved once the find action has been performed. Success response returns a list of anonymous registration resources matching the given criteria. * @method * @param options Query resource options object. * @returns A promise that is resolved once the find action has been performed. * @example notificationsRegistrationsAnonymousClient.find({ pageNumber : 1, pageSize : 10, orderBy : '<field>', orderDirection : '<asc|desc>', search : '<search-phrase>', providers: '<provider-name1>,<provider-name2>', embed: '<embed>' }) .then(function (collection) { // perform success action here }, function (response, status, headers, config) { // perform error handling here }); */ find(options?: IOptions): PromiseLike<IHttpResponse<IQueryModel<IAnonymousRegistration>>> { return this.apiClient.get<IQueryModel<IAnonymousRegistration>>(this.routeDefinition.find(options)); } /** * Returns a promise that is resolved once the get action has been performed. Success response returns the specified anonymous registration resource. * @method * @param id The registration identifier which uniquely identifies AnonymousRegistration resource that needs to be retrieved. * @param options Query resource options object. <|fim▁hole|> }, function (response, status, headers, config) { // perform error handling here }); */ get(id: string, options?: IGetRequestOptions): PromiseLike<IHttpResponse<IAnonymousRegistration>> { return this.apiClient.get(this.notificationsRegistrationsAnonymousRoute.get(id, options)); } /** * Returns a promise that is resolved once the remove anonymous registration action has been performed. This action will remove a anonymous registration resource from the system if successfully completed. This route uses HAL enabled objects to obtain routes and therefore it doesn't apply `notificationsRegistrationsAnonymousRoute` route template. Here is an example of how a route can be obtained from HAL enabled objects: * ``` * let params = modelMapper.removeParams(registration); * var uri = params['model'].links('delete').href; * ``` * @method * @param data An object used to delete specified AnonymousRegistration resource. * @returns A promise that is resolved once the remove anonymous registration action has been performed. * @example // registration is a resource previously fetched using get action. notificationsRegistrationsAnonymousClient.remove(registration) .then(function (data) { // perform success action here }, function (response, status, headers, config) { // perform error handling here }); */ remove(data: IAnonymousRegistration): PromiseLike<IHttpResponse<void>> { return this.apiClient.delete<void>(this.routeDefinition.delete(data)); } /** * Returns a promise that is resolved once the update anonymous registration action has been performed; this action updates a anonymous registration resource. This route uses HAL enabled objects to obtain routes and therefore it doesn't apply `notificationsRegistrationsAnonymousRoute` route template. Here is an example of how a route can be obtained from HAL enabled objects: * ``` * let params = modelMapper.updateParams(registration); * var uri = params['model'].links('put').href; * ``` * @method * @param data An object used to update specified AnonymousRegistration resource. * @returns A promise that is resolved once the update anonymous registration action has been performed * @example // registration is a resource previously fetched using get action. subscription.provider = '<provider-name>'; notificationsRegistrationsAnonymousClient.update(registration) .then(function (data) { // perform success action here }, function (response, status, headers, config) { // perform error handling here }); */ update(data: IAnonymousRegistration): PromiseLike<IHttpResponse<void>> { return this.apiClient.put<void>(this.routeDefinition.update(data), this.routeDefinition.updateParams(data)); } } /** * @overview ***Notes:** - Refer to the [REST API documentation](https://github.com/Baasic/baasic-rest-api/wiki) for detailed information about available Baasic REST API end-points. - All end-point objects are transformed by the associated route service. */<|fim▁end|>
* @returns A promise that is resolved once the get action has been performed. * @example notificationsRegistrationsAnonymousClient.get('<registration-id>') .then(function (data) { // perform success action here
<|file_name|>zh.js<|end_file_name|><|fim▁begin|>// The translations in this file are added by default. 'use strict'; module.exports = { counterpart: { names: require('date-names/en'),<|fim▁hole|> default: '%a, %e %b %Y', long: '%A, %B %o, %Y', short: '%b %e', }, time: { default: '%H:%M', long: '%H:%M:%S %z', short: '%H:%M', }, datetime: { default: '%a, %e %b %Y %H:%M', long: '%A, %B %o, %Y %H:%M:%S %z', short: '%e %b %H:%M', }, }, }, };<|fim▁end|>
pluralize: require('pluralizers/en'), formats: { date: {
<|file_name|>app.component.spec.ts<|end_file_name|><|fim▁begin|>import { NO_ERRORS_SCHEMA } from '@angular/core';<|fim▁hole|>import { inject, async, TestBed, ComponentFixture } from '@angular/core/testing'; // Load the implementations that should be tested import { AppComponent } from './app.component'; import { AppState } from './app.service'; describe(`App`, () => { });<|fim▁end|>
<|file_name|>_codeblock.py<|end_file_name|><|fim▁begin|>class CodeBlock: """Code fragment for the readable format. """ def __init__(self, head, codes):<|fim▁hole|> self._head = '' if head == '' else head + ' ' self._codes = codes def _to_str_list(self, indent_width=0): codes = [] codes.append(' ' * indent_width + self._head + '{') for code in self._codes: next_indent_width = indent_width + 2 if isinstance(code, str): codes.append(' ' * next_indent_width + code) elif isinstance(code, CodeBlock): codes += code._to_str_list(indent_width=next_indent_width) else: assert False codes.append(' ' * indent_width + '}') return codes def __str__(self): """Emit CUDA program like the following format. <<head>> { <<begin codes>> ...; <<end codes>> } """ return '\n'.join(self._to_str_list())<|fim▁end|>
<|file_name|>errors.js<|end_file_name|><|fim▁begin|>class NotFoundError extends Error { <|fim▁hole|> if (Error.hasOwnProperty('captureStackTrace')) { Error.captureStackTrace(this, this.constructor); } else { Object.defineProperty(this, 'stack', { value : (new Error()).stack }); } Object.defineProperty(this, 'message', { value : message }); } get name() { return this.constructor.name; } } export default { NotFoundError }<|fim▁end|>
constructor(message) { super();
<|file_name|>MMCLEVVersionedItemComponent.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright (c) 2013-2015 UAH Space Research Group. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * MICOBS SRG Team - Initial API and implementation ******************************************************************************/ package es.uah.aut.srg.micobs.mclev.library.mclevlibrary; /** * A representation of an MCLEV Library versioned item corresponding to the * model of a regular component. * * <p> * The following features are supported: * <ul> * <li>{@link es.uah.aut.srg.micobs.mclev.library.mclevlibrary.MMCLEVVersionedItemComponent#getSwPackageURI <em>Sw Package URI</em>}</li> * <li>{@link es.uah.aut.srg.micobs.mclev.library.mclevlibrary.MMCLEVVersionedItemComponent#getSwPackageVersion <em>Sw Package Version</em>}</li> * </ul> * </p> * * @see es.uah.aut.srg.micobs.mclev.library.mclevlibrary.mclevlibraryPackage#getMMCLEVVersionedItemComponent() * @model * @generated */ public interface MMCLEVVersionedItemComponent extends MMCLEVPackageVersionedItem { /** * Returns the URI of the MESP software package that stores the * implementation of the component or <code>null</code> if no software * package is defined for the component. * @return the URI of the attached MESP software package or * <code>null</code> if no software package is defined for the component. * @see #setSwPackageURI(String) * @see es.uah.aut.srg.micobs.mclev.library.mclevlibrary.mclevlibraryPackage#getMMCLEVVersionedItemComponent_SwPackageURI() * @model * @generated */ String getSwPackageURI(); /** * Sets the URI of the MESP software package that stores the * implementation of the component. * @param value the new URI of the attached MESP software package. * @see #getSwPackageURI() * @generated */ void setSwPackageURI(String value); /** * Returns the version of the MESP software package that stores the * implementation of the component or <code>null</code> if no software * package is defined for the component. * @return the version of the attached MESP software package or * <code>null</code> if no software package is defined for the component. * @see #setSwPackageVersion(String) * @see es.uah.aut.srg.micobs.mclev.library.mclevlibrary.mclevlibraryPackage#getMMCLEVVersionedItemComponent_SwPackageVersion() * @model * @generated */ String getSwPackageVersion(); /** * Sets the version of the MESP software package that stores the * implementation of the component. * @param value the new version of the attached MESP software package. * @see #getSwPackageVersion() * @generated */ void setSwPackageVersion(String value); /** * Returns the value of the '<em><b>Sw Interface URI</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Sw Interface URI</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --><|fim▁hole|> * @model * @generated */ String getSwInterfaceURI(); /** * Sets the value of the '{@link es.uah.aut.srg.micobs.mclev.library.mclevlibrary.MMCLEVVersionedItemComponent#getSwInterfaceURI <em>Sw Interface URI</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Sw Interface URI</em>' attribute. * @see #getSwInterfaceURI() * @generated */ void setSwInterfaceURI(String value); /** * Returns the value of the '<em><b>Sw Interface Version</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Sw Interface Version</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Sw Interface Version</em>' attribute. * @see #setSwInterfaceVersion(String) * @see es.uah.aut.srg.micobs.mclev.library.mclevlibrary.mclevlibraryPackage#getMMCLEVVersionedItemComponent_SwInterfaceVersion() * @model * @generated */ String getSwInterfaceVersion(); /** * Sets the value of the '{@link es.uah.aut.srg.micobs.mclev.library.mclevlibrary.MMCLEVVersionedItemComponent#getSwInterfaceVersion <em>Sw Interface Version</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Sw Interface Version</em>' attribute. * @see #getSwInterfaceVersion() * @generated */ void setSwInterfaceVersion(String value); }<|fim▁end|>
* @return the value of the '<em>Sw Interface URI</em>' attribute. * @see #setSwInterfaceURI(String) * @see es.uah.aut.srg.micobs.mclev.library.mclevlibrary.mclevlibraryPackage#getMMCLEVVersionedItemComponent_SwInterfaceURI()
<|file_name|>testRobotAfma6.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************** * * ViSP, open source Visual Servoing Platform software.<|fim▁hole|> * This software is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * See the file LICENSE.txt at the root directory of this source * distribution for additional information about the GNU GPL. * * For using ViSP with software that can not be combined with the GNU * GPL, please contact Inria about acquiring a ViSP Professional * Edition License. * * See http://visp.inria.fr for more information. * * This software was developed at: * Inria Rennes - Bretagne Atlantique * Campus Universitaire de Beaulieu * 35042 Rennes Cedex * France * * If you have questions regarding the use of this file, please contact * Inria at [email protected] * * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. * * Description: * Test for Afma 6 dof robot. * * Authors: * Fabien Spindler * *****************************************************************************/ /*! \example testRobotAfma6.cpp Example of a real robot control, the Afma6 robot (cartesian robot, with 6 degrees of freedom). */ #include <visp3/core/vpCameraParameters.h> #include <visp3/core/vpDebug.h> #include <visp3/robot/vpRobotAfma6.h> #include <iostream> #ifdef VISP_HAVE_AFMA6 int main() { try { std::cout << "a test for vpRobotAfma6 class..." << std::endl; vpRobotAfma6 afma6; vpCameraParameters cam; std::cout << "-- Default settings for Afma6 ---" << std::endl; std::cout << afma6 << std::endl; afma6.getCameraParameters(cam, 640, 480); std::cout << cam << std::endl; std::cout << "-- Settings associated to the CCMOP tool without distortion ---" << std::endl; afma6.init(vpAfma6::TOOL_CCMOP); std::cout << afma6 << std::endl; afma6.getCameraParameters(cam, 640, 480); std::cout << cam << std::endl; std::cout << "-- Settings associated to CCMOP tool with distortion ------" << std::endl; afma6.init(vpAfma6::TOOL_CCMOP, vpCameraParameters::perspectiveProjWithDistortion); std::cout << afma6 << std::endl; afma6.getCameraParameters(cam, 640, 480); std::cout << cam << std::endl; std::cout << "-- Settings associated to the gripper tool without distortion ---" << std::endl; afma6.init(vpAfma6::TOOL_GRIPPER); std::cout << afma6 << std::endl; afma6.getCameraParameters(cam, 640, 480); std::cout << cam << std::endl; std::cout << "-- Settings associated to gripper tool with distortion ------" << std::endl; afma6.init(vpAfma6::TOOL_GRIPPER, vpCameraParameters::perspectiveProjWithDistortion); std::cout << afma6 << std::endl; afma6.getCameraParameters(cam, 640, 480); std::cout << cam << std::endl; } catch (const vpException &e) { std::cout << "Catch an exception: " << e << std::endl; } return 0; } #else int main() { std::cout << "The real Afma6 robot controller is not available." << std::endl; return 0; } #endif<|fim▁end|>
* Copyright (C) 2005 - 2019 by Inria. All rights reserved. *
<|file_name|>CT_TablePartStyle_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Baliance. All rights reserved.<|fim▁hole|>// DO NOT EDIT: generated by gooxml ECMA-376 generator // // Use of this source code is governed by the terms of the Affero GNU General // Public License version 3.0 as published by the Free Software Foundation and // appearing in the file LICENSE included in the packaging of this file. A // commercial license can be purchased by contacting [email protected]. package dml_test import ( "encoding/xml" "testing" "baliance.com/gooxml/schema/soo/dml" ) func TestCT_TablePartStyleConstructor(t *testing.T) { v := dml.NewCT_TablePartStyle() if v == nil { t.Errorf("dml.NewCT_TablePartStyle must return a non-nil value") } if err := v.Validate(); err != nil { t.Errorf("newly constructed dml.CT_TablePartStyle should validate: %s", err) } } func TestCT_TablePartStyleMarshalUnmarshal(t *testing.T) { v := dml.NewCT_TablePartStyle() buf, _ := xml.Marshal(v) v2 := dml.NewCT_TablePartStyle() xml.Unmarshal(buf, v2) }<|fim▁end|>
//
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # """ Django settings for pdc project. Generated by 'django-admin startproject' using Django 1.8.1. For more information on this file, see<|fim▁hole|>https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os import sys BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '3hm)=^*sowhxr%m)%_u3mk+!ncy=c)147xbevej%l_lcdogu#+' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = [] ITEMS_PER_PAGE = 50 # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'corsheaders', 'pdc.apps.auth', 'pdc.apps.common', 'pdc.apps.compose', 'pdc.apps.package', 'pdc.apps.release', 'pdc.apps.repository', 'pdc.apps.contact', 'pdc.apps.component', 'pdc.apps.changeset', 'pdc.apps.utils', 'pdc.apps.bindings', 'pdc.apps.usage', 'pdc.apps.osbs', 'mptt', ) AUTH_USER_MODEL = 'kerb_auth.User' REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': ( 'pdc.apps.auth.authentication.TokenAuthenticationWithChangeSet', 'rest_framework.authentication.SessionAuthentication', ), 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissions' ], 'DEFAULT_METADATA_CLASS': 'contrib.bulk_operations.metadata.BulkMetadata', 'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',), 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', 'pdc.apps.common.renderers.ReadOnlyBrowsableAPIRenderer', ), 'EXCEPTION_HANDLER': 'pdc.apps.common.handlers.exception_handler', 'DEFAULT_PAGINATION_CLASS': 'pdc.apps.common.pagination.AutoDetectedPageNumberPagination', 'NON_FIELD_ERRORS_KEY': 'detail', } MIDDLEWARE_CLASSES = ( 'corsheaders.middleware.CorsMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'pdc.apps.auth.middleware.RemoteUserMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', 'kobo.django.menu.middleware.MenuMiddleware', 'pdc.apps.usage.middleware.UsageMiddleware', 'pdc.apps.changeset.middleware.ChangesetMiddleware', 'pdc.apps.utils.middleware.MessagingMiddleware', ) AUTHENTICATION_BACKENDS = ( 'pdc.apps.auth.backends.KerberosUserBackend', #'pdc.apps.auth.backends.AuthMellonUserBackend', 'django.contrib.auth.backends.ModelBackend', ) LOGIN_URL = '/auth/krb5login' LOGIN_REDIRECT_URL = '/' ROOT_URLCONF = 'pdc.urls' import kobo ROOT_MENUCONF = "pdc.menu" TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, "pdc/templates"), os.path.join(os.path.dirname(kobo.__file__), "hub", "templates"), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'kobo.django.menu.context_processors.menu_context_processor', ], }, }, ] WSGI_APPLICATION = 'pdc.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_ROOT = '/usr/share/pdc/static' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, "pdc/static"), "/usr/share/patternfly1/resources", ) REST_API_URL = 'rest_api/' REST_API_VERSION = 'v1' REST_API_PAGE_SIZE = 20 REST_API_PAGE_SIZE_QUERY_PARAM = 'page_size' REST_API_MAX_PAGE_SIZE = 100 API_HELP_TEMPLATE = "api/help.html" DIST_GIT_WEB_ROOT_URL = "http://pkgs.example.com/cgit/" DIST_GIT_RPM_PATH = 'rpms/' DIST_GIT_REPO_FORMAT = DIST_GIT_WEB_ROOT_URL + DIST_GIT_RPM_PATH + "%s" DIST_GIT_BRANCH_FORMAT = "?h=%s" # ldap settings LDAP_URI = "ldap://ldap.example.com:389" LDAP_USERS_DN = "ou=users,dc=example,dc=com" LDAP_GROUPS_DN = "ou=groups,dc=example,dc=com" LDAP_CACHE_HOURS = 24 # # CORS settings # # The requests can come from any origin (hostname). If this is undesirable, use # settings_local.py module, set this to False and either set # CORS_ORIGIN_WHITELIST to a tuple of hostnames that are allowed to contact the # API, or set CORS_ORIGIN_REGEX_WHITELIST, which again is a tuple of regular # expressions. CORS_ORIGIN_ALLOW_ALL = True # Only the REST API can be accessed. If settings local override REST_API_URL, # make sure to update this setting as well. CORS_URLS_REGEX = '^/%s.*$' % REST_API_URL # We want HTML/JS clients to be able to use Kerberos authentication. CORS_ALLOW_CREDENTIALS = True # Allow default headers from django-cors-headers package as well as # PDC-Change-Comment custom header. CORS_ALLOW_HEADERS = ( 'x-requested-with', 'content-type', 'accept', 'origin', 'authorization', 'x-csrftoken', 'pdc-change-comment', ) # mock kerberos login for debugging DEBUG_USER = None BROWSABLE_DOCUMENT_MACROS = { # need to be rewrite with the real host name when deploy. 'HOST_NAME': 'http://localhost:8000', # make consistent with rest api root. 'API_PATH': '%s%s' % (REST_API_URL, REST_API_VERSION), } EMPTY_PATCH_ERROR_RESPONSE = { 'detail': 'Partial update with no changes does not make much sense.', 'hint': ' '.join(['Please make sure the URL includes the trailing slash.', 'Some software may automatically redirect you the the', 'correct URL but not forward the request body.']) } INTERNAL_SERVER_ERROR_RESPONSE = { 'detail': 'The server encountered an internal error or misconfiguration and was unable to complete your request.' } # Messaging Bus Config MESSAGE_BUS = { # MLP: Messaging Library Package # e.g. `fedmsg` for fedmsg or `kombu` for AMQP and other transports that `kombu` supports. # `stomp` for STOMP supports. 'MLP': '', # # `fedmsg` config example: # # fedmsg's config is managed by `fedmsg` package, so normally here just need to set the # # 'MLP' to 'fedmsg' # 'MLP': 'fedmsg', # # # `kombu` config example: # 'MLP': 'kombu', # 'URL': 'amqp://guest:[email protected]:5672//', # 'EXCHANGE': { # 'name': 'pdc', # 'type': 'topic', # 'durable': False # }, # 'OPTIONS': { # # Set these two items to config `kombu` to use ssl. # 'login_method': 'EXTERNAL', # 'ssl': { # 'ca_certs': '', # 'keyfile': '', # 'certfile': '', # 'cert_reqs': ssl.CERT_REQUIRED, # } # } # # # `stomp` config items: # 'MLP': 'stomp', # 'HOST_AND_PORTS': [ # ('stomp.example1.com', 61613), # ('stomp.example2.com', 61613), # ('stomp.example3.com', 61613), # ], # 'TOPIC': 'pdc', # 'CERT_FILE': '', # 'KEY_FILE': '', } # ======== Email configuration ========= # Email addresses who would like to receive email ADMINS = (('PDC Dev', '[email protected]'),) # Email SMTP HOST configuration EMAIL_HOST = 'smtp.example.com' # Email sender's address SERVER_EMAIL = '[email protected]' EMAIL_SUBJECT_PREFIX = '[PDC]' LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(process)d [%(filename)s -- %(module)s.%(funcName)s:%(lineno)d] [%(levelname)s]- %(message)s' }, }, 'handlers': { 'stderr': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', 'stream': sys.stderr }, 'stdout': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', 'stream': sys.stdout }, 'watchedfile': { 'level': 'INFO', 'class': 'logging.handlers.WatchedFileHandler', 'formatter': 'verbose', 'filename': '/var/log/pdc/server.log', 'delay': True, }, # Send a warning email if we want it. 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', 'include_html': True, } }, 'loggers': { 'pdc': { 'handlers': ['stderr'], 'level': 'INFO', }, 'django.request': { 'handlers': ['stderr'], 'level': 'ERROR', 'propagate': False, } } } # Attempts to import server specific settings. # Note that all server specific settings should go to 'settings_local.py' try: from settings_local import * # noqa except ImportError: pass if 'pdc.apps.bindings' in INSTALLED_APPS: WITH_BINDINGS = True else: WITH_BINDINGS = False<|fim▁end|>
https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see
<|file_name|>mtadv.py<|end_file_name|><|fim▁begin|>import sys from ..pakbase import Package class Mt3dAdv(Package): """ MT3DMS Advection Package Class. Parameters ---------- model : model object The model object (of type :class:`flopy.mt3d.mt.Mt3dms`) to which this package will be added. mixelm : int MIXELM is an integer flag for the advection solution option. MIXELM = 0, the standard finite-difference method with upstream or central-in-space weighting, depending on the value of NADVFD; = 1, the forward-tracking method of characteristics (MOC); = 2, the backward-tracking modified method of characteristics (MMOC); = 3, the hybrid method of characteristics (HMOC) with MOC or MMOC automatically and dynamically selected; = -1, the third-order TVD scheme (ULTIMATE). percel : float PERCEL is the Courant number (i.e., the number of cells, or a fraction of a cell) advection will be allowed in any direction in one transport step. For implicit finite-difference or particle-tracking-based schemes, there is no limit on PERCEL, but for accuracy reasons, it is generally not set much greater than one. Note, however, that the PERCEL limit is checked over the entire model grid. Thus, even if PERCEL > 1, advection may not be more than one cell's length at most model locations. For the explicit finite-difference or the third-order TVD scheme, PERCEL is also a stability constraint which must not exceed one and will be automatically reset to one if a value greater than one is specified. mxpart : int MXPART is the maximum total number of moving particles allowed and is used only when MIXELM = 1 or 3. nadvfd : int NADVFD is an integer flag indicating which weighting scheme should be used; it is needed only when the advection term is solved using the implicit finite- difference method. NADVFD = 0 or 1, upstream weighting (default); = 2,central-in-space weighting. itrack : int ITRACK is a flag indicating which particle-tracking algorithm is selected for the Eulerian-Lagrangian methods. ITRACK = 1, the first-order Euler algorithm is used. = 2, the fourth-order Runge-Kutta algorithm is used; this option is computationally demanding and may be needed only when PERCEL is set greater than one. = 3, the hybrid first- and fourth-order algorithm is used; the Runge-Kutta algorithm is used in sink/source cells and the cells next to sinks/sources while the Euler algorithm is used elsewhere. wd : float is a concentration weighting factor between 0.5 and 1. It is used for operator splitting in the particle- tracking-based methods. The value of 0.5 is generally adequate. The value of WD may be adjusted to achieve better mass balance. Generally, it can be increased toward 1.0 as advection becomes more dominant. dceps : float is a small Relative Cell Concentration Gradient below which advective transport is considered nplane : int NPLANE is a flag indicating whether the random or fixed pattern is selected for initial placement of moving particles. If NPLANE = 0, the random pattern is selected for initial placement. Particles are distributed randomly in both the horizontal and vertical directions by calling a random number generator (Figure 18b). This option is usually preferred and leads to smaller mass balance discrepancy in nonuniform or diverging/converging flow fields. If NPLANE > 0, the fixed pattern is selected for initial placement. The value of NPLANE serves as the number of vertical 'planes' on which initial particles are placed within each cell block (Figure 18a). The fixed pattern may work better than the random pattern only in relatively uniform flow fields. For two-dimensional simulations in plan view, set NPLANE = 1. For cross sectional or three-dimensional simulations, NPLANE = 2 is normally adequate. Increase NPLANE if more resolution in the vertical direction is desired. npl : int NPL is the number of initial particles per cell to be placed at cells where the Relative Cell Concentration Gradient is less than or equal to DCEPS. Generally, NPL can be set to zero since advection is considered insignificant when the Relative Cell Concentration Gradient is less than or equal to DCEPS. Setting NPL equal to NPH causes a uniform number of particles to be placed in every cell over the entire grid (i.e., the uniform approach). nph : int NPH is the number of initial particles per cell to be placed at cells where the Relative Cell Concentration Gradient is greater than DCEPS. The selection of NPH depends on the nature of the flow field and also the computer memory limitation. Generally, a smaller number should be used in relatively uniform flow fields and a larger number should be used in relatively nonuniform flow fields. However, values exceeding 16 in two-dimensional simulation or 32 in three- dimensional simulation are rarely necessary. If the random pattern is chosen, NPH particles are randomly distributed within the cell block. If the fixed pattern is chosen, NPH is divided by NPLANE to yield the number of particles to be placed per vertical plane, which is rounded to one of the values shown in Figure 30. npmin : int is the minimum number of particles allowed per cell. If the number of particles in a cell at the end of a transport step is fewer than NPMIN, new particles are inserted into that cell to maintain a sufficient number of particles. NPMIN can be set to zero in relatively uniform flow fields and to a number greater than zero in diverging/converging flow fields. Generally, a value between zero and four is adequate. npmax : int NPMAX is the maximum number of particles allowed per cell. If the number of particles in a cell exceeds NPMAX, all particles are removed from that cell and replaced by a new set of particles equal to NPH to maintain mass balance. Generally, NPMAX can be set to approximately two times of NPH. interp : int is a flag indicating the concentration interpolation method for use in the MMOC scheme. Currently, only linear interpolation is implemented. nlsink : int s a flag indicating whether the random or fixed pattern is selected for initial placement of particles to approximate sink cells in the MMOC scheme. The convention is the same as that for NPLANE. It is generally adequate to set NLSINK equivalent to NPLANE. npsink : int is the number of particles used to approximate sink cells in the MMOC scheme. The convention is the same as that for NPH. It is generally adequate to set NPSINK equivalent to NPH. dchmoc : float DCHMOC is the critical Relative Concentration Gradient for controlling the selective use of either MOC or MMOC in the HMOC solution scheme. The MOC solution is selected at cells where the Relative Concentration Gradient is greater than DCHMOC. The MMOC solution is selected at cells where the Relative Concentration Gradient is less than or equal to DCHMOC. extension : string Filename extension (default is 'adv') unitnumber : int File unit number (default is None). filenames : str or list of str Filenames to use for the package. If filenames=None the package name will be created using the model name and package extension. If a single string is passed the package will be set to the string. Default is None. Attributes ---------- Methods ------- See Also -------- Notes ----- Examples -------- >>> import flopy >>> m = flopy.mt3d.Mt3dms() >>> adv = flopy.mt3d.Mt3dAdv(m) """ def __init__(self, model, mixelm=3, percel=0.75, mxpart=800000, nadvfd=1, itrack=3, wd=0.5, dceps=1e-5, nplane=2, npl=10, nph=40, npmin=5, npmax=80, nlsink=0, npsink=15, dchmoc=0.0001, extension='adv', unitnumber=None, filenames=None): if unitnumber is None: unitnumber = Mt3dAdv.defaultunit() elif unitnumber == 0: unitnumber = Mt3dAdv.reservedunit() # set filenames if filenames is None: filenames = [None] elif isinstance(filenames, str): filenames = [filenames] # Fill namefile items name = [Mt3dAdv.ftype()] units = [unitnumber] extra = [''] # set package name fname = [filenames[0]] # Call ancestor's init to set self.parent, extension, name and unit number Package.__init__(self, model, extension=extension, name=name, unit_number=units, extra=extra, filenames=fname) self.mixelm = mixelm self.percel = percel self.mxpart = mxpart self.nadvfd = nadvfd self.mixelm = mixelm self.itrack = itrack self.wd = wd self.dceps = dceps self.nplane = nplane self.npl = npl self.nph = nph self. npmin = npmin self.npmax = npmax self.interp = 1 # Command-line 'interp' might once be needed if MT3DMS is updated to include other interpolation method self.nlsink = nlsink self.npsink = npsink self.dchmoc = dchmoc self.parent.add_package(self) return def write_file(self): """ Write the package file Returns ------- None """ f_adv = open(self.fn_path, 'w') f_adv.write('%10i%10f%10i%10i\n' % (self.mixelm, self.percel, self.mxpart, self.nadvfd)) if (self.mixelm > 0): f_adv.write('%10i%10f\n' % (self.itrack, self.wd)) if ((self.mixelm == 1) or (self.mixelm == 3)): f_adv.write('%10.4e%10i%10i%10i%10i%10i\n' % (self.dceps, self.nplane, self.npl, self.nph, self. npmin, self.npmax)) if ((self.mixelm == 2) or (self.mixelm == 3)): f_adv.write('%10i%10i%10i\n' % (self.interp, self.nlsink, self.npsink)) if (self.mixelm == 3): f_adv.write('%10f\n' % (self.dchmoc)) f_adv.close() return @staticmethod def load(f, model, ext_unit_dict=None): """ Load an existing package. Parameters ---------- f : filename or file handle File to load. model : model object The model object (of type :class:`flopy.mt3d.mt.Mt3dms`) to which this package will be added. ext_unit_dict : dictionary, optional If the arrays in the file are specified using EXTERNAL, or older style array control records, then `f` should be a file handle. In this case ext_unit_dict is required, which can be constructed using the function :class:`flopy.utils.mfreadnam.parsenamefile`. Returns ------- adv : Mt3dAdv object Mt3dAdv object. Examples -------- >>> import flopy >>> mt = flopy.mt3d.Mt3dms() >>> adv = flopy.mt3d.Mt3dAdv.load('test.adv', m) """ if model.verbose: sys.stdout.write('loading adv package file...\n') # Open file, if necessary if not hasattr(f, 'read'): filename = f f = open(filename, 'r') # Dataset 0 -- comment line while True: line = f.readline() if line[0] != '#': break # Item B1: MIXELM, PERCEL, MXPART, NADVFD - line already read above if model.verbose: print(' loading MIXELM, PERCEL, MXPART, NADVFD...') mixelm = int(line[0:10]) percel = float(line[10:20]) mxpart = 0 if mixelm == 1 or mixelm == 3: if len(line[20:30].strip()) > 0: mxpart = int(line[20:30]) nadvfd = 0 <|fim▁hole|> print(' MIXELM {}'.format(mixelm)) print(' PERCEL {}'.format(nadvfd)) print(' MXPART {}'.format(mxpart)) print(' NADVFD {}'.format(nadvfd)) # Item B2: ITRACK WD itrack = None wd = None if mixelm == 1 or mixelm == 2 or mixelm == 3: if model.verbose: print(' loading ITRACK, WD...') line = f.readline() itrack = int(line[0:10]) wd = float(line[10:20]) if model.verbose: print(' ITRACK {}'.format(itrack)) print(' WD {}'.format(wd)) # Item B3: DCEPS, NPLANE, NPL, NPH, NPMIN, NPMAX dceps = None nplane = None npl = None nph = None npmin = None npmax = None if mixelm == 1 or mixelm == 3: if model.verbose: print(' loading DCEPS, NPLANE, NPL, NPH, NPMIN, NPMAX...') line = f.readline() dceps = float(line[0:10]) nplane = int(line[10:20]) npl = int(line[20:30]) nph = int(line[30:40]) npmin = int(line[40:50]) npmax = int(line[50:60]) if model.verbose: print(' DCEPS {}'.format(dceps)) print(' NPLANE {}'.format(nplane)) print(' NPL {}'.format(npl)) print(' NPH {}'.format(nph)) print(' NPMIN {}'.format(npmin)) print(' NPMAX {}'.format(npmax)) # Item B4: INTERP, NLSINK, NPSINK interp = None nlsink = None npsink = None if mixelm == 2 or mixelm == 3: if model.verbose: print(' loading INTERP, NLSINK, NPSINK...') line = f.readline() interp = int(line[0:10]) nlsink = int(line[10:20]) npsink = int(line[20:30]) if model.verbose: print(' INTERP {}'.format(interp)) print(' NLSINK {}'.format(nlsink)) print(' NPSINK {}'.format(npsink)) # Item B5: DCHMOC dchmoc = None if mixelm == 3: if model.verbose: print(' loading DCHMOC...') line = f.readline() dchmoc = float(line[0:10]) if model.verbose: print(' DCHMOC {}'.format(dchmoc)) # set package unit number unitnumber = None filenames = [None] if ext_unit_dict is not None: unitnumber, filenames[0] = \ model.get_ext_dict_attr(ext_unit_dict, filetype=Mt3dAdv.ftype()) # Construct and return adv package adv = Mt3dAdv(model, mixelm=mixelm, percel=percel, mxpart=mxpart, nadvfd=nadvfd, itrack=itrack, wd=wd, dceps=dceps, nplane=nplane, npl=npl, nph=nph, npmin=npmin, npmax=npmax, nlsink=nlsink, npsink=npsink, dchmoc=dchmoc, unitnumber=unitnumber, filenames=filenames) return adv @staticmethod def ftype(): return 'ADV' @staticmethod def defaultunit(): return 32 @staticmethod def reservedunit(): return 2<|fim▁end|>
if mixelm == 0: if len(line[30:40].strip()) > 0: nadvfd = int(line[30:40]) if model.verbose:
<|file_name|>plugins.rs<|end_file_name|><|fim▁begin|>use crate::{ config::{self}, prelude::*, }; use neon::{prelude::*, result::Throw}; use std::str::FromStr; use stencila::{ config::Config, tokio::sync::MutexGuard, }; use plugins::{self, Plugin, PluginInstallation, Plugins, PLUGINS}; /// Lock the global plugins store pub fn lock(cx: &mut FunctionContext) -> NeonResult<MutexGuard<'static, Plugins>> { match PLUGINS.try_lock() { Ok(guard) => Ok(guard), Err(error) => cx.throw_error(format!( "When attempting to lock plugins: {}", error.to_string() )), } } /// Get plugin schema pub fn schema(cx: FunctionContext) -> JsResult<JsString> { let schema = Plugin::schema(); to_json_or_throw(cx, schema) } /// List plugins pub fn list(mut cx: FunctionContext) -> JsResult<JsString> { let aliases = &config::lock(&mut cx)?.plugins.aliases; let plugins = &*lock(&mut cx)?; to_json(cx, plugins.list_plugins(aliases)) } /// Install a plugin pub fn install(mut cx: FunctionContext) -> JsResult<JsString> { let spec = &cx.argument::<JsString>(0)?.value(&mut cx); let config = &config::lock(&mut cx)?; let installs = &installations(&mut cx, 1, config)?; let aliases = &config.plugins.aliases; let plugins = &mut *lock(&mut cx)?; match RUNTIME.block_on(async { Plugin::install(spec, installs, aliases, plugins, None).await })<|fim▁hole|>} /// Uninstall a plugin pub fn uninstall(mut cx: FunctionContext) -> JsResult<JsString> { let alias = &cx.argument::<JsString>(0)?.value(&mut cx); let aliases = &config::lock(&mut cx)?.plugins.aliases; let plugins = &mut *lock(&mut cx)?; match Plugin::uninstall(alias, aliases, plugins) { Ok(_) => to_json(cx, plugins.list_plugins(aliases)), Err(error) => cx.throw_error(error.to_string()), } } /// Upgrade a plugin pub fn upgrade(mut cx: FunctionContext) -> JsResult<JsString> { let spec = &cx.argument::<JsString>(0)?.value(&mut cx); let config = &config::lock(&mut cx)?; let installs = &config.plugins.installations; let aliases = &config.plugins.aliases; let plugins = &mut *lock(&mut cx)?; match RUNTIME.block_on(async { Plugin::upgrade(spec, installs, aliases, plugins).await }) { Ok(_) => to_json(cx, plugins.list_plugins(aliases)), Err(error) => cx.throw_error(error.to_string()), } } /// Refresh plugins pub fn refresh(mut cx: FunctionContext) -> JsResult<JsString> { let arg = cx.argument::<JsArray>(0)?.to_vec(&mut cx)?; let list = arg .iter() .map(|item| { item.to_string(&mut cx) .expect("Unable to convert to string") .value(&mut cx) }) .collect(); let config = &config::lock(&mut cx)?; let aliases = &config.plugins.aliases; let plugins = &mut *lock(&mut cx)?; match RUNTIME.block_on(async { Plugin::refresh_list(list, aliases, plugins).await }) { Ok(_) => to_json(cx, plugins.list_plugins(aliases)), Err(error) => cx.throw_error(error.to_string()), } } /// Get the `installations` argument, falling back to the array in `config.plugins.installations` pub fn installations( cx: &mut FunctionContext, position: i32, config: &Config, ) -> Result<Vec<PluginInstallation>, Throw> { let arg = cx.argument::<JsArray>(position)?.to_vec(cx)?; if arg.is_empty() { Ok(config.plugins.installations.clone()) } else { let mut installations = Vec::new(); for value in arg { let str = value.to_string(cx)?.value(cx); let installation = match plugins::PluginInstallation::from_str(&str) { Ok(value) => value, Err(error) => return cx.throw_error(error.to_string()), }; installations.push(installation) } Ok(installations) } }<|fim▁end|>
{ Ok(_) => to_json(cx, plugins.list_plugins(aliases)), Err(error) => cx.throw_error(error.to_string()), }
<|file_name|>mark.go<|end_file_name|><|fim▁begin|>/* Copyright (c) 2016 VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package storage import ( "flag" "fmt" "golang.org/x/net/context" "github.com/vmware/govmomi/govc/cli" "github.com/vmware/govmomi/govc/flags" "github.com/vmware/govmomi/object" "github.com/vmware/govmomi/vim25/mo" "github.com/vmware/govmomi/vim25/types" ) type mark struct { *flags.HostSystemFlag ssd *bool local *bool } func init() { cli.Register("host.storage.mark", &mark{}) } func (cmd *mark) Register(ctx context.Context, f *flag.FlagSet) { cmd.HostSystemFlag, ctx = flags.NewHostSystemFlag(ctx) cmd.HostSystemFlag.Register(ctx, f) f.Var(flags.NewOptionalBool(&cmd.ssd), "ssd", "Mark as SSD") f.Var(flags.NewOptionalBool(&cmd.local), "local", "Mark as local") } func (cmd *mark) Process(ctx context.Context) error { if err := cmd.HostSystemFlag.Process(ctx); err != nil { return err } return nil }<|fim▁hole|> func (cmd *mark) Description() string { return `Mark device at DEVICE_PATH.` } func (cmd *mark) Mark(ctx context.Context, ss *object.HostStorageSystem, uuid string) error { var err error var task *object.Task if cmd.ssd != nil { if *cmd.ssd { task, err = ss.MarkAsSsd(ctx, uuid) } else { task, err = ss.MarkAsNonSsd(ctx, uuid) } if err != nil { return err } err = task.Wait(ctx) if err != nil { return err } } if cmd.local != nil { if *cmd.local { task, err = ss.MarkAsLocal(ctx, uuid) } else { task, err = ss.MarkAsNonLocal(ctx, uuid) } if err != nil { return err } err = task.Wait(ctx) if err != nil { return err } } return nil } func (cmd *mark) Run(ctx context.Context, f *flag.FlagSet) error { if f.NArg() != 1 { return fmt.Errorf("specify device path") } path := f.Args()[0] host, err := cmd.HostSystem() if err != nil { return err } ss, err := host.ConfigManager().StorageSystem(ctx) if err != nil { return err } var hss mo.HostStorageSystem err = ss.Properties(ctx, ss.Reference(), nil, &hss) if err != nil { return nil } for _, e := range hss.StorageDeviceInfo.ScsiLun { disk, ok := e.(*types.HostScsiDisk) if !ok { continue } if disk.DevicePath == path { return cmd.Mark(ctx, ss, disk.Uuid) } } return fmt.Errorf("%s not found", path) }<|fim▁end|>
func (cmd *mark) Usage() string { return "DEVICE_PATH" }
<|file_name|>date-parser.test.js<|end_file_name|><|fim▁begin|>import DateParser from '../date-parser.js'; import ParsedInfo from '../../parsed-info'; import moment from 'moment'; Date.now = jest.fn(() => 1527130897000) test('Parses 12 Jan', () => { const dateParser = new DateParser(); dateParser.parse('12 Jan', ParsedInfo); const { value, startIndex, endIndex } = ParsedInfo.dateParser; expect({ value: value.unix(), startIndex, endIndex }) .toEqual({ value: 1515695400, startIndex: 0, endIndex: 6 }); }); test('Parses 22 May', () => { const dateParser = new DateParser(); dateParser.parse('22 May', ParsedInfo); const { value, startIndex, endIndex } = ParsedInfo.dateParser; expect({ value: value.unix(), startIndex, endIndex })<|fim▁hole|><|fim▁end|>
.toEqual({ value: 1526927400, startIndex: 0, endIndex: 6 }); });
<|file_name|>auth.js<|end_file_name|><|fim▁begin|>import Immutable from 'immutable'; import * as ActionType from '../../actions/auth/auth'; const defaultState = Immutable.fromJS({ loggedIn: false, }); function authReducer(state = defaultState, action) { const { loggedIn, } = action; <|fim▁hole|> return state.merge(Immutable.fromJS({ loggedIn })); default: return state; } } export default authReducer;<|fim▁end|>
switch (action.type) { case ActionType.VERIFIED_LOGIN:
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (c) 2009-2014, Erkan Ozgur Yilmaz # # This module is part of oyProjectManager and is released under the BSD 2 # License: http://www.opensource.org/licenses/BSD-2-Clause """ Database Module =============== This is where all the magic happens. .. versionadded:: 0.2.0 SQLite3 Database: To hold the information about all the data created :class:`~oyProjectManager.models.project.Project`\ s, :class:`~oyProjectManager.models.sequence.Sequence`\ s, :class:`~oyProjectManager.models.shot.Shot`\ s, :class:`~oyProjectManager.models.asset.Asset`\ s and :class:`~oyProjectManager.models.version.VersionType`\ s , there is a ".metadata.db" file in the repository root. This SQLite3 database has all the information about everything. With this new extension it is much faster to query any data needed. <|fim▁hole|>Querying data is very simple and fun. To get any kind of data from the database, just call the ``db.setup()`` and then use ``db.query`` to get the data. For a simple example, lets get all the shots for a Sequence called "TEST_SEQ" in the "TEST_PROJECT":: from oyProjectManager import db from oyProjectManager import Project, Sequence, Shot # setup the database session db.setup() all_shots = Shot.query().join(Sequence).\ filter(Sequence.project.name="TEST_PROJECT").\ filter(Shot.sequence.name=="TEST_SEQ").all() that's it. """ import os import logging import sqlalchemy import oyProjectManager from oyProjectManager.db.declarative import Base # SQLAlchemy database engine engine = None # SQLAlchemy session manager session = None query = None # SQLAlchemy metadata metadata = None database_url = None # create a logger logger = logging.getLogger(__name__) #logger.setLevel(logging.WARNING) logger.setLevel(logging.DEBUG) def setup(database_url_in=None): """Utility function that helps to connect the system to the given database. Returns the created session :param database_url_in: The database address, default is None. If the database_url is skipped or given as None, the default database url from the :mod:`oyProjectManager.config` will be used. This is good, just call ``db.setup()`` and then use ``db.session`` and ``db.query`` to get the data. :returns: sqlalchemy.orm.session """ global engine global session global query global metadata global database_url # create engine # TODO: create tests for this if database_url_in is None: logger.debug("using the default database_url from the config file") # use the default database conf = oyProjectManager.conf database_url_in = conf.database_url # expand user and env variables if any # TODO: because the dialect part and the address part are now coming from # from one source, it is not possible to expand any variables in the path, # try to use SQLAlchemy to separate the dialect and the address part and # expand any data and then merge it again #database_url_in = os.path.expanduser( # os.path.expandvars( # os.path.expandvars( # database_url_in # ) # ) #) while "$" in database_url_in or "~" in database_url_in: database_url_in = os.path.expanduser( os.path.expandvars( database_url_in ) ) database_url = database_url_in logger.debug("setting up database in %s" % database_url) engine = sqlalchemy.create_engine(database_url, echo=False) # create the tables metadata = Base.metadata metadata.create_all(engine) # create the Session class Session = sqlalchemy.orm.sessionmaker(bind=engine) # create and save session object to session session = Session() query = session.query # initialize the db __init_db__() # TODO: create a test to check if the returned session is session return session def __init_db__(): """initializes the just setup database It adds: - Users - VersionTypes to the database. """ logger.debug("db is newly created, initializing the db") global query global session # get the users from the config from oyProjectManager import conf # ------------------------------------------------------ # create the users from oyProjectManager.models.auth import User # get all users from db users_from_db = query(User).all() for user_data in conf.users_data: name = user_data.get("name") initials = user_data.get("initials") email = user_data.get("email") user_from_config = User(name, initials, email) if user_from_config not in users_from_db: session.add(user_from_config) # ------------------------------------------------------ # add the VersionTypes from oyProjectManager.models.version import VersionType version_types_from_db = query(VersionType).all() for version_type in conf.version_types: version_type_from_conf = VersionType(**version_type) if version_type_from_conf not in version_types_from_db: session.add(version_type_from_conf) session.commit() logger.debug("finished initialization of the db")<|fim▁end|>
<|file_name|>Pagination.tsx<|end_file_name|><|fim▁begin|>import Link from "next/link"; import { FunctionComponent } from "react"; import { PagedCollection } from "types/Collection"; interface Props { collection: PagedCollection<any>; } const Pagination: FunctionComponent<Props> = ({ collection }) => { const view = collection && collection['hydra:view']; if (!view) return; const { 'hydra:first': first, 'hydra:previous': previous, 'hydra:next': next, 'hydra:last': last } = view; return ( <nav aria-label="Page navigation"> <Link href={first ? first : '#'}> <a className={`btn btn-primary${previous ? '' : ' disabled'}`}> <span aria-hidden="true">&lArr;</span> First </a> </Link> <Link href={previous ? previous : '#'}> <a className={`btn btn-primary${previous ? '' : ' disabled'}`}> <span aria-hidden="true">&larr;</span> Previous </a> </Link> <Link href={next ? next : '#'}> <a className={`btn btn-primary${next ? '' : ' disabled'}`}> Next <span aria-hidden="true">&rarr;</span> </a> </Link> <Link href={last ? last : '#'}> <a className={`btn btn-primary${next ? '' : ' disabled'}`}> Last <span aria-hidden="true">&rArr;</span> </a> </Link> </nav> ); };<|fim▁hole|> export default Pagination;<|fim▁end|>
<|file_name|>JointPDFTest.cc<|end_file_name|><|fim▁begin|>/* This file is part of MAUS: http://micewww.pp.rl.ac.uk:8080/projects/maus * * MAUS is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * MAUS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MAUS. If not, see <http://www.gnu.org/licenses/>. * */ #include <stdlib.h> #include "src/common_cpp/Recon/Bayes/JointPDF.hh" #include "src/common_cpp/Recon/Bayes/PDF.hh" #include "gtest/gtest.h" namespace MAUS { <|fim▁hole|> protected: JointPDFTest() {} virtual ~JointPDFTest() {} virtual void SetUp() { // Sets up a JointPDF object for testing. std::string lname("JointPDF"); std::string pname("prob_station3"); _shift_min = -8.; _shift_max = 8.; _bin_width = 0.01; _jointPDF = new JointPDF(lname, _bin_width, _shift_min, _shift_max); double sigma = 1.5; // mm int number_of_tosses = 100000; _jointPDF->Build("gaussian", sigma, number_of_tosses); } virtual void TearDown() { delete _jointPDF; } JointPDF *_jointPDF; // err can be made smaller by defining a smaller _bin_width // That will increase the test time, so I'm keeping things // large. static constexpr double err = 0.01; double _shift_min; double _shift_max; double _bin_width; }; TEST_F(JointPDFTest, test_binning) { // Bin numbers run from 0 to nbins. // (bin 0 and nbins+1 are underflow and overflow) // Must assert that bin 0 and bin nbins have their centres // falling on the extremes we defined for our PDF. // Must also check that the middle bin corresponds to 0. int n_bins = _jointPDF->n_bins(); // centre of bin 1 double bin_1_centre = _jointPDF->GetJointPDF()->GetXaxis()->GetBinCenter(1); EXPECT_NEAR(_shift_min, bin_1_centre, 1e-6); // centre of middle bin (x=0). double middle_bin_centre = _jointPDF->GetJointPDF()->GetXaxis()->GetBinCenter(n_bins/2+1); EXPECT_NEAR(0., middle_bin_centre, 1e-6); // centre of bin nbins double bin_n_bins_centre = _jointPDF->GetJointPDF()->GetXaxis()->GetBinCenter(n_bins); EXPECT_NEAR(_shift_max, bin_n_bins_centre, 1e-6); } TEST_F(JointPDFTest, test_mean) { double expected_mean = 0; TH1D *likelihood = reinterpret_cast<TH1D*> ((_jointPDF->GetLikelihood(expected_mean)).Clone("likelihood")); double mean = likelihood->GetMean(); EXPECT_NEAR(expected_mean, mean, err); } TEST_F(JointPDFTest, test_posterior) { // Build a posterior. double new_shift = 1.2; TH1D *likelihood = reinterpret_cast<TH1D*> (_jointPDF->GetLikelihood(new_shift).Clone("likelihood")); EXPECT_NEAR(new_shift, likelihood->GetMean(), err); std::string pname("prob_station"); PDF *pdf = new PDF(pname, _bin_width, _shift_min, _shift_max); // Assert the prior is flat. EXPECT_NEAR(0., pdf->GetMean(), err); pdf->ComputeNewPosterior(*likelihood); // Check if the posterior is correct. EXPECT_NEAR(new_shift, pdf->GetMean(), err); delete pdf; } } // ~namespace MAUS<|fim▁end|>
class JointPDFTest : public ::testing::Test {
<|file_name|>test_window.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import inspect from pyspark import pandas as ps from pyspark.pandas.exceptions import PandasNotImplementedError from pyspark.pandas.missing.window import ( MissingPandasLikeExpanding, MissingPandasLikeRolling, MissingPandasLikeExpandingGroupby, MissingPandasLikeRollingGroupby, ) from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils class ExpandingRollingTest(PandasOnSparkTestCase, TestUtils): def test_missing(self): psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]}) # Expanding functions missing_functions = inspect.getmembers(MissingPandasLikeExpanding, inspect.isfunction) unsupported_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function" ] for name in unsupported_functions: with self.assertRaisesRegex(<|fim▁hole|> getattr(psdf.expanding(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.expanding(1), name)() # Series deprecated_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function" ] for name in deprecated_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.expanding(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.a.expanding(1), name)() # Series # Rolling functions missing_functions = inspect.getmembers(MissingPandasLikeRolling, inspect.isfunction) unsupported_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function" ] for name in unsupported_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.rolling(1), name)() # Series deprecated_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function" ] for name in deprecated_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.a.rolling(1), name)() # Series # Expanding properties missing_properties = inspect.getmembers( MissingPandasLikeExpanding, lambda o: isinstance(o, property) ) unsupported_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "unsupported_property" ] for name in unsupported_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.expanding(1), name) # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.expanding(1), name) # Series deprecated_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "deprecated_property" ] for name in deprecated_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.expanding(1), name) # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.a.expanding(1), name) # Series # Rolling properties missing_properties = inspect.getmembers( MissingPandasLikeRolling, lambda o: isinstance(o, property) ) unsupported_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "unsupported_property" ] for name in unsupported_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.rolling(1), name)() # Series deprecated_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "deprecated_property" ] for name in deprecated_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.a.rolling(1), name)() # Series def test_missing_groupby(self): psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]}) # Expanding functions missing_functions = inspect.getmembers( MissingPandasLikeExpandingGroupby, inspect.isfunction ) unsupported_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function" ] for name in unsupported_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.groupby("a").expanding(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series deprecated_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function" ] for name in deprecated_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.groupby("a").expanding(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series # Rolling functions missing_functions = inspect.getmembers(MissingPandasLikeRollingGroupby, inspect.isfunction) unsupported_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function" ] for name in unsupported_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.groupby("a").rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series deprecated_functions = [ name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function" ] for name in deprecated_functions: with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.a.rolling(1), name)() # Series # Expanding properties missing_properties = inspect.getmembers( MissingPandasLikeExpandingGroupby, lambda o: isinstance(o, property) ) unsupported_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "unsupported_property" ] for name in unsupported_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.groupby("a").expanding(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series deprecated_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "deprecated_property" ] for name in deprecated_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.expanding(1), name) # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name) ): getattr(psdf.a.expanding(1), name) # Series # Rolling properties missing_properties = inspect.getmembers( MissingPandasLikeRollingGroupby, lambda o: isinstance(o, property) ) unsupported_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "unsupported_property" ] for name in unsupported_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.groupby("a").rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name), ): getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series deprecated_properties = [ name for (name, type_) in missing_properties if type_.fget.__name__ == "deprecated_property" ] for name in deprecated_properties: with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.rolling(1), name)() # Frame with self.assertRaisesRegex( PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name) ): getattr(psdf.a.rolling(1), name)() # Series if __name__ == "__main__": import unittest from pyspark.pandas.tests.test_window import * # noqa: F401 try: import xmlrunner # type: ignore[import] testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) except ImportError: testRunner = None unittest.main(testRunner=testRunner, verbosity=2)<|fim▁end|>
PandasNotImplementedError, "method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name), ):
<|file_name|>sql_rewrite.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ /* In here, we rewrite queries (to obfuscate passwords etc.) that need it before we log them. Stored procedures may also rewrite their statements (to show the actual values of their variables etc.). There is currently no scenario where a statement can be eligible for both rewrites. (see sp_instr.cc) Special consideration will need to be taken if this assertion is changed. We also do not intersect with query cache at this time, as QC only caches SELECTs (which we don't rewrite). If and when QC becomes more general, it should probably cache the rewritten query along with the user-submitted one. (see sql_parse.cc) */ #include "auth_common.h" // append_user #include "sql_parse.h" // get_current_user #include "sql_show.h" // append_identifier #include "sp_head.h" // struct set_var_base #include "rpl_slave.h" // SLAVE_SQL, SLAVE_IO /** Append a key/value pair to a string, with an optional preceeding comma. For numeric values. @param str The string to append to @param comma Prepend a comma? @param txt C-string, must end in a space @param len strlen(txt) @param val numeric value @param cond only append if this evaluates to true @retval false if any subsequent key/value pair would be the first */ bool append_int(String *str, bool comma, const char *txt, size_t len, long val, int cond) { if (cond) { String numbuf(42); if (comma) str->append(STRING_WITH_LEN(", ")); str->append(txt,len); numbuf.set((longlong)val,&my_charset_bin); str->append(numbuf); return true; } return comma; } /** Append a key/value pair to a string if the value is non-NULL, with an optional preceeding comma. @param str The string to append to @param comma Prepend a comma? @param key C-string: the key, must be non-NULL @param val C-string: the value<|fim▁hole|> @retval false if any subsequent key/value pair would be the first */ bool append_str(String *str, bool comma, const char *key, const char *val) { if (val) { if (comma) str->append(STRING_WITH_LEN(", ")); str->append(key); str->append(STRING_WITH_LEN(" '")); str->append(val); str->append(STRING_WITH_LEN("'")); return true; } return comma; } /** Rewrite a GRANT statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_grant(THD *thd, String *rlb) { LEX *lex= thd->lex; TABLE_LIST *first_table= (TABLE_LIST*) lex->select_lex->table_list.first; bool comma= FALSE, comma_inner; String cols(1024); int c; rlb->append(STRING_WITH_LEN("GRANT ")); if (lex->all_privileges) rlb->append(STRING_WITH_LEN("ALL PRIVILEGES")); else { ulong priv; for (c= 0, priv= SELECT_ACL; priv <= GLOBAL_ACLS; c++, priv <<= 1) { if (priv == GRANT_ACL) continue; comma_inner= FALSE; if (lex->columns.elements) // show columns, if any { class LEX_COLUMN *column; List_iterator <LEX_COLUMN> column_iter(lex->columns); cols.length(0); cols.append(STRING_WITH_LEN(" (")); /* If the statement was GRANT SELECT(f2), INSERT(f3), UPDATE(f1,f3, f2), our list cols will contain the order f2, f3, f1, and thus that's the order we'll recreate the privilege: UPDATE (f2, f3, f1) */ while ((column= column_iter++)) { if (column->rights & priv) { if (comma_inner) cols.append(STRING_WITH_LEN(", ")); else comma_inner= TRUE; cols.append(column->column.ptr(),column->column.length()); } } cols.append(STRING_WITH_LEN(")")); } if (comma_inner || (lex->grant & priv)) // show privilege name { if (comma) rlb->append(STRING_WITH_LEN(", ")); else comma= TRUE; rlb->append(command_array[c],command_lengths[c]); if (!(lex->grant & priv)) // general outranks specific rlb->append(cols); } } if (!comma) // no privs, default to USAGE rlb->append(STRING_WITH_LEN("USAGE")); } rlb->append(STRING_WITH_LEN(" ON ")); switch(lex->type) { case TYPE_ENUM_PROCEDURE: rlb->append(STRING_WITH_LEN("PROCEDURE ")); break; case TYPE_ENUM_FUNCTION: rlb->append(STRING_WITH_LEN("FUNCTION ")); break; default: break; } if (first_table) { append_identifier(thd, rlb, first_table->db, strlen(first_table->db)); rlb->append(STRING_WITH_LEN(".")); append_identifier(thd, rlb, first_table->table_name, strlen(first_table->table_name)); } else { if (lex->current_select()->db) append_identifier(thd, rlb, lex->current_select()->db, strlen(lex->current_select()->db)); else rlb->append("*"); rlb->append(STRING_WITH_LEN(".*")); } rlb->append(STRING_WITH_LEN(" TO ")); { LEX_USER *user_name, *tmp_user_name; List_iterator <LEX_USER> user_list(lex->users_list); bool comma= FALSE; while ((tmp_user_name= user_list++)) { if ((user_name= get_current_user(thd, tmp_user_name))) { append_user(thd, rlb, user_name, comma, true); comma= TRUE; } } } if (lex->ssl_type != SSL_TYPE_NOT_SPECIFIED) { rlb->append(STRING_WITH_LEN(" REQUIRE")); switch (lex->ssl_type) { case SSL_TYPE_SPECIFIED: if (lex->x509_subject) { rlb->append(STRING_WITH_LEN(" SUBJECT '")); rlb->append(lex->x509_subject); rlb->append(STRING_WITH_LEN("'")); } if (lex->x509_issuer) { rlb->append(STRING_WITH_LEN(" ISSUER '")); rlb->append(lex->x509_issuer); rlb->append(STRING_WITH_LEN("'")); } if (lex->ssl_cipher) { rlb->append(STRING_WITH_LEN(" CIPHER '")); rlb->append(lex->ssl_cipher); rlb->append(STRING_WITH_LEN("'")); } break; case SSL_TYPE_X509: rlb->append(STRING_WITH_LEN(" X509")); break; case SSL_TYPE_ANY: rlb->append(STRING_WITH_LEN(" SSL")); break; case SSL_TYPE_NOT_SPECIFIED: /* fall-thru */ case SSL_TYPE_NONE: rlb->append(STRING_WITH_LEN(" NONE")); break; } } if (lex->mqh.specified_limits || (lex->grant & GRANT_ACL)) { rlb->append(STRING_WITH_LEN(" WITH")); if (lex->grant & GRANT_ACL) rlb->append(STRING_WITH_LEN(" GRANT OPTION")); append_int(rlb, false, STRING_WITH_LEN(" MAX_QUERIES_PER_HOUR "), lex->mqh.questions, lex->mqh.specified_limits & USER_RESOURCES::QUERIES_PER_HOUR); append_int(rlb, false, STRING_WITH_LEN(" MAX_UPDATES_PER_HOUR "), lex->mqh.updates, lex->mqh.specified_limits & USER_RESOURCES::UPDATES_PER_HOUR); append_int(rlb, false, STRING_WITH_LEN(" MAX_CONNECTIONS_PER_HOUR "), lex->mqh.conn_per_hour, lex->mqh.specified_limits & USER_RESOURCES::CONNECTIONS_PER_HOUR); append_int(rlb, false, STRING_WITH_LEN(" MAX_USER_CONNECTIONS "), lex->mqh.user_conn, lex->mqh.specified_limits & USER_RESOURCES::USER_CONNECTIONS); } } /** Rewrite a SET statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_set(THD *thd, String *rlb) { LEX *lex= thd->lex; List_iterator_fast<set_var_base> it(lex->var_list); set_var_base *var; bool comma= FALSE; rlb->append(STRING_WITH_LEN("SET ")); while ((var= it++)) { if (comma) rlb->append(STRING_WITH_LEN(",")); else comma= TRUE; var->print(thd, rlb); } } /** Rewrite CREATE USER statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_create_user(THD *thd, String *rlb) { LEX *lex= thd->lex; LEX_USER *user_name, *tmp_user_name; List_iterator <LEX_USER> user_list(lex->users_list); bool comma= FALSE; rlb->append(STRING_WITH_LEN("CREATE USER ")); while ((tmp_user_name= user_list++)) { if ((user_name= get_current_user(thd, tmp_user_name))) { append_user(thd, rlb, user_name, comma, TRUE); comma= TRUE; } } } /** Rewrite a CHANGE MASTER statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_change_master(THD *thd, String *rlb) { LEX *lex= thd->lex; rlb->append(STRING_WITH_LEN("CHANGE MASTER TO")); if (lex->mi.host) { rlb->append(STRING_WITH_LEN(" MASTER_HOST = '")); rlb->append(lex->mi.host); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.user) { rlb->append(STRING_WITH_LEN(" MASTER_USER = '")); rlb->append(lex->mi.user); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.password) { rlb->append(STRING_WITH_LEN(" MASTER_PASSWORD = <secret>")); } if (lex->mi.port) { rlb->append(STRING_WITH_LEN(" MASTER_PORT = ")); rlb->append_ulonglong(lex->mi.port); } if (lex->mi.connect_retry) { rlb->append(STRING_WITH_LEN(" MASTER_CONNECT_RETRY = ")); rlb->append_ulonglong(lex->mi.connect_retry); } if (lex->mi.ssl) { rlb->append(STRING_WITH_LEN(" MASTER_SSL = ")); rlb->append(lex->mi.ssl == LEX_MASTER_INFO::LEX_MI_ENABLE ? "1" : "0"); } if (lex->mi.ssl_ca) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_CA = '")); rlb->append(lex->mi.ssl_ca); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.ssl_capath) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_CAPATH = '")); rlb->append(lex->mi.ssl_capath); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.ssl_cert) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_CERT = '")); rlb->append(lex->mi.ssl_cert); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.ssl_cipher) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_CIPHER = '")); rlb->append(lex->mi.ssl_cipher); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.ssl_key) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_KEY = '")); rlb->append(lex->mi.ssl_key); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.log_file_name) { rlb->append(STRING_WITH_LEN(" MASTER_LOG_FILE = '")); rlb->append(lex->mi.log_file_name); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.pos) { rlb->append(STRING_WITH_LEN(" MASTER_LOG_POS = ")); rlb->append_ulonglong(lex->mi.pos); } if (lex->mi.relay_log_name) { rlb->append(STRING_WITH_LEN(" RELAY_LOG_FILE = '")); rlb->append(lex->mi.relay_log_name); rlb->append(STRING_WITH_LEN("'")); } if (lex->mi.relay_log_pos) { rlb->append(STRING_WITH_LEN(" RELAY_LOG_POS = ")); rlb->append_ulonglong(lex->mi.relay_log_pos); } if (lex->mi.ssl_verify_server_cert) { rlb->append(STRING_WITH_LEN(" MASTER_SSL_VERIFY_SERVER_CERT = ")); rlb->append(lex->mi.ssl_verify_server_cert == LEX_MASTER_INFO::LEX_MI_ENABLE ? "1" : "0"); } if (lex->mi.repl_ignore_server_ids_opt) { bool first= TRUE; rlb->append(STRING_WITH_LEN(" IGNORE_SERVER_IDS = ( ")); for (uint i= 0; i < lex->mi.repl_ignore_server_ids.elements; i++) { ulong s_id; get_dynamic(&lex->mi.repl_ignore_server_ids, (uchar*) &s_id, i); if (first) first= FALSE; else rlb->append(STRING_WITH_LEN(", ")); rlb->append_ulonglong(s_id); } rlb->append(STRING_WITH_LEN(" )")); } if (lex->mi.heartbeat_opt != LEX_MASTER_INFO::LEX_MI_UNCHANGED) { rlb->append(STRING_WITH_LEN(" MASTER_HEARTBEAT_PERIOD = ")); if (lex->mi.heartbeat_opt == LEX_MASTER_INFO::LEX_MI_DISABLE) rlb->append(STRING_WITH_LEN("0")); else { char buf[64]; snprintf(buf, 64, "%f", lex->mi.heartbeat_period); rlb->append(buf); } } } /** Rewrite a START SLAVE statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_start_slave(THD *thd, String *rlb) { LEX *lex= thd->lex; if (!lex->slave_connection.password) return; rlb->append(STRING_WITH_LEN("START SLAVE")); if (lex->slave_thd_opt & SLAVE_IO) rlb->append(STRING_WITH_LEN(" IO_THREAD")); /* we have printed the IO THREAD related options */ if (lex->slave_thd_opt & SLAVE_IO && lex->slave_thd_opt & SLAVE_SQL) rlb->append(STRING_WITH_LEN(",")); if (lex->slave_thd_opt & SLAVE_SQL) rlb->append(STRING_WITH_LEN(" SQL_THREAD")); /* until options */ if (lex->mi.log_file_name || lex->mi.relay_log_name) { rlb->append(STRING_WITH_LEN(" UNTIL")); if (lex->mi.log_file_name) { rlb->append(STRING_WITH_LEN(" MASTER_LOG_FILE = '")); rlb->append(lex->mi.log_file_name); rlb->append(STRING_WITH_LEN("', ")); rlb->append(STRING_WITH_LEN("MASTER_LOG_POS = ")); rlb->append_ulonglong(lex->mi.pos); } if (lex->mi.relay_log_name) { rlb->append(STRING_WITH_LEN(" RELAY_LOG_FILE = '")); rlb->append(lex->mi.relay_log_name); rlb->append(STRING_WITH_LEN("', ")); rlb->append(STRING_WITH_LEN("RELAY_LOG_POS = ")); rlb->append_ulonglong(lex->mi.relay_log_pos); } } /* connection options */ if (lex->slave_connection.user) { rlb->append(STRING_WITH_LEN(" USER = '")); rlb->append(lex->slave_connection.user); rlb->append(STRING_WITH_LEN("'")); } if (lex->slave_connection.password) rlb->append(STRING_WITH_LEN(" PASSWORD = '<secret>'")); if (lex->slave_connection.plugin_auth) { rlb->append(STRING_WITH_LEN(" DEFAULT_AUTH = '")); rlb->append(lex->slave_connection.plugin_auth); rlb->append(STRING_WITH_LEN("'")); } if (lex->slave_connection.plugin_dir) { rlb->append(STRING_WITH_LEN(" PLUGIN_DIR = '")); rlb->append(lex->slave_connection.plugin_dir); rlb->append(STRING_WITH_LEN("'")); } } /** Rewrite a SERVER OPTIONS clause (for CREATE SERVER and ALTER SERVER). @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_server_options(THD *thd, String *rlb) { LEX *lex= thd->lex; rlb->append(STRING_WITH_LEN(" OPTIONS ( ")); rlb->append(STRING_WITH_LEN("PASSWORD '<secret>'")); append_str(rlb, true, "USER", lex->server_options.get_username()); append_str(rlb, true, "HOST", lex->server_options.get_host()); append_str(rlb, true, "DATABASE", lex->server_options.get_db()); append_str(rlb, true, "OWNER", lex->server_options.get_owner()); append_str(rlb, true, "SOCKET", lex->server_options.get_socket()); append_int(rlb, true, STRING_WITH_LEN("PORT "), lex->server_options.get_port(), lex->server_options.get_port() != Server_options::PORT_NOT_SET); rlb->append(STRING_WITH_LEN(" )")); } /** Rewrite a CREATE SERVER statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_create_server(THD *thd, String *rlb) { LEX *lex= thd->lex; if (!lex->server_options.get_password()) return; rlb->append(STRING_WITH_LEN("CREATE SERVER ")); rlb->append(lex->server_options.m_server_name.str ? lex->server_options.m_server_name.str : ""); rlb->append(STRING_WITH_LEN(" FOREIGN DATA WRAPPER '")); rlb->append(lex->server_options.get_scheme() ? lex->server_options.get_scheme() : ""); rlb->append(STRING_WITH_LEN("'")); mysql_rewrite_server_options(thd, rlb); } /** Rewrite a ALTER SERVER statement. @param thd The THD to rewrite for. @param rlb An empty String object to put the rewritten query in. */ static void mysql_rewrite_alter_server(THD *thd, String *rlb) { LEX *lex= thd->lex; if (!lex->server_options.get_password()) return; rlb->append(STRING_WITH_LEN("ALTER SERVER ")); rlb->append(lex->server_options.m_server_name.str ? lex->server_options.m_server_name.str : ""); mysql_rewrite_server_options(thd, rlb); } /** Rewrite a query (to obfuscate passwords etc.) Side-effects: thd->rewritten_query will contain a rewritten query, or be cleared if no rewriting took place. @param thd The THD to rewrite for. */ void mysql_rewrite_query(THD *thd) { String *rlb= &thd->rewritten_query; rlb->free(); if (thd->lex->contains_plaintext_password) { switch(thd->lex->sql_command) { case SQLCOM_GRANT: mysql_rewrite_grant(thd, rlb); break; case SQLCOM_SET_OPTION: mysql_rewrite_set(thd, rlb); break; case SQLCOM_CREATE_USER: mysql_rewrite_create_user(thd, rlb); break; case SQLCOM_CHANGE_MASTER: mysql_rewrite_change_master(thd, rlb); break; case SQLCOM_SLAVE_START: mysql_rewrite_start_slave(thd, rlb); break; case SQLCOM_CREATE_SERVER: mysql_rewrite_create_server(thd, rlb); break; case SQLCOM_ALTER_SERVER: mysql_rewrite_alter_server(thd, rlb); break; default: /* unhandled query types are legal. */ break; } } }<|fim▁end|>
<|file_name|>AnalysisTool.hh<|end_file_name|><|fim▁begin|>#ifndef AnalysisTool_h #define AnalysisTool_h 1 #include "StackingTool.hh" #include <sstream> // stringstream using namespace std; class AnalysisTool { public:<|fim▁hole|> virtual bool getInterest(); virtual bool getInterest(int particle, int sturface); virtual bool getInterest(int particle, int sturface, int creationProcess, int flagPhotoElectron); virtual bool getInterest(int particle, int sturface, int volume); virtual bool getInterest(int particle, int surface, double energy); virtual string processData(); virtual string processData(int id, float energy); virtual string processData(int creation_process); private: }; #endif<|fim▁end|>
AnalysisTool(); virtual ~AnalysisTool(); virtual void PrintTool();
<|file_name|>certs.go<|end_file_name|><|fim▁begin|>package middleware import "github.com/gin-gonic/gin" import "github.com/helderfarias/ges/api/util" func CertifiedConfig(certs *util.Certified) gin.HandlerFunc { return func(c *gin.Context) { c.Set("certs", certs)<|fim▁hole|> c.Next() } }<|fim▁end|>
<|file_name|>RectTransform.ts<|end_file_name|><|fim▁begin|>/// <reference path="Transform.ts"/> module WOZLLA { /** * RectTransform is a subclass of {@link WOZLLA.Transform}, define a rect region * for {@WOZLLA.GameObject} and a anchor mode to specify how to related to it's parent. * @class WOZLLA.RectTransform */ export class RectTransform extends Transform { public static getMode(name):number { var names = name.split('_'); var value = 0; switch(names[0]) { case 'Left': value |= RectTransform.ANCHOR_LEFT; break; case 'Right': value |= RectTransform.ANCHOR_RIGHT; break; case 'HStrength': value |= RectTransform.ANCHOR_HORIZONTAL_STRENGTH; break; default: value |= RectTransform.ANCHOR_CENTER; break; } switch(names[1]) { case 'Top': value |= RectTransform.ANCHOR_TOP; break; case 'Bottom': value |= RectTransform.ANCHOR_BOTTOM; break; case 'VStrength': value |= RectTransform.ANCHOR_VERTICAL_STRENGTH; break; default: value |= RectTransform.ANCHOR_MIDDLE; break; } return value; } /** * vertical anchor mode * @property {number} ANCHOR_TOP * @readonly * @static */ public static ANCHOR_TOP = 0x1; /** * vertical anchor mode * @property {number} ANCHOR_MIDDLE * @readonly * @static */ public static ANCHOR_MIDDLE = 0x10; /** * vertical anchor mode * @property {number} ANCHOR_BOTTOM * @readonly * @static */ public static ANCHOR_BOTTOM = 0x100; /** * vertical anchor mode * @property {number} ANCHOR_VERTICAL_STRENGTH * @readonly * @static */ public static ANCHOR_VERTICAL_STRENGTH = 0x1000; /** * horizontal anchor mode * @property {number} ANCHOR_LEFT * @readonly * @static */ public static ANCHOR_LEFT = 0x10000; /** * horizontal anchor mode * @property {number} ANCHOR_CENTER * @readonly * @static */ public static ANCHOR_CENTER = 0x100000; /** * horizontal anchor mode * @property {number} ANCHOR_RIGHT * @readonly * @static */ public static ANCHOR_RIGHT = 0x1000000; /** * horizontal anchor mode * @property {number} ANCHOR_HORIZONTAL_STRENGTH * @readonly * @static */ public static ANCHOR_HORIZONTAL_STRENGTH = 0x10000000; /** * get or set width, this property only effect on fixed size mode * @property {number} width */ get width():number { return this._width; } set width(value:number) { if(this._width === value) return; this._width = value; this.dirty = true; } /** * get or set height, this property only effect on fixed size mode * @property {number} height */ get height():number { return this._height; } set height(value:number) { if(this._height === value) return; this._height = value; this.dirty = true; } /** * get or set top * @property {number} top */ get top():number { return this._top; } set top(value:number) { if(this._top === value) return; this._top = value; this.dirty = true; } /** * get or set left * @property {number} left */ get left():number { return this._left; } set left(value:number) { if(this._left === value) return; this._left = value; this.dirty = true; } /** * get or set right * @property {number} right */ get right():number { return this._right; } set right(value:number) { if(this._right === value) return; this._right = value; this.dirty = true; } /** * get or set bottom * @property {number} bottom */ get bottom():number { return this._bottom; } set bottom(value:number) { if(this._bottom === value) return; this._bottom = value; this.dirty = true; } /** * get or set px, this only effect on strengthen mode * @property {number} px specify x coords */ get px():number { return this._px; } set px(value:number) { if(this._px === value) return; this._px = value; this.dirty = true; } /** * get or set py, this only effect on strengthen mode * @property {number} py specify y coords */ get py():number { return this._py; } set py(value:number) { if(this._py === value) return; this._py = value; this.dirty = true; } /** * get or set anchor mode * @property {number} anchorMode */ get anchorMode():number { return this._anchorMode; } set anchorMode(value:number) { if(this._anchorMode === value) return; this._anchorMode = value; this.dirty = true; } _width:number = 0; _height:number = 0; _top:number = 0; _left:number = 0; _right:number = 0; _bottom:number = 0; _px:number = 0; _py:number = 0; _anchorMode = RectTransform.ANCHOR_CENTER | RectTransform.ANCHOR_MIDDLE;<|fim▁hole|> /** * set rect transform * @param {WOZLLA.RectTransform} rectTransform */ set(rectTransform:any) { var anchorMode:any = rectTransform.anchorMode; if(typeof anchorMode === 'string') { anchorMode = RectTransform.getMode(anchorMode); } this._anchorMode = anchorMode; this._width = rectTransform.width || 0; this._height = rectTransform.height || 0; this._top = rectTransform.top || 0; this._left = rectTransform.left || 0; this._right = rectTransform.right || 0; this._bottom = rectTransform.bottom || 0; this._px = rectTransform.px || 0; this._py = rectTransform.py || 0; if(typeof rectTransform.relative !== 'undefined') { this._relative = rectTransform.relative; } this.dirty = true; } superSet(transform:Transform) { super.set(transform); } /** * transform with parent transform * @param {WOZLLA.Transform} parentTransform */ transform(parentTransform:Transform=null) { var m, R, p:RectTransform; if(!parentTransform || !this._relative || !(parentTransform instanceof RectTransform)) { p = Director.getInstance().viewRectTransform; } else { p = <RectTransform>parentTransform; } m = this._anchorMode; R = RectTransform; if((m & R.ANCHOR_LEFT) === R.ANCHOR_LEFT) { this.x = this._px; } else if((m & R.ANCHOR_RIGHT) === R.ANCHOR_RIGHT) { this.x = p._width + this._px; } else if((m & R.ANCHOR_HORIZONTAL_STRENGTH) === R.ANCHOR_HORIZONTAL_STRENGTH) { this.x = this._left; this._width = p._width - this._left - this._right; } else { this.x = p._width/2 + this._px; } if((m & R.ANCHOR_TOP) === R.ANCHOR_TOP) { this.y = this._py; } else if((m & R.ANCHOR_BOTTOM) === R.ANCHOR_BOTTOM) { this.y = p._height + this._py; } else if((m & R.ANCHOR_VERTICAL_STRENGTH) === R.ANCHOR_VERTICAL_STRENGTH) { this.y = this._top; this._height = p._height - this._top - this._bottom; } else { this.y = p._height/2 + this._py; } super.transform(parentTransform); } protected getRootMatrix() { return Director.getInstance().stage.viewRectTransform.worldMatrix; } } }<|fim▁end|>
<|file_name|>delta history.js<|end_file_name|><|fim▁begin|>var config = { type: Phaser.CANVAS, width: 800, height: 600, parent: 'phaser-example', backgroundColor: '#2d2d2d', useTicker: true, scene: {<|fim▁hole|> update: update } }; var image; var time; var delta; var speed = (600 / 2) / 1000; var game = new Phaser.Game(config); function preload () { this.load.image('bunny', 'assets/sprites/bunny.png'); this.load.atlas('gems', 'assets/tests/columns/gems.png', 'assets/tests/columns/gems.json'); } function create () { delta = this.add.text(32, 32); time = this.add.text(500, 400); image = this.add.image(0, 200, 'bunny'); this.anims.create({ key: 'diamond', frames: this.anims.generateFrameNames('gems', { prefix: 'diamond_', end: 15, zeroPad: 4 }), repeat: -1 }); this.anims.create({ key: 'prism', frames: this.anims.generateFrameNames('gems', { prefix: 'prism_', end: 6, zeroPad: 4 }), repeat: -1 }); this.anims.create({ key: 'ruby', frames: this.anims.generateFrameNames('gems', { prefix: 'ruby_', end: 6, zeroPad: 4 }), repeat: -1 }); this.anims.create({ key: 'square', frames: this.anims.generateFrameNames('gems', { prefix: 'square_', end: 14, zeroPad: 4 }), repeat: -1 }); this.add.sprite(400, 100, 'gems').play('diamond'); this.add.sprite(400, 200, 'gems').play('prism'); this.add.sprite(400, 300, 'gems').play('ruby'); this.add.sprite(400, 400, 'gems').play('square'); } function update (timestep, dt) { image.x += speed * dt; if (image.x > 1000) { image.x = 0; } time.setText('time: ' + this.sys.game.loop.time.toString()); delta.setText(this.sys.game.loop.deltaHistory); }<|fim▁end|>
preload: preload, create: create,
<|file_name|>L10nLabel.tsx<|end_file_name|><|fim▁begin|><|fim▁hole|> interface IProps extends TypographyProps { english: string; l10nKey: string; } // Displays a Text field with the label localized (only works if label is a string). export const L10nLabel: React.FunctionComponent<IProps> = props => { const label = useL10n(props.english, props.l10nKey); return <Typography {...props}>{label}</Typography>; };<|fim▁end|>
import * as React from "react"; import Typography, { TypographyProps } from "@material-ui/core/Typography"; import { useL10n } from "./l10nHooks";
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
default_app_config = 'django_tenants.apps.DjangoTenantsConfig'
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import { YsideBar, YrightBar } from 'yrui'; import { rightbarTabs, rightbarTabLists, projectList } from '../../models/models'; let userInfo = { logo: require('../../styles/images/usr.jpg'), name: 'test', email: '[email protected]' }; export default class Yaside extends Component { constructor(props) {<|fim▁hole|> super(props); }; render() { return ( <aside> <YsideBar menu={this.props.sideBarMenu} projectList={true} userInfo={true} /> <YrightBar tabs={rightbarTabs} tabList={rightbarTabLists} /> </aside> ); } }<|fim▁end|>
<|file_name|>igs036crypt.cpp<|end_file_name|><|fim▁begin|>// license:BSD-3-Clause // copyright-holders:Andreas Naive,David Haywood #include "emu.h" #include "igs036crypt.h" /**************************************************************************** IGS036 encryption emulation The encryption used by the IGS036 seems to be another iteration over previous IGS encryption schemes. Basically, it consists on a rotation-based non-trivial obfuscation layered upon a simple address-based XOR encryption (similar to the ones found in previous IGS circuits). The scheme works on 16-bits words and is probably designed to depend on 24 bits of (word-) address; in what follows, we will refer to the 8 lowest ones simply as the lowest bits of the address, and the other 16 as the highest bits the address. The address-based XOR can be thought as 16 one-bit XORs against key bits controlled by certain combinations of up to three address bits. The game key is comprised of 256 8-bits values provided by the internal ARM; every 8-bit value in the key is used on those words whose address match the index modulus 256; in a given key byte, every bit affects two positions of the correponding 16-bits encrypted words. This use of the key is similar to the one found in previous instantiations of IGS circuits. What is new in the IGS036 is the use of an obfuscation working this way: 1) The highest bits of the address are split in 4 groups, every of which controls a rotation by a shift of (plus or minus) 9, 1, 2 and 4 respectively. 2) For every address, the highest bit of the group set in the address controls the activation/deactivation of the rotation for his group, using an associated (and fixed) boolean function depending on the lowest bits of the address. 3) If the group rotation is to be activated according to 2), then another fixed group-level boolean functions (again, depending on the lowest bits of the address) control the direction (left or right) of the rotation. 4) One of the groups (the associated with the shift by 9) interacts with the other three by inverting (when active itself) the activation/deactivation patterns of the other three. 5) The lowest bits of the address control a further rotation(independent on the highest bits of the address). 6) Finally, a global bitswap is applied. All the associated boolean functions are clearly of low complexity, so it should be expected that the hardware is calculating them that way rather than using lookup tables or otherwise. It should be stressed that this obfuscation is done system-wide without dependence on the game keys. On a different note, the unused tail of the ROMs are pattern-filled and, more interestingly, that region appears to be hiding 20-bytes values (SHA-1 hashes?) located at positions which vary per set. See the table below. driver 20-bytes value position in the ROM --------- ---------------------------------- orleg2o $763984-$763997 orleg2 $76C77C-$76C78F kov3 $718040-$718053 kof98umh $E50E60-$E50E73 TO-DO: complete the table with the 20-bytes values *****************************************************************************/ igs036_decryptor::igs036_decryptor(const uint8_t* game_key) : key(game_key) { } void igs036_decryptor::decrypter_rom(uint16_t* rom, int size, int offset) { for (int i = 0; i < size / 2; i++) { rom[i] = decrypt(rom[i], i+offset); } } uint16_t igs036_decryptor::decrypt(uint16_t cipherword, int word_address)const { // key-independent manipulation int aux = deobfuscate(cipherword, word_address); // key-dependent manipulation for (int i=0; i<16; ++i) { if ((word_address&triggers[i][0]) == triggers[i][1]) aux ^= BIT(key[word_address&0xff],i&7) << i; else aux ^= BIT(0x1a3a, i) << i; } return aux; } uint16_t igs036_decryptor::deobfuscate(uint16_t cipherword, int word_address)const { // key-independent manipulation int shift = rotation(word_address); int aux = rol(cipherword, shift); aux = bitswap<16>(aux, 10,9,8,7,0,15,6,5, 14,13,4,3,12,11,2,1); return aux; } int igs036_decryptor::rotation(int address)const { const int group15[] = {15,11, 7, 5}; // 15 is a guess const int group14[] = {14, 9, 3, 2};<|fim▁hole|> int enabled0 = rot_enabled(address, group15); int rot = enabled0 * rot_group(address, group15) * 9; int enabled1 = enabled0 ^ rot_enabled(address, group14); rot += enabled1 * rot_group(address, group14) * 1; int enabled2 = enabled0 ^ rot_enabled(address, group13); rot += enabled2 * rot_group(address, group13) * 2; int enabled3 = enabled0 ^ rot_enabled(address, group12); rot += enabled3 * rot_group(address, group12) * 4; // block-independent rotation (just depending on the lowest 8 bits) int rot2 = 4*BIT(address,0); rot2 += 1*BIT(address,4)*(BIT(address,0)*2-1); rot2 += 4*BIT(address,3)*(BIT(address,0)*2-1); rot2 *= (BIT(address,7)|(BIT(address,0)^BIT(address,1)^1))*2-1; rot2 += 2*((BIT(address,0)^BIT(address,1))&(BIT(address,7)^1)); return (rot+rot2)&0xf; } int igs036_decryptor::rot_enabled(int address, const int* group)const { int enabled = 0; for (int j=0; j<4; ++j) { if (BIT(address,8+group[j])) { int aux = address ^ (0x1b*BIT(address,2)); enabled = rot_enabling[group[j]][aux&3](aux); break; } } return enabled; } int igs036_decryptor::rot_group(int address, const int* group)const { int aux = rot_direction[group[0]&3][address&7](address); return (aux*2)-1; } uint16_t igs036_decryptor::rol(uint16_t num, int shift)const { uint16_t r = num<<shift; uint16_t l = num>>(16-shift); return r|l; } // the triggers describe under what conditions are every one of the 16 XORs activated const uint32_t igs036_decryptor::triggers[16][2] = { {0x000101, 0x000001}, {0x000802, 0x000800}, {0x000204, 0x000004}, {0x000408, 0x000408}, {0x010010, 0x000010}, {0x020020, 0x000020}, {0x040040, 0x000040}, {0x080080, 0x080080}, {0x100100, 0x000100}, {0x200200, 0x200000}, {0x400400, 0x400000}, {0x800801, 0x000001}, {0x001004, 0x001000}, {0x002010, 0x002000}, {0x004040, 0x000040}, {0x008100, 0x008100} }; // The rotation depending on the 16 highest address bits depends on a series // of function on the 8 lowest word-address bits. Some comments: // * Bits #5 & #6 are unused so, in fact, they only depend on 6 address bits // * The functions are clearly low-complexity boolean functions on those 6 bits // rather than, say, random lookup tables // * There are quite a number of functionally equivalent ways to implement // those boolean functions, so the given implementation (by multiplexing // over some simple functions) shouldn't be taken too seriously: while it's // functionally correct, it doesn't neccesarily represent the way the hardware // is calculating them. static int unknown(int address) { return 0; } static int cZero (int address) { return 0; } static int cOne (int address) { return 1; } static int bit_3 (int address) { return BIT(address,3); } static int bit_4 (int address) { return BIT(address,4); } static int bit_7 (int address) { return BIT(address,7); } static int not_3 (int address) { return BIT(address,3)^1; } static int not_4 (int address) { return BIT(address,4)^1; } static int not_7 (int address) { return BIT(address,7)^1; } static int xor_37 (int address) { return BIT(address,3)^BIT(address,7); } static int xnor_37(int address) { return BIT(address,3)^BIT(address,7)^1; } static int xor_47 (int address) { return BIT(address,4)^BIT(address,7); } static int xnor_47(int address) { return BIT(address,4)^BIT(address,7)^1; } static int nor_34 (int address) { return (BIT(address,3)|BIT(address,4))^1; } static int impl_43(int address) { return BIT(address,3)||(BIT(address,4)^1); } int (*igs036_decryptor::rot_enabling[16][4])(int) = { {bit_3 , not_3 , bit_3 , not_3 }, {bit_3 , not_3 , bit_3 , not_3 }, {bit_4 , bit_4 , bit_4 , bit_4 }, {bit_4 , not_4 , bit_4 , not_4 }, {bit_3 , bit_3 , bit_3 , bit_3 }, {nor_34 , bit_7 , bit_7 , cZero }, {cZero , cOne , cZero , cOne }, {impl_43, xor_37 , xnor_37, not_3 }, {bit_3 , bit_3 , not_3 , not_3 }, {bit_4 , bit_4 , not_4 , not_4 }, {cZero , cZero , cZero , cZero }, {nor_34 , bit_7 , not_7 , cOne }, {bit_3 , not_3 , bit_3 , not_3 }, {cZero , cOne , cOne , cZero }, {bit_4 , not_4 , bit_4 , not_4 }, {unknown, unknown, unknown, unknown}, }; int (*igs036_decryptor::rot_direction[4][8])(int) = { {bit_3 , xor_37 , xnor_37, not_3 , bit_3 , xor_37 , xnor_37, not_3 }, {cZero , not_7 , not_7 , cZero , cZero , not_7 , not_7 , cZero }, {bit_4 , xor_47 , xnor_47, not_4 , bit_4 , xor_47 , xnor_47, not_4 }, {bit_3 , not_7 , bit_7 , cZero , cOne , not_7 , bit_7 , cZero }, }; // ------------------------GAME KEYS--------------------------- // The keys below have been obtained by an automatic process // exploiting the simple XOR scheme used by the system. Overall, the process, // while simple, seems to be pretty robust, so few errors should be expected, // if any. The exceptions are DDPDOJ & KOF98UMH (see below). const uint8_t m312cn_key[0x100] = { 0x01, 0x09, 0x02, 0xab, 0x23, 0x20, 0xa2, 0x03, 0x10, 0x9b, 0xba, 0x33, 0x04, 0x2e, 0x27, 0x23, 0x92, 0x11, 0x13, 0x93, 0x13, 0x86, 0x83, 0x02, 0x18, 0x8a, 0x8b, 0x9a, 0x10, 0x0f, 0x13, 0x83, 0xa2, 0x98, 0x32, 0xba, 0x06, 0xab, 0x02, 0x0b, 0x1a, 0xa0, 0x13, 0x82, 0x84, 0x80, 0x8a, 0xa7, 0x83, 0xb0, 0xb2, 0xab, 0x31, 0x07, 0xa3, 0x02, 0x10, 0x23, 0x8b, 0xb2, 0x2b, 0x0a, 0xa7, 0xa3, 0x02, 0x7b, 0x12, 0xc3, 0x07, 0x0c, 0x43, 0xa6, 0x91, 0x91, 0x9b, 0xaa, 0x82, 0xca, 0x2e, 0x6a, 0x43, 0x51, 0x02, 0xcb, 0x52, 0x8b, 0x56, 0x57, 0x88, 0xc3, 0x83, 0x1a, 0x8d, 0x51, 0x86, 0x0a, 0xc1, 0x1b, 0x22, 0x5a, 0x07, 0x84, 0xa3, 0xce, 0xba, 0xfa, 0xab, 0x6a, 0xea, 0x2c, 0x2e, 0x67, 0x00, 0x33, 0x53, 0xd3, 0x47, 0x98, 0x93, 0x62, 0x2b, 0x9b, 0x2b, 0x82, 0xed, 0x4b, 0x1a, 0x86, 0xa0, 0xb9, 0x82, 0x0b, 0x27, 0x09, 0xa2, 0xab, 0x20, 0x3a, 0x8b, 0x0a, 0x84, 0x8d, 0x0b, 0x8f, 0x83, 0x8a, 0x92, 0x13, 0x10, 0x18, 0x06, 0x96, 0x83, 0x89, 0x8b, 0x92, 0x1c, 0x92, 0x9b, 0x17, 0x02, 0x2b, 0x02, 0x02, 0x06, 0x25, 0xa2, 0xab, 0xa8, 0x12, 0x13, 0x9a, 0x21, 0x27, 0x03, 0x2a, 0xa3, 0x92, 0x33, 0xb2, 0x94, 0x12, 0x32, 0x9b, 0x90, 0xa0, 0x8a, 0x2a, 0x9a, 0xbb, 0xae, 0x1e, 0x41, 0x2b, 0x92, 0xb2, 0x44, 0xe0, 0x02, 0x6f, 0x61, 0x30, 0x4a, 0x13, 0x61, 0x4f, 0x2e, 0xa6, 0x52, 0x00, 0xc2, 0x8b, 0x53, 0x8f, 0x93, 0x4f, 0x5b, 0x01, 0x1a, 0x9b, 0xc6, 0x01, 0x03, 0x0b, 0x42, 0x09, 0xf2, 0x62, 0x82, 0x41, 0x22, 0xc6, 0x90, 0x2a, 0xfa, 0x0b, 0x6c, 0xa0, 0x4f, 0x03, 0xa0, 0x53, 0xf2, 0xbb, 0x46, 0x96, 0x23, 0x22, 0xd8, 0xfa, 0x12, 0xab, 0x88, 0x1a, 0x7a, 0x8a, }; const uint8_t cjddzsp_key[0x100] = { 0x11, 0x21, 0xa2, 0x1a, 0x84, 0xaf, 0x26, 0x0b, 0x3b, 0xbb, 0x12, 0x9b, 0x89, 0x80, 0x2f, 0x0a, 0x91, 0x80, 0x93, 0x93, 0x80, 0x0b, 0x13, 0x93, 0x0a, 0x82, 0x8a, 0x12, 0x13, 0x05, 0x96, 0x17, 0x81, 0xb1, 0xb3, 0xab, 0x06, 0x2a, 0x87, 0x83, 0x33, 0x93, 0x13, 0x8a, 0x28, 0xa8, 0x07, 0x8b, 0x11, 0xa3, 0xb2, 0xa2, 0x23, 0x17, 0x17, 0xb6, 0x33, 0xa9, 0xa3, 0x23, 0xa0, 0xa3, 0x9b, 0xbb, 0x70, 0xe8, 0x83, 0x72, 0xe6, 0xa2, 0xa2, 0x27, 0xbb, 0xc8, 0xf3, 0x42, 0x6d, 0xc8, 0x66, 0x47, 0x93, 0x18, 0x12, 0x12, 0x13, 0x58, 0xd2, 0xc6, 0x49, 0x09, 0xc3, 0x0a, 0x81, 0x0b, 0xc2, 0xda, 0xd2, 0x33, 0xc2, 0x1a, 0x40, 0x89, 0x26, 0xeb, 0x78, 0x51, 0x5a, 0x62, 0xa3, 0xee, 0x02, 0x8f, 0x42, 0xa1, 0xe3, 0x3a, 0x41, 0x44, 0x93, 0xd3, 0x03, 0xda, 0xe2, 0x83, 0x69, 0xc5, 0xb3, 0xb6, 0x91, 0x00, 0xa2, 0x32, 0x24, 0x88, 0x87, 0xab, 0x02, 0x28, 0x2a, 0x8b, 0x87, 0xab, 0x2b, 0x8b, 0x13, 0x02, 0x03, 0x9a, 0x94, 0x13, 0x87, 0x0b, 0x1a, 0x98, 0x03, 0x1b, 0x10, 0x81, 0x1a, 0x9f, 0x81, 0xa9, 0x03, 0x3a, 0x05, 0x06, 0x27, 0xab, 0x3b, 0xa8, 0x8a, 0xab, 0xaf, 0x0a, 0xaa, 0x2f, 0x31, 0x39, 0x32, 0x3a, 0x81, 0xbf, 0x07, 0x87, 0x89, 0x98, 0xa2, 0x22, 0x13, 0xa4, 0xb6, 0x0e, 0x43, 0xf2, 0x43, 0x33, 0x47, 0x4c, 0x66, 0x26, 0xf2, 0x69, 0x2b, 0x5a, 0xa3, 0x83, 0x4b, 0xe6, 0x41, 0x50, 0x92, 0xcb, 0xd3, 0x1e, 0x57, 0x87, 0x01, 0x19, 0x9a, 0x52, 0x45, 0x5a, 0x9e, 0xde, 0xa3, 0xa1, 0x42, 0x7b, 0xa3, 0x22, 0xa2, 0x87, 0x80, 0xe0, 0xf3, 0x23, 0x2a, 0x8e, 0x2f, 0x6f, 0x92, 0x1a, 0x23, 0xab, 0xb3, 0x09, 0xd6, 0xab, 0x38, 0xe3, 0x2b, 0x3a, 0xdf, 0x7d, 0xea, 0x87, }; const uint8_t cjdh2_key[0x100] = { 0x03, 0x31, 0x92, 0x23, 0x21, 0x2b, 0x23, 0x23, 0x39, 0x01, 0xb2, 0x9b, 0x0d, 0xaa, 0x07, 0x86, 0x03, 0x9b, 0x03, 0x82, 0x82, 0x00, 0x86, 0x0b, 0x80, 0x92, 0x9a, 0x1b, 0x81, 0x9a, 0x92, 0x8f, 0x83, 0x89, 0x82, 0x0a, 0x02, 0x0f, 0x83, 0xa7, 0x80, 0x32, 0xbb, 0x02, 0x8f, 0xa2, 0xaa, 0x0e, 0x80, 0x12, 0x23, 0xbb, 0x86, 0xb9, 0xb3, 0x1b, 0x19, 0xb8, 0x93, 0x22, 0x28, 0x9d, 0xbf, 0xb2, 0xa1, 0xb0, 0x63, 0xaa, 0x81, 0x8a, 0x47, 0x0b, 0xdb, 0x21, 0x5a, 0x03, 0xe9, 0x60, 0x2f, 0xab, 0x00, 0x43, 0xc2, 0x8b, 0x06, 0x54, 0x47, 0x9f, 0x51, 0xc9, 0x4a, 0x4b, 0x1f, 0x40, 0x9f, 0x52, 0x21, 0x00, 0xe3, 0x72, 0x44, 0x43, 0xc2, 0xab, 0x5a, 0x32, 0x1a, 0x62, 0x6d, 0xa2, 0x82, 0xce, 0x73, 0xe0, 0xc3, 0xa3, 0x73, 0x71, 0x16, 0x42, 0x69, 0xc9, 0x02, 0x43, 0x93, 0x23, 0x43, 0xbf, 0x83, 0x19, 0xb2, 0x9a, 0xa0, 0x8a, 0x03, 0x8e, 0x29, 0x03, 0x02, 0x0b, 0xa0, 0xa0, 0x8b, 0x0a, 0x13, 0x0b, 0x12, 0x9a, 0x10, 0x80, 0x87, 0x8f, 0x98, 0x89, 0x13, 0x0b, 0x83, 0x8e, 0x1a, 0x1a, 0x90, 0xab, 0xa2, 0x9b, 0xa5, 0xae, 0x22, 0x0a, 0x8b, 0xab, 0xa3, 0x0a, 0x0e, 0x02, 0x8e, 0x0f, 0x32, 0x3b, 0x13, 0x0b, 0x93, 0x91, 0x22, 0x0b, 0x90, 0xab, 0xb2, 0x33, 0xa1, 0x21, 0xaa, 0xae, 0xa3, 0x93, 0x73, 0xc2, 0x67, 0x81, 0xc7, 0x0a, 0x31, 0xa2, 0x7b, 0x93, 0xa7, 0x60, 0x86, 0xce, 0x53, 0x18, 0x53, 0x52, 0xc6, 0x5b, 0x47, 0x1a, 0x0b, 0x98, 0x5b, 0xda, 0x92, 0x14, 0x07, 0x82, 0x70, 0xc3, 0x02, 0xd2, 0xe1, 0x42, 0x42, 0x47, 0xe3, 0x20, 0x9a, 0xea, 0xe6, 0x02, 0x2a, 0x8f, 0xf3, 0x3a, 0x22, 0x7a, 0xf1, 0x58, 0x97, 0xeb, 0x41, 0x59, 0xe2, 0x73, 0xdd, 0xa7, 0x7e, 0x1f, };<|fim▁end|>
const int group13[] = {13,10, 6, 1}; const int group12[] = {12, 8, 4, 0}; // rotation depending on all the address bits
<|file_name|>storage.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::StorageBinding; use dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use dom::bindings::codegen::InheritTypes::{EventCast, EventTargetCast}; use dom::bindings::global::{GlobalRef, GlobalField}; use dom::bindings::js::{Root, RootedReference}; use dom::bindings::refcounted::Trusted; use dom::bindings::utils::{Reflector, reflect_dom_object}; use dom::event::{EventBubbles, EventCancelable}; use dom::storageevent::StorageEvent; use dom::urlhelper::UrlHelper; use ipc_channel::ipc; use net_traits::storage_task::{StorageTask, StorageTaskMsg, StorageType}; use page::IterablePage; use script_task::{ScriptTask, MainThreadRunnable, MainThreadScriptMsg}; use std::borrow::ToOwned; use std::sync::mpsc::channel; use url::Url; use util::str::DOMString; #[dom_struct] pub struct Storage { reflector_: Reflector, global: GlobalField, storage_type: StorageType } impl Storage { fn new_inherited(global: &GlobalRef, storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), global: GlobalField::from_rooted(global), storage_type: storage_type } } pub fn new(global: &GlobalRef, storage_type: StorageType) -> Root<Storage> { reflect_dom_object(box Storage::new_inherited(global, storage_type), *global, StorageBinding::Wrap) } fn get_url(&self) -> Url { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.get_url() } fn get_storage_task(&self) -> StorageTask { let global_root = self.global.root(); let global_ref = global_root.r(); global_ref.as_window().storage_task() } } impl StorageMethods for Storage { // https://html.spec.whatwg.org/multipage/#dom-storage-length fn Length(&self) -> u32 { let (sender, receiver) = ipc::channel().unwrap(); self.get_storage_task().send(StorageTaskMsg::Length(sender, self.get_url(), self.storage_type)).unwrap(); receiver.recv().unwrap() as u32 } // https://html.spec.whatwg.org/multipage/#dom-storage-key fn Key(&self, index: u32) -> Option<DOMString> { let (sender, receiver) = ipc::channel().unwrap(); self.get_storage_task().send(StorageTaskMsg::Key(sender, self.get_url(), self.storage_type, index)).unwrap(); receiver.recv().unwrap() } // https://html.spec.whatwg.org/multipage/#dom-storage-getitem fn GetItem(&self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = ipc::channel().unwrap(); let msg = StorageTaskMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_task().send(msg).unwrap(); receiver.recv().unwrap() } // https://html.spec.whatwg.org/multipage/#dom-storage-setitem fn SetItem(&self, name: DOMString, value: DOMString) { let (sender, receiver) = ipc::channel().unwrap(); let msg = StorageTaskMsg::SetItem(sender, self.get_url(), self.storage_type, name.clone(), value.clone()); self.get_storage_task().send(msg).unwrap(); let (changed, old_value) = receiver.recv().unwrap(); if changed { self.broadcast_change_notification(Some(name), old_value, Some(value));<|fim▁hole|> // https://html.spec.whatwg.org/multipage/#dom-storage-removeitem fn RemoveItem(&self, name: DOMString) { let (sender, receiver) = ipc::channel().unwrap(); let msg = StorageTaskMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_task().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } // https://html.spec.whatwg.org/multipage/#dom-storage-clear fn Clear(&self) { let (sender, receiver) = ipc::channel().unwrap(); self.get_storage_task().send(StorageTaskMsg::Clear(sender, self.get_url(), self.storage_type)).unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } // check-tidy: no specs after this line fn NamedGetter(&self, name: DOMString, found: &mut bool) -> Option<DOMString> { let item = self.GetItem(name); *found = item.is_some(); item } fn NamedSetter(&self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedCreator(&self, name: DOMString, value: DOMString) { self.SetItem(name, value); } fn NamedDeleter(&self, name: DOMString) { self.RemoveItem(name); } fn SupportedPropertyNames(&self) -> Vec<DOMString> { // FIXME: unimplemented (https://github.com/servo/servo/issues/7273) vec![] } } impl Storage { /// https://html.spec.whatwg.org/multipage/#send-a-storage-notification fn broadcast_change_notification(&self, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>) { let global_root = self.global.root(); let global_ref = global_root.r(); let main_script_chan = global_ref.as_window().main_thread_script_chan(); let script_chan = global_ref.script_chan(); let trusted_storage = Trusted::new(global_ref.get_cx(), self, script_chan.clone()); main_script_chan.send(MainThreadScriptMsg::MainThreadRunnableMsg( box StorageEventRunnable::new(trusted_storage, key, old_value, new_value))).unwrap(); } } pub struct StorageEventRunnable { element: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString> } impl StorageEventRunnable { fn new(storage: Trusted<Storage>, key: Option<DOMString>, old_value: Option<DOMString>, new_value: Option<DOMString>) -> StorageEventRunnable { StorageEventRunnable { element: storage, key: key, old_value: old_value, new_value: new_value } } } impl MainThreadRunnable for StorageEventRunnable { fn handler(self: Box<StorageEventRunnable>, script_task: &ScriptTask) { let this = *self; let storage_root = this.element.root(); let storage = storage_root.r(); let global_root = storage.global.root(); let global_ref = global_root.r(); let ev_window = global_ref.as_window(); let ev_url = storage.get_url(); let storage_event = StorageEvent::new( global_ref, "storage".to_owned(), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, this.key, this.old_value, this.new_value, ev_url.to_string(), Some(storage) ); let event = EventCast::from_ref(storage_event.r()); let root_page = script_task.root_page(); for it_page in root_page.iter() { let it_window_root = it_page.window(); let it_window = it_window_root.r(); assert!(UrlHelper::SameOrigin(&ev_url, &it_window.get_url())); // TODO: Such a Document object is not necessarily fully active, but events fired on such // objects are ignored by the event loop until the Document becomes fully active again. if ev_window.pipeline() != it_window.pipeline() { let target = EventTargetCast::from_ref(it_window); event.fire(target); } } } }<|fim▁end|>
} }
<|file_name|>source_list.py<|end_file_name|><|fim▁begin|><|fim▁hole|># # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from boundary import ApiCli class SourceList(ApiCli): def __init__(self): ApiCli.__init__(self) self.path = "v1/account/sources/" self.method = "GET" def getDescription(self): return "Lists the sources in a Boundary account"<|fim▁end|>
<|file_name|>Test_WebFSStat.py<|end_file_name|><|fim▁begin|>from webfs import WebFSStat import stat def Test_Basic(): fields = ('st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid', 'st_size', 'st_atime', 'st_mtime', 'st_ctime')<|fim▁hole|> def Test_InitParam(): st = WebFSStat() assert st.st_mode == stat.S_IFDIR | 0555 st = WebFSStat(False) assert st.st_mode == stat.S_IFREG | 0444 def Test_IsDir(): st = WebFSStat() assert st.isDir() st = WebFSStat(False) assert not st.isDir()<|fim▁end|>
st = WebFSStat() print st.__dict__.keys() for field in fields: assert field in st.__dict__.keys(), 'field(%s) is not in members' % field
<|file_name|>sortController.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for ag-grid v15.0.0 // Project: http://www.ag-grid.com/ // Definitions by: Niall Crosby <https://github.com/ag-grid/> import { Column } from "./entities/column"; export declare class SortController { private static DEFAULT_SORTING_ORDER; private gridOptionsWrapper; private columnController; private eventService; private columnApi; private gridApi; progressSort(column: Column, multiSort: boolean): void; setSortForColumn(column: Column, sort: string, multiSort: boolean): void; onSortChanged(): void; private dispatchSortChangedEvents(); private clearSortBarThisColumn(columnToSkip);<|fim▁hole|> sort: string; }[]; setSortModel(sortModel: any): void; private compareColIds(sortModelEntry, column); getColumnsWithSortingOrdered(): Column[]; getSortForRowController(): any[]; }<|fim▁end|>
private getNextSortDirection(column); getSortModel(): { colId: string;
<|file_name|>date.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- # Copyright (C) 2015 Alejandro López Espinosa (kudrom) import datetime import random class Date(object): """ Descriptor for a date datum """ def __init__(self, variance, **kwargs): """ @param variance is the maximum variance of time allowed for the generation of random data. """ self.variance = variance def generate(self): """<|fim▁hole|> Generates random data for the descriptor. This is called by the DataSchemaManager.generate """ now = datetime.datetime.now().strftime("%s") return int(now) + random.randrange(0, self.variance) def validate(self, data): """ Validates @param data against the descriptor. This is called by the DataSchemaManager.validate """ return True<|fim▁end|>
<|file_name|>ex9_28.cpp<|end_file_name|><|fim▁begin|>#include <iostream> #include <string> #include <forward_list> <|fim▁hole|>using namespace std; void insert(forward_list<string>& fst, const string& to_find, const string& to_add); int main() { forward_list<string> fst{ "pen", "pineapple", "apple", "pen" }; insert(fst, "pen", "and"); for (auto& i : fst) cout << i << " "; cout << endl; return 0; } void insert(forward_list<string>& fst, const string& to_find, const string& to_add) { auto prev = fst.before_begin(); for (auto iter = fst.begin(); iter != fst.end(); prev = iter++) { if (*iter == to_find) { fst.insert_after(iter, to_add); return; } } fst.insert_after(prev, to_add); return; }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import codecs from setuptools import setup with codecs.open('README.rst', encoding='utf-8') as f: long_description = f.read() setup( name="shadowsocks", version="2.8.2.1", license='http://www.apache.org/licenses/LICENSE-2.0', description="A fast tunnel proxy that help you get through firewalls", author='clowwindy', author_email='[email protected]', url='https://github.com/shadowsocks/shadowsocks', packages=['shadowsocks', 'shadowsocks.crypto'], package_data={ 'shadowsocks': ['README.rst', 'LICENSE'] }, install_requires=[], entry_points=""" [console_scripts] sslocal = shadowsocks.local:main ssserver = shadowsocks.server:main """, classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3',<|fim▁hole|> 'Topic :: Internet :: Proxy Servers', ], long_description=long_description, )<|fim▁end|>
'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy',
<|file_name|>xbmc_context.py<|end_file_name|><|fim▁begin|>import sys import urllib import urlparse<|fim▁hole|>import json import xbmc import xbmcaddon import xbmcplugin import xbmcvfs from ..abstract_context import AbstractContext from .xbmc_plugin_settings import XbmcPluginSettings from .xbmc_context_ui import XbmcContextUI from .xbmc_system_version import XbmcSystemVersion from .xbmc_playlist import XbmcPlaylist from .xbmc_player import XbmcPlayer from ... import utils class XbmcContext(AbstractContext): def __init__(self, path='/', params=None, plugin_name=u'', plugin_id=u'', override=True): AbstractContext.__init__(self, path, params, plugin_name, plugin_id) if plugin_id: self._addon = xbmcaddon.Addon(id=plugin_id) else: self._addon = xbmcaddon.Addon() pass self._system_version = None """ I don't know what xbmc/kodi is doing with a simple uri, but we have to extract the information from the sys parameters and re-build our clean uri. Also we extract the path and parameters - man, that would be so simple with the normal url-parsing routines. """ # first the path of the uri if override: self._uri = sys.argv[0] comps = urlparse.urlparse(self._uri) self._path = urllib.unquote(comps.path).decode('utf-8') # after that try to get the params if len(sys.argv) > 2: params = sys.argv[2][1:] if len(params) > 0: self._uri = self._uri + '?' + params self._params = {} params = dict(urlparse.parse_qsl(params)) for _param in params: item = params[_param] self._params[_param] = item.decode('utf-8') pass pass pass self._ui = None self._video_playlist = None self._audio_playlist = None self._video_player = None self._audio_player = None self._plugin_handle = int(sys.argv[1]) if len(sys.argv) > 1 else None self._plugin_id = plugin_id or self._addon.getAddonInfo('id') self._plugin_name = plugin_name or self._addon.getAddonInfo('name') self._version = self._addon.getAddonInfo('version') self._native_path = xbmc.translatePath(self._addon.getAddonInfo('path')) self._settings = XbmcPluginSettings(self._addon) """ Set the data path for this addon and create the folder """ self._data_path = xbmc.translatePath('special://profile/addon_data/%s' % self._plugin_id) if isinstance(self._data_path, str): self._data_path = self._data_path.decode('utf-8') pass if not xbmcvfs.exists(self._data_path): xbmcvfs.mkdir(self._data_path) pass pass def format_date_short(self, date_obj): date_format = xbmc.getRegion('dateshort') _date_obj = date_obj if isinstance(_date_obj, datetime.date): _date_obj = datetime.datetime(_date_obj.year, _date_obj.month, _date_obj.day) pass return _date_obj.strftime(date_format) def format_time(self, time_obj): time_format = xbmc.getRegion('time') _time_obj = time_obj if isinstance(_time_obj, datetime.time): _time_obj = datetime.time(_time_obj.hour, _time_obj.minute, _time_obj.second) pass return _time_obj.strftime(time_format) def get_language(self): """ The xbmc.getLanguage() method is fucked up!!! We always return 'en-US' for now """ return 'en-US' """ if self.get_system_version().get_release_name() == 'Frodo': return 'en-US' try: language = xbmc.getLanguage(0, region=True) language = language.split('-') language = '%s-%s' % (language[0].lower(), language[1].upper()) return language except Exception, ex: self.log_error('Failed to get system language (%s)', ex.__str__()) return 'en-US' pass """ def get_system_version(self): if not self._system_version: self._system_version = XbmcSystemVersion(version='', releasename='', appname='') pass return self._system_version def get_video_playlist(self): if not self._video_playlist: self._video_playlist = XbmcPlaylist('video', weakref.proxy(self)) pass return self._video_playlist def get_audio_playlist(self): if not self._audio_playlist: self._audio_playlist = XbmcPlaylist('audio', weakref.proxy(self)) pass return self._audio_playlist def get_video_player(self): if not self._video_player: self._video_player = XbmcPlayer('video', weakref.proxy(self)) pass return self._video_player def get_audio_player(self): if not self._audio_player: self._audio_player = XbmcPlayer('audio', weakref.proxy(self)) pass return self._audio_player def get_ui(self): if not self._ui: self._ui = XbmcContextUI(self._addon, weakref.proxy(self)) pass return self._ui def get_handle(self): return self._plugin_handle def get_data_path(self): return self._data_path def get_native_path(self): return self._native_path def get_settings(self): return self._settings def localize(self, text_id, default_text=u''): if isinstance(text_id, int): """ We want to use all localization strings! Addons should only use the range 30000 thru 30999 (see: http://kodi.wiki/view/Language_support) but we do it anyway. I want some of the localized strings for the views of a skin. """ if text_id >= 0 and (text_id < 30000 or text_id > 30999): result = xbmc.getLocalizedString(text_id) if result is not None and result: return utils.to_unicode(result) pass pass result = self._addon.getLocalizedString(int(text_id)) if result is not None and result: return utils.to_unicode(result) return utils.to_unicode(default_text) def set_content_type(self, content_type): self.log_debug('Setting content-type: "%s" for "%s"' % (content_type, self.get_path())) xbmcplugin.setContent(self._plugin_handle, content_type) pass def add_sort_method(self, *sort_methods): for sort_method in sort_methods: xbmcplugin.addSortMethod(self._plugin_handle, sort_method) pass pass def clone(self, new_path=None, new_params=None): if not new_path: new_path = self.get_path() pass if not new_params: new_params = self.get_params() pass new_context = XbmcContext(path=new_path, params=new_params, plugin_name=self._plugin_name, plugin_id=self._plugin_id, override=False) new_context._function_cache = self._function_cache new_context._search_history = self._search_history new_context._favorite_list = self._favorite_list new_context._watch_later_list = self._watch_later_list new_context._access_manager = self._access_manager new_context._ui = self._ui new_context._video_playlist = self._video_playlist new_context._video_player = self._video_player return new_context def execute(self, command): xbmc.executebuiltin(command) pass def sleep(self, milli_seconds): xbmc.sleep(milli_seconds) pass def addon_enabled(self, addon_id): rpc_request = json.dumps({"jsonrpc": "2.0", "method": "Addons.GetAddonDetails", "id": 1, "params": {"addonid": "%s" % addon_id, "properties": ["enabled"]} }) response = json.loads(xbmc.executeJSONRPC(rpc_request)) try: return response['result']['addon']['enabled'] is True except KeyError: message = response['error']['message'] code = response['error']['code'] error = 'Requested |%s| received error |%s| and code: |%s|' % (rpc_request, message, code) xbmc.log(error, xbmc.LOGDEBUG) return False def set_addon_enabled(self, addon_id, enabled=True): rpc_request = json.dumps({"jsonrpc": "2.0", "method": "Addons.SetAddonEnabled", "id": 1, "params": {"addonid": "%s" % addon_id, "enabled": enabled} }) response = json.loads(xbmc.executeJSONRPC(rpc_request)) try: return response['result'] == 'OK' except KeyError: message = response['error']['message'] code = response['error']['code'] error = 'Requested |%s| received error |%s| and code: |%s|' % (rpc_request, message, code) xbmc.log(error, xbmc.LOGDEBUG) return False<|fim▁end|>
import weakref import datetime
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.contexthub.storageapp; import android.os.Bundle; import android.support.v4.app.FragmentManager; import android.support.v4.view.MenuItemCompat; import android.support.v7.app.ActionBarActivity; import android.support.v7.widget.SearchView; import android.view.Menu; import android.view.MenuItem; import com.chaione.contexthub.sdk.model.VaultDocument; import com.contexthub.storageapp.fragments.AboutFragment; import com.contexthub.storageapp.fragments.EditVaultItemFragment; import com.contexthub.storageapp.fragments.VaultItemListFragment; import com.contexthub.storageapp.models.Person; public class MainActivity extends ActionBarActivity implements VaultItemListFragment.Listener, FragmentManager.OnBackStackChangedListener { private MenuItem menuSearch; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if(savedInstanceState == null) { getSupportFragmentManager().beginTransaction() .replace(android.R.id.content, new VaultItemListFragment()) .commit(); getSupportFragmentManager().addOnBackStackChangedListener(this); } } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); setupSearchView(menu.findItem(R.id.action_search)); return true; } private void setupSearchView(final MenuItem menuSearch) { this.menuSearch = menuSearch; SearchView searchView = (SearchView) MenuItemCompat.getActionView(menuSearch); SearchView.SearchAutoComplete searchAutoComplete = (SearchView.SearchAutoComplete) searchView.findViewById(R.id.search_src_text); searchAutoComplete.setHint(R.string.search_hint); searchView.setOnQueryTextListener(new android.support.v7.widget.SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { menuSearch.collapseActionView(); getSupportFragmentManager().beginTransaction() .addToBackStack(null) .replace(android.R.id.content, VaultItemListFragment.newInstance(query)) .commit(); return true; } @Override public boolean onQueryTextChange(String query) { return false; } }); } @Override public boolean onPrepareOptionsMenu(Menu menu) { boolean isMainFragment = getSupportFragmentManager().getBackStackEntryCount() <= 0; menu.findItem(R.id.action_search).setVisible(isMainFragment); menu.findItem(R.id.action_add).setVisible(isMainFragment); menu.findItem(R.id.action_about).setVisible(isMainFragment); return super.onPrepareOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { menuSearch.collapseActionView(); switch(item.getItemId()) { case R.id.action_add: launchEditVaultItemFragment(null); return true; case R.id.action_about: getSupportFragmentManager().beginTransaction() .addToBackStack(null) .replace(android.R.id.content, new AboutFragment()) .commit(); return true; default: return super.onOptionsItemSelected(item); } } private void launchEditVaultItemFragment(VaultDocument<Person> document) { EditVaultItemFragment fragment = document == null ? new EditVaultItemFragment() : EditVaultItemFragment.newInstance(document); getSupportFragmentManager().beginTransaction() .addToBackStack(null) .replace(android.R.id.content, fragment) .commit(); } <|fim▁hole|> } @Override public void onBackStackChanged() { supportInvalidateOptionsMenu(); } }<|fim▁end|>
@Override public void onItemClick(VaultDocument<Person> document) { menuSearch.collapseActionView(); launchEditVaultItemFragment(document);
<|file_name|>BalancedBinaryTree_rec.py<|end_file_name|><|fim▁begin|>""" Definition of TreeNode: class TreeNode: def __init__(self, val): self.val = val self.left, self.right = None, None """ class Solution: """ @param root: The root of binary tree. @return: True if this Binary tree is Balanced, or false. """ def isBalanced(self, root): # write your code here isbalanced, h = self.isBalancedandHeight(root) return isbalanced def isBalancedandHeight(self, root): if root is None: return True, 0 l, r = root.left, root.right l_balanced, l_h = self.isBalancedandHeight(l) if not l_balanced: return False, 0 r_balanced, r_h = self.isBalancedandHeight(r) if not r_balanced: return False, 0 <|fim▁hole|><|fim▁end|>
if abs(l_h - r_h) < 2: return True, max(l_h, r_h) + 1 return False, 0
<|file_name|>test_irc_channel.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ :Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_ :License: MIT, see LICENSE for details.<|fim▁hole|>from __future__ import unicode_literals from unittest import TestCase from nose2.tools import params from syslog2irc.irc import Channel class IrcChannelTestCase(TestCase): @params( (Channel('#example'), '#example', None ), (Channel('#example', password=None), '#example', None ), (Channel('#headquarters', password='secret'), '#headquarters', 'secret'), ) def test_irc_channel_creation(self, channel, expected_name, expected_password): self.assertEqual(channel.name, expected_name) self.assertEqual(channel.password, expected_password)<|fim▁end|>
"""
<|file_name|>export.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # Hive Appier Framework # Copyright (c) 2008-2015 Hive Solutions Lda. # # This file is part of Hive Appier Framework. # # Hive Appier Framework is free software: you can redistribute it and/or modify # it under the terms of the Apache License as published by the Apache # Foundation, either version 2.0 of the License, or (at your option) any # later version. # # Hive Appier Framework is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License along with # Hive Appier Framework. If not, see <http://www.apache.org/licenses/>. __author__ = "João Magalhães <[email protected]>" """ The author(s) of the module """ __version__ = "1.0.0" """ The version of the module """ __revision__ = "$LastChangedRevision$" """ The revision number of the module """ __date__ = "$LastChangedDate$" """ The last change date of the module """ __copyright__ = "Copyright (c) 2008-2015 Hive Solutions Lda." """ The copyright for the module """ __license__ = "Apache License, Version 2.0" """ The license for the module """ import os import json import zipfile import tempfile from . import legacy try: import bson except: bson = None IGNORE = 1 """ Ignore strategy for conflict solving in the import operation basically this strategy skips importing a document that has the same key value as one that already exists in the collection """ OVERWRITE = 2 """ Strategy for conflict solving that overwrites (completely) a previously existing document in the data source if it has the same key value as the one being imported, this should be used carefully as it may create data loss """ DUPLICATE = 3 """ Conflict solving strategy that basically duplicates the entries in the data source even if they have the same key value, this may create a somehow inconsistent state and so must be used carefully """ JOIN = 4 """ Join strategy for conflict solving in document collision, that basically adds new fields or updates existing fields in a previously existing document, this strategy does not remove extra fields existing in the previous document """ class ExportManager(object): db = None single = None multiple = None def __init__(self, db, single = (), multiple = ()): self.db = db self.single = single self.multiple = multiple def import_data(self, file_path, policy = IGNORE): temporary_path = tempfile.mkdtemp() base_path = temporary_path single_path = os.path.join(base_path, "settings") self._deploy_zip(file_path, temporary_path) for name, key in self.single: collection = self.db[name] source_path = os.path.join(single_path, "%s.json" % name) file = open(source_path, "rb") try: data = file.read() finally: file.close() self._import_single( collection, data, key = key, policy = policy <|fim▁hole|> if not os.path.exists(source_directory): continue collection = self.db[name] items = os.listdir(source_directory) data = [] for item in items: value, _extension = os.path.splitext(item) source_path = os.path.join(source_directory, item) file = open(source_path, "rb") try: _data = file.read() finally: file.close() data.append((value, _data)) self._import_multiple( collection, data, key = key, policy = policy ) def export_data(self, file_path): temporary_path = tempfile.mkdtemp() base_path = temporary_path single_path = os.path.join(base_path, "settings") if not os.path.exists(single_path): os.makedirs(single_path) for name, key in self.single: collection = self.db[name] data = self._export_single(collection, key) target_path = os.path.join(single_path, "%s.json" % name) file = open(target_path, "wb") try: file.write(data) finally: file.close() for name, key in self.multiple: collection = self.db[name] data = self._export_multiple(collection, key) target_directory = os.path.join(base_path, name) if not os.path.exists(target_directory): os.makedirs(target_directory) for value, _data in data: target_path = os.path.join(target_directory, "%s.json" % value) file = open(target_path, "wb") try: file.write(_data) finally: file.close() self._create_zip(file_path, temporary_path) def _import_single(self, collection, data, key, policy = IGNORE): # loads the provided json data as a sequence of key value items # and then starts loading all the values into the data source data = data.decode("utf-8") data_s = json.loads(data) for _key, entity in data_s.items(): # verifies if the "native" object id value for the mongo # database exists and if that's the case tries to convert # the value from the "underlying" string value to object # identifier, defaulting to a string value if it fails if "_id" in entity: try: entity["_id"] = bson.ObjectId(entity["_id"]) except: entity["_id"] = entity["_id"] # retrieves the key value for the current entity to # be inserted and then tries to retrieve an existing # entity for the same key, to avoid duplicated entry value = entity.get(key, None) if value: entity_e = collection.find_one({key : value}) else: entity_e = None # in case there's no existing entity for the same key # (normal situation) only need to insert the new entity # otherwise must apply the selected conflict policy for # the resolution of the data source conflict if not entity_e: collection.insert(entity) elif policy == IGNORE: continue elif policy == OVERWRITE: collection.remove({key : value}) collection.insert(entity) elif policy == DUPLICATE: collection.insert(entity) elif policy == JOIN: if "_id" in entity: del entity["_id"] collection.update({ "_id" : entity_e["_id"] }, { "$set" : entity }) def _import_multiple(self, collection, data, key, policy = IGNORE): # iterates over the complete set of data element to load # the json contents and then load the corresponding entity # value into the data source for _value, _data in data: # loads the current data in iteration from the file # as the entity to be loaded into the data source _data = _data.decode("utf-8") entity = json.loads(_data) # verifies if the "native" object id value for the mongo # database exists and if that's the case tries to convert # the value from the "underlying" string value to object # identifier, defaulting to a string value if it fails if "_id" in entity: try: entity["_id"] = bson.ObjectId(entity["_id"]) except: entity["_id"] = entity["_id"] # retrieves the key value for the current entity to # be inserted and then tries to retrieve an existing # entity for the same key, to avoid duplicated entry value = entity.get(key, None) if value: entity_e = collection.find_one({key : value}) else: entity_e = None # in case there's no existing entity for the same key # (normal situation) only need to insert the new entity # otherwise must apply the selected conflict policy for # the resolution of the data source conflict if not entity_e: collection.insert(entity) elif policy == IGNORE: continue elif policy == OVERWRITE: collection.remove({key : value}) collection.insert(entity) elif policy == DUPLICATE: collection.insert(entity) elif policy == JOIN: if "_id" in entity: del entity["_id"] collection.update({ "_id" : entity_e["_id"] }, { "$set" : entity }) def _export_single(self, collection, key = "_id"): entities = collection.find() _entities = {} for entity in entities: value = entity[key] value_s = self._to_key(value) _entities[value_s] = entity data = json.dumps(_entities, cls = MongoEncoder) data = legacy.bytes(data) return data def _export_multiple(self, collection, key = "_id"): entities = collection.find() for entity in entities: value = entity[key] value_s = self._to_key(value) value_s = self._escape_key(value_s) _data = json.dumps(entity, cls = MongoEncoder) _data = legacy.bytes(_data) yield (value_s, _data) def _to_key(self, key): key_t = type(key) if key_t in legacy.STRINGS: return key key = legacy.UNICODE(key) return key def _escape_key(self, key): return key.replace(":", "_") def _deploy_zip(self, zip_path, path): zip_file = zipfile.ZipFile( zip_path, mode = "r", compression = zipfile.ZIP_DEFLATED ) try: zip_file.extractall(path) finally: zip_file.close() def _create_zip(self, zip_path, path): zip_file = zipfile.ZipFile( zip_path, mode = "w", compression = zipfile.ZIP_DEFLATED ) try: list = os.listdir(path) for name in list: _path = os.path.join(path, name) is_file = os.path.isfile(_path) if is_file: zip_file.write(_path) else: self.__add_to_zip(zip_file, _path, base = path) finally: zip_file.close() def __add_to_zip(self, zip_file, path, base = ""): list = os.listdir(path) for name in list: _path = os.path.join(path, name) _path_out = _path[len(base):] _path_out = _path_out.replace("\\", "/") _path_out = _path_out.strip("/") if os.path.isfile(_path): zip_file.write(_path, _path_out) elif os.path.isdir(_path): self.__add_to_zip(zip_file, _path, base = base) class MongoEncoder(json.JSONEncoder): def default(self, obj, **kwargs): if isinstance(obj, bson.objectid.ObjectId): return str(obj) else: return json.JSONEncoder.default(obj, **kwargs)<|fim▁end|>
) for name, key in self.multiple: source_directory = os.path.join(base_path, name)
<|file_name|>test_quicklinks.py<|end_file_name|><|fim▁begin|># Copyright 2022 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from django.test import TransactionTestCase from myuw.models import VisitedLinkNew, CustomLink, PopularLink, User from myuw.test import get_request_with_user from myuw.dao.user import get_user_model from myuw.dao.affiliation import get_all_affiliations from myuw.dao.quicklinks import get_quicklink_data, get_link_label,\ add_custom_link, delete_custom_link, edit_custom_link,\ add_hidden_link, delete_hidden_link, get_popular_link_by_id,\ get_recent_link_by_id from myuw.test import get_request_with_user class TestQuickLinkDAO(TransactionTestCase): def test_recent_filtering(self): def _get_recent(data): recent = set() for link in data['recent_links']: recent.add(link['url']) return recent username = 'none' req = get_request_with_user(username) user = get_user_model(req) u1 = 'http://example.com?q=1' u2 = 'http://example.com?q=2' v1 = VisitedLinkNew.objects.create(user=user, url=u1) self.assertTrue(get_recent_link_by_id(req, v1.pk)) v2 = VisitedLinkNew.objects.create(user=user, url=u2) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 2) self.assertTrue(u1 in recent) self.assertTrue(u2 in recent) plink = PopularLink.objects.create(url=u2) self.assertTrue(get_popular_link_by_id(plink.pk)) self.assertIsNotNone(plink.json_data()) self.assertIsNotNone(str(plink)) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 1) self.assertTrue(u1 in recent) CustomLink.objects.create(user=user, url=u1) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 0) for i in range(10): VisitedLinkNew.objects.create(user=user, url="http://example.com?q=%s" % i) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 5) def test_link_label_override(self): req = get_request_with_user('none') user = get_user_model(req) data = {"user": user, "url": "http://example.com?q=replaceit", "label": "Original"} l1 = VisitedLinkNew.objects.create(**data) self.assertEquals(get_link_label(l1), "Row For Unit Tests") l1 = VisitedLinkNew.objects.create(user=user, url="http://example.com?q=whatever", label="Original") self.assertEquals(get_link_label(l1), "Original") def test_hidden_link(self): req = get_request_with_user('none') url = "http://s.ss.edu" link = add_hidden_link(req, url) self.assertEquals(link.url, url) # second time link1 = add_hidden_link(req, url) self.assertEquals(link.pk, link1.pk) self.assertIsNotNone(delete_hidden_link(req, link.pk)) # second time self.assertIsNone(delete_hidden_link(req, link.pk)) def test_add_custom_link(self): username = 'none' req = get_request_with_user(username) link = add_custom_link(req, "http://s1.ss.edu") self.assertIsNone(link.label) url = "http://s.ss.edu" link_label = "ss" link1 = add_custom_link(req, url, link_label) self.assertEquals(link1.url, url) self.assertEquals(link1.label, link_label) # second time link2 = add_custom_link(req, url, link_label) self.assertEquals(link2.pk, link1.pk) def test_delete_custom_link(self): username = 'none' req = get_request_with_user(username) url = "http://s.ss.edu" link = add_custom_link(req, url) self.assertIsNotNone(delete_custom_link(req, link.pk)) # second time self.assertIsNone(delete_custom_link(req, link.pk)) def test_edit_custom_link(self): username = 'none' req = get_request_with_user(username) url = "http://s.ss.edu" link = add_custom_link(req, url) url1 = "http://s1.ss.edu" link1 = edit_custom_link(req, link.pk, url1) self.assertEquals(link1.url, url1) url2 = "http://s2.ss.edu" label2 = "s2" link2 = edit_custom_link(req, link1.pk, url2, label2) self.assertIsNotNone(link2) self.assertEquals(link2.label, label2) def test_get_quicklink_data(self): data = { "affiliation": "student", "url": "http://iss1.washington.edu/", "label": "ISS1", "campus": "seattle", "pce": False, "affiliation": "{intl_stud: True}", } plink = PopularLink.objects.create(**data) username = "jinter" req = get_request_with_user(username) affiliations = get_all_affiliations(req) user = get_user_model(req) link_data = { "user": user, "url": "http://iss.washington.edu/", "label": "ISS1", "is_anonymous": False, "is_student": affiliations.get('student', False), "is_undegrad": affiliations.get('undergrad', False), "is_grad_student": affiliations.get('grad', False), "is_employee": affiliations.get('employee', False), "is_faculty": affiliations.get('faculty', False), "is_seattle": affiliations.get('seattle', False), "is_tacoma": affiliations.get('tacoma', False), "is_bothell": affiliations.get('bothell', False), "is_pce": affiliations.get('pce', False), "is_student_employee": affiliations.get('stud_employee', False), "is_intl_stud": affiliations.get('intl_stud', False) } l1 = VisitedLinkNew.objects.create(**link_data) qls = get_quicklink_data(req)<|fim▁hole|> self.assertEqual(qls['recent_links'][0]['label'], "ISS1") self.assertEqual(qls['default_links'][0]['label'], "International Student Services (ISS)") def test_bot_quicklinks(self): username = "botgrad" req = get_request_with_user(username) bot_qls = get_quicklink_data(req) self.assertEqual(bot_qls['default_links'][0]['url'], "http://www.uwb.edu/cie") def test_tac_quicklinks(self): username = "tacgrad" req = get_request_with_user(username) tac_qls = get_quicklink_data(req) self.assertEqual(tac_qls['default_links'][0]['label'], "International Student and Scholar Services (ISSS)") def test_MUWM_4760(self): req = get_request_with_user('bill') data = get_quicklink_data(req) self.assertTrue(data['instructor']) self.assertTrue(data['sea_emp']) self.assertFalse(data['student']) req = get_request_with_user('javerage') data = get_quicklink_data(req) self.assertFalse(data['instructor']) self.assertTrue(data['student']) self.assertFalse(data['bot_student']) self.assertFalse(data['tac_student']) self.assertTrue(data['sea_student']) self.assertTrue(data['sea_emp']) self.assertFalse(data['bot_emp']) self.assertFalse(data['tac_emp']) req = get_request_with_user('jbothell') data = get_quicklink_data(req) self.assertTrue(data['student']) self.assertTrue(data['bot_student']) req = get_request_with_user('eight') data = get_quicklink_data(req) self.assertTrue(data['student']) self.assertTrue(data['tac_student']) self.assertTrue(data['instructor']) self.assertTrue(data['sea_emp'])<|fim▁end|>
<|file_name|>contexts.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Etalage -- Open Data POIs portal # By: Emmanuel Raviart <[email protected]> # # Copyright (C) 2011, 2012 Easter-eggs # http://gitorious.org/infos-pratiques/etalage # # This file is part of Etalage. # # Etalage is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # Etalage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Context loaded and saved in WSGI requests""" import gettext import webob from . import conf __all__ = ['Ctx', 'null_ctx'] class Ctx(object): _parent = None default_values = dict( _lang = None, _scopes = UnboundLocalError, _translator = None, base_categories_slug = None, category_tags_slug = None, container_base_url = None, distance = None, # Max distance in km gadget_id = None, hide_directory = False, req = None, subscriber = None, ) env_keys = ('_lang', '_scopes', '_translator') def __init__(self, req = None): if req is not None: self.req = req etalage_env = req.environ.get('etalage', {}) for key in object.__getattribute__(self, 'env_keys'): value = etalage_env.get(key) if value is not None: setattr(self, key, value) def __getattribute__(self, name): try: return object.__getattribute__(self, name) except AttributeError: parent = object.__getattribute__(self, '_parent') if parent is None: default_values = object.__getattribute__(self, 'default_values') if name in default_values: return default_values[name] raise return getattr(parent, name) @property def _(self): return self.translator.ugettext def blank_req(self, path, environ = None, base_url = None, headers = None, POST = None, **kw): env = environ.copy() if environ else {} etalage_env = env.setdefault('etalage', {}) for key in self.env_keys: value = getattr(self, key) if value is not None: etalage_env[key] = value return webob.Request.blank(path, environ = env, base_url = base_url, headers = headers, POST = POST, **kw) def get_containing(self, name, depth = 0): """Return the n-th (n = ``depth``) context containing attribute named ``name``.""" ctx_dict = object.__getattribute__(self, '__dict__') if name in ctx_dict: if depth <= 0: return self depth -= 1 parent = ctx_dict.get('_parent') if parent is None: return None return parent.get_containing(name, depth = depth) def get_inherited(self, name, default = UnboundLocalError, depth = 1): ctx = self.get_containing(name, depth = depth) if ctx is None: if default is UnboundLocalError: raise AttributeError('Attribute %s not found in %s' % (name, self)) return default return object.__getattribute__(ctx, name) def iter(self): yield self parent = object.__getattribute__(self, '_parent') if parent is not None: for ancestor in parent.iter(): yield ancestor def iter_containing(self, name): ctx_dict = object.__getattribute__(self, '__dict__') if name in ctx_dict: yield self parent = ctx_dict.get('_parent') if parent is not None: for ancestor in parent.iter_containing(name): yield ancestor def iter_inherited(self, name): for ctx in self.iter_containing(name): yield object.__getattribute__(ctx, name) def lang_del(self): del self._lang if self.req is not None and self.req.environ.get('etalage') is not None \ and '_lang' in self.req.environ['etalage']: del self.req.environ['etalage']['_lang'] def lang_get(self): if self._lang is None: # self._lang = self.req.accept_language.best_matches('en-US') if self.req is not None else [] # Note: Don't forget to add country-less language code when only a "language-COUNTRY" code is given. self._lang = ['fr-FR', 'fr'] if self.req is not None: self.req.environ.setdefault('etalage', {})['_lang'] = self._lang return self._lang def lang_set(self, lang): self._lang = lang if self.req is not None: self.req.environ.setdefault('etalage', {})['_lang'] = self._lang # Reinitialize translator for new languages. if self._translator is not None: # Don't del self._translator, because attribute _translator can be defined in a parent. self._translator = None if self.req is not None and self.req.environ.get('etalage') is not None \ and '_translator' in self.req.environ['etalage']: del self.req.environ['etalage']['_translator'] lang = property(lang_get, lang_set, lang_del) def new(self, **kwargs): ctx = Ctx() ctx._parent = self for name, value in kwargs.iteritems(): setattr(ctx, name, value) return ctx @property def parent(self): return object.__getattribute__(self, '_parent') def scopes_del(self): del self._scopes if self.req is not None and self.req.environ.get('wenoit_etalage') is not None \ and '_scopes' in self.req.environ['wenoit_etalage']: del self.req.environ['wenoit_etalage']['_scopes'] def scopes_get(self): return self._scopes def scopes_set(self, scopes): self._scopes = scopes if self.req is not None: self.req.environ.setdefault('wenoit_etalage', {})['_scopes'] = scopes scopes = property(scopes_get, scopes_set, scopes_del) @property def session(self): return self.req.environ.get('beaker.session') if self.req is not None else None @property def translator(self): """Get a valid translator object from one or several languages names.""" if self._translator is None:<|fim▁hole|> languages = [languages] translator = gettext.NullTranslations() i18n_dir_by_plugin_name = conf['i18n_dir_by_plugin_name'] or {} for name, i18n_dir in [ ('biryani', conf['biryani_i18n_dir']), (conf['package_name'], conf['i18n_dir']), ] + sorted(i18n_dir_by_plugin_name.iteritems()): if name is not None and i18n_dir is not None: translator = new_translator(name, i18n_dir, languages, fallback = translator) self._translator = translator return self._translator null_ctx = Ctx() null_ctx.lang = ['fr-FR', 'fr'] def new_translator(domain, localedir, languages, fallback = None): new = gettext.translation(domain, localedir, fallback = True, languages = languages) if fallback is not None: new.add_fallback(fallback) return new<|fim▁end|>
languages = self.lang if not languages: return gettext.NullTranslations() if not isinstance(languages, list):
<|file_name|>Tooltip.Props.js<|end_file_name|><|fim▁begin|>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var TooltipDelay; (function (TooltipDelay) { TooltipDelay[TooltipDelay["zero"] = 0] = "zero"; TooltipDelay[TooltipDelay["medium"] = 1] = "medium"; })(TooltipDelay = exports.TooltipDelay || (exports.TooltipDelay = {}));<|fim▁hole|> //# sourceMappingURL=Tooltip.Props.js.map<|fim▁end|>
<|file_name|>references-test.ts<|end_file_name|><|fim▁begin|>import { CONSTANT_TAG, DirtyableTag, UpdatableTag, RevisionTag, Reference, CachedReference, combine } from '@glimmer/reference'; import { dict } from '@glimmer/util'; class UpdatableReference<T> implements Reference<T> { public tag: RevisionTag; private _tag: DirtyableTag; constructor(private content: T) { this.tag = this._tag = new DirtyableTag(); } value(): T { return this.content; } update(content: T) { this._tag.dirty(); return this.content = content; } } class TaggedDict<T> { public tag: RevisionTag; private _tag: DirtyableTag; private data = dict<T>(); constructor() { this.tag = this._tag = new DirtyableTag(); } get(key: string): T { return this.data[key]; } set(key: string, value: T) { this._tag.dirty(); return this.data[key] = value; } } QUnit.module("References"); QUnit.test("CachedReference caches computation correctly", assert => { let computed = 0; class DictValueReference extends CachedReference<string> { public tag: RevisionTag; constructor(private dict: TaggedDict<string>, private key: string) { super(); this.tag = dict.tag; } compute(): string { computed++; return this.dict.get(this.key); } } let dict = new TaggedDict<string>(); let reference = new DictValueReference(dict, 'foo'); dict.set('foo', 'bar'); assert.strictEqual(computed, 0, 'precond'); assert.equal(reference.value(), 'bar'); assert.equal(reference.value(), 'bar'); assert.equal(reference.value(), 'bar'); assert.strictEqual(computed, 1, 'computed'); dict.set('foo', 'BAR'); assert.equal(reference.value(), 'BAR'); assert.equal(reference.value(), 'BAR'); assert.equal(reference.value(), 'BAR'); assert.strictEqual(computed, 2, 'computed'); dict.set('baz', 'bat'); assert.equal(reference.value(), 'BAR'); assert.equal(reference.value(), 'BAR'); assert.equal(reference.value(), 'BAR'); assert.strictEqual(computed, 3, 'computed'); dict.set('foo', 'bar'); assert.equal(reference.value(), 'bar'); assert.equal(reference.value(), 'bar'); assert.equal(reference.value(), 'bar'); assert.strictEqual(computed, 4, 'computed'); }); QUnit.test("CachedReference caches nested computation correctly", assert => { let computed = 0; class DictValueReference extends CachedReference<string> { public tag: RevisionTag; private _tag: UpdatableTag; constructor(private parent: Reference<TaggedDict<string>>, private key: string) { super(); let _tag = this._tag = new UpdatableTag(CONSTANT_TAG); this.tag = combine([parent.tag, _tag]); } compute(): string { computed++; let { parent, _tag, key } = this; let dict = parent.value(); _tag.update(dict.tag); return dict.get(key); } } let first = new TaggedDict<string>(); let second = new TaggedDict<string>();<|fim▁hole|> let dictReference = new UpdatableReference(first); let valueReference = new DictValueReference(dictReference, 'foo'); first.set('foo', 'bar'); assert.strictEqual(computed, 0, 'precond'); assert.equal(valueReference.value(), 'bar'); assert.equal(valueReference.value(), 'bar'); assert.equal(valueReference.value(), 'bar'); assert.strictEqual(computed, 1, 'computed'); second.set('foo', 'BAR'); assert.equal(valueReference.value(), 'bar'); assert.equal(valueReference.value(), 'bar'); assert.equal(valueReference.value(), 'bar'); assert.strictEqual(computed, 1, 'computed'); dictReference.update(second); assert.equal(valueReference.value(), 'BAR'); assert.equal(valueReference.value(), 'BAR'); assert.equal(valueReference.value(), 'BAR'); assert.strictEqual(computed, 2, 'computed'); second.set('foo', 'foo-bar'); assert.equal(valueReference.value(), 'foo-bar'); assert.equal(valueReference.value(), 'foo-bar'); assert.equal(valueReference.value(), 'foo-bar'); assert.strictEqual(computed, 3, 'computed'); });<|fim▁end|>
<|file_name|>combination-sum-ii.py<|end_file_name|><|fim▁begin|># coding: utf-8 class Solution(object): @staticmethod def dfs(candidates, target, vis, res, cur_idx, sum): if sum > target: return if sum == target: ans = [candidates[i] for i in cur_idx if i >= 0] res.append(ans) return if sum < target: for i, v in enumerate(candidates): if sum + v > target: break if i != cur_idx[-1] + 1 and candidates[i] == candidates[i-1]: continue if i >= cur_idx[-1] and (not vis[i]): vis[i] = 1 cur_idx.append(i) Solution.dfs(candidates, target, vis, res, cur_idx, sum+v)<|fim▁hole|> def combinationSum2(self, candidates, target): """ :type candidates: List[int] :type target: int :rtype: List[List[int]] """ candidates = sorted(candidates) n = len(candidates) res = [] cur_idx = [-1] vis = [0 for _ in candidates] Solution.dfs(candidates, target, vis, res, cur_idx, 0) # return map(list, list(res)) return res s = Solution() print s.combinationSum2([10,1,2,7,6,1,5], 8) print s.combinationSum2([2,5,2,1,2], 5)<|fim▁end|>
vis[i] = 0 cur_idx.pop()
<|file_name|>frames-mips.cc<|end_file_name|><|fim▁begin|>// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT<|fim▁hole|>// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "v8.h" #if defined(V8_TARGET_ARCH_MIPS) #include "assembler.h" #include "assembler-mips.h" #include "assembler-mips-inl.h" #include "frames-inl.h" #include "mips/assembler-mips-inl.h" #include "macro-assembler.h" #include "macro-assembler-mips.h" namespace v8 { namespace internal { Address ExitFrame::ComputeStackPointer(Address fp) { return Memory::Address_at(fp + ExitFrameConstants::kSPOffset); } Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; } Register StubFailureTrampolineFrame::context_register() { return cp; } } } // namespace v8::internal #endif // V8_TARGET_ARCH_MIPS<|fim▁end|>
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
<|file_name|>binary_16_byte.py<|end_file_name|><|fim▁begin|>""" Binary 16 byte is an export plugin to convert gcode into 16 byte binary segments. An export plugin is a script in the export_plugins folder which has the functions getOuput, isArchivable and writeOutput. It is meant to be run from the export tool. To ensure that the plugin works on platforms which do not handle file capitalization properly, give the plugin a lower case name. The getOutput function of this script takes a gcode text and returns that text converted into 16 byte segments. The writeOutput function of this script takes a gcode text and writes that in a binary format converted into 16 byte segments. Many of the functions in this script are copied from gcodec in skeinforge_utilities. They are copied rather than imported so developers making new plugins do not have to learn about gcodec, the code here is all they need to learn. This plugin is just a starter to make a real binary converter. //Record structure BinArray(0) = AscW(Inst_Code_Letter) BinArray(1) = cInst_Code X Data sInt32_to_Hbytes(iXdim_1) BinArray(2) = lsb 'short lsb BinArray(3) = msb 'short msb Y Data sInt32_to_Hbytes(iYdim_2) BinArray(4) = lsb 'short lsb BinArray(5) = msb 'short msb Z Data sInt32_to_Hbytes(iZdim_3) BinArray(6) = lsb 'short lsb BinArray(7) = msb 'short msb I Data sInt32_to_Hbytes(iIdim_4) BinArray(8) = lsb 'short lsb BinArray(9) = msb 'short msb J Data sInt32_to_Hbytes(iJdim_5) BinArray(10) = lsb 'short lsb BinArray(11) = msb 'short msb BinArray(12) = FP_Char sInt32_to_Hbytes(iFP_Num) BinArray(13) = lsb 'short lsb BinArray(14) = bActiveFlags BinArray(15) = AscW("#")End of record filler Byte 14 is worth a few extra notes, this byte is used to define which of the axes are active, its used to get round the problem of say a line of code with no mention of z. This would be put into the file as z = 0 as the space for this data is reserved, if we did nothing, this would instruct the machine to go to z = 0. If we use the active flag to define the z axis as inactive the z = 0 is ignored and the value set to the last saved value of z, i.e it does not move. If the z data is actually set to z = 0 then the axis would be set to active and the move takes place. """ <|fim▁hole|>from __future__ import absolute_import import __init__ from skeinforge_tools.skeinforge_utilities import gcodec from skeinforge_tools.skeinforge_utilities import preferences from skeinforge_tools.skeinforge_utilities import interpret from skeinforge_tools import polyfile from struct import Struct import cStringIO import os import sys __author__ = "Enrique Perez ([email protected])" __date__ = "$Date: 2008/21/04 $" __license__ = "GPL 3.0" def getIntegerFromCharacterLengthLineOffset( character, offset, splitLine, stepLength ): "Get the integer after the first occurence of the character in the split line." lineFromCharacter = getStringFromCharacterSplitLine( character, splitLine ) if lineFromCharacter == None: return 0 floatValue = ( float( lineFromCharacter ) + offset ) / stepLength return int( round( floatValue ) ) def getIntegerFlagFromCharacterSplitLine( character, splitLine ): "Get the integer flag after the first occurence of the character in the split line." lineFromCharacter = getStringFromCharacterSplitLine( character, splitLine ) if lineFromCharacter == None: return 0 return 1 def getOutput( gcodeText, binary16BytePreferences = None ): """Get the exported version of a gcode file. This function, isArchivable and writeOutput are the only necessary functions in a skeinforge export plugin. If this plugin writes an output than should not be printed, an empty string should be returned.""" if gcodeText == '': return '' if binary16BytePreferences == None: binary16BytePreferences = Binary16BytePreferences() preferences.readPreferences( binary16BytePreferences ) skein = Binary16ByteSkein() skein.parseGcode( gcodeText, binary16BytePreferences ) return skein.output.getvalue() def getStringFromCharacterSplitLine( character, splitLine ): "Get the string after the first occurence of the character in the split line." indexOfCharacter = indexOfStartingWithSecond( character, splitLine ) if indexOfCharacter < 0: return None return splitLine[ indexOfCharacter ][ 1 : ] def getSummarizedFilename( fileName ): "Get the fileName basename if the file is in the current working directory, otherwise return the original full name." if os.getcwd() == os.path.dirname( fileName ): return os.path.basename( fileName ) return fileName def getTextLines( text ): "Get the all the lines of text of a text." return text.replace( '\r', '\n' ).split( '\n' ) def indexOfStartingWithSecond( letter, splitLine ): "Get index of the first occurence of the given letter in the split line, starting with the second word. Return - 1 if letter is not found" for wordIndex in xrange( 1, len( splitLine ) ): word = splitLine[ wordIndex ] firstLetter = word[ 0 ] if firstLetter == letter: return wordIndex return - 1 def isArchivable(): "Return whether or not this plugin is archivable." return True def isReplacable(): "Return whether or not the output from this plugin is replacable. This should be true if the output is text and false if it is binary." return False def writeFileText( fileName, fileText ): "Write a text to a file." try: file = open( fileName, 'wb' ) file.write( fileText ) file.close() except IOError: print( 'The file ' + fileName + ' can not be written to.' ) def writeOutput( fileName = '', gcodeText = '' ): "Write the exported version of a gcode file. This function, getOutput and isArchivable are the only necessary functions in a skeinforge export plugin." if fileName == '': unmodified = interpret.getGNUTranslatorFilesUnmodified() if len( unmodified ) == 0: print( "There are no unmodified gcode files in this folder." ) return fileName = unmodified[ 0 ] binary16BytePreferences = Binary16BytePreferences() preferences.readPreferences( binary16BytePreferences ) gcodeText = gcodec.getGcodeFileText( fileName, gcodeText ) skeinOutput = getOutput( gcodeText, binary16BytePreferences ) suffixFilename = fileName[ : fileName.rfind( '.' ) ] + '_export.' + binary16BytePreferences.fileExtension.value writeFileText( suffixFilename, skeinOutput ) print( 'The converted file is saved as ' + getSummarizedFilename( suffixFilename ) ) class Binary16BytePreferences: "A class to handle the export preferences." def __init__( self ): "Set the default preferences, execute title & preferences fileName." #Set the default preferences. self.archive = [] self.fileExtension = preferences.StringPreference().getFromValue( 'File Extension:', 'bin' ) self.archive.append( self.fileExtension ) self.fileNameInput = preferences.Filename().getFromFilename( [ ( 'Gcode text files', '*.gcode' ) ], 'Open File to be Converted to Binary 16 Byte', '' ) self.archive.append( self.fileNameInput ) self.feedrateStepLength = preferences.FloatPreference().getFromValue( 'Feedrate Step Length (millimeters/second)', 0.1 ) self.archive.append( self.feedrateStepLength ) self.xStepLength = preferences.FloatPreference().getFromValue( 'X Step Length (millimeters)', 0.1 ) self.archive.append( self.xStepLength ) self.yStepLength = preferences.FloatPreference().getFromValue( 'Y Step Length (millimeters)', 0.1 ) self.archive.append( self.yStepLength ) self.zStepLength = preferences.FloatPreference().getFromValue( 'Z Step Length (millimeters)', 0.01 ) self.archive.append( self.zStepLength ) self.xOffset = preferences.FloatPreference().getFromValue( 'X Offset (millimeters)', 0.0 ) self.archive.append( self.xOffset ) self.yOffset = preferences.FloatPreference().getFromValue( 'Y Offset (millimeters)', 0.0 ) self.archive.append( self.yOffset ) self.zOffset = preferences.FloatPreference().getFromValue( 'Z Offset (millimeters)', 0.0 ) self.archive.append( self.zOffset ) #Create the archive, title of the execute button, title of the dialog & preferences fileName. self.executeTitle = 'Convert to Binary 16 Byte' self.saveTitle = 'Save Preferences' preferences.setHelpPreferencesFileNameTitleWindowPosition( self, 'skeinforge_tools.export_plugins.binary_16_byte.html' ) def execute( self ): "Convert to binary 16 byte button has been clicked." fileNames = polyfile.getFileOrDirectoryTypesUnmodifiedGcode( self.fileNameInput.value, [ '.gcode' ], self.fileNameInput.wasCancelled ) for fileName in fileNames: writeOutput( fileName ) class Binary16ByteSkein: "A class to convert gcode into 16 byte binary segments." def __init__( self ): self.output = cStringIO.StringIO() def parseGcode( self, gcodeText, binary16BytePreferences ): "Parse gcode text and store the gcode." self.binary16BytePreferences = binary16BytePreferences lines = getTextLines( gcodeText ) for line in lines: self.parseLine( line ) def parseLine( self, line ): "Parse a gcode line." binary16BytePreferences = self.binary16BytePreferences splitLine = line.split() if len( splitLine ) < 1: return firstWord = splitLine[ 0 ] if len( firstWord ) < 1: return firstLetter = firstWord[ 0 ] if firstLetter == '(': return feedrateInteger = getIntegerFromCharacterLengthLineOffset( 'F', 0.0, splitLine, binary16BytePreferences.feedrateStepLength.value ) iInteger = getIntegerFromCharacterLengthLineOffset( 'I', 0.0, splitLine, binary16BytePreferences.xStepLength.value ) jInteger = getIntegerFromCharacterLengthLineOffset( 'J', 0.0, splitLine, binary16BytePreferences.yStepLength.value ) xInteger = getIntegerFromCharacterLengthLineOffset( 'X', binary16BytePreferences.xOffset.value, splitLine, binary16BytePreferences.xStepLength.value ) yInteger = getIntegerFromCharacterLengthLineOffset( 'Y', binary16BytePreferences.yOffset.value, splitLine, binary16BytePreferences.yStepLength.value ) zInteger = getIntegerFromCharacterLengthLineOffset( 'Z', binary16BytePreferences.zOffset.value, splitLine, binary16BytePreferences.zStepLength.value ) sixteenByteStruct = Struct( 'cBhhhhhhBc' ) # print( 'xInteger' ) # print( xInteger ) flagInteger = getIntegerFlagFromCharacterSplitLine( 'X', splitLine ) flagInteger += 2 * getIntegerFlagFromCharacterSplitLine( 'Y', splitLine ) flagInteger += 4 * getIntegerFlagFromCharacterSplitLine( 'Z', splitLine ) flagInteger += 8 * getIntegerFlagFromCharacterSplitLine( 'I', splitLine ) flagInteger += 16 * getIntegerFlagFromCharacterSplitLine( 'J', splitLine ) flagInteger += 32 * getIntegerFlagFromCharacterSplitLine( 'F', splitLine ) packedString = sixteenByteStruct.pack( firstLetter, int( firstWord[ 1 : ] ), xInteger, yInteger, zInteger, iInteger, jInteger, feedrateInteger, flagInteger, '#' ) self.output.write( packedString ) def main( hashtable = None ): "Display the export dialog." if len( sys.argv ) > 1: writeOutput( ' '.join( sys.argv[ 1 : ] ) ) else: preferences.displayDialog( Binary16BytePreferences() ) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>test_cert_verification.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included<|fim▁hole|># THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Check that all of the certs on all service endpoints validate. """ import unittest from tests.integration import ServiceCertVerificationTest import boto.swf class SWFCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest): swf = True regions = boto.swf.regions() def sample_service_call(self, conn): conn.list_domains('REGISTERED')<|fim▁end|>
# in all copies or substantial portions of the Software. #
<|file_name|>database.py<|end_file_name|><|fim▁begin|>class DatabaseRouter(object): ''' These functions are called when Django accesses the database. Returns the name of the database to use depending on the app and model. Returning None means use default. ''' def db_for_read(self, model, **hints): return self.__db_for_read_and_write(model, **hints) def db_for_write(self, model, **hints): return self.__db_for_read_and_write(model, **hints) def allow_relation(self, obj1, obj2, **hints): return None def allow_syncdb(self, db, model): ''' Makes sure the correct databases are used when "python manage.py syncdb" is called. Returning True means "model" should be synchronised with "db". ''' allow = False if db == 'default': allow = model._meta.app_label != 'OGRgeoConverter' allow = allow and model._meta.app_label != 'sessions' elif db == 'sessions_db': allow = model._meta.app_label == 'sessions' elif db == 'ogrgeoconverter_db': allow = model._meta.db_table != 'ogrgeoconverter_log_entries' allow = allow and model._meta.db_table != 'ogrgeoconverter_ogr_log_entries' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_jobs' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_folders' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_file_matches' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_files' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_file_id_tracking' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_urls' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_shell_parameters' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_download_items' allow = allow and model._meta.db_table != 'ogrgeoconverter_conversion_job_identification'<|fim▁hole|> elif db == 'ogrgeoconverter_conversion_jobs_db': allow = model._meta.db_table == 'ogrgeoconverter_conversion_jobs' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_folders' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_file_matches' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_files' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_file_id_tracking' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_urls' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_shell_parameters' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_download_items' allow = allow or model._meta.db_table == 'ogrgeoconverter_conversion_job_identification' allow = allow and model._meta.app_label == 'OGRgeoConverter' elif db == 'ogrgeoconverter_log_db': allow = model._meta.db_table == 'ogrgeoconverter_log_entries' allow = allow or model._meta.db_table == 'ogrgeoconverter_ogr_log_entries' allow = allow and model._meta.app_label == 'OGRgeoConverter' else: allow = None return allow def __db_for_read_and_write(self, model, **hints): if model._meta.app_label == 'sessions': return 'sessions_db' elif model._meta.app_label == 'OGRgeoConverter': if model._meta.db_table == 'ogrgeoconverter_log_entries' \ or model._meta.db_table == 'ogrgeoconverter_ogr_log_entries': return 'ogrgeoconverter_log_db' elif model._meta.db_table == 'ogrgeoconverter_conversion_jobs' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_folders' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_file_matches' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_files' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_file_id_tracking' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_urls' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_shell_parameters' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_download_items' \ or model._meta.db_table == 'ogrgeoconverter_conversion_job_identification': return 'ogrgeoconverter_conversion_jobs_db' else: return 'ogrgeoconverter_db' return None<|fim▁end|>
allow = allow and model._meta.app_label == 'OGRgeoConverter'
<|file_name|>ZEofHeader.java<|end_file_name|><|fim▁begin|>/* * Jermit * * The MIT License (MIT) *<|fim▁hole|> * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * * @author Kevin Lamonte [[email protected]] * @version 1 */ package jermit.protocol.zmodem; /** * ZEofHeader represents the end of a file. */ class ZEofHeader extends Header { // ------------------------------------------------------------------------ // Constructors ----------------------------------------------------------- // ------------------------------------------------------------------------ /** * Public constructor. */ public ZEofHeader() { this(0); } /** * Public constructor. * * @param data the data field for this header */ public ZEofHeader(final int data) { super(Type.ZEOF, (byte) 0x0B, "ZEOF", data); } // ------------------------------------------------------------------------ // Header ----------------------------------------------------------------- // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // ZEofHeader ------------------------------------------------------------- // ------------------------------------------------------------------------ /** * Get the file size value. * * @return the value */ public int getFileSize() { return data; } }<|fim▁end|>
* Copyright (C) 2018 Kevin Lamonte *
<|file_name|>testMessageKeys.py<|end_file_name|><|fim▁begin|>import json import re packageJson = '../../../package.json' with open(packageJson) as data_file: data = json.load(data_file) config = '../../pkjs/config.js'<|fim▁hole|> suggestKey = re.search(r"messageKey\"\:(.[^,]*)", s) if suggestKey: keys.append(suggestKey.group(1).strip('" ,')) s = conf_file.readline() def func(item): return item.split('[',1)[0] knownKeys = list(map(func, data["pebble"]["messageKeys"])) for key in keys: #print('processing ', key); if key not in knownKeys: print('unknow key', key)<|fim▁end|>
with open(config) as conf_file: s = conf_file.readline() keys = [] while (s):
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass pub mod B; pub mod C; <|fim▁hole|><|fim▁end|>
pub use self::C::T;
<|file_name|>Ewens&uniform+RSK_rho_1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Thu Apr 27 13:35:59 2017 @author: mkammoun.lct """ import numpy as np import matplotlib.pyplot as pl from bisect import bisect import math n=200 n2=10000 def per(theta,n): perm=[] for i in range(1,n+1): if np.random.binomial(1,theta/(float(theta)+i-1))==1: perm.append(i) else: j=np.random.randint(i-1) k=perm[j] perm[j]=i perm.append(k) return perm <|fim▁hole|> P = []; Q = [] def insert(m, n=0): '''Insert m into P, then place n in Q at the same place''' for r in range(len(P)): if m > P[r][-1]: P[r].append(m); return c = bisect(P[r], m) P[r][c],m = m,P[r][c] P.append([m]) return P for i in range(len(p)): insert(int(p[i]), i+1) return map(len,P) def pointspos(per): rsk=RSK(per) return [rsk[i]-i-1 for i in range(len(rsk)) if (rsk[i]-i -1) >=0] pointspos([1,2,3]) ## seulement les points entre [-3 rac(n) et 3 rac(n)] alea1={} alea2={} for i in range(int(3*n**0.5)+1): alea1[i]=0 alea2[i]=0 for j in range(n2): per_unif=np.random.permutation(range(1,np.random.poisson(n)+1)) per_ewens=per(0.1,np.random.poisson(n)) print j p1=pointspos(per_unif) p2=pointspos(per_ewens) for i in p1 : if i<3*n**0.5: alea1[i]+=1 for i in p2 : if i<3*n**0.5: alea2[i]+=1 x=range(int(3*n**0.5+1)) a1=np.array([alea1[i]for i in x])/float(n2) a2=np.array([alea2[i]for i in x])/float(n2) x2=np.array(range(int(1000*2*n**0.5+1)))/1000 a3=np.array(np.arccos(np.array(x2)/(2*n**0.5)))/math.pi pl.plot(x,a1,"*",label="uniform") pl.plot(x,a2,"+",label="Ewens") pl.plot(x2,a3,label="approximation sinus") pl.legend()<|fim▁end|>
per(0.1,1000) def RSK(p): '''Given a permutation p, spit out a pair of Young tableaux'''
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import appActions from './application' import todosActions from './todos' import filterActions from './filter' import commentsActions from './comments' import userActions from './user' export { appActions, todosActions, filterActions, commentsActions, userActions } export * from './application'<|fim▁hole|>export * from './filter' export * from './comments' export * from './user'<|fim▁end|>
export * from './todos'
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import User<|fim▁hole|> from website.models import Issue # Create your models here. class Comment(models.Model): parent = models.ForeignKey('self', null=True, on_delete=models.CASCADE) issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name='comments') author = models.CharField(max_length=200) author_url = models.CharField(max_length=200) text = models.TextField() created_date = models.DateTimeField(default=timezone.now) def __str__(self): return self.text def children(self): return Comment.objects.filter(parent=self)<|fim▁end|>
from django.db import models from django.utils import timezone
<|file_name|>ODSOColumnImpl.java<|end_file_name|><|fim▁begin|>/* ** GENEREATED FILE - DO NOT MODIFY ** */ package com.wilutions.mslib.office.impl; import com.wilutions.com.*; @SuppressWarnings("all") @CoClass(guid="{C09B8C5A-A463-DB41-5DAE-69E7A5F7FCBC}") public class ODSOColumnImpl extends Dispatch implements com.wilutions.mslib.office.ODSOColumn { @DeclDISPID(1610743808) public IDispatch getApplication() throws ComException { final Object obj = this._dispatchCall(1610743808,"Application", DISPATCH_PROPERTYGET,null); if (obj == null) return null; return (IDispatch)obj; } @DeclDISPID(1610743809) public Integer getCreator() throws ComException { final Object obj = this._dispatchCall(1610743809,"Creator", DISPATCH_PROPERTYGET,null); if (obj == null) return null; <|fim▁hole|> @DeclDISPID(1) public Integer getIndex() throws ComException { final Object obj = this._dispatchCall(1,"Index", DISPATCH_PROPERTYGET,null); if (obj == null) return null; return (Integer)obj; } @DeclDISPID(2) public String getName() throws ComException { final Object obj = this._dispatchCall(2,"Name", DISPATCH_PROPERTYGET,null); if (obj == null) return null; return (String)obj; } @DeclDISPID(3) public IDispatch getParent() throws ComException { final Object obj = this._dispatchCall(3,"Parent", DISPATCH_PROPERTYGET,null); if (obj == null) return null; return (IDispatch)obj; } @DeclDISPID(4) public String getValue() throws ComException { final Object obj = this._dispatchCall(4,"Value", DISPATCH_PROPERTYGET,null); if (obj == null) return null; return (String)obj; } public ODSOColumnImpl(String progId) throws ComException { super(progId, "{000C1531-0000-0000-C000-000000000046}"); } protected ODSOColumnImpl(long ndisp) { super(ndisp); } public String toString() { return "[ODSOColumnImpl" + super.toString() + "]"; } }<|fim▁end|>
return (Integer)obj; }
<|file_name|>SpeedyshareCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Test links: # http://speedy.sh/ep2qY/Zapp-Brannigan.jpg import re import urlparse from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo class SpeedyshareCom(SimpleHoster): __name__ = "SpeedyshareCom" __type__ = "hoster" __version__ = "0.06" __status__ = "testing" __pattern__ = r'https?://(?:www\.)?(speedyshare\.com|speedy\.sh)/\w+' __config__ = [("use_premium", "bool", "Use premium account if available", True)] __description__ = """Speedyshare.com hoster plugin""" __license__ = "GPLv3" __authors__ = [("zapp-brannigan", "[email protected]")] NAME_PATTERN = r'class=downloadfilename>(?P<N>.*)</span></td>' SIZE_PATTERN = r'class=sizetagtext>(?P<S>.*) (?P<U>[kKmM]?[iI]?[bB]?)</div>' OFFLINE_PATTERN = r'class=downloadfilenamenotfound>.*</span>' <|fim▁hole|> def setup(self): self.multiDL = False self.chunk_limit = 1 def handle_free(self, pyfile): m = re.search(self.LINK_FREE_PATTERN, self.html) if m is None: self.link = m.group(1) getInfo = create_getInfo(SpeedyshareCom)<|fim▁end|>
LINK_FREE_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload\.png alt=\'Slow Download\' border=0'
<|file_name|>api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.http import HttpResponse, HttpRequest, QueryDict, HttpResponseRedirect import json import conekta from store.models import * from store.forms import * ### PETICIONES API PARA EL CARRITO def delBasket(request): id = str(request.GET.get('id')) if request.GET.get('id'): liston = request.session['basket'] if id in liston: liston.remove(id) request.session['basket'] = liston msg = 'Success' status = 'ok' else: msg = 'Error: Product not found in basket' status = 'failed' else: msg = "Success" status = 'ok' try: del request.session['basket'] except KeyError: msg = 'Error: Cant delete basket' status = 'failed' """ response_data = {} response_data['result'] = status response_data['message'] = msg callback = request.GET.get('callback', '') response = json.dumps(response_data) response = callback + '(' + response + ');' return HttpResponse(response,content_type="application/json") """ return HttpResponseRedirect("/store/checkout/") def setBasket(request): id = str(request.GET.get('id')) if id.isdigit(): if request.session.get('basket',False): # Se ha definido anteriormente liston = request.session['basket'] if id in liston: msg = 'Error: product already exists' status = 'failed' else: liston.append(id) request.session['basket'] = liston msg = 'Success' status = 'ok' else: # No se ha definido msg = 'Success'<|fim▁hole|> else: msg = 'Error en la peticion' status = 'failed' response_data = {} response_data['result'] = status response_data['message'] = msg callback = request.GET.get('callback', '') response = json.dumps(response_data) response = callback + '(' + response + ');' return HttpResponse(response,content_type="application/json") import pprint from django.views.decorators.csrf import csrf_exempt @csrf_exempt def conektaio(request): try: data = json.loads(request.body) except: data = False if data: try: pedido = Pedido.objects.get(custom=data['data']['object']['reference_id']) except: pedido = False if pedido: dato = { "status": "success" ,"id": pedido.id, "nombre":pedido.payment } if data['data']['object']['status'] == "paid": pedido.paid=True pedido.save() numero = 200 else: debug = Debug.objects.create(texto=data) debug.save() dato = { "status":"ergo" } numero = 200 else: dato = { "status":"error" } numero = 400 return HttpResponse(dato['status'],content_type="application/json",status=numero) #### END API<|fim▁end|>
status = 'ok' request.session['basket'] = [id]
<|file_name|>fake_provisioner.go<|end_file_name|><|fim▁begin|>// Copyright 2012 tsuru authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package provisiontest import ( "fmt" "io" "io/ioutil" "net/url" "sort" "sync" "sync/atomic" "time" docker "github.com/fsouza/go-dockerclient" "github.com/pkg/errors" "github.com/tsuru/tsuru/action" "github.com/tsuru/tsuru/app/bind" "github.com/tsuru/tsuru/event" "github.com/tsuru/tsuru/net" "github.com/tsuru/tsuru/provision" "github.com/tsuru/tsuru/provision/dockercommon" "github.com/tsuru/tsuru/quota" "github.com/tsuru/tsuru/router/routertest" appTypes "github.com/tsuru/tsuru/types/app" ) var ( ProvisionerInstance *FakeProvisioner errNotProvisioned = &provision.Error{Reason: "App is not provisioned."} uniqueIpCounter int32 = 0 _ provision.NodeProvisioner = &FakeProvisioner{} _ provision.Provisioner = &FakeProvisioner{} _ provision.App = &FakeApp{} _ bind.App = &FakeApp{} ) const fakeAppImage = "app-image" func init() { ProvisionerInstance = NewFakeProvisioner() provision.Register("fake", func() (provision.Provisioner, error) { return ProvisionerInstance, nil }) } // Fake implementation for provision.App. type FakeApp struct { name string cname []string IP string platform string units []provision.Unit logs []string logMut sync.Mutex Commands []string Memory int64 Swap int64 CpuShare int commMut sync.Mutex Deploys uint env map[string]bind.EnvVar bindCalls []*provision.Unit bindLock sync.Mutex serviceEnvs []bind.ServiceEnvVar serviceLock sync.Mutex Pool string UpdatePlatform bool TeamOwner string Teams []string quota.Quota } func NewFakeApp(name, platform string, units int) *FakeApp { app := FakeApp{ name: name, platform: platform, units: make([]provision.Unit, units), Quota: quota.Unlimited, Pool: "test-default", } routertest.FakeRouter.AddBackend(&app) namefmt := "%s-%d" for i := 0; i < units; i++ { val := atomic.AddInt32(&uniqueIpCounter, 1) app.units[i] = provision.Unit{ ID: fmt.Sprintf(namefmt, name, i), Status: provision.StatusStarted, IP: fmt.Sprintf("10.10.10.%d", val), Address: &url.URL{ Scheme: "http", Host: fmt.Sprintf("10.10.10.%d:%d", val, val), }, } } return &app } func (a *FakeApp) GetMemory() int64 { return a.Memory } func (a *FakeApp) GetSwap() int64 { return a.Swap } func (a *FakeApp) GetCpuShare() int { return a.CpuShare } func (a *FakeApp) GetTeamsName() []string { return a.Teams } func (a *FakeApp) HasBind(unit *provision.Unit) bool { a.bindLock.Lock() defer a.bindLock.Unlock() for _, u := range a.bindCalls { if u.ID == unit.ID { return true } } return false } func (a *FakeApp) BindUnit(unit *provision.Unit) error { a.bindLock.Lock() defer a.bindLock.Unlock() a.bindCalls = append(a.bindCalls, unit) return nil } func (a *FakeApp) UnbindUnit(unit *provision.Unit) error { a.bindLock.Lock() defer a.bindLock.Unlock() index := -1 for i, u := range a.bindCalls { if u.ID == unit.ID { index = i break } } if index < 0 { return errors.New("not bound") } length := len(a.bindCalls) a.bindCalls[index] = a.bindCalls[length-1] a.bindCalls = a.bindCalls[:length-1] return nil } func (a *FakeApp) GetQuota() quota.Quota { return a.Quota } func (a *FakeApp) SetQuotaInUse(inUse int) error { if !a.Quota.Unlimited() && inUse > a.Quota.Limit { return &quota.QuotaExceededError{ Requested: uint(inUse), Available: uint(a.Quota.Limit), } } a.Quota.InUse = inUse return nil } func (a *FakeApp) GetCname() []string { return a.cname } func (a *FakeApp) GetServiceEnvs() []bind.ServiceEnvVar { a.serviceLock.Lock() defer a.serviceLock.Unlock() return a.serviceEnvs } func (a *FakeApp) AddInstance(instanceArgs bind.AddInstanceArgs) error { a.serviceLock.Lock() defer a.serviceLock.Unlock() a.serviceEnvs = append(a.serviceEnvs, instanceArgs.Envs...) if instanceArgs.Writer != nil { instanceArgs.Writer.Write([]byte("add instance")) } return nil } func (a *FakeApp) RemoveInstance(instanceArgs bind.RemoveInstanceArgs) error { a.serviceLock.Lock() defer a.serviceLock.Unlock() lenBefore := len(a.serviceEnvs) for i := 0; i < len(a.serviceEnvs); i++ { se := a.serviceEnvs[i] if se.ServiceName == instanceArgs.ServiceName && se.InstanceName == instanceArgs.InstanceName { a.serviceEnvs = append(a.serviceEnvs[:i], a.serviceEnvs[i+1:]...) i-- } } if len(a.serviceEnvs) == lenBefore { return errors.New("instance not found") } if instanceArgs.Writer != nil { instanceArgs.Writer.Write([]byte("remove instance")) } return nil } func (a *FakeApp) Logs() []string { a.logMut.Lock() defer a.logMut.Unlock() logs := make([]string, len(a.logs)) copy(logs, a.logs) return logs } func (a *FakeApp) HasLog(source, unit, message string) bool { log := source + unit + message a.logMut.Lock() defer a.logMut.Unlock() for _, l := range a.logs { if l == log { return true } } return false } func (a *FakeApp) GetCommands() []string { a.commMut.Lock() defer a.commMut.Unlock() return a.Commands } func (a *FakeApp) Log(message, source, unit string) error { a.logMut.Lock() a.logs = append(a.logs, source+unit+message) a.logMut.Unlock() return nil } func (a *FakeApp) GetName() string { return a.name } func (a *FakeApp) GetPool() string { return a.Pool } func (a *FakeApp) GetPlatform() string { return a.platform } func (a *FakeApp) GetDeploys() uint { return a.Deploys } func (a *FakeApp) GetTeamOwner() string { return a.TeamOwner } func (a *FakeApp) Units() ([]provision.Unit, error) { return a.units, nil } func (a *FakeApp) AddUnit(u provision.Unit) { a.units = append(a.units, u) } func (a *FakeApp) SetEnv(env bind.EnvVar) { if a.env == nil { a.env = map[string]bind.EnvVar{} } a.env[env.Name] = env } func (a *FakeApp) SetEnvs(setEnvs bind.SetEnvArgs) error { for _, env := range setEnvs.Envs { a.SetEnv(env) } return nil } func (a *FakeApp) UnsetEnvs(unsetEnvs bind.UnsetEnvArgs) error { for _, env := range unsetEnvs.VariableNames { delete(a.env, env) } return nil } func (a *FakeApp) GetLock() provision.AppLock { return nil } func (a *FakeApp) GetUnits() ([]bind.Unit, error) { units := make([]bind.Unit, len(a.units)) for i := range a.units { units[i] = &a.units[i] } return units, nil } func (a *FakeApp) Envs() map[string]bind.EnvVar { return a.env } func (a *FakeApp) Run(cmd string, w io.Writer, args provision.RunArgs) error { a.commMut.Lock() a.Commands = append(a.Commands, fmt.Sprintf("ran %s", cmd)) a.commMut.Unlock() return nil } func (a *FakeApp) GetUpdatePlatform() bool { return a.UpdatePlatform } func (app *FakeApp) GetRouters() []appTypes.AppRouter { return []appTypes.AppRouter{{Name: "fake"}} } func (app *FakeApp) GetAddresses() ([]string, error) { addr, err := routertest.FakeRouter.Addr(app.GetName()) if err != nil { return nil, err } return []string{addr}, nil } type Cmd struct { Cmd string Args []string App provision.App } type failure struct { method string err error } // Fake implementation for provision.Provisioner. type FakeProvisioner struct { Name string cmds []Cmd cmdMut sync.Mutex outputs chan []byte failures chan failure apps map[string]provisionedApp mut sync.RWMutex shells map[string][]provision.ShellOptions shellMut sync.Mutex nodes map[string]FakeNode nodeContainers map[string]int } func NewFakeProvisioner() *FakeProvisioner { p := FakeProvisioner{Name: "fake"} p.outputs = make(chan []byte, 8) p.failures = make(chan failure, 8) p.apps = make(map[string]provisionedApp) p.shells = make(map[string][]provision.ShellOptions) p.nodes = make(map[string]FakeNode) p.nodeContainers = make(map[string]int) return &p } func (p *FakeProvisioner) getError(method string) error { select { case fail := <-p.failures: if fail.method == method { return fail.err } p.failures <- fail default: } return nil } type FakeNode struct { ID string Addr string PoolName string Meta map[string]string status string p *FakeProvisioner failures int hasSuccess bool } func (n *FakeNode) IaaSID() string { return n.ID } func (n *FakeNode) Pool() string { return n.PoolName } func (n *FakeNode) Address() string { return n.Addr } func (n *FakeNode) Metadata() map[string]string { return n.Meta } func (n *FakeNode) MetadataNoPrefix() map[string]string { return n.Meta } func (n *FakeNode) Units() ([]provision.Unit, error) { n.p.mut.Lock() defer n.p.mut.Unlock() return n.unitsLocked() } func (n *FakeNode) unitsLocked() ([]provision.Unit, error) { var units []provision.Unit for _, a := range n.p.apps { for _, u := range a.units { if net.URLToHost(u.Address.String()) == net.URLToHost(n.Addr) { units = append(units, u) } } } return units, nil } func (n *FakeNode) Status() string { return n.status } func (n *FakeNode) FailureCount() int { return n.failures } func (n *FakeNode) HasSuccess() bool { return n.hasSuccess } func (n *FakeNode) ResetFailures() { n.failures = 0 } func (n *FakeNode) Provisioner() provision.NodeProvisioner { return n.p } func (n *FakeNode) SetHealth(failures int, hasSuccess bool) { n.failures = failures n.hasSuccess = hasSuccess } func (p *FakeProvisioner) AddNode(opts provision.AddNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("AddNode"); err != nil { return err } if err := p.getError("AddNode:" + opts.Address); err != nil { return err } metadata := opts.Metadata if metadata == nil { metadata = map[string]string{} } if _, ok := p.nodes[opts.Address]; ok { return errors.New("fake node already exists") } p.nodes[opts.Address] = FakeNode{ ID: opts.IaaSID, Addr: opts.Address, PoolName: opts.Pool, Meta: metadata, p: p, status: "enabled", } return nil } func (p *FakeProvisioner) GetNode(address string) (provision.Node, error) { p.mut.RLock() defer p.mut.RUnlock() if err := p.getError("GetNode"); err != nil { return nil, err } if n, ok := p.nodes[address]; ok { return &n, nil } return nil, provision.ErrNodeNotFound } func (p *FakeProvisioner) RemoveNode(opts provision.RemoveNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("RemoveNode"); err != nil { return err } _, ok := p.nodes[opts.Address] if !ok { return provision.ErrNodeNotFound } delete(p.nodes, opts.Address) if opts.Writer != nil { if opts.Rebalance { opts.Writer.Write([]byte("rebalancing...")) p.rebalanceNodesLocked(provision.RebalanceNodesOptions{ Force: true, }) } opts.Writer.Write([]byte("remove done!")) } return nil } func (p *FakeProvisioner) UpdateNode(opts provision.UpdateNodeOptions) error { p.mut.Lock() defer p.mut.Unlock() if err := p.getError("UpdateNode"); err != nil { return err } n, ok := p.nodes[opts.Address] if !ok { return provision.ErrNodeNotFound } if opts.Pool != "" { n.PoolName = opts.Pool } if opts.Metadata != nil { n.Meta = opts.Metadata } if opts.Enable { n.status = "enabled" } if opts.Disable { n.status = "disabled" } p.nodes[opts.Address] = n return nil } type nodeList []provision.Node func (l nodeList) Len() int { return len(l) } func (l nodeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } func (l nodeList) Less(i, j int) bool { return l[i].Address() < l[j].Address() } func (p *FakeProvisioner) ListNodes(addressFilter []string) ([]provision.Node, error) { p.mut.RLock() defer p.mut.RUnlock() if err := p.getError("ListNodes"); err != nil { return nil, err } var result []provision.Node if addressFilter != nil { result = make([]provision.Node, 0, len(addressFilter)) for _, a := range addressFilter { n := p.nodes[a] result = append(result, &n) } } else { result = make([]provision.Node, 0, len(p.nodes)) for a := range p.nodes { n := p.nodes[a] result = append(result, &n) } } sort.Sort(nodeList(result)) return result, nil } func (p *FakeProvisioner) NodeForNodeData(nodeData provision.NodeStatusData) (provision.Node, error) { return provision.FindNodeByAddrs(p, nodeData.Addrs) } func (p *FakeProvisioner) RebalanceNodes(opts provision.RebalanceNodesOptions) (bool, error) { p.mut.Lock() defer p.mut.Unlock() return p.rebalanceNodesLocked(opts) } func (p *FakeProvisioner) rebalanceNodesLocked(opts provision.RebalanceNodesOptions) (bool, error) { if err := p.getError("RebalanceNodes"); err != nil { return true, err } var w io.Writer if opts.Event == nil { w = ioutil.Discard } else { w = opts.Event } fmt.Fprintf(w, "rebalancing - dry: %v, force: %v\n", opts.Dry, opts.Force) if len(opts.AppFilter) != 0 { fmt.Fprintf(w, "filtering apps: %v\n", opts.AppFilter) } if len(opts.MetadataFilter) != 0 { fmt.Fprintf(w, "filtering metadata: %v\n", opts.MetadataFilter) } if opts.Pool != "" { fmt.Fprintf(w, "filtering pool: %v\n", opts.Pool) } if len(p.nodes) == 0 || opts.Dry { return true, nil } max := 0 min := -1 var nodes []FakeNode for _, n := range p.nodes { nodes = append(nodes, n) units, err := n.unitsLocked() if err != nil { return true, err } unitCount := len(units) if unitCount > max { max = unitCount } if min == -1 || unitCount < min { min = unitCount } } if max-min < 2 && !opts.Force { return false, nil } gi := 0 for _, a := range p.apps { nodeIdx := 0 for i := range a.units { u := &a.units[i] firstIdx := nodeIdx var hostAddr string for { idx := nodeIdx nodeIdx = (nodeIdx + 1) % len(nodes) if nodes[idx].Pool() == a.app.GetPool() { hostAddr = net.URLToHost(nodes[idx].Address()) break } if nodeIdx == firstIdx { return true, errors.Errorf("unable to find node for pool %s", a.app.GetPool()) } } u.IP = hostAddr u.Address = &url.URL{ Scheme: "http", Host: fmt.Sprintf("%s:%d", hostAddr, gi), } gi++ } } return true, nil } // Restarts returns the number of restarts for a given app. func (p *FakeProvisioner) Restarts(a provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[a.GetName()].restarts[process] } // Starts returns the number of starts for a given app. func (p *FakeProvisioner) Starts(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].starts[process] } // Stops returns the number of stops for a given app. func (p *FakeProvisioner) Stops(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].stops[process] } // Sleeps returns the number of sleeps for a given app. func (p *FakeProvisioner) Sleeps(app provision.App, process string) int { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].sleeps[process] } func (p *FakeProvisioner) CustomData(app provision.App) map[string]interface{} { p.mut.RLock() defer p.mut.RUnlock() return p.apps[app.GetName()].lastData } // Shells return all shell calls to the given unit. func (p *FakeProvisioner) Shells(unit string) []provision.ShellOptions { p.shellMut.Lock() defer p.shellMut.Unlock() return p.shells[unit] } // Returns the number of calls to restart. // GetCmds returns a list of commands executed in an app. If you don't specify // the command (an empty string), it will return all commands executed in the // given app. func (p *FakeProvisioner) GetCmds(cmd string, app provision.App) []Cmd { var cmds []Cmd p.cmdMut.Lock() for _, c := range p.cmds { if (cmd == "" || c.Cmd == cmd) && app.GetName() == c.App.GetName() { cmds = append(cmds, c) } } p.cmdMut.Unlock() return cmds } // Provisioned checks whether the given app has been provisioned. func (p *FakeProvisioner) Provisioned(app provision.App) bool { p.mut.RLock() defer p.mut.RUnlock() _, ok := p.apps[app.GetName()] return ok } func (p *FakeProvisioner) GetUnits(app provision.App) []provision.Unit { p.mut.RLock() pApp := p.apps[app.GetName()] p.mut.RUnlock() return pApp.units } // GetAppFromUnitID returns an app from unitID func (p *FakeProvisioner) GetAppFromUnitID(unitID string) (provision.App, error) { p.mut.RLock() defer p.mut.RUnlock() for _, a := range p.apps { for _, u := range a.units { if u.GetID() == unitID { return a.app, nil } } } return nil, errors.New("app not found") } // PrepareOutput sends the given slice of bytes to a queue of outputs. // // Each prepared output will be used in the ExecuteCommand. It might be sent to // the standard output or standard error. See ExecuteCommand docs for more // details. func (p *FakeProvisioner) PrepareOutput(b []byte) { p.outputs <- b } // PrepareFailure prepares a failure for the given method name. // // For instance, PrepareFailure("GitDeploy", errors.New("GitDeploy failed")) will // cause next Deploy call to return the given error. Multiple calls to this // method will enqueue failures, i.e. three calls to // PrepareFailure("GitDeploy"...) means that the three next GitDeploy call will // fail. func (p *FakeProvisioner) PrepareFailure(method string, err error) { p.failures <- failure{method, err} } // Reset cleans up the FakeProvisioner, deleting all apps and their data. It // also deletes prepared failures and output. It's like calling // NewFakeProvisioner again, without all the allocations. func (p *FakeProvisioner) Reset() { p.cmdMut.Lock() p.cmds = nil p.cmdMut.Unlock() p.mut.Lock() p.apps = make(map[string]provisionedApp) p.mut.Unlock() p.shellMut.Lock() p.shells = make(map[string][]provision.ShellOptions) p.shellMut.Unlock() p.mut.Lock() p.nodes = make(map[string]FakeNode) p.mut.Unlock() uniqueIpCounter = 0 p.nodeContainers = make(map[string]int) for { select { case <-p.outputs: case <-p.failures: default: return } } } func (p *FakeProvisioner) Swap(app1, app2 provision.App, cnameOnly bool) error { return routertest.FakeRouter.Swap(app1.GetName(), app2.GetName(), cnameOnly) } func (p *FakeProvisioner) Deploy(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("Deploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } pApp.image = img evt.Write([]byte("Builder deploy called")) p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) GetClient(app provision.App) (provision.BuilderDockerClient, error) { for _, node := range p.nodes { client, err := docker.NewClient(node.Addr) if err != nil { return nil, err } return &dockercommon.PullAndCreateClient{Client: client}, nil } return nil, errors.New("No node found") } func (p *FakeProvisioner) CleanImage(appName, imgName string) error { for _, node := range p.nodes { c, err := docker.NewClient(node.Addr) if err != nil { return err } err = c.RemoveImage(imgName) if err != nil && err != docker.ErrNoSuchImage { return err } } return nil } func (p *FakeProvisioner) ArchiveDeploy(app provision.App, archiveURL string, evt *event.Event) (string, error) { if err := p.getError("ArchiveDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Archive deploy called")) pApp.lastArchive = archiveURL p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) UploadDeploy(app provision.App, file io.ReadCloser, fileSize int64, build bool, evt *event.Event) (string, error) { if err := p.getError("UploadDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Upload deploy called")) pApp.lastFile = file p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) ImageDeploy(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("ImageDeploy"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } pApp.image = img evt.Write([]byte("Image deploy called")) p.apps[app.GetName()] = pApp return img, nil } func (p *FakeProvisioner) Rollback(app provision.App, img string, evt *event.Event) (string, error) { if err := p.getError("Rollback"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Rollback deploy called")) p.apps[app.GetName()] = pApp return img, nil } func (p *FakeProvisioner) Rebuild(app provision.App, evt *event.Event) (string, error) { if err := p.getError("Rebuild"); err != nil { return "", err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return "", errNotProvisioned } evt.Write([]byte("Rebuild deploy called")) p.apps[app.GetName()] = pApp return fakeAppImage, nil } func (p *FakeProvisioner) Provision(app provision.App) error { if err := p.getError("Provision"); err != nil { return err } if p.Provisioned(app) { return &provision.Error{Reason: "App already provisioned."} } p.mut.Lock() defer p.mut.Unlock() p.apps[app.GetName()] = provisionedApp{ app: app, restarts: make(map[string]int), starts: make(map[string]int), stops: make(map[string]int), sleeps: make(map[string]int), } return nil } func (p *FakeProvisioner) Restart(app provision.App, process string, w io.Writer) error { if err := p.getError("Restart"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.restarts[process]++ p.apps[app.GetName()] = pApp if w != nil { fmt.Fprintf(w, "restarting app") } return nil } func (p *FakeProvisioner) Start(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.starts[process]++ p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) Destroy(app provision.App) error { if err := p.getError("Destroy"); err != nil { return err } if !p.Provisioned(app) { return errNotProvisioned } p.mut.Lock() defer p.mut.Unlock() delete(p.apps, app.GetName()) return nil } func (p *FakeProvisioner) AddUnits(app provision.App, n uint, process string, w io.Writer) error { _, err := p.AddUnitsToNode(app, n, process, w, "") return err } func (p *FakeProvisioner) AddUnitsToNode(app provision.App, n uint, process string, w io.Writer, nodeAddr string) ([]provision.Unit, error) { if err := p.getError("AddUnits"); err != nil { return nil, err } if n == 0 { return nil, errors.New("Cannot add 0 units.") } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return nil, errNotProvisioned } name := app.GetName() platform := app.GetPlatform() length := uint(len(pApp.units)) var addresses []*url.URL for i := uint(0); i < n; i++ { val := atomic.AddInt32(&uniqueIpCounter, 1) var hostAddr string if nodeAddr != "" { hostAddr = net.URLToHost(nodeAddr) } else if len(p.nodes) > 0 { for _, n := range p.nodes { hostAddr = net.URLToHost(n.Address()) break } } else { hostAddr = fmt.Sprintf("10.10.10.%d", val) } unit := provision.Unit{ ID: fmt.Sprintf("%s-%d", name, pApp.unitLen), AppName: name, Type: platform, Status: provision.StatusStarted, IP: hostAddr, ProcessName: process, Address: &url.URL{ Scheme: "http", Host: fmt.Sprintf("%s:%d", hostAddr, val), }, } addresses = append(addresses, unit.Address) pApp.units = append(pApp.units, unit) pApp.unitLen++ } err := routertest.FakeRouter.AddRoutes(name, addresses) if err != nil { return nil, err } result := make([]provision.Unit, int(n)) copy(result, pApp.units[length:]) p.apps[app.GetName()] = pApp if w != nil { fmt.Fprintf(w, "added %d units", n) } return result, nil } func (p *FakeProvisioner) RemoveUnits(app provision.App, n uint, process string, w io.Writer) error { if err := p.getError("RemoveUnits"); err != nil { return err } if n == 0 { return errors.New("cannot remove 0 units") } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } var newUnits []provision.Unit removedCount := n var addresses []*url.URL for _, u := range pApp.units { if removedCount > 0 && u.ProcessName == process { removedCount-- addresses = append(addresses, u.Address) continue } newUnits = append(newUnits, u) } err := routertest.FakeRouter.RemoveRoutes(app.GetName(), addresses) if err != nil { return err } if removedCount > 0 { return errors.New("too many units to remove") } if w != nil { fmt.Fprintf(w, "removing %d units", n) } pApp.units = newUnits pApp.unitLen = len(newUnits) p.apps[app.GetName()] = pApp return nil } // ExecuteCommand will pretend to execute the given command, recording data // about it. // // The output of the command must be prepared with PrepareOutput, and failures // must be prepared with PrepareFailure. In case of failure, the prepared // output will be sent to the standard error stream, otherwise, it will be sent // to the standard error stream. // // When there is no output nor failure prepared, ExecuteCommand will return a // timeout error. func (p *FakeProvisioner) ExecuteCommand(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var ( output []byte err error ) command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() units, err := p.Units(app) if err != nil { return err } for range units { select { case output = <-p.outputs: select { case fail := <-p.failures: if fail.method == "ExecuteCommand" { stderr.Write(output) return fail.err } p.failures <- fail default: stdout.Write(output) } case fail := <-p.failures: if fail.method == "ExecuteCommand" { err = fail.err select { case output = <-p.outputs: stderr.Write(output) default: } } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } } return err } func (p *FakeProvisioner) ExecuteCommandOnce(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var output []byte command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() select { case output = <-p.outputs:<|fim▁hole|> if fail.method == "ExecuteCommandOnce" { select { case output = <-p.outputs: stderr.Write(output) default: } return fail.err } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } return nil } func (p *FakeProvisioner) ExecuteCommandIsolated(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error { var output []byte command := Cmd{ Cmd: cmd, Args: args, App: app, } p.cmdMut.Lock() p.cmds = append(p.cmds, command) p.cmdMut.Unlock() select { case output = <-p.outputs: stdout.Write(output) case fail := <-p.failures: if fail.method == "ExecuteCommandIsolated" { select { case output = <-p.outputs: stderr.Write(output) default: } return fail.err } else { p.failures <- fail } case <-time.After(2e9): return errors.New("FakeProvisioner timed out waiting for output.") } return nil } func (p *FakeProvisioner) AddUnit(app provision.App, unit provision.Unit) { p.mut.Lock() defer p.mut.Unlock() a := p.apps[app.GetName()] a.units = append(a.units, unit) a.unitLen++ p.apps[app.GetName()] = a } func (p *FakeProvisioner) Units(apps ...provision.App) ([]provision.Unit, error) { if err := p.getError("Units"); err != nil { return nil, err } p.mut.Lock() defer p.mut.Unlock() var allUnits []provision.Unit for _, a := range apps { allUnits = append(allUnits, p.apps[a.GetName()].units...) } return allUnits, nil } func (p *FakeProvisioner) RoutableAddresses(app provision.App) ([]url.URL, error) { p.mut.Lock() defer p.mut.Unlock() units := p.apps[app.GetName()].units addrs := make([]url.URL, len(units)) for i := range units { addrs[i] = *units[i].Address } return addrs, nil } func (p *FakeProvisioner) SetUnitStatus(unit provision.Unit, status provision.Status) error { p.mut.Lock() defer p.mut.Unlock() var units []provision.Unit if unit.AppName == "" { units = p.getAllUnits() } else { app, ok := p.apps[unit.AppName] if !ok { return errNotProvisioned } units = app.units } index := -1 for i, unt := range units { if unt.ID == unit.ID { index = i unit.AppName = unt.AppName break } } if index < 0 { return &provision.UnitNotFoundError{ID: unit.ID} } app := p.apps[unit.AppName] app.units[index].Status = status p.apps[unit.AppName] = app return nil } func (p *FakeProvisioner) getAllUnits() []provision.Unit { var units []provision.Unit for _, app := range p.apps { units = append(units, app.units...) } return units } func (p *FakeProvisioner) Addr(app provision.App) (string, error) { if err := p.getError("Addr"); err != nil { return "", err } return routertest.FakeRouter.Addr(app.GetName()) } func (p *FakeProvisioner) SetCName(app provision.App, cname string) error { if err := p.getError("SetCName"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.cnames = append(pApp.cnames, cname) p.apps[app.GetName()] = pApp return routertest.FakeRouter.SetCName(cname, app.GetName()) } func (p *FakeProvisioner) UnsetCName(app provision.App, cname string) error { if err := p.getError("UnsetCName"); err != nil { return err } p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.cnames = []string{} p.apps[app.GetName()] = pApp return routertest.FakeRouter.UnsetCName(cname, app.GetName()) } func (p *FakeProvisioner) HasCName(app provision.App, cname string) bool { p.mut.RLock() pApp, ok := p.apps[app.GetName()] p.mut.RUnlock() for _, cnameApp := range pApp.cnames { if cnameApp == cname { return ok && true } } return false } func (p *FakeProvisioner) Stop(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.stops[process]++ for i, u := range pApp.units { u.Status = provision.StatusStopped pApp.units[i] = u } p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) Sleep(app provision.App, process string) error { p.mut.Lock() defer p.mut.Unlock() pApp, ok := p.apps[app.GetName()] if !ok { return errNotProvisioned } pApp.sleeps[process]++ for i, u := range pApp.units { u.Status = provision.StatusAsleep pApp.units[i] = u } p.apps[app.GetName()] = pApp return nil } func (p *FakeProvisioner) RegisterUnit(a provision.App, unitId string, customData map[string]interface{}) error { p.mut.Lock() defer p.mut.Unlock() pa, ok := p.apps[a.GetName()] if !ok { return errors.New("app not found") } pa.lastData = customData for i, u := range pa.units { if u.ID == unitId { u.IP = u.IP + "-updated" pa.units[i] = u p.apps[a.GetName()] = pa return nil } } return &provision.UnitNotFoundError{ID: unitId} } func (p *FakeProvisioner) Shell(opts provision.ShellOptions) error { var unit provision.Unit units, err := p.Units(opts.App) if err != nil { return err } if len(units) == 0 { return errors.New("app has no units") } else if opts.Unit != "" { for _, u := range units { if u.ID == opts.Unit { unit = u break } } } else { unit = units[0] } if unit.ID == "" { return errors.New("unit not found") } p.shellMut.Lock() defer p.shellMut.Unlock() p.shells[unit.ID] = append(p.shells[unit.ID], opts) return nil } func (p *FakeProvisioner) FilterAppsByUnitStatus(apps []provision.App, status []string) ([]provision.App, error) { filteredApps := []provision.App{} for i := range apps { units, _ := p.Units(apps[i]) for _, u := range units { if stringInArray(u.Status.String(), status) { filteredApps = append(filteredApps, apps[i]) break } } } return filteredApps, nil } func (p *FakeProvisioner) GetName() string { return p.Name } func (p *FakeProvisioner) UpgradeNodeContainer(name string, pool string, writer io.Writer) error { p.nodeContainers[name+"-"+pool]++ return nil } func (p *FakeProvisioner) RemoveNodeContainer(name string, pool string, writer io.Writer) error { p.nodeContainers[name+"-"+pool] = 0 return nil } func (p *FakeProvisioner) HasNodeContainer(name string, pool string) bool { return p.nodeContainers[name+"-"+pool] > 0 } func stringInArray(value string, array []string) bool { for _, str := range array { if str == value { return true } } return false } type PipelineFakeProvisioner struct { *FakeProvisioner executedPipeline bool } func (p *PipelineFakeProvisioner) ExecutedPipeline() bool { return p.executedPipeline } func (p *PipelineFakeProvisioner) DeployPipeline() *action.Pipeline { act := action.Action{ Name: "change-executed-pipeline", Forward: func(ctx action.FWContext) (action.Result, error) { p.executedPipeline = true return nil, nil }, Backward: func(ctx action.BWContext) { }, } actions := []*action.Action{&act} pipeline := action.NewPipeline(actions...) return pipeline } type PipelineErrorFakeProvisioner struct { *FakeProvisioner } func (p *PipelineErrorFakeProvisioner) DeployPipeline() *action.Pipeline { act := action.Action{ Name: "error-pipeline", Forward: func(ctx action.FWContext) (action.Result, error) { return nil, errors.New("deploy error") }, Backward: func(ctx action.BWContext) { }, } actions := []*action.Action{&act} pipeline := action.NewPipeline(actions...) return pipeline } type provisionedApp struct { units []provision.Unit app provision.App restarts map[string]int starts map[string]int stops map[string]int sleeps map[string]int lastArchive string lastFile io.ReadCloser cnames []string unitLen int lastData map[string]interface{} image string }<|fim▁end|>
stdout.Write(output) case fail := <-p.failures:
<|file_name|>htmlcollection.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::HTMLCollectionBinding; use dom::bindings::codegen::Bindings::HTMLCollectionBinding::HTMLCollectionMethods; use dom::bindings::global::GlobalRef; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, Root, MutNullableHeap}; use dom::bindings::reflector::{Reflector, reflect_dom_object}; use dom::bindings::str::DOMString; use dom::bindings::trace::JSTraceable; use dom::bindings::xmlname::namespace_from_domstring; use dom::element::Element; use dom::node::Node; use dom::window::Window; use std::ascii::AsciiExt; use std::cell::Cell; use string_cache::{Atom, Namespace, QualName}; use style::str::split_html_space_chars; pub trait CollectionFilter : JSTraceable { fn filter<'a>(&self, elem: &'a Element, root: &'a Node) -> bool; } // An optional u32, using maxint to represent None. // It would be nicer just to use Option<u32> for this, but that would produce word // alignment issues since Option<u32> uses 33 bits. #[derive(Clone, Copy, JSTraceable, HeapSizeOf)] struct OptionU32 { bits: u32, } impl OptionU32 { fn to_option(self) -> Option<u32> { if self.bits == u32::max_value() { None } else { Some(self.bits) } } fn some(bits: u32) -> OptionU32 { assert!(bits != u32::max_value()); OptionU32 { bits: bits } } fn none() -> OptionU32 { OptionU32 { bits: u32::max_value() } } } #[dom_struct] pub struct HTMLCollection { reflector_: Reflector, root: JS<Node>, #[ignore_heap_size_of = "Contains a trait object; can't measure due to #6870"] filter: Box<CollectionFilter + 'static>, // We cache the version of the root node and all its decendents, // the length of the collection, and a cursor into the collection. // FIXME: make the cached cursor element a weak pointer cached_version: Cell<u64>, cached_cursor_element: MutNullableHeap<JS<Element>>, cached_cursor_index: Cell<OptionU32>, cached_length: Cell<OptionU32>, } impl HTMLCollection { #[allow(unrooted_must_root)] pub fn new_inherited(root: &Node, filter: Box<CollectionFilter + 'static>) -> HTMLCollection { HTMLCollection { reflector_: Reflector::new(), root: JS::from_ref(root), filter: filter,<|fim▁hole|> cached_cursor_element: MutNullableHeap::new(None), cached_cursor_index: Cell::new(OptionU32::none()), cached_length: Cell::new(OptionU32::none()), } } #[allow(unrooted_must_root)] pub fn new(window: &Window, root: &Node, filter: Box<CollectionFilter + 'static>) -> Root<HTMLCollection> { reflect_dom_object(box HTMLCollection::new_inherited(root, filter), GlobalRef::Window(window), HTMLCollectionBinding::Wrap) } pub fn create(window: &Window, root: &Node, filter: Box<CollectionFilter + 'static>) -> Root<HTMLCollection> { HTMLCollection::new(window, root, filter) } fn validate_cache(&self) { // Clear the cache if the root version is different from our cached version let cached_version = self.cached_version.get(); let curr_version = self.root.inclusive_descendants_version(); if curr_version != cached_version { // Default values for the cache self.cached_version.set(curr_version); self.cached_cursor_element.set(None); self.cached_length.set(OptionU32::none()); self.cached_cursor_index.set(OptionU32::none()); } } fn set_cached_cursor(&self, index: u32, element: Option<Root<Element>>) -> Option<Root<Element>> { if let Some(element) = element { self.cached_cursor_index.set(OptionU32::some(index)); self.cached_cursor_element.set(Some(element.r())); Some(element) } else { None } } pub fn by_tag_name(window: &Window, root: &Node, mut tag: DOMString) -> Root<HTMLCollection> { let tag_atom = Atom::from(&*tag); tag.make_ascii_lowercase(); let ascii_lower_tag = Atom::from(tag); // FIXME(ajeffrey): don't clone atom if it was already lowercased. HTMLCollection::by_atomic_tag_name(window, root, tag_atom, ascii_lower_tag) } pub fn by_atomic_tag_name(window: &Window, root: &Node, tag_atom: Atom, ascii_lower_tag: Atom) -> Root<HTMLCollection> { #[derive(JSTraceable, HeapSizeOf)] struct TagNameFilter { tag: Atom, ascii_lower_tag: Atom, } impl CollectionFilter for TagNameFilter { fn filter(&self, elem: &Element, _root: &Node) -> bool { if self.tag == atom!("*") { true } else if elem.html_element_in_html_document() { *elem.local_name() == self.ascii_lower_tag } else { *elem.local_name() == self.tag } } } let filter = TagNameFilter { tag: tag_atom, ascii_lower_tag: ascii_lower_tag, }; HTMLCollection::create(window, root, box filter) } pub fn by_tag_name_ns(window: &Window, root: &Node, tag: DOMString, maybe_ns: Option<DOMString>) -> Root<HTMLCollection> { let local = Atom::from(tag); let ns = namespace_from_domstring(maybe_ns); let qname = QualName::new(ns, local); HTMLCollection::by_qual_tag_name(window, root, qname) } pub fn by_qual_tag_name(window: &Window, root: &Node, qname: QualName) -> Root<HTMLCollection> { #[derive(JSTraceable, HeapSizeOf)] struct TagNameNSFilter { qname: QualName } impl CollectionFilter for TagNameNSFilter { fn filter(&self, elem: &Element, _root: &Node) -> bool { ((self.qname.ns == Namespace(atom!("*"))) || (self.qname.ns == *elem.namespace())) && ((self.qname.local == atom!("*")) || (self.qname.local == *elem.local_name())) } } let filter = TagNameNSFilter { qname: qname }; HTMLCollection::create(window, root, box filter) } pub fn by_class_name(window: &Window, root: &Node, classes: DOMString) -> Root<HTMLCollection> { let class_atoms = split_html_space_chars(&classes).map(Atom::from).collect(); HTMLCollection::by_atomic_class_name(window, root, class_atoms) } pub fn by_atomic_class_name(window: &Window, root: &Node, classes: Vec<Atom>) -> Root<HTMLCollection> { #[derive(JSTraceable, HeapSizeOf)] struct ClassNameFilter { classes: Vec<Atom> } impl CollectionFilter for ClassNameFilter { fn filter(&self, elem: &Element, _root: &Node) -> bool { self.classes.iter().all(|class| elem.has_class(class)) } } let filter = ClassNameFilter { classes: classes }; HTMLCollection::create(window, root, box filter) } pub fn children(window: &Window, root: &Node) -> Root<HTMLCollection> { #[derive(JSTraceable, HeapSizeOf)] struct ElementChildFilter; impl CollectionFilter for ElementChildFilter { fn filter(&self, elem: &Element, root: &Node) -> bool { root.is_parent_of(elem.upcast()) } } HTMLCollection::create(window, root, box ElementChildFilter) } pub fn elements_iter_after(&self, after: &Node) -> HTMLCollectionElementsIter { // Iterate forwards from a node. HTMLCollectionElementsIter { node_iter: box after.following_nodes(&self.root), root: Root::from_ref(&self.root), filter: &self.filter, } } pub fn elements_iter(&self) -> HTMLCollectionElementsIter { // Iterate forwards from the root. self.elements_iter_after(&*self.root) } pub fn elements_iter_before(&self, before: &Node) -> HTMLCollectionElementsIter { // Iterate backwards from a node. HTMLCollectionElementsIter { node_iter: box before.preceding_nodes(&self.root), root: Root::from_ref(&self.root), filter: &self.filter, } } } // TODO: Make this generic, and avoid code duplication pub struct HTMLCollectionElementsIter<'a> { node_iter: Box<Iterator<Item = Root<Node>>>, root: Root<Node>, filter: &'a Box<CollectionFilter>, } impl<'a> Iterator for HTMLCollectionElementsIter<'a> { type Item = Root<Element>; fn next(&mut self) -> Option<Self::Item> { let filter = &self.filter; let root = &self.root; self.node_iter.by_ref() .filter_map(Root::downcast) .filter(|element| filter.filter(&element, root)) .next() } } impl HTMLCollectionMethods for HTMLCollection { // https://dom.spec.whatwg.org/#dom-htmlcollection-length fn Length(&self) -> u32 { self.validate_cache(); if let Some(cached_length) = self.cached_length.get().to_option() { // Cache hit cached_length } else { // Cache miss, calculate the length let length = self.elements_iter().count() as u32; self.cached_length.set(OptionU32::some(length)); length } } // https://dom.spec.whatwg.org/#dom-htmlcollection-item fn Item(&self, index: u32) -> Option<Root<Element>> { self.validate_cache(); if let Some(element) = self.cached_cursor_element.get() { // Cache hit, the cursor element is set if let Some(cached_index) = self.cached_cursor_index.get().to_option() { if cached_index == index { // The cursor is the element we're looking for Some(element) } else if cached_index < index { // The cursor is before the element we're looking for // Iterate forwards, starting at the cursor. let offset = index - (cached_index + 1); let node: Root<Node> = Root::upcast(element); self.set_cached_cursor(index, self.elements_iter_after(node.r()).nth(offset as usize)) } else { // The cursor is after the element we're looking for // Iterate backwards, starting at the cursor. let offset = cached_index - (index + 1); let node: Root<Node> = Root::upcast(element); self.set_cached_cursor(index, self.elements_iter_before(node.r()).nth(offset as usize)) } } else { // Cache miss // Iterate forwards through all the nodes self.set_cached_cursor(index, self.elements_iter().nth(index as usize)) } } else { // Cache miss // Iterate forwards through all the nodes self.set_cached_cursor(index, self.elements_iter().nth(index as usize)) } } // https://dom.spec.whatwg.org/#dom-htmlcollection-nameditem fn NamedItem(&self, key: DOMString) -> Option<Root<Element>> { // Step 1. if key.is_empty() { return None; } // Step 2. self.elements_iter().find(|elem| { elem.get_string_attribute(&atom!("id")) == key || (elem.namespace() == &ns!(html) && elem.get_string_attribute(&atom!("name")) == key) }) } // https://dom.spec.whatwg.org/#dom-htmlcollection-item fn IndexedGetter(&self, index: u32) -> Option<Root<Element>> { self.Item(index) } // check-tidy: no specs after this line fn NamedGetter(&self, name: DOMString) -> Option<Root<Element>> { self.NamedItem(name) } // https://dom.spec.whatwg.org/#interface-htmlcollection fn SupportedPropertyNames(&self) -> Vec<DOMString> { // Step 1 let mut result = vec![]; // Step 2 for elem in self.elements_iter() { // Step 2.1 let id_attr = elem.get_string_attribute(&atom!("id")); if !id_attr.is_empty() && !result.contains(&id_attr) { result.push(id_attr) } // Step 2.2 let name_attr = elem.get_string_attribute(&atom!("name")); if !name_attr.is_empty() && !result.contains(&name_attr) && *elem.namespace() == ns!(html) { result.push(name_attr) } } // Step 3 result } }<|fim▁end|>
// Default values for the cache cached_version: Cell::new(root.inclusive_descendants_version()),
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Package api pulls 4chan board and thread data from the JSON API into native Go data structures. package api import ( "encoding/json" "fmt" "io" "net/http" pathpkg "path" "sync" "time" ) var ( // Whether or not to use HTTPS for requests. SSL bool = false // Cooldown time for updating threads using (*Thread).Update(). // If it is set to less than 10 seconds, it will be re-set to 10 seconds // before being used. UpdateCooldown time.Duration = 15 * time.Second cooldown <-chan time.Time cooldownMutex sync.Mutex ) const ( APIURL = "a.4cdn.org" ImageURL = "i.4cdn.org" StaticURL = "s.4cdn.org" ) func prefix() string { if SSL { return "https://" } else { return "http://" } } func get(base, path string, modify func(*http.Request) error) (*http.Response, error) { url := prefix() + pathpkg.Join(base, path) cooldownMutex.Lock() if cooldown != nil { <-cooldown } req, err := http.NewRequest("GET", url, nil) if err != nil { return nil, err } if modify != nil { err = modify(req) if err != nil { return nil, err } } resp, err := http.DefaultClient.Do(req) cooldown = time.After(1 * time.Second) cooldownMutex.Unlock() return resp, err } func getDecode(base, path string, dest interface{}, modify func(*http.Request) error) error { resp, err := get(base, path, modify) if err != nil { return err } defer resp.Body.Close() return json.NewDecoder(resp.Body).Decode(dest) } // Direct mapping from the API's JSON to a Go type. type jsonPost struct { No int64 `json:"no"` // Post number 1-9999999999999 Resto int64 `json:"resto"` // Reply to 0 (is thread), 1-999999999999 Sticky int `json:"sticky"` // Stickied thread? 0 (no), 1 (yes) Closed int `json:"closed"` // Closed thread? 0 (no), 1 (yes) Now string `json:"now"` // Date and time MM\/DD\/YY(Day)HH:MM (:SS on some boards) Time int64 `json:"time"` // UNIX timestamp UNIX timestamp Name string `json:"name"` // Name text or empty Trip string `json:"trip"` // Tripcode text (format: !tripcode!!securetripcode) Id string `json:"id"` // ID text (8 characters), Mod, Admin Capcode string `json:"capcode"` // Capcode none, mod, admin, admin_highlight, developer Country string `json:"country"` // Country code ISO 3166-1 alpha-2, XX (unknown) CountryName string `json:"country_name"` // Country name text Email string `json:"email"` // Email text or empty Sub string `json:"sub"` // Subject text or empty Com string `json:"com"` // Comment text (includes escaped HTML) or empty Tim int64 `json:"tim"` // Renamed filename UNIX timestamp + microseconds FileName string `json:"filename"` // Original filename text Ext string `json:"ext"` // File extension .jpg, .png, .gif, .pdf, .swf Fsize int `json:"fsize"` // File size 1-8388608 Md5 []byte `json:"md5"` // File MD5 byte slice Width int `json:"w"` // Image width 1-10000 Height int `json:"h"` // Image height 1-10000 TnW int `json:"tn_w"` // Thumbnail width 1-250 TnH int `json:"tn_h"` // Thumbnail height 1-250 FileDeleted int `json:"filedeleted"` // File deleted? 0 (no), 1 (yes) Spoiler int `json:"spoiler"` // Spoiler image? 0 (no), 1 (yes) CustomSpoiler int `json:"custom_spoiler"` // Custom spoilers? 1-99 OmittedPosts int `json:"omitted_posts"` // # replies omitted 1-10000 OmittedImages int `json:"omitted_images"` // # images omitted 1-10000 Replies int `json:"replies"` // total # of replies 0-99999 Images int `json:"images"` // total # of images 0-99999 BumpLimit int `json:"bumplimit"` // bump limit? 0 (no), 1 (yes) ImageLimit int `json:"imagelimit"` // image limit? 0 (no), 1 (yes) CapcodeReplies map[string][]int `json:"capcode_replies"` LastModified int64 `json:"last_modified"` } // A Post represents all of the attributes of a 4chan post, organized in a more directly usable fashion. type Post struct { // Post info Id int64 Thread *Thread Time time.Time Subject string LastModified int64 // These are only present in an OP post. They are exposed through their // corresponding Thread getter methods. replies int images int omitted_posts int omitted_images int bump_limit bool image_limit bool sticky bool closed bool custom_spoiler int // the number of custom spoilers on a given board // Poster info Name string Trip string Email string Special string Capcode string // Country and CountryName are empty unless the board uses country info Country string CountryName string // Message body Comment string // File info if any, otherwise nil File *File // only when they do this on /q/ CapcodeReplies map[string][]int } func (self *Post) String() (s string) { s += fmt.Sprintf("#%d %s%s on %s:\n", self.Id, self.Name, self.Trip, self.Time.Format(time.RFC822)) if self.File != nil { s += self.File.String() } s += self.Comment return } // ImageURL constructs and returns the URL of the attached image. Returns the // empty string if there is none. func (self *Post) ImageURL() string { file := self.File if file == nil { return "" } return fmt.Sprintf("%s%s/%s/%d%s", prefix(), ImageURL, self.Thread.Board, file.Id, file.Ext) } // ThumbURL constructs and returns the thumbnail URL of the attached image. // Returns the empty string if there is none. func (self *Post) ThumbURL() string { file := self.File if file == nil { return "" } return fmt.Sprintf("%s%s/%s/%ds%s", prefix(), ImageURL, self.Thread.Board, file.Id, ".jpg") } // A File represents an uploaded file's metadata. type File struct { Id int64 // Id is what 4chan renames images to (UNIX + microtime, e.g. 1346971121077) Name string // Original filename Ext string Size int MD5 []byte Width int Height int ThumbWidth int ThumbHeight int Deleted bool Spoiler bool } func (self *File) String() string { return fmt.Sprintf("File: %s%s (%dx%d, %d bytes, md5 %x)\n", self.Name, self.Ext, self.Width, self.Height, self.Size, self.MD5) } // CountryFlagURL returns the URL of the post's country flag icon, if enabled // on the board in question. func (self *Post) CountryFlagURL() string { if self.Country == "" { return "" } // lol /pol/ if self.Thread.Board == "pol" { return fmt.Sprintf("%s://%s/image/country/troll/%s.gif", prefix(), StaticURL, self.Country) } return fmt.Sprintf("%s://%s/image/country/%s.gif", prefix(), StaticURL, self.Country) } // A Thread represents a thread of posts. It may or may not contain the actual replies. type Thread struct { Posts []*Post OP *Post Board string // without slashes ex. "g" or "ic" date_recieved time.Time cooldown <-chan time.Time } // GetIndex hits the API for an index of thread stubs from the given board and // page. func GetIndex(board string, page int) ([]*Thread, error) { resp, err := get(APIURL, fmt.Sprintf("/%s/%d.json", board, page + 1), nil) if err != nil { return nil, err } defer resp.Body.Close() threads, err := ParseIndex(resp.Body, board) if err != nil { return nil, err } now := time.Now() for _, t := range threads { t.date_recieved = now } return threads, err } // GetThreads hits the API for a list of the thread IDs of all the active // threads on a given board. func GetThreads(board string) ([][]int64, error) { p := make([]struct { Page int `json:"page"` Threads []struct { No int64 `json:"no"` } `json:"threads"` }, 0, 10) if err := getDecode(APIURL, fmt.Sprintf("/%s/threads.json", board), &p, nil); err != nil { return nil, err } n := make([][]int64, len(p)) for _, page := range p { // Pages are 1 based in the json api n[page.Page-1] = make([]int64, len(page.Threads)) for j, thread := range page.Threads { n[page.Page-1][j] = thread.No } } return n, nil } // GetThread hits the API for a single thread and all its replies. board is // just the board name, without the surrounding slashes. If a thread is being // updated, use an existing thread's Update() method if possible because that // uses If-Modified-Since in the request, which reduces unnecessary server // load. func GetThread(board string, thread_id int64) (*Thread, error) { return getThread(board, thread_id, time.Unix(0, 0)) } func getThread(board string, thread_id int64, stale_time time.Time) (*Thread, error) { resp, err := get(APIURL, fmt.Sprintf("/%s/thread/%d.json", board, thread_id), func(req *http.Request) error { if stale_time.Unix() != 0 { req.Header.Add("If-Modified-Since", stale_time.UTC().Format(http.TimeFormat)) } return nil }) if err != nil { return nil, err } defer resp.Body.Close() thread, err := ParseThread(resp.Body, board) thread.date_recieved = time.Now() return thread, err } // ParseIndex converts a JSON response for multiple threads into a native Go // data structure func ParseIndex(r io.Reader, board string) ([]*Thread, error) { var t struct { Threads []struct { Posts []*jsonPost `json:"posts"` } `json:"threads"` } if err := json.NewDecoder(r).Decode(&t); err != nil { return nil, err } threads := make([]*Thread, len(t.Threads)) for i, json_thread := range t.Threads { thread := &Thread{Posts: make([]*Post, len(t.Threads[i].Posts)), Board: board} for k, v := range json_thread.Posts { thread.Posts[k] = json_to_native(v, thread) if v.No == 0 { thread.OP = thread.Posts[k] } } // TODO: fix this up if thread.OP == nil { thread.OP = thread.Posts[0] } threads[i] = thread } return threads, nil } // ParseThread converts a JSON response for one thread into a native Go data // structure. func ParseThread(r io.Reader, board string) (*Thread, error) { var t struct {<|fim▁hole|> if err := json.NewDecoder(r).Decode(&t); err != nil { return nil, err } thread := &Thread{Posts: make([]*Post, len(t.Posts)), Board: board} for k, v := range t.Posts { thread.Posts[k] = json_to_native(v, thread) if v.No == 0 { thread.OP = thread.Posts[k] } } // TODO: fix this up if thread.OP == nil { thread.OP = thread.Posts[0] } return thread, nil } func json_to_native(v *jsonPost, thread *Thread) *Post { p := &Post{ Id: v.No, sticky: v.Sticky == 1, closed: v.Closed == 1, Time: time.Unix(v.Time, 0), Name: v.Name, Trip: v.Trip, Special: v.Id, Capcode: v.Capcode, Country: v.Country, CountryName: v.CountryName, Email: v.Email, Subject: v.Sub, Comment: v.Com, custom_spoiler: v.CustomSpoiler, replies: v.Replies, images: v.Images, omitted_posts: v.OmittedPosts, omitted_images: v.OmittedImages, bump_limit: v.BumpLimit == 1, image_limit: v.ImageLimit == 1, Thread: thread, CapcodeReplies: v.CapcodeReplies, LastModified: v.LastModified, } if len(v.FileName) > 0 { p.File = &File{ Id: v.Tim, Name: v.FileName, Ext: v.Ext, Size: v.Fsize, MD5: v.Md5, Width: v.Width, Height: v.Height, ThumbWidth: v.TnW, ThumbHeight: v.TnH, Deleted: v.FileDeleted == 1, Spoiler: v.Spoiler == 1, } } return p } // Update an existing thread in-place. func (self *Thread) Update() (new_posts, deleted_posts int, err error) { cooldownMutex.Lock() if self.cooldown != nil { <-self.cooldown } var thread *Thread thread, err = getThread(self.Board, self.Id(), self.date_recieved) if UpdateCooldown < 10*time.Second { UpdateCooldown = 10 * time.Second } self.cooldown = time.After(UpdateCooldown) cooldownMutex.Unlock() if err != nil { return 0, 0, err } var a, b int // traverse both threads in parallel to check for deleted/appended posts for a, b = 0, 0; a < len(self.Posts); a, b = a+1, b+1 { if self.Posts[a].Id == thread.Posts[b].Id { continue } // a post has been deleted, go back one to compare with the next b-- deleted_posts++ } new_posts = len(thread.Posts) - b self.Posts = thread.Posts return } // Id returns the thread OP's post ID. func (self *Thread) Id() int64 { return self.OP.Id } func (self *Thread) String() (s string) { for _, post := range self.Posts { s += post.String() + "\n\n" } return } // Replies returns the number of replies the thread OP has. func (self *Thread) Replies() int { return self.OP.replies } // Images returns the number of images in the thread. func (self *Thread) Images() int { return self.OP.images } // OmittedPosts returns the number of posts omitted in a thread list overview. func (self *Thread) OmittedPosts() int { return self.OP.omitted_posts } // OmittedImages returns the number of image posts omitted in a thread list overview. func (self *Thread) OmittedImages() int { return self.OP.omitted_images } // BumpLimit returns true if the thread is at its bump limit, or false otherwise. func (self *Thread) BumpLimit() bool { return self.OP.bump_limit } // ImageLimit returns true if the thread can no longer accept image posts, or false otherwise. func (self *Thread) ImageLimit() bool { return self.OP.image_limit } // Closed returns true if the thread is closed for replies, or false otherwise. func (self *Thread) Closed() bool { return self.OP.closed } // Sticky returns true if the thread is stickied, or false otherwise. func (self *Thread) Sticky() bool { return self.OP.sticky } // CustomSpoiler returns the ID of its custom spoiler image, if there is one. func (self *Thread) CustomSpoiler() int { return self.OP.custom_spoiler } // CustomSpoilerURL builds and returns the URL of the custom spoiler image, or // an empty string if none exists. func (self *Thread) CustomSpoilerURL(id int, ssl bool) string { if id > self.OP.custom_spoiler { return "" } return fmt.Sprintf("%s://%s/image/spoiler-%s%d.png", prefix(), StaticURL, self.Board, id) } // A Board is the name and title of a single board. type Board struct { Board string `json:"board"` Title string `json:"title"` } // Board names/descriptions will be cached here after a call to LookupBoard or GetBoards var Boards []Board // LookupBoard returns the Board corresponding to the board name (without slashes) func LookupBoard(name string) (Board, error) { if Boards == nil { _, err := GetBoards() if err != nil { return Board{}, fmt.Errorf("Board '%s' not found: %v", name, err) } } for _, b := range Boards { if name == b.Board { return b, nil } } return Board{}, fmt.Errorf("Board '%s' not found", name) } // Get the list of boards. func GetBoards() ([]Board, error) { var b struct { Boards []Board `json:"boards"` } err := getDecode(APIURL, "/boards.json", &b, nil) if err != nil { return nil, err } Boards = b.Boards return b.Boards, nil } // A Catalog contains a list of (truncated) threads on each page of a board. type Catalog []struct { Page int Threads []*Thread } type catalog []struct { Page int `json:"page"` Threads []*jsonPost `json:"threads"` } // GetCatalog hits the API for a catalog listing of a board. func GetCatalog(board string) (Catalog, error) { if len(board) == 0 { return nil, fmt.Errorf("api: GetCatalog: No board name given") } var c catalog err := getDecode(APIURL, fmt.Sprintf("/%s/catalog.json", board), &c, nil) if err != nil { return nil, err } cat := make(Catalog, len(c)) for i, page := range c { extracted := struct { Page int Threads []*Thread }{page.Page, make([]*Thread, len(page.Threads))} for j, post := range page.Threads { thread := &Thread{Posts: make([]*Post, 1), Board: board} post := json_to_native(post, thread) thread.Posts[0] = post extracted.Threads[j] = thread if thread.OP == nil { thread.OP = thread.Posts[0] } } cat[i] = extracted } return cat, nil }<|fim▁end|>
Posts []*jsonPost `json:"posts"` }
<|file_name|>p0012.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import math from collections import Counter import operator import functools DICT_FACTORS = dict() def factorize(n): if n in DICT_FACTORS: return DICT_FACTORS[n] cnt = Counter() sqrtn = int(math.sqrt(n)) + 1 for i in range(2, sqrtn + 1): if n % i == 0: cnt[i] += 1 cnt += factorize(n // i) break<|fim▁hole|> return cnt def n_divisors(n): factors = factorize(n) n_div = functools.reduce(operator.mul, (val+1 for val in factors.values()), 1) return n_div def triangular_number(n): return n*(n+1)//2 i=100 while True: n = triangular_number(i) n_div = n_divisors(n) if n_div>500: print(n) break print('{}, {}: {}'.format(n, i, n_div)) i = i + 1<|fim▁end|>
else: cnt[n] += 1 DICT_FACTORS[n] = cnt
<|file_name|>http_cache.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #![deny(missing_docs)] //! A memory cache implementing the logic specified in <http://tools.ietf.org/html/rfc7234> //! and <http://tools.ietf.org/html/rfc7232>. use crate::fetch::methods::{Data, DoneChannel}; use crossbeam_channel::{unbounded, Sender}; use headers::{ CacheControl, ContentRange, Expires, HeaderMapExt, LastModified, Pragma, Range, Vary, }; use http::header::HeaderValue; use http::{header, HeaderMap}; use hyper::{Method, StatusCode}; use malloc_size_of::Measurable; use malloc_size_of::{ MallocSizeOf, MallocSizeOfOps, MallocUnconditionalShallowSizeOf, MallocUnconditionalSizeOf, }; use net_traits::request::Request; use net_traits::response::{HttpsState, Response, ResponseBody}; use net_traits::{FetchMetadata, Metadata, ResourceFetchTiming}; use servo_arc::Arc; use servo_url::ServoUrl; use std::collections::HashMap; use std::ops::Bound; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Mutex; use std::time::SystemTime; use time::{Duration, Timespec, Tm}; /// The key used to differentiate requests in the cache. #[derive(Clone, Eq, Hash, MallocSizeOf, PartialEq)] pub struct CacheKey { url: ServoUrl, } impl CacheKey { /// Create a cache-key from a request. pub(crate) fn new(request: &Request) -> CacheKey { CacheKey { url: request.current_url(), } } fn from_servo_url(servo_url: &ServoUrl) -> CacheKey { CacheKey { url: servo_url.clone(), } } } /// A complete cached resource. #[derive(Clone)] struct CachedResource { request_headers: Arc<Mutex<HeaderMap>>, body: Arc<Mutex<ResponseBody>>, aborted: Arc<AtomicBool>, awaiting_body: Arc<Mutex<Vec<Sender<Data>>>>, data: Measurable<MeasurableCachedResource>, } #[derive(Clone, MallocSizeOf)] struct MeasurableCachedResource { metadata: CachedMetadata, location_url: Option<Result<ServoUrl, String>>, https_state: HttpsState, status: Option<(StatusCode, String)>, raw_status: Option<(u16, Vec<u8>)>, url_list: Vec<ServoUrl>, expires: Duration, last_validated: Tm, } impl MallocSizeOf for CachedResource { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { // TODO: self.request_headers.unconditional_size_of(ops) + self.body.unconditional_size_of(ops) + self.aborted.unconditional_size_of(ops) + self.awaiting_body.unconditional_size_of(ops) + self.data.size_of(ops) } } /// Metadata about a loaded resource, such as is obtained from HTTP headers. #[derive(Clone)] struct CachedMetadata { /// Headers pub headers: Arc<Mutex<HeaderMap>>, /// Fields that implement MallocSizeOf pub data: Measurable<MeasurableCachedMetadata>, } #[derive(Clone, MallocSizeOf)] struct MeasurableCachedMetadata { /// Final URL after redirects. pub final_url: ServoUrl, /// MIME type / subtype. pub content_type: Option<String>, /// Character set. pub charset: Option<String>, /// HTTP Status pub status: Option<(u16, Vec<u8>)>, } impl MallocSizeOf for CachedMetadata { fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { self.headers.unconditional_shallow_size_of(ops) + // TODO: self.headers.size_of(ops) + self.data.size_of(ops) } } /// Wrapper around a cached response, including information on re-validation needs pub struct CachedResponse { /// The response constructed from the cached resource pub response: Response, /// The revalidation flag for the stored response pub needs_validation: bool, } /// A memory cache. #[derive(MallocSizeOf)] pub struct HttpCache { /// cached responses. entries: HashMap<CacheKey, Vec<CachedResource>>, } /// Determine if a response is cacheable by default <https://tools.ietf.org/html/rfc7231#section-6.1> fn is_cacheable_by_default(status_code: u16) -> bool { match status_code { 200 | 203 | 204 | 206 | 300 | 301 | 404 | 405 | 410 | 414 | 501 => true, _ => false, } } /// Determine if a given response is cacheable. /// Based on <https://tools.ietf.org/html/rfc7234#section-3> fn response_is_cacheable(metadata: &Metadata) -> bool { // TODO: if we determine that this cache should be considered shared: // 1. check for absence of private response directive <https://tools.ietf.org/html/rfc7234#section-5.2.2.6> // 2. check for absence of the Authorization header field. let mut is_cacheable = false; let headers = metadata.headers.as_ref().unwrap(); if headers.contains_key(header::EXPIRES) || headers.contains_key(header::LAST_MODIFIED) || headers.contains_key(header::ETAG) { is_cacheable = true; } if let Some(ref directive) = headers.typed_get::<CacheControl>() { if directive.no_store() {<|fim▁hole|> } if directive.public() || directive.s_max_age().is_some() || directive.max_age().is_some() || directive.no_cache() { is_cacheable = true; } } if let Some(pragma) = headers.typed_get::<Pragma>() { if pragma.is_no_cache() { return false; } } is_cacheable } /// Calculating Age /// <https://tools.ietf.org/html/rfc7234#section-4.2.3> fn calculate_response_age(response: &Response) -> Duration { // TODO: follow the spec more closely (Date headers, request/response lag, ...) if let Some(secs) = response.headers.get(header::AGE) { if let Ok(seconds_string) = secs.to_str() { if let Ok(secs) = seconds_string.parse::<i64>() { return Duration::seconds(secs); } } } Duration::seconds(0i64) } /// Determine the expiry date from relevant headers, /// or uses a heuristic if none are present. fn get_response_expiry(response: &Response) -> Duration { // Calculating Freshness Lifetime <https://tools.ietf.org/html/rfc7234#section-4.2.1> let age = calculate_response_age(&response); if let Some(directives) = response.headers.typed_get::<CacheControl>() { if directives.no_cache() { // Requires validation on first use. return Duration::seconds(0i64); } else { if let Some(secs) = directives.max_age().or(directives.s_max_age()) { let max_age = Duration::from_std(secs).unwrap(); if max_age < age { return Duration::seconds(0i64); } return max_age - age; } } } match response.headers.typed_get::<Expires>() { Some(t) => { // store the period of time from now until expiry let t: SystemTime = t.into(); let t = t.duration_since(SystemTime::UNIX_EPOCH).unwrap(); let desired = Timespec::new(t.as_secs() as i64, 0); let current = time::now().to_timespec(); if desired > current { return desired - current; } else { return Duration::seconds(0i64); } }, // Malformed Expires header, shouldn't be used to construct a valid response. None if response.headers.contains_key(header::EXPIRES) => return Duration::seconds(0i64), _ => {}, } // Calculating Heuristic Freshness // <https://tools.ietf.org/html/rfc7234#section-4.2.2> if let Some((ref code, _)) = response.raw_status { // <https://tools.ietf.org/html/rfc7234#section-5.5.4> // Since presently we do not generate a Warning header field with a 113 warn-code, // 24 hours minus response age is the max for heuristic calculation. let max_heuristic = Duration::hours(24) - age; let heuristic_freshness = if let Some(last_modified) = // If the response has a Last-Modified header field, // caches are encouraged to use a heuristic expiration value // that is no more than some fraction of the interval since that time. response.headers.typed_get::<LastModified>() { let current = time::now().to_timespec(); let last_modified: SystemTime = last_modified.into(); let last_modified = last_modified .duration_since(SystemTime::UNIX_EPOCH) .unwrap(); let last_modified = Timespec::new(last_modified.as_secs() as i64, 0); // A typical setting of this fraction might be 10%. let raw_heuristic_calc = (current - last_modified) / 10; let result = if raw_heuristic_calc < max_heuristic { raw_heuristic_calc } else { max_heuristic }; result } else { max_heuristic }; if is_cacheable_by_default(*code) { // Status codes that are cacheable by default can use heuristics to determine freshness. return heuristic_freshness; } else { // Other status codes can only use heuristic freshness if the public cache directive is present. if let Some(ref directives) = response.headers.typed_get::<CacheControl>() { if directives.public() { return heuristic_freshness; } } } } // Requires validation upon first use as default. Duration::seconds(0i64) } /// Request Cache-Control Directives /// <https://tools.ietf.org/html/rfc7234#section-5.2.1> fn get_expiry_adjustment_from_request_headers(request: &Request, expires: Duration) -> Duration { let directive = match request.headers.typed_get::<CacheControl>() { Some(data) => data, None => return expires, }; if let Some(max_age) = directive.max_stale() { return expires + Duration::from_std(max_age).unwrap(); } if let Some(max_age) = directive.max_age() { let max_age = Duration::from_std(max_age).unwrap(); if expires > max_age { return Duration::min_value(); } return expires - max_age; } if let Some(min_fresh) = directive.min_fresh() { let min_fresh = Duration::from_std(min_fresh).unwrap(); if expires < min_fresh { return Duration::min_value(); } return expires - min_fresh; } if directive.no_cache() || directive.no_store() { return Duration::min_value(); } expires } /// Create a CachedResponse from a request and a CachedResource. fn create_cached_response( request: &Request, cached_resource: &CachedResource, cached_headers: &HeaderMap, done_chan: &mut DoneChannel, ) -> Option<CachedResponse> { debug!("creating a cached response for {:?}", request.url()); if cached_resource.aborted.load(Ordering::Acquire) { return None; } let resource_timing = ResourceFetchTiming::new(request.timing_type()); let mut response = Response::new( cached_resource.data.metadata.data.final_url.clone(), resource_timing, ); response.headers = cached_headers.clone(); response.body = cached_resource.body.clone(); if let ResponseBody::Receiving(_) = *cached_resource.body.lock().unwrap() { debug!("existing body is in progress"); let (done_sender, done_receiver) = unbounded(); *done_chan = Some((done_sender.clone(), done_receiver)); cached_resource .awaiting_body .lock() .unwrap() .push(done_sender); } response.location_url = cached_resource.data.location_url.clone(); response.status = cached_resource.data.status.clone(); response.raw_status = cached_resource.data.raw_status.clone(); response.url_list = cached_resource.data.url_list.clone(); response.https_state = cached_resource.data.https_state.clone(); response.referrer = request.referrer.to_url().cloned(); response.referrer_policy = request.referrer_policy.clone(); response.aborted = cached_resource.aborted.clone(); let expires = cached_resource.data.expires; let adjusted_expires = get_expiry_adjustment_from_request_headers(request, expires); let now = Duration::seconds(time::now().to_timespec().sec); let last_validated = Duration::seconds(cached_resource.data.last_validated.to_timespec().sec); let time_since_validated = now - last_validated; // TODO: take must-revalidate into account <https://tools.ietf.org/html/rfc7234#section-5.2.2.1> // TODO: if this cache is to be considered shared, take proxy-revalidate into account // <https://tools.ietf.org/html/rfc7234#section-5.2.2.7> let has_expired = (adjusted_expires < time_since_validated) || (adjusted_expires == time_since_validated); let cached_response = CachedResponse { response: response, needs_validation: has_expired, }; Some(cached_response) } /// Create a new resource, based on the bytes requested, and an existing resource, /// with a status-code of 206. fn create_resource_with_bytes_from_resource( bytes: &[u8], resource: &CachedResource, ) -> CachedResource { CachedResource { request_headers: resource.request_headers.clone(), body: Arc::new(Mutex::new(ResponseBody::Done(bytes.to_owned()))), aborted: Arc::new(AtomicBool::new(false)), awaiting_body: Arc::new(Mutex::new(vec![])), data: Measurable(MeasurableCachedResource { metadata: resource.data.metadata.clone(), location_url: resource.data.location_url.clone(), https_state: resource.data.https_state.clone(), status: Some((StatusCode::PARTIAL_CONTENT, "Partial Content".into())), raw_status: Some((206, b"Partial Content".to_vec())), url_list: resource.data.url_list.clone(), expires: resource.data.expires.clone(), last_validated: resource.data.last_validated.clone(), }), } } /// Support for range requests <https://tools.ietf.org/html/rfc7233>. fn handle_range_request( request: &Request, candidates: &[&CachedResource], range_spec: Vec<(Bound<u64>, Bound<u64>)>, done_chan: &mut DoneChannel, ) -> Option<CachedResponse> { let mut complete_cached_resources = candidates .iter() .filter(|resource| match resource.data.raw_status { Some((ref code, _)) => *code == 200, None => false, }); let partial_cached_resources = candidates .iter() .filter(|resource| match resource.data.raw_status { Some((ref code, _)) => *code == 206, None => false, }); match ( range_spec.first().unwrap(), complete_cached_resources.next(), ) { // TODO: take the full range spec into account. // If we have a complete resource, take the request range from the body. // When there isn't a complete resource available, we loop over cached partials, // and see if any individual partial response can fulfill the current request for a bytes range. // TODO: combine partials that in combination could satisfy the requested range? // see <https://tools.ietf.org/html/rfc7233#section-4.3>. // TODO: add support for complete and partial resources, // whose body is in the ResponseBody::Receiving state. (&(Bound::Included(beginning), Bound::Included(end)), Some(ref complete_resource)) => { if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { if end == u64::max_value() { // Prevent overflow on the addition below. return None; } let b = beginning as usize; let e = end as usize + 1; let requested = body.get(b..e); if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource); let cached_headers = new_resource.data.metadata.headers.lock().unwrap(); let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } }, (&(Bound::Included(beginning), Bound::Included(end)), None) => { for partial_resource in partial_cached_resources { let headers = partial_resource.data.metadata.headers.lock().unwrap(); let content_range = headers.typed_get::<ContentRange>(); let (res_beginning, res_end) = match content_range { Some(range) => { if let Some(bytes_range) = range.bytes_range() { bytes_range } else { continue; } }, _ => continue, }; if res_beginning <= beginning && res_end >= end { let resource_body = &*partial_resource.body.lock().unwrap(); let requested = match resource_body { &ResponseBody::Done(ref body) => { let b = beginning as usize - res_beginning as usize; let e = end as usize - res_beginning as usize + 1; body.get(b..e) }, _ => continue, }; if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource); let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } } }, (&(Bound::Included(beginning), Bound::Unbounded), Some(ref complete_resource)) => { if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { let b = beginning as usize; let requested = body.get(b..); if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource); let cached_headers = new_resource.data.metadata.headers.lock().unwrap(); let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } }, (&(Bound::Included(beginning), Bound::Unbounded), None) => { for partial_resource in partial_cached_resources { let headers = partial_resource.data.metadata.headers.lock().unwrap(); let content_range = headers.typed_get::<ContentRange>(); let (res_beginning, res_end, total) = if let Some(range) = content_range { match (range.bytes_range(), range.bytes_len()) { (Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total), _ => continue, } } else { continue; }; if total == 0 { // Prevent overflow in the below operations from occuring. continue; }; if res_beginning < beginning && res_end == total - 1 { let resource_body = &*partial_resource.body.lock().unwrap(); let requested = match resource_body { &ResponseBody::Done(ref body) => { let from_byte = beginning as usize - res_beginning as usize; body.get(from_byte..) }, _ => continue, }; if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource); let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } } }, (&(Bound::Unbounded, Bound::Included(offset)), Some(ref complete_resource)) => { if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() { let from_byte = body.len() - offset as usize; let requested = body.get(from_byte..); if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource); let cached_headers = new_resource.data.metadata.headers.lock().unwrap(); let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } }, (&(Bound::Unbounded, Bound::Included(offset)), None) => { for partial_resource in partial_cached_resources { let headers = partial_resource.data.metadata.headers.lock().unwrap(); let content_range = headers.typed_get::<ContentRange>(); let (res_beginning, res_end, total) = if let Some(range) = content_range { match (range.bytes_range(), range.bytes_len()) { (Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total), _ => continue, } } else { continue; }; if !(total >= res_beginning) || !(total >= res_end) || offset == 0 || offset == u64::max_value() { // Prevent overflow in the below operations from occuring. continue; } if (total - res_beginning) > (offset - 1) && (total - res_end) < offset + 1 { let resource_body = &*partial_resource.body.lock().unwrap(); let requested = match resource_body { &ResponseBody::Done(ref body) => { let from_byte = body.len() - offset as usize; body.get(from_byte..) }, _ => continue, }; if let Some(bytes) = requested { let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource); let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } } }, // All the cases with Bound::Excluded should be unreachable anyway _ => return None, } None } impl HttpCache { /// Create a new memory cache instance. pub fn new() -> HttpCache { HttpCache { entries: HashMap::new(), } } /// Constructing Responses from Caches. /// <https://tools.ietf.org/html/rfc7234#section-4> pub fn construct_response( &self, request: &Request, done_chan: &mut DoneChannel, ) -> Option<CachedResponse> { // TODO: generate warning headers as appropriate <https://tools.ietf.org/html/rfc7234#section-5.5> debug!("trying to construct cache response for {:?}", request.url()); if request.method != Method::GET { // Only Get requests are cached, avoid a url based match for others. debug!("non-GET method, not caching"); return None; } let entry_key = CacheKey::new(&request); let resources = self .entries .get(&entry_key)? .into_iter() .filter(|r| !r.aborted.load(Ordering::Relaxed)); let mut candidates = vec![]; for cached_resource in resources { let mut can_be_constructed = true; let cached_headers = cached_resource.data.metadata.headers.lock().unwrap(); let original_request_headers = cached_resource.request_headers.lock().unwrap(); if let Some(vary_value) = cached_headers.typed_get::<Vary>() { if vary_value.is_any() { debug!("vary value is any, not caching"); can_be_constructed = false } else { // For every header name found in the Vary header of the stored response. // Calculating Secondary Keys with Vary <https://tools.ietf.org/html/rfc7234#section-4.1> for vary_val in vary_value.iter_strs() { match request.headers.get(vary_val) { Some(header_data) => { // If the header is present in the request. if let Some(original_header_data) = original_request_headers.get(vary_val) { // Check that the value of the nominated header field, // in the original request, matches the value in the current request. if original_header_data != header_data { debug!("headers don't match, not caching"); can_be_constructed = false; break; } } }, None => { // If a header field is absent from a request, // it can only match a stored response if those headers, // were also absent in the original request. can_be_constructed = original_request_headers.get(vary_val).is_none(); if !can_be_constructed { debug!("vary header present, not caching"); } }, } if !can_be_constructed { break; } } } } if can_be_constructed { candidates.push(cached_resource); } } // Support for range requests if let Some(range_spec) = request.headers.typed_get::<Range>() { return handle_range_request( request, candidates.as_slice(), range_spec.iter().collect(), done_chan, ); } else { while let Some(cached_resource) = candidates.pop() { // Not a Range request. // Do not allow 206 responses to be constructed. // // See https://tools.ietf.org/html/rfc7234#section-3.1 // // A cache MUST NOT use an incomplete response to answer requests unless the // response has been made complete or the request is partial and // specifies a range that is wholly within the incomplete response. // // TODO: Combining partial content to fulfill a non-Range request // see https://tools.ietf.org/html/rfc7234#section-3.3 match cached_resource.data.raw_status { Some((ref code, _)) => { if *code == 206 { continue; } }, None => continue, } // Returning a response that can be constructed // TODO: select the most appropriate one, using a known mechanism from a selecting header field, // or using the Date header to return the most recent one. let cached_headers = cached_resource.data.metadata.headers.lock().unwrap(); let cached_response = create_cached_response(request, cached_resource, &*cached_headers, done_chan); if let Some(cached_response) = cached_response { return Some(cached_response); } } } debug!("couldn't find an appropriate response, not caching"); // The cache wasn't able to construct anything. None } /// Wake-up consumers of cached resources /// whose response body was still receiving data when the resource was constructed, /// and whose response has now either been completed or cancelled. pub fn update_awaiting_consumers(&self, request: &Request, response: &Response) { let entry_key = CacheKey::new(&request); let cached_resources = match self.entries.get(&entry_key) { None => return, Some(resources) => resources, }; // Ensure we only wake-up consumers of relevant resources, // ie we don't want to wake-up 200 awaiting consumers with a 206. let relevant_cached_resources = cached_resources.iter().filter(|resource| { if response.actual_response().is_network_error() { return *resource.body.lock().unwrap() == ResponseBody::Empty; } resource.data.raw_status == response.raw_status }); for cached_resource in relevant_cached_resources { let mut awaiting_consumers = cached_resource.awaiting_body.lock().unwrap(); if awaiting_consumers.is_empty() { continue; } let to_send = if cached_resource.aborted.load(Ordering::Acquire) { // In the case of an aborted fetch, // wake-up all awaiting consumers. // Each will then start a new network request. // TODO: Wake-up only one consumer, and make it the producer on which others wait. Data::Cancelled } else { match *cached_resource.body.lock().unwrap() { ResponseBody::Done(_) | ResponseBody::Empty => Data::Done, ResponseBody::Receiving(_) => { continue; }, } }; for done_sender in awaiting_consumers.drain(..) { let _ = done_sender.send(to_send.clone()); } } } /// Freshening Stored Responses upon Validation. /// <https://tools.ietf.org/html/rfc7234#section-4.3.4> pub fn refresh( &mut self, request: &Request, response: Response, done_chan: &mut DoneChannel, ) -> Option<Response> { assert_eq!(response.status.map(|s| s.0), Some(StatusCode::NOT_MODIFIED)); let entry_key = CacheKey::new(&request); if let Some(cached_resources) = self.entries.get_mut(&entry_key) { for cached_resource in cached_resources.iter_mut() { // done_chan will have been set to Some(..) by http_network_fetch. // If the body is not receiving data, set the done_chan back to None. // Otherwise, create a new dedicated channel to update the consumer. // The response constructed here will replace the 304 one from the network. let in_progress_channel = match *cached_resource.body.lock().unwrap() { ResponseBody::Receiving(..) => Some(unbounded()), ResponseBody::Empty | ResponseBody::Done(..) => None, }; match in_progress_channel { Some((done_sender, done_receiver)) => { *done_chan = Some((done_sender.clone(), done_receiver)); cached_resource .awaiting_body .lock() .unwrap() .push(done_sender); }, None => *done_chan = None, } // Received a response with 304 status code, in response to a request that matches a cached resource. // 1. update the headers of the cached resource. // 2. return a response, constructed from the cached resource. let resource_timing = ResourceFetchTiming::new(request.timing_type()); let mut constructed_response = Response::new( cached_resource.data.metadata.data.final_url.clone(), resource_timing, ); constructed_response.body = cached_resource.body.clone(); constructed_response.status = cached_resource.data.status.clone(); constructed_response.https_state = cached_resource.data.https_state.clone(); constructed_response.referrer = request.referrer.to_url().cloned(); constructed_response.referrer_policy = request.referrer_policy.clone(); constructed_response.raw_status = cached_resource.data.raw_status.clone(); constructed_response.url_list = cached_resource.data.url_list.clone(); cached_resource.data.expires = get_response_expiry(&constructed_response); let mut stored_headers = cached_resource.data.metadata.headers.lock().unwrap(); stored_headers.extend(response.headers); constructed_response.headers = stored_headers.clone(); return Some(constructed_response); } } None } fn invalidate_for_url(&mut self, url: &ServoUrl) { let entry_key = CacheKey::from_servo_url(url); if let Some(cached_resources) = self.entries.get_mut(&entry_key) { for cached_resource in cached_resources.iter_mut() { cached_resource.data.expires = Duration::seconds(0i64); } } } /// Invalidation. /// <https://tools.ietf.org/html/rfc7234#section-4.4> pub fn invalidate(&mut self, request: &Request, response: &Response) { // TODO(eijebong): Once headers support typed_get, update this to use them if let Some(Ok(location)) = response .headers .get(header::LOCATION) .map(HeaderValue::to_str) { if let Ok(url) = request.current_url().join(location) { self.invalidate_for_url(&url); } } if let Some(Ok(ref content_location)) = response .headers .get(header::CONTENT_LOCATION) .map(HeaderValue::to_str) { if let Ok(url) = request.current_url().join(&content_location) { self.invalidate_for_url(&url); } } self.invalidate_for_url(&request.url()); } /// Storing Responses in Caches. /// <https://tools.ietf.org/html/rfc7234#section-3> pub fn store(&mut self, request: &Request, response: &Response) { if pref!(network.http_cache.disabled) { return; } if request.method != Method::GET { // Only Get requests are cached. return; } if request.headers.contains_key(header::AUTHORIZATION) { // https://tools.ietf.org/html/rfc7234#section-3.1 // A shared cache MUST NOT use a cached response // to a request with an Authorization header field // // TODO: unless a cache directive that allows such // responses to be stored is present in the response. return; }; let entry_key = CacheKey::new(&request); let metadata = match response.metadata() { Ok(FetchMetadata::Filtered { filtered: _, unsafe_: metadata, }) | Ok(FetchMetadata::Unfiltered(metadata)) => metadata, _ => return, }; if !response_is_cacheable(&metadata) { return; } let expiry = get_response_expiry(&response); let cacheable_metadata = CachedMetadata { headers: Arc::new(Mutex::new(response.headers.clone())), data: Measurable(MeasurableCachedMetadata { final_url: metadata.final_url, content_type: metadata.content_type.map(|v| v.0.to_string()), charset: metadata.charset, status: metadata.status, }), }; let entry_resource = CachedResource { request_headers: Arc::new(Mutex::new(request.headers.clone())), body: response.body.clone(), aborted: response.aborted.clone(), awaiting_body: Arc::new(Mutex::new(vec![])), data: Measurable(MeasurableCachedResource { metadata: cacheable_metadata, location_url: response.location_url.clone(), https_state: response.https_state.clone(), status: response.status.clone(), raw_status: response.raw_status.clone(), url_list: response.url_list.clone(), expires: expiry, last_validated: time::now(), }), }; let entry = self.entries.entry(entry_key).or_insert_with(|| vec![]); entry.push(entry_resource); // TODO: Complete incomplete responses, including 206 response, when stored here. // See A cache MAY complete a stored incomplete response by making a subsequent range request // https://tools.ietf.org/html/rfc7234#section-3.1 } }<|fim▁end|>
return false;
<|file_name|>gpu_device_plugin_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package e2enode import ( "os/exec" "strconv" "time" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/uuid" kubeletmetrics "k8s.io/kubernetes/pkg/kubelet/metrics" "k8s.io/kubernetes/test/e2e/framework" "k8s.io/kubernetes/test/e2e/framework/gpu" "k8s.io/kubernetes/test/e2e/framework/metrics" "github.com/onsi/ginkgo" "github.com/onsi/gomega" "github.com/prometheus/common/model" ) // numberOfNVIDIAGPUs returns the number of GPUs advertised by a node // This is based on the Device Plugin system and expected to run on a COS based node // After the NVIDIA drivers were installed // TODO make this generic and not linked to COS only func numberOfNVIDIAGPUs(node *v1.Node) int64 { val, ok := node.Status.Capacity[gpu.NVIDIAGPUResourceName] if !ok { return 0 } return val.Value() } // NVIDIADevicePlugin returns the official Google Device Plugin pod for NVIDIA GPU in GKE func NVIDIADevicePlugin() *v1.Pod { ds, err := framework.DsFromManifest(gpu.GPUDevicePluginDSYAML) framework.ExpectNoError(err) p := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "device-plugin-nvidia-gpu-" + string(uuid.NewUUID()), Namespace: metav1.NamespaceSystem, }, Spec: ds.Spec.Template.Spec, } // Remove node affinity p.Spec.Affinity = nil return p } // Serial because the test restarts Kubelet var _ = framework.KubeDescribe("NVIDIA GPU Device Plugin [Feature:GPUDevicePlugin][NodeFeature:GPUDevicePlugin][Serial] [Disruptive]", func() { f := framework.NewDefaultFramework("device-plugin-gpus-errors") ginkgo.Context("DevicePlugin", func() { var devicePluginPod *v1.Pod var err error ginkgo.BeforeEach(func() { ginkgo.By("Ensuring that Nvidia GPUs exists on the node") if !checkIfNvidiaGPUsExistOnNode() { ginkgo.Skip("Nvidia GPUs do not exist on the node. Skipping test.") } ginkgo.By("Creating the Google Device Plugin pod for NVIDIA GPU in GKE") devicePluginPod, err = f.ClientSet.CoreV1().Pods(metav1.NamespaceSystem).Create(NVIDIADevicePlugin())<|fim▁hole|> ginkgo.By("Waiting for GPUs to become available on the local node") gomega.Eventually(func() bool { return numberOfNVIDIAGPUs(getLocalNode(f)) > 0 }, 5*time.Minute, framework.Poll).Should(gomega.BeTrue()) if numberOfNVIDIAGPUs(getLocalNode(f)) < 2 { ginkgo.Skip("Not enough GPUs to execute this test (at least two needed)") } }) ginkgo.AfterEach(func() { l, err := f.PodClient().List(metav1.ListOptions{}) framework.ExpectNoError(err) for _, p := range l.Items { if p.Namespace != f.Namespace.Name { continue } f.PodClient().Delete(p.Name, &metav1.DeleteOptions{}) } }) ginkgo.It("checks that when Kubelet restarts exclusive GPU assignation to pods is kept.", func() { ginkgo.By("Creating one GPU pod on a node with at least two GPUs") podRECMD := "devs=$(ls /dev/ | egrep '^nvidia[0-9]+$') && echo gpu devices: $devs" p1 := f.PodClient().CreateSync(makeBusyboxPod(gpu.NVIDIAGPUResourceName, podRECMD)) deviceIDRE := "gpu devices: (nvidia[0-9]+)" devID1 := parseLog(f, p1.Name, p1.Name, deviceIDRE) p1, err := f.PodClient().Get(p1.Name, metav1.GetOptions{}) framework.ExpectNoError(err) ginkgo.By("Restarting Kubelet and waiting for the current running pod to restart") restartKubelet() ginkgo.By("Confirming that after a kubelet and pod restart, GPU assignment is kept") ensurePodContainerRestart(f, p1.Name, p1.Name) devIDRestart1 := parseLog(f, p1.Name, p1.Name, deviceIDRE) framework.ExpectEqual(devIDRestart1, devID1) ginkgo.By("Restarting Kubelet and creating another pod") restartKubelet() framework.WaitForAllNodesSchedulable(f.ClientSet, framework.TestContext.NodeSchedulableTimeout) gomega.Eventually(func() bool { return numberOfNVIDIAGPUs(getLocalNode(f)) > 0 }, 5*time.Minute, framework.Poll).Should(gomega.BeTrue()) p2 := f.PodClient().CreateSync(makeBusyboxPod(gpu.NVIDIAGPUResourceName, podRECMD)) ginkgo.By("Checking that pods got a different GPU") devID2 := parseLog(f, p2.Name, p2.Name, deviceIDRE) framework.ExpectEqual(devID1, devID2) ginkgo.By("Deleting device plugin.") f.ClientSet.CoreV1().Pods(metav1.NamespaceSystem).Delete(devicePluginPod.Name, &metav1.DeleteOptions{}) ginkgo.By("Waiting for GPUs to become unavailable on the local node") gomega.Eventually(func() bool { node, err := f.ClientSet.CoreV1().Nodes().Get(framework.TestContext.NodeName, metav1.GetOptions{}) framework.ExpectNoError(err) return numberOfNVIDIAGPUs(node) <= 0 }, 10*time.Minute, framework.Poll).Should(gomega.BeTrue()) ginkgo.By("Checking that scheduled pods can continue to run even after we delete device plugin.") ensurePodContainerRestart(f, p1.Name, p1.Name) devIDRestart1 = parseLog(f, p1.Name, p1.Name, deviceIDRE) framework.ExpectEqual(devIDRestart1, devID1) ensurePodContainerRestart(f, p2.Name, p2.Name) devIDRestart2 := parseLog(f, p2.Name, p2.Name, deviceIDRE) framework.ExpectEqual(devIDRestart2, devID2) ginkgo.By("Restarting Kubelet.") restartKubelet() ginkgo.By("Checking that scheduled pods can continue to run even after we delete device plugin and restart Kubelet.") ensurePodContainerRestart(f, p1.Name, p1.Name) devIDRestart1 = parseLog(f, p1.Name, p1.Name, deviceIDRE) framework.ExpectEqual(devIDRestart1, devID1) ensurePodContainerRestart(f, p2.Name, p2.Name) devIDRestart2 = parseLog(f, p2.Name, p2.Name, deviceIDRE) framework.ExpectEqual(devIDRestart2, devID2) logDevicePluginMetrics() // Cleanup f.PodClient().DeleteSync(p1.Name, &metav1.DeleteOptions{}, framework.DefaultPodDeletionTimeout) f.PodClient().DeleteSync(p2.Name, &metav1.DeleteOptions{}, framework.DefaultPodDeletionTimeout) }) }) }) func checkIfNvidiaGPUsExistOnNode() bool { // Cannot use `lspci` because it is not installed on all distros by default. err := exec.Command("/bin/sh", "-c", "find /sys/devices/pci* -type f | grep vendor | xargs cat | grep 0x10de").Run() if err != nil { framework.Logf("check for nvidia GPUs failed. Got Error: %v", err) return false } return true } func logDevicePluginMetrics() { ms, err := metrics.GrabKubeletMetricsWithoutProxy(framework.TestContext.NodeName+":10255", "/metrics") framework.ExpectNoError(err) for msKey, samples := range ms { switch msKey { case kubeletmetrics.KubeletSubsystem + "_" + kubeletmetrics.DevicePluginAllocationDurationKey: for _, sample := range samples { latency := sample.Value resource := string(sample.Metric["resource_name"]) var quantile float64 if val, ok := sample.Metric[model.QuantileLabel]; ok { var err error if quantile, err = strconv.ParseFloat(string(val), 64); err != nil { continue } framework.Logf("Metric: %v ResourceName: %v Quantile: %v Latency: %v", msKey, resource, quantile, latency) } } case kubeletmetrics.KubeletSubsystem + "_" + kubeletmetrics.DevicePluginRegistrationCountKey: for _, sample := range samples { resource := string(sample.Metric["resource_name"]) count := sample.Value framework.Logf("Metric: %v ResourceName: %v Count: %v", msKey, resource, count) } } } }<|fim▁end|>
framework.ExpectNoError(err)
<|file_name|>fetch_info.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # @Author: karthik # @Date: 2016-12-10 21:40:07 # @Last Modified by: chandan # @Last Modified time: 2016-12-11 12:55:27 from models.portfolio import Portfolio from models.company import Company from models.position import Position import tenjin from tenjin.helpers import * import wikipedia import matplotlib.pyplot as plt from data_helpers import * from stock_data import * import BeautifulSoup as bs import urllib2 import re from datetime import date as dt engine = tenjin.Engine(path=['templates']) # info fetch handler def send_info_handler(bot, update, args): args = list(parse_args(args)) if len(args) == 0 or "portfolio" in [arg.lower() for arg in args] : send_portfolio_info(bot, update) else: info_companies = get_companies(args) send_companies_info(bot, update, info_companies) # get portfolio function def send_portfolio_info(bot, update): print "Userid: %d requested portfolio information" %(update.message.chat_id) context = { 'positions': Portfolio.instance.positions, 'wallet_value': Portfolio.instance.wallet_value, } html_str = engine.render('portfolio_info.pyhtml', context) bot.sendMessage(parse_mode="HTML", chat_id=update.message.chat_id, text=html_str) # get companies information def send_companies_info(bot, update, companies): print "Userid: requested information for following companies %s" %','.join([c.name for c in companies]) for company in companies: context = { 'company': company, 'current_price': get_current_price(company), 'description': wikipedia.summary(company.name.split()[0], sentences=2) } wiki_page = wikipedia.page(company.name.split()[0]) html_page = urllib2.urlopen(wiki_page.url) soup = bs.BeautifulSoup(html_page) img_url = 'http:' + soup.find('td', { "class" : "logo" }).find('img')['src'] bot.sendPhoto(chat_id=update.message.chat_id, photo=img_url) html_str = engine.render('company_template.pyhtml', context) bot.sendMessage(parse_mode="HTML", chat_id=update.message.chat_id, text=html_str) symbols = [c.symbol for c in companies]<|fim▁hole|> else: symbol_string = symbols[0] last_n_days = 10 if len(companies) < 4: create_graph(companies, last_n_days) history_text = ''' Here's the price history for {} for the last {} days '''.format(symbol_string, last_n_days) bot.sendMessage(chat_id=update.message.chat_id, text=history_text) bot.sendPhoto(chat_id=update.message.chat_id, photo=open("plots/temp.png",'rb')) def create_graph(companies, timedel): fig, ax = plt.subplots() for company in companies: dates, lookback_prices = get_lookback_prices(company, timedel) # dates = [i.strftime('%d/%m') for i in dates] h = ax.plot(dates, lookback_prices, label=company.symbol) ax.legend() plt.xticks(rotation=45) plt.savefig('plots/temp.png')<|fim▁end|>
if len(symbols) >= 2: symbol_string = ", ".join(symbols[:-1]) + " and " + symbols[-1]
<|file_name|>issue-18576.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern:stop // #18576 // Make sure that calling an extern function pointer in an unreachable // context doesn't cause an LLVM assertion #[allow(unreachable_code)] fn main() { panic!("stop"); let pointer = other; pointer(); } extern "C" fn other() {}<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
<|file_name|>test04-out-ds.js<|end_file_name|><|fim▁begin|>var $M = require("@effectful/debugger"), $x = $M.context, $ret = $M.ret, $unhandled = $M.unhandled, $brk = $M.brk, $lset = $M.lset, $mcall = $M.mcall, $m = $M.module("file.js", null, typeof module === "undefined" ? null : module, null, "$", { __webpack_require__: typeof __webpack_require__ !== "undefined" && __webpack_require__ }, null), $s$1 = [{ e: [1, "1:9-1:10"] }, null, 0], $s$2 = [{}, $s$1, 1], $m$0 = $M.fun("m$0", "file.js", null, null, [], 0, 2, "1:0-4:0", 32, function ($, $l, $p) { for (;;) switch ($.state = $.goto) { case 0: $lset($l, 1, $m$1($)); $.goto = 2; continue; case 1: $.goto = 2; return $unhandled($.error); case 2: return $ret($.result); default: throw new Error("Invalid state"); } }, null, null, 0, [[0, "1:0-3:1", $s$1], [16, "4:0-4:0", $s$1], [16, "4:0-4:0", $s$1]]), $m$1 = $M.fun("m$1", "e", null, $m$0, [], 0, 2, "1:0-3:1", 0, function ($, $l, $p) { for (;;) switch ($.state = $.goto) { case 0: $.goto = 1; $brk(); $.state = 1; case 1: $.goto = 2; $p = ($x.call = eff)(1); $.state = 2; case 2: $l[1] = $p;<|fim▁hole|> case 3: $.goto = 4; $mcall("log", console, $l[1] + $p); $.state = 4; case 4: $.goto = 6; $brk(); continue; case 5: $.goto = 6; return $unhandled($.error); case 6: return $ret($.result); default: throw new Error("Invalid state"); } }, null, null, 1, [[4, "2:2-2:31", $s$2], [2, "2:14-2:20", $s$2], [2, "2:23-2:29", $s$2], [2, "2:2-2:30", $s$2], [36, "3:1-3:1", $s$2], [16, "3:1-3:1", $s$2], [16, "3:1-3:1", $s$2]]); $M.moduleExports();<|fim▁end|>
$.goto = 3; $p = ($x.call = eff)(2); $.state = 3;
<|file_name|>0006_auto__add_field_tttrip_date.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'TtTrip.date' db.add_column(u'timetable_tttrip', 'date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'TtTrip.date' db.delete_column(u'timetable_tttrip', 'date') models = { u'timetable.ttstop': { 'Meta': {'object_name': 'TtStop'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'stop_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), 'stop_lat': ('django.db.models.fields.FloatField', [], {}), 'stop_lon': ('django.db.models.fields.FloatField', [], {}), 'stop_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'stop_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) }, u'timetable.ttstoptime': { 'Meta': {'object_name': 'TtStopTime'}, 'exp_arrival': ('django.db.models.fields.DateTimeField', [], {}), 'exp_departure': ('django.db.models.fields.DateTimeField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtStop']"}), 'stop_sequence': ('django.db.models.fields.IntegerField', [], {}), 'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtTrip']"}) }, u'timetable.tttrip': { 'Meta': {'object_name': 'TtTrip'}, 'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'shape_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),<|fim▁hole|> } } complete_apps = ['timetable']<|fim▁end|>
'trip_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
<|file_name|>rpcrawtransaction.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto // Copyright (c) 2013 The paccoin developer // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include <boost/assign/list_of.hpp> #include "base58.h" #include "paccoinrpc.h" #include "db.h" #include "init.h" #include "main.h" #include "net.h" #include "wallet.h" using namespace std; using namespace boost; using namespace boost::assign; using namespace json_spirit; void ScriptPubKeyToJSON(const CScript& scriptPubKey, Object& out) { txnouttype type; vector<CTxDestination> addresses; int nRequired; out.push_back(Pair("asm", scriptPubKey.ToString())); out.push_back(Pair("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end()))); if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired)) { out.push_back(Pair("type", GetTxnOutputType(TX_NONSTANDARD))); return; } out.push_back(Pair("reqSigs", nRequired)); out.push_back(Pair("type", GetTxnOutputType(type))); Array a; BOOST_FOREACH(const CTxDestination& addr, addresses) a.push_back(CpaccoinAddress(addr).ToString()); out.push_back(Pair("addresses", a)); } void TxToJSON(const CTransaction& tx, const uint256 hashBlock, Object& entry) { entry.push_back(Pair("txid", tx.GetHash().GetHex())); entry.push_back(Pair("version", tx.nVersion)); entry.push_back(Pair("time", (boost::int64_t)tx.nTime)); entry.push_back(Pair("locktime", (boost::int64_t)tx.nLockTime)); if (tx.nVersion >= 2) { entry.push_back(Pair("tx-comment", tx.strTxComment)); } Array vin; BOOST_FOREACH(const CTxIn& txin, tx.vin) { Object in; if (tx.IsCoinBase()) in.push_back(Pair("coinbase", HexStr(txin.scriptSig.begin(), txin.scriptSig.end()))); else { in.push_back(Pair("txid", txin.prevout.hash.GetHex())); in.push_back(Pair("vout", (boost::int64_t)txin.prevout.n)); Object o; o.push_back(Pair("asm", txin.scriptSig.ToString())); o.push_back(Pair("hex", HexStr(txin.scriptSig.begin(), txin.scriptSig.end()))); in.push_back(Pair("scriptSig", o)); } in.push_back(Pair("sequence", (boost::int64_t)txin.nSequence)); vin.push_back(in); } entry.push_back(Pair("vin", vin)); Array vout; for (unsigned int i = 0; i < tx.vout.size(); i++) { const CTxOut& txout = tx.vout[i]; Object out; out.push_back(Pair("value", ValueFromAmount(txout.nValue))); out.push_back(Pair("n", (boost::int64_t)i)); Object o; ScriptPubKeyToJSON(txout.scriptPubKey, o); out.push_back(Pair("scriptPubKey", o)); vout.push_back(out); } entry.push_back(Pair("vout", vout)); if (hashBlock != 0) { entry.push_back(Pair("blockhash", hashBlock.GetHex())); map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi != mapBlockIndex.end() && (*mi).second) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) { entry.push_back(Pair("confirmations", 1 + nBestHeight - pindex->nHeight)); entry.push_back(Pair("time", (boost::int64_t)pindex->nTime)); entry.push_back(Pair("blocktime", (boost::int64_t)pindex->nTime)); } else entry.push_back(Pair("confirmations", 0)); } } } Value getrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 2) throw runtime_error( "getrawtransaction <txid> [verbose=0]\n" "If verbose=0, returns a string that is\n" "serialized, hex-encoded data for <txid>.\n" "If verbose is non-zero, returns an Object\n" "with information about <txid>."); uint256 hash; hash.SetHex(params[0].get_str()); bool fVerbose = false; <|fim▁hole|> uint256 hashBlock = 0; if (!GetTransaction(hash, tx, hashBlock)) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction"); CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION); ssTx << tx; string strHex = HexStr(ssTx.begin(), ssTx.end()); if (!fVerbose) return strHex; Object result; result.push_back(Pair("hex", strHex)); TxToJSON(tx, hashBlock, result); return result; } Value listunspent(const Array& params, bool fHelp) { if (fHelp || params.size() > 3) throw runtime_error( "listunspent [minconf=1] [maxconf=9999999] [\"address\",...]\n" "Returns array of unspent transaction outputs\n" "with between minconf and maxconf (inclusive) confirmations.\n" "Optionally filtered to only include txouts paid to specified addresses.\n" "Results are an array of Objects, each of which has:\n" "{txid, vout, scriptPubKey, amount, confirmations}"); RPCTypeCheck(params, list_of(int_type)(int_type)(array_type)); int nMinDepth = 1; if (params.size() > 0) nMinDepth = params[0].get_int(); int nMaxDepth = 9999999; if (params.size() > 1) nMaxDepth = params[1].get_int(); set<CpaccoinAddress> setAddress; if (params.size() > 2) { Array inputs = params[2].get_array(); BOOST_FOREACH(Value& input, inputs) { CpaccoinAddress address(input.get_str()); if (!address.IsValid()) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid paccoin address: ")+input.get_str()); if (setAddress.count(address)) throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+input.get_str()); setAddress.insert(address); } } Array results; vector<COutput> vecOutputs; pwalletMain->AvailableCoins(vecOutputs, false); BOOST_FOREACH(const COutput& out, vecOutputs) { if (out.nDepth < nMinDepth || out.nDepth > nMaxDepth) continue; if(setAddress.size()) { CTxDestination address; if(!ExtractDestination(out.tx->vout[out.i].scriptPubKey, address)) continue; if (!setAddress.count(address)) continue; } int64 nValue = out.tx->vout[out.i].nValue; const CScript& pk = out.tx->vout[out.i].scriptPubKey; Object entry; entry.push_back(Pair("txid", out.tx->GetHash().GetHex())); entry.push_back(Pair("vout", out.i)); entry.push_back(Pair("scriptPubKey", HexStr(pk.begin(), pk.end()))); entry.push_back(Pair("amount",ValueFromAmount(nValue))); entry.push_back(Pair("confirmations",out.nDepth)); results.push_back(entry); } return results; } Value createrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() != 2) throw runtime_error( "createrawtransaction [{\"txid\":txid,\"vout\":n},...] {address:amount,...}\n" "Create a transaction spending given inputs\n" "(array of objects containing transaction id and output number),\n" "sending to given address(es).\n" "Returns hex-encoded raw transaction.\n" "Note that the transaction's inputs are not signed, and\n" "it is not stored in the wallet or transmitted to the network."); RPCTypeCheck(params, list_of(array_type)(obj_type)); Array inputs = params[0].get_array(); Object sendTo = params[1].get_obj(); CTransaction rawTx; BOOST_FOREACH(Value& input, inputs) { const Object& o = input.get_obj(); const Value& txid_v = find_value(o, "txid"); if (txid_v.type() != str_type) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing txid key"); string txid = txid_v.get_str(); if (!IsHex(txid)) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected hex txid"); const Value& vout_v = find_value(o, "vout"); if (vout_v.type() != int_type) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing vout key"); int nOutput = vout_v.get_int(); if (nOutput < 0) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive"); CTxIn in(COutPoint(uint256(txid), nOutput)); rawTx.vin.push_back(in); } set<CpaccoinAddress> setAddress; BOOST_FOREACH(const Pair& s, sendTo) { CpaccoinAddress address(s.name_); if (!address.IsValid()) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid paccoin address: ")+s.name_); if (setAddress.count(address)) throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_); setAddress.insert(address); CScript scriptPubKey; scriptPubKey.SetDestination(address.Get()); int64 nAmount = AmountFromValue(s.value_); CTxOut out(nAmount, scriptPubKey); rawTx.vout.push_back(out); } CDataStream ss(SER_NETWORK, PROTOCOL_VERSION); ss << rawTx; return HexStr(ss.begin(), ss.end()); } Value decoderawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() != 1) throw runtime_error( "decoderawtransaction <hex string>\n" "Return a JSON object representing the serialized, hex-encoded transaction."); RPCTypeCheck(params, list_of(str_type)); vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); CTransaction tx; try { ssData >> tx; } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } Object result; TxToJSON(tx, 0, result); return result; } Value signrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 4) throw runtime_error( "signrawtransaction <hex string> [{\"txid\":txid,\"vout\":n,\"scriptPubKey\":hex},...] [<privatekey1>,...] [sighashtype=\"ALL\"]\n" "Sign inputs for raw transaction (serialized, hex-encoded).\n" "Second optional argument (may be null) is an array of previous transaction outputs that\n" "this transaction depends on but may not yet be in the blockchain.\n" "Third optional argument (may be null) is an array of base58-encoded private\n" "keys that, if given, will be the only keys used to sign the transaction.\n" "Fourth optional argument is a string that is one of six values; ALL, NONE, SINGLE or\n" "ALL|ANYONECANPAY, NONE|ANYONECANPAY, SINGLE|ANYONECANPAY.\n" "Returns json object with keys:\n" " hex : raw transaction with signature(s) (hex-encoded string)\n" " complete : 1 if transaction has a complete set of signature (0 if not)" + HelpRequiringPassphrase()); RPCTypeCheck(params, list_of(str_type)(array_type)(array_type)(str_type), true); vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); vector<CTransaction> txVariants; while (!ssData.empty()) { try { CTransaction tx; ssData >> tx; txVariants.push_back(tx); } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } } if (txVariants.empty()) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Missing transaction"); // mergedTx will end up with all the signatures; it // starts as a clone of the rawtx: CTransaction mergedTx(txVariants[0]); bool fComplete = true; // Fetch previous transactions (inputs): map<COutPoint, CScript> mapPrevOut; for (unsigned int i = 0; i < mergedTx.vin.size(); i++) { CTransaction tempTx; MapPrevTx mapPrevTx; CTxDB txdb("r"); map<uint256, CTxIndex> unused; bool fInvalid; // FetchInputs aborts on failure, so we go one at a time. tempTx.vin.push_back(mergedTx.vin[i]); tempTx.FetchInputs(txdb, unused, false, false, mapPrevTx, fInvalid); // Copy results into mapPrevOut: BOOST_FOREACH(const CTxIn& txin, tempTx.vin) { const uint256& prevHash = txin.prevout.hash; if (mapPrevTx.count(prevHash) && mapPrevTx[prevHash].second.vout.size()>txin.prevout.n) mapPrevOut[txin.prevout] = mapPrevTx[prevHash].second.vout[txin.prevout.n].scriptPubKey; } } // Add previous txouts given in the RPC call: if (params.size() > 1 && params[1].type() != null_type) { Array prevTxs = params[1].get_array(); BOOST_FOREACH(Value& p, prevTxs) { if (p.type() != obj_type) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "expected object with {\"txid'\",\"vout\",\"scriptPubKey\"}"); Object prevOut = p.get_obj(); RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type)); string txidHex = find_value(prevOut, "txid").get_str(); if (!IsHex(txidHex)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "txid must be hexadecimal"); uint256 txid; txid.SetHex(txidHex); int nOut = find_value(prevOut, "vout").get_int(); if (nOut < 0) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "vout must be positive"); string pkHex = find_value(prevOut, "scriptPubKey").get_str(); if (!IsHex(pkHex)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "scriptPubKey must be hexadecimal"); vector<unsigned char> pkData(ParseHex(pkHex)); CScript scriptPubKey(pkData.begin(), pkData.end()); COutPoint outpoint(txid, nOut); if (mapPrevOut.count(outpoint)) { // Complain if scriptPubKey doesn't match if (mapPrevOut[outpoint] != scriptPubKey) { string err("Previous output scriptPubKey mismatch:\n"); err = err + mapPrevOut[outpoint].ToString() + "\nvs:\n"+ scriptPubKey.ToString(); throw JSONRPCError(RPC_DESERIALIZATION_ERROR, err); } } else mapPrevOut[outpoint] = scriptPubKey; } } bool fGivenKeys = false; CBasicKeyStore tempKeystore; if (params.size() > 2 && params[2].type() != null_type) { fGivenKeys = true; Array keys = params[2].get_array(); BOOST_FOREACH(Value k, keys) { CpaccoinSecret vchSecret; bool fGood = vchSecret.SetString(k.get_str()); if (!fGood) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,"Invalid private key"); CKey key; bool fCompressed; CSecret secret = vchSecret.GetSecret(fCompressed); key.SetSecret(secret, fCompressed); tempKeystore.AddKey(key); } } else EnsureWalletIsUnlocked(); const CKeyStore& keystore = (fGivenKeys ? tempKeystore : *pwalletMain); int nHashType = SIGHASH_ALL; if (params.size() > 3 && params[3].type() != null_type) { static map<string, int> mapSigHashValues = boost::assign::map_list_of (string("ALL"), int(SIGHASH_ALL)) (string("ALL|ANYONECANPAY"), int(SIGHASH_ALL|SIGHASH_ANYONECANPAY)) (string("NONE"), int(SIGHASH_NONE)) (string("NONE|ANYONECANPAY"), int(SIGHASH_NONE|SIGHASH_ANYONECANPAY)) (string("SINGLE"), int(SIGHASH_SINGLE)) (string("SINGLE|ANYONECANPAY"), int(SIGHASH_SINGLE|SIGHASH_ANYONECANPAY)) ; string strHashType = params[3].get_str(); if (mapSigHashValues.count(strHashType)) nHashType = mapSigHashValues[strHashType]; else throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid sighash param"); } bool fHashSingle = ((nHashType & ~SIGHASH_ANYONECANPAY) == SIGHASH_SINGLE); // Sign what we can: for (unsigned int i = 0; i < mergedTx.vin.size(); i++) { CTxIn& txin = mergedTx.vin[i]; if (mapPrevOut.count(txin.prevout) == 0) { fComplete = false; continue; } const CScript& prevPubKey = mapPrevOut[txin.prevout]; txin.scriptSig.clear(); // Only sign SIGHASH_SINGLE if there's a corresponding output: if (!fHashSingle || (i < mergedTx.vout.size())) SignSignature(keystore, prevPubKey, mergedTx, i, nHashType); // ... and merge in other signatures: BOOST_FOREACH(const CTransaction& txv, txVariants) { txin.scriptSig = CombineSignatures(prevPubKey, mergedTx, i, txin.scriptSig, txv.vin[i].scriptSig); } if (!VerifyScript(txin.scriptSig, prevPubKey, mergedTx, i, true, 0)) fComplete = false; } Object result; CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION); ssTx << mergedTx; result.push_back(Pair("hex", HexStr(ssTx.begin(), ssTx.end()))); result.push_back(Pair("complete", fComplete)); return result; } Value sendrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 1) throw runtime_error( "sendrawtransaction <hex string>\n" "Submits raw transaction (serialized, hex-encoded) to local node and network."); RPCTypeCheck(params, list_of(str_type)); // parse hex string from parameter vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); CTransaction tx; // deserialize binary data stream try { ssData >> tx; } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } uint256 hashTx = tx.GetHash(); // See if the transaction is already in a block // or in the memory pool: CTransaction existingTx; uint256 hashBlock = 0; if (GetTransaction(hashTx, existingTx, hashBlock)) { if (hashBlock != 0) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("transaction already in block ")+hashBlock.GetHex()); // Not in block, but already in the memory pool; will drop // through to re-relay it. } else { // push to local node CTxDB txdb("r"); if (!tx.AcceptToMemoryPool(txdb)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX rejected"); SyncWithWallets(tx, NULL, true); } RelayMessage(CInv(MSG_TX, hashTx), tx); return hashTx.GetHex(); }<|fim▁end|>
if (params.size() > 1) fVerbose = (params[1].get_int() != 0); CTransaction tx;
<|file_name|>verbose_fixtures.py<|end_file_name|><|fim▁begin|>from contextional import GCM with GCM("A") as A: @GCM.add_setup def setUp(): pass @GCM.add_teardown def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): raise Exception pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup def setUp(): raise Exception pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown def tearDown(): raise Exception() pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass @GCM.add_teardown def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_teardown("teardown w/ description") def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup("setup w/ description") def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown("teardown w/ description") def tearDown(): raise Exception() @GCM.add_teardown("teardown w/ description") def tearDown(): pass A.create_tests() with GCM("A") as A: @GCM.add_setup def setUp(): pass @GCM.add_teardown def tearDown(): pass with GCM.add_group("B"): @GCM.add_setup def setUp(): pass @GCM.add_test("some test") def test(case): pass @GCM.add_teardown def tearDown(): pass A.create_tests() expected_stream_output = [ "A", " B", " some test ... ok", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown w/ description ", " # teardown w/ description ", "A", " # setup w/ description ERROR", " B", " some test ... FAIL", " # teardown w/ description ", "A", " # setup (1/1) ERROR", " B", " some test ... FAIL", " # teardown w/ description ", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown (1/2) ERROR", " # teardown w/ description ", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown (1/2) ERROR", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown (1/2) ERROR", "A",<|fim▁hole|> " B", " # setup w/ description ", " some test ... ok", " # teardown (1/2) ERROR", " # teardown (1/2) ERROR", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown w/ description ", " # teardown (2/3) ERROR", " # teardown w/ description ", "A", " # setup w/ description ", " B", " # setup w/ description ", " some test ... ok", " # teardown w/ description ERROR", " # teardown w/ description ", "A", " B", " some test ... ok", ]<|fim▁end|>
" # setup w/ description ",