prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>_tickformatstops.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class TickformatstopsValidator(_plotly_utils.basevalidators.CompoundArrayValidator):
def __init__(
self, plotly_name="tickformatstops", parent_name="contour.colorbar", **kwargs
):
super(TickformatstopsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Tickformatstop"),
data_docs=kwargs.pop(
"data_docs",
"""
dtickrange
range [*min*, *max*], where "min", "max" -
dtick values which describe some zoom level, it
is possible to omit "min" or "max" value by
passing "null"
enabled
Determines whether or not this stop is used. If
`false`, this stop is ignored even within its
`dtickrange`.
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless<|fim▁hole|> value
string - dtickformat for described zoom level,
the same as "tickformat"
""",
),
**kwargs
)<|fim▁end|> | you explicitly show it with `visible: true`. |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Simulated broker used for backtests. Contains facilities for simulating trades,
//! managing balances, and reporting on statistics from previous trades.
//!
//! See README.md for more information about the specifics of the SimBroker implementation
//! and a description of its functionality.
#![feature(libc, rustc_attrs, core_intrinsics, conservative_impl_trait, associated_consts, custom_derive, test, slice_patterns, rand)]
extern crate test;
extern crate futures;
extern crate uuid;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate tickgrinder_util;
#[macro_use]
extern crate from_hashmap;
extern crate libc;
extern crate rand;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::collections::BinaryHeap;
use std::sync::{Arc, mpsc};
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use std::ops::{Index, IndexMut};
use std::mem;
use libc::c_void;
use futures::{Stream, oneshot, Oneshot, Complete};
use futures::stream::BoxStream;
use futures::sync::mpsc::{channel, Sender};
use uuid::Uuid;
use rand::Rng;
use tickgrinder_util::trading::tick::*;
pub use tickgrinder_util::trading::broker::*;
use tickgrinder_util::trading::trading_condition::*;
use tickgrinder_util::transport::command_server::CommandServer;
use tickgrinder_util::transport::tickstream::{TickGenerator, TickGenerators};
use tickgrinder_util::conf::CONF;
mod tests;
mod helpers;
pub use self::helpers::*;
mod client;
pub use self::client::*;
mod superlog;
use superlog::SuperLogger;
// link with the libboost_random wrapper
#[link(name="rand_bindings")]
extern {
fn init_rng(seed: u32) -> *mut c_void;
fn rand_int_range(void_rng: *mut c_void, min: i32, max: i32) -> u32;
}
/// A simulated broker that is used as the endpoint for trading activity in backtests. This is the broker backend
/// that creates/ingests streams that interact with the client.
pub struct SimBroker {
/// Contains all the accounts simulated by the SimBroker
pub accounts: Accounts,
/// A copy of the settings generated from the input HashMap
pub settings: SimBrokerSettings,
/// Contains the streams that yield `Tick`s for the SimBroker as well as data about the symbols and other metadata.
symbols: Symbols,
/// Priority queue that maintains that forms the basis of the internal ordered event loop.
pq: SimulationQueue,
/// Timestamp of last price update received by broker
timestamp: u64,
/// Receiving end of the channel over which the `SimBrokerClient` sends messages
client_rx: Option<mpsc::Receiver<(BrokerAction, Complete<BrokerResult>)>>,
/// A handle to the sender for the channel through which push messages are sent
push_stream_handle: Option<Sender<(u64, BrokerResult)>>,
/// A handle to the receiver for the channel through which push messages are received
push_stream_recv: Option<Box<Stream<Item=(u64, BrokerResult), Error=()> + Send>>,
/// The CommandServer used for logging
pub cs: CommandServer,
/// Holds a logger used to log detailed data to flatfile if the `superlog` feature id enabled and an empty struct otherwise.
logger: SuperLogger,
/// A source of deterministic PRNG to be used to generating Uuids.
prng: *mut c_void,
}
// .-.
unsafe impl Send for SimBroker {}
impl SimBroker {
pub fn new(
settings: SimBrokerSettings, cs: CommandServer, client_rx: mpsc::Receiver<(BrokerAction, Complete<BrokerResult>)>,
) -> Result<SimBroker, BrokerError> {
let logger = SuperLogger::new();
let mut accounts = Accounts::new(logger.clone());
// set up the deterministicly random data generator if it's enabled in the config
let seed: u32 = if CONF.fuzzer_deterministic_rng {
let mut sum = 0;
// convert the seed string into an integer for seeding the fuzzer
for c in CONF.fuzzer_seed.chars() {
sum += c as u32;
}
sum
} else {
let mut rng = rand::thread_rng();
rng.gen()
};
let rng = unsafe { init_rng(seed) };
let uuid = gen_uuid(rng);
// create with one account with the starting balance.
let account = Account {
uuid: uuid,
ledger: Ledger::new(settings.starting_balance),
live: false,
};
accounts.insert(uuid, account);
// TODO: Make sure that 0 is the right buffer size for this channel
let (client_push_tx, client_push_rx) = channel::<(u64, BrokerResult)>(0);
// try to deserialize the "tickstreams" parameter of the input settings to get a list of tickstreams register
let tickstreams: Vec<(String, TickGenerators, bool, usize)> = serde_json::from_str(&settings.tickstreams)
.map_err(|_| BrokerError::Message{message: String::from("Unable to deserialize the input tickstreams into a vector!")})?;
let mut sim = SimBroker {
accounts: accounts,
settings: settings,
symbols: Symbols::new(cs.clone()),
pq: SimulationQueue::new(),
timestamp: 0,
client_rx: Some(client_rx),
push_stream_handle: Some(client_push_tx),
push_stream_recv: Some(client_push_rx.boxed()),
cs: cs,
logger: logger,
prng: rng,
};
// create an actual tickstream for each of the definitions and subscribe to all of them
for (name, def, is_fx, decimals) in tickstreams {
let mut gen: Box<TickGenerator> = def.get();
let strm = gen.get_raw().map_err(|s| BrokerError::Message{message: s})?;
sim.register_tickstream(name, strm, is_fx, decimals)?;
}
Ok(sim)
}
/// Starts the simulation process. Ticks are read in from the inputs and processed internally into
/// priority queue. The source of blocking here is determined by the client. The `Stream`s of `Tick`s
/// that are handed off have a capacity of 1 so that the `Sender` will block until it is consumed by
/// the client.
///
/// The assumption is that the client will do all its processing and have a chance to
/// submit `BrokerActions` to the SimBroker before more processing more ticks, thus preserving the
/// strict ordering by timestamp of events and fully simulating asynchronous operation.
pub fn init_sim_loop(&mut self) {
// initialize the internal queue with values from attached tickstreams
// all tickstreams should be added by this point
self.pq.init(&mut self.symbols);
self.cs.debug(None, "Internal simulation queue has been initialized.");
self.logger.event_log(self.timestamp, "Starting the great simulation loop...");
}
/// Called by the fuzzer executor to drive progress on the simulation. Returns the number of client
/// actions (tickstream ticks + pushstream messages) that were sent to the client during this tick.
pub fn tick_sim_loop(&mut self, num_last_actions: usize, buffer: &mut Vec<TickOutput>) -> usize {
// first check if we have any messages from the client to process into the queue
{ // borrow-b-gone
let rx = self.client_rx.as_mut().unwrap();
for _ in 0..num_last_actions {
// get the next message from the client receiver
// println!("Blocking for message from client...");
let (action, complete) = rx.recv().expect("Error from client receiver!");
// println!("Got message from client: {:?}", action);
// determine how long it takes the broker to process this message internally
let execution_delay = self.settings.get_delay(&action);
// insert this message into the internal queue adding on processing time
let qi = QueueItem {
timestamp: self.timestamp + execution_delay,
unit: WorkUnit::ActionComplete(complete, action),
};
self.logger.event_log(self.timestamp, &format!("Pushing new ActionComplete into pq: {:?}", qi.unit));
self.pq.push(qi);
}
}
if self.timestamp % 100000 == 0 {
self.cs.notice(None, &format!("{} ticks processed", self.timestamp));
}
let item = self.pq.pop().unwrap();
self.timestamp = item.timestamp;
let mut client_event_count = 0;
// then process the new item we took out of the queue
match item.unit {
// A tick arriving at the broker. The client doesn't get to know until after network delay.
WorkUnit::NewTick(symbol_ix, tick) => {
// update the price for the popped tick's symbol
let price = (tick.bid, tick.ask);
self.symbols[symbol_ix].price = price;
// push the ClientTick event back into the queue + network delay
self.pq.push(QueueItem {
timestamp: tick.timestamp as u64 + self.settings.ping_ns,
unit: WorkUnit::ClientTick(symbol_ix, tick),
});
// check to see if we have any actions to take on open positions and take them if we do
self.logger.event_log(
self.timestamp,
&format!("Ticking positions in response to new tick: ({}, {:?})", symbol_ix, tick)
);
client_event_count += self.tick_positions(symbol_ix, (tick.bid, tick.ask,), client_event_count, buffer);
// push the next future tick into the queue
self.logger.event_log(self.timestamp, &format!("Pushing ClientTick into queue: ({}, {:?})", symbol_ix, tick));
self.pq.push_next_tick(&mut self.symbols);
},
// A tick arriving at the client. We now send it down the Client's channels and block
// until it is consumed.
WorkUnit::ClientTick(symbol_ix, tick) => {
// TODO: Check to see if this does a copy and if it does, fine a way to eliminate it
let mut inner_symbol = &mut self.symbols[symbol_ix];
self.logger.event_log(self.timestamp, &format!("Sending tick to client: ({}, {:?})", symbol_ix, tick));
// send the tick through the client stream, blocking until it is consumed by the client.
inner_symbol.send_client(tick);
// put the message into the result buffer and increment its length
buffer[client_event_count] = TickOutput::Tick(symbol_ix, tick);
client_event_count += 1;
},
// The moment the broker finishes processing an action and the action takes place.
// Begins the network delay for the trip back to the client.
WorkUnit::ActionComplete(future, action) => {
// process the message and re-insert the response into the queue
assert_eq!(self.timestamp, item.timestamp);
let res = self.exec_action(&action);
// calculate when the response would be recieved by the client
// then re-insert the response into the queue
let res_time = item.timestamp + self.settings.ping_ns;
let item = QueueItem {
timestamp: res_time,
unit: WorkUnit::Response(future, res),
};
self.pq.push(item);
},
// The moment a response reaches the client.
WorkUnit::Response(future, res) => {
// fulfill the future with the result
match res {
Ok(BrokerMessage::AccountListing{accounts: _}) => {
let msg = "Fulfilling work unit Ok(AccountListing{_})'s oneshot";
self.logger.event_log(self.timestamp, msg);
},
Ok(BrokerMessage::Ledger{ledger: _}) => {
let msg = "Fulfilling work unit Ok(Ledger{_})'s oneshot";
self.logger.event_log(self.timestamp, msg);
},
_ => self.logger.event_log(self.timestamp, &format!("Fulfilling work unit {:?}'s oneshot", res)),
};
future.complete(res.clone());
// send the push message through the channel, blocking until it's consumed by the client.
self.push_msg(res.clone());
// put the message into the result buffer and increment its length
buffer[client_event_count] = TickOutput::Pushstream(self.timestamp, res);
client_event_count += 1;
},
// The moment a spurious notification reaches the client. Network delay is already taken intou account,
// so we can deliver it immediately.
WorkUnit::Notification(res) => {
self.logger.event_log(self.timestamp, &format!("Delivering spurious notification to client: {:?}", res));
// send the push message through the channel, blocking until it's consumed by the client.
self.push_msg(res.clone());
// put the message into the result buffer and increment its length
buffer[client_event_count] = TickOutput::Pushstream(self.timestamp, res);
client_event_count += 1;
}
}
client_event_count
}
/// Immediately sends a message over the broker's push channel. Should only be called from within
/// the SimBroker's internal event handling loop since it immediately sends the message.
fn push_msg(&mut self, _: BrokerResult) {
// self.logger.event_log(self.timestamp, &format!("`push_msg()` sending message to client: {:?}", msg));
// let sender = mem::replace(&mut self.push_stream_handle, None).unwrap();
// let new_sender = sender.send((self.timestamp, msg)).wait().expect("Unable to push_msg");
// mem::replace(&mut self.push_stream_handle, Some(new_sender));
}
/// Actually carries out the action of the supplied BrokerAction (simulates it being received and processed)
/// by a remote broker) and returns the result of the action. The provided timestamp is that of
/// when it was received by the broker (after delays and simulated lag).
fn exec_action(&mut self, cmd: &BrokerAction) -> BrokerResult {
self.logger.event_log(self.timestamp, &format!("`exec_action()`: {:?}", cmd));
match cmd {
&BrokerAction::Ping => {
Ok(BrokerMessage::Pong{time_received: self.timestamp})
},
&BrokerAction::TradingAction{account_uuid, ref action} => {
match action {
&TradingAction::MarketOrder{ref symbol, long, size, stop, take_profit, max_range} => {
match self.symbols.get_index(symbol) {
Some(ix) => self.market_open(account_uuid, ix, long, size, stop, take_profit, max_range),
None => Err(BrokerError::NoSuchSymbol),
}
},
&TradingAction::MarketClose{uuid, size} => {
self.market_close(account_uuid, uuid, size)
},
&TradingAction::LimitOrder{ref symbol, long, size, stop, take_profit, entry_price} => {
match self.symbols.get_index(symbol) {
Some(ix) => self.place_order(account_uuid, ix, entry_price, long, size, stop, take_profit),
None => Err(BrokerError::NoSuchSymbol),
}
},
// no support for partial closes at this time
&TradingAction::LimitClose{uuid, size, exit_price} => {
// limit close just means to take profit when we hit a certain price, so just adjust the TP
self.modify_position(account_uuid, uuid, None, Some(Some(exit_price)))
},
&TradingAction::ModifyOrder{uuid, size, entry_price, stop, take_profit} => {
self.modify_order(account_uuid, uuid, size, entry_price, stop, take_profit)
},
&TradingAction::CancelOrder{uuid} => {
self.cancel_order(account_uuid, uuid)
}
&TradingAction::ModifyPosition{uuid, stop, take_profit} => {
self.modify_position(account_uuid, uuid, Some(stop), Some(take_profit))
},
}
},
&BrokerAction::GetLedger{account_uuid} => {
match self.accounts.get(&account_uuid) {
Some(acct) => Ok(BrokerMessage::Ledger{ledger: acct.ledger.clone()}),
None => Err(BrokerError::NoSuchAccount),
}
},
&BrokerAction::ListAccounts => {
let mut res = Vec::with_capacity(self.accounts.len());
for (_, acct) in self.accounts.iter() {
res.push(acct.clone());
}
Ok(BrokerMessage::AccountListing{accounts: res})
}
&BrokerAction::Disconnect => unimplemented!(),
}
}
/// Called when the balance of a ledger has been changed. Automatically takes into account ping.
fn buying_power_changed(&mut self, account_uuid: Uuid, new_buying_power: usize) {
self.pq.push(QueueItem{
timestamp: self.timestamp + self.settings.ping_ns,
unit: WorkUnit::Notification(Ok(BrokerMessage::LedgerBalanceChange{
account_uuid: account_uuid,
new_buying_power: new_buying_power,
})),
});
}
/// Creates a new pending position on the `SimBroker`.
fn place_order(
&mut self, account_uuid: Uuid, symbol_ix: usize, limit_price: usize, long: bool, size: usize,
stop: Option<usize>, take_profit: Option<usize>,
) -> BrokerResult {
let opt = self.get_price(symbol_ix);
if opt.is_none() {
return Err(BrokerError::NoSuchSymbol)
}
let (bid, ask) = opt.unwrap();
let order = Position {
creation_time: self.timestamp,
symbol_id: symbol_ix,
size: size,
price: Some(limit_price),
long: long,
stop: stop,
take_profit: take_profit,
execution_time: None,
execution_price: None,
exit_price: None,
exit_time: None,
};
// make sure the supplied parameters are sane
let _ = order.check_sanity()?;
// check if we're able to open this position right away at market price
match order.is_open_satisfied(bid, ask) {
// if this order is fillable right now, open it.
Some(entry_price) => {
let res = self.market_open(account_uuid, symbol_ix, long, size, stop, take_profit, Some(0));
// this should always succeed
if res.is_err() {
self.logger.error_log(&format!("Error while trying to place order: {:?}, {:?}", &order, res));
}
// assert!(res.is_ok());
return res
},
None => (),
}
let pos_value = self.get_position_value(&order)?;
// if we're not able to open it, try to place the order.
let res = match self.accounts.entry(account_uuid) {
Entry::Occupied(mut o) => {
let account = o.get_mut();
account.ledger.place_order(order.clone(), pos_value, gen_uuid(self.prng))
},
Entry::Vacant(_) => {
Err(BrokerError::NoSuchAccount)
},
};
// if the order was actually placed, notify the cache that we've opened a new order
// also send notification of ledger buying power change
match &res {
&Ok(ref msg) => {
match msg {
&BrokerMessage::OrderPlaced{order_id, order: _, timestamp: _} => {
self.accounts.order_placed(&order, order_id, account_uuid);
let new_buying_power = self.accounts.get(&account_uuid).unwrap().ledger.buying_power;
self.buying_power_changed(account_uuid, new_buying_power);
},
_ => (),
}
},
&Err(_) => (),
}
res
}
/// Attempts to open a position at the current market price with options for settings stop loss, or take profit.
/// Right now, this assumes that the order is filled as soon as it is placed (after the processing delay is taken
/// into account) and that it is filled fully.
fn market_open(
&mut self, account_uuid: Uuid, symbol_ix: usize, long: bool, size: usize, stop: Option<usize>,
take_profit: Option<usize>, max_range: Option<usize>
) -> BrokerResult {
let opt = self.get_price(symbol_ix);
if opt.is_none() {
return Err(BrokerError::NoSuchSymbol)
}
let (bid, ask) = opt.unwrap();
let cur_price = if long { ask } else { bid };
let pos = Position {
creation_time: self.timestamp,
symbol_id: symbol_ix,
size: size,
price: Some(cur_price),
long: long,
stop: stop,
take_profit: take_profit,
execution_time: Some(self.timestamp + self.settings.execution_delay_ns),
execution_price: Some(cur_price),
exit_price: None,
exit_time: None,
};
// make sure the supplied parameters are sane
let _ = pos.check_sanity()?;
let pos_value = self.get_position_value(&pos)?;
let pos_uuid = gen_uuid(self.prng);
let new_buying_power;
let res = {
let acct_entry = self.accounts.entry(account_uuid);
match acct_entry {
Entry::Occupied(mut occ) => {
let mut account = occ.get_mut();
// manually subtract the cost of the position from the account balance
if account.ledger.buying_power < pos_value {
return Err(BrokerError::InsufficientBuyingPower);
} else {
account.ledger.buying_power -= pos_value;
new_buying_power = account.ledger.buying_power;
}
// create the position in the `Ledger`
account.ledger.open_position(pos_uuid, pos.clone())
},
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchAccount);
}
}
};
// that should never fail
assert!(res.is_ok());
// add the position to the cache for checking when to close it
self.accounts.position_opened_immediate(&pos, pos_uuid, account_uuid);
// send notification about the change in ledger buying power
self.buying_power_changed(account_uuid, new_buying_power);
res
}
/// Attempts to close part of a position at market price. Right now, this assumes that the order is
/// fully filled as soon as it is placed (after the processing delay is taken into account).
fn market_close(&mut self, account_id: Uuid, position_uuid: Uuid, size: usize) -> BrokerResult {
if size == 0 {
let ts_string = self.timestamp.to_string();
self.cs.warning(
Some(&ts_string),
&format!("Warning: Attempted to close 0 units of position with uuid {}", position_uuid)
);
// TODO: Add configuration setting to optionally return an error
}
let pos = {
let account = match self.accounts.entry(account_id) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchAccount);
},
};
match account.ledger.open_positions.entry(position_uuid) {
Entry::Occupied(o) => o.get().clone(),
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchPosition);
}
}
};
let pos_value = self.get_position_value(&pos)?;
let new_buying_power;
let res = {
let account = self.accounts.get_mut(&account_id).unwrap();
let modification_cost = (pos_value / pos.size) * size;
let res = account.ledger.resize_position(position_uuid, (-1 * size as isize), modification_cost, self.timestamp);
new_buying_power = account.ledger.buying_power;
res
};
// if the position was fully closed, remove it from the cache and send notification of ledger buying power change
match res {<|fim▁hole|> self.buying_power_changed(account_id, new_buying_power);
},
_ => (),
},
Err(_) => (),
}
res
}
/// Modifies an order, setting the parameters of the contained `Position` equal to those supplied.
fn modify_order(
&mut self, account_uuid: Uuid, pos_uuid: Uuid, size: usize, entry_price: usize,
stop: Option<usize>, take_profit: Option<usize>,
) -> BrokerResult {
let res = {
let order = {
let account = match self.accounts.entry(account_uuid) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchAccount);
},
};
// pull it out of the pending hashmap while we modify it
match account.ledger.pending_positions.get(&pos_uuid) {
Some(pos) => pos,
None => {
return Err(BrokerError::NoSuchPosition);
},
}.clone()
};
let opt = self.get_price(order.symbol_id);
if opt.is_none() {
return Err(BrokerError::NoSuchSymbol)
}
let (bid, ask) = opt.unwrap();
match order.is_open_satisfied(bid, ask) {
// if the new entry price makes the order marketable, go ahead and open the position.
Some(entry_price) => {
let res = {
let account = self.accounts.get_mut(&account_uuid).unwrap();
// remove the position from the pending hashmap
let mut hm_order = account.ledger.pending_positions.remove(&pos_uuid).unwrap();
hm_order.execution_time = Some(self.timestamp);
hm_order.execution_price = Some(entry_price);
// add it to the open hashmap
account.ledger.open_position(pos_uuid, order.clone())
};
// that should always succeed
if res.is_err() {
self.logger.error_log(&format!("Error while trying to modify order: {:?}, {:?}", &order, res));
}
// assert!(res.is_ok());
// notify the cache that the position was opened
self.accounts.position_opened(&order, pos_uuid);
return res;
},
// if it's not marketable, perform the modification on the ledger
None => {
let mut account = self.accounts.get_mut(&account_uuid).unwrap();
account.ledger.modify_order(pos_uuid, size, entry_price, stop, take_profit, self.timestamp)
},
}
};
// as of now, the modification operation always succeeds so we should always update the cache
match res.as_ref().unwrap() {
&BrokerMessage::OrderModified{ ref order, order_id: _, timestamp: _ } => {
self.accounts.order_modified(order, pos_uuid);
},
_ => unreachable!(),
}
res
}
/// Cancels the pending position.
pub fn cancel_order(&mut self, account_uuid: Uuid, order_uuid: Uuid) -> BrokerResult {
let new_buying_power;
let res = {
let account = match self.accounts.entry(account_uuid) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchAccount);
},
};
// attempt to cancel the order and remove it from the hashmaps
let res = account.ledger.cancel_order(order_uuid, self.timestamp);
new_buying_power = account.ledger.buying_power;
res
};
// if it was successful, remove the position from the `pending` cache
// also send notification of ledger buying power change
match res {
Ok(ref msg) => {
match msg {
&BrokerMessage::OrderCancelled{ ref order, order_id: _, timestamp: _ } => {
self.accounts.order_cancelled(order_uuid, order.symbol_id);
self.buying_power_changed(account_uuid, new_buying_power);
},
_ => unreachable!(),
}
},
Err(_) => (),
}
res
}
/// Modifies the stop loss or take profit of a position. SL and TP are double option-wrapped; the outer
/// option indicates if they should be changed and the inner option indicates if the value should be set
/// or not (`Some(None)` indicates that the current SL should be removed, for example).
fn modify_position(
&mut self, account_id: Uuid, position_uuid: Uuid, sl: Option<Option<usize>>, tp: Option<Option<usize>>
) -> BrokerResult {
let res = {
let account = match self.accounts.entry(account_id) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(_) => {
return Err(BrokerError::NoSuchAccount);
},
};
account.ledger.modify_position(position_uuid, sl, tp, self.timestamp)
};
// TODO: Check if the new SL/TP make the position meet closure conditions and if they do, close it
// if the position was actually modified, remove it from the cache
match res {
Ok(ref message) => match message {
&BrokerMessage::PositionModified{position: ref pos, position_id: pos_uuid, timestamp: _} => {
self.accounts.position_modified(pos, pos_uuid);
},
_ => (),
},
Err(_) => (),
}
res
}
/// Dumps the SimBroker state to a file that can be resumed later.
fn dump_to_file(&mut self, filename: &str) {
unimplemented!(); // TODO
}
/// Used for Forex exchange rate conversions. The cost to open a position is determined
/// by the exchange rate between the base currency and the primary currency of the pair.
/// A decimal precision of 10 is used for all returned results.
///
/// Gets the conversion rate (in pips) between the base currency of the simbroker and
/// the supplied currency. If the base currency is USD and AUD is provided, the exchange
/// rate for AUD/USD will be returned. Returns Err if we lack the data to do that. Results
/// are returned with the specified decimal precision.
fn get_base_rate(&self, currency: &str, desired_decimals: usize) -> Result<usize, BrokerError> {
if !self.settings.fx {
return Err(BrokerError::Message{
message: String::from("Can only convert to base rate when in FX mode.")
});
}
let base_currency = &self.settings.fx_base_currency;
let base_pair = format!("{}{}", currency, base_currency);
let (_, ask, decimals) = if !self.symbols.contains(&base_pair) {
// try reversing the order or the pairs
let base_pair_reverse = format!("{}{}", base_currency, currency);
if !self.symbols.contains(&base_pair_reverse) {
return Err(BrokerError::NoDataAvailable);
} else {
self.symbols[&base_pair_reverse].get_price()
}
} else {
self.symbols[&base_pair].get_price()
};
Ok(convert_decimals(ask, decimals, desired_decimals))
}
/// Returns the value of a position in units of base currency, not taking into account leverage.
fn get_position_value(&self, pos: &Position) -> Result<usize, BrokerError> {
let ix = pos.symbol_id;
let sym = &self.symbols[ix];
if sym.is_fx() {
let base_rate: usize = self.get_base_rate(&sym.name[0..3], sym.metadata.decimal_precision)?;
Ok(pos.size * base_rate * self.settings.fx_lot_size)
} else {
Ok(pos.size)
}
}
/// Called every price update the broker receives. It simulates some kind of market activity on the simulated exchange
/// that triggers a price update for that symbol. This function checks all pending and open positions and determines
/// if they need to be opened, closed, or modified in any way due to this update.
///
/// All actions that take place here are guarenteed to succeed since they are simulated as taking place within the
/// brokerage itself. All `BrokerMessage`s generated by any actions that take place are sent through the supplied
/// push stream handle to the client. The returned value is how many push messages were sent to the client
/// during this tick.
pub fn tick_positions(
&mut self, symbol_id: usize, price: (usize, usize), cur_index: usize, buffer: &mut Vec<TickOutput>
) -> usize {
let (bid, ask) = price;
let mut push_msg_count = 0;
// check if any pending orders should be closed, modified, or opened
// manually keep track of the index because we remove things from the vector dynamically
let mut i = 0;
while i < self.accounts.positions[symbol_id].pending.len() {
let push_msg_opt = {
let &CachedPosition { pos_uuid, acct_uuid, ref pos } = &self.accounts.positions[symbol_id].pending[i];
match pos.is_open_satisfied(bid, ask) {
Some(open_price) => {
// if the position should be opened, remove it from the pending `HashMap` and the cache and open it.
let mut ledger = &mut self.accounts.data.get_mut(&acct_uuid).unwrap().ledger;
// remove from the hashmap
let mut hm_pos = ledger.pending_positions.remove(&pos_uuid).unwrap();
hm_pos.execution_price = Some(open_price);
hm_pos.execution_time = Some(self.timestamp);
Some(ledger.open_position(pos_uuid, hm_pos))
},
None => None,
}
};
i += 1;
match push_msg_opt {
Some(Ok(BrokerMessage::PositionOpened{position_id: _, position: ref hm_pos, timestamp: _})) => {
// remove from the pending cache
let mut cached_pos = self.accounts.positions[symbol_id].pending.remove(i-1);
// update the cached position with the one with execution data
cached_pos.pos = hm_pos.clone();
let push_msg = push_msg_opt.as_ref().unwrap();
// this should always succeed
// if push_msg.is_err() {
// let err_msg = format!("Error while trying to open position during tick check: {:?}, {:?}", &cached_pos.pos, push_msg);
// self.logger.error_log(&err_msg);
// }
assert!(push_msg.is_ok());
// add it to the open cache
self.accounts.positions[symbol_id].open.push(cached_pos);
// send the push message to the client
self.push_msg(Ok(push_msg.as_ref().unwrap().clone()));
// put the new tick into the buffer to be returned to the client
let output = TickOutput::Pushstream(self.timestamp, Ok(push_msg.as_ref().unwrap().clone()));
buffer[cur_index + push_msg_count] = output;
push_msg_count += 1;
// decrement i since we modified the cache
i -= 1;
},
Some(Err(err)) => self.logger.error_log(&format!("Push message from opening pending position was error: {:?}", err)),
Some(Ok(msg)) => self.logger.error_log(&format!("Received unexpected response type when opening pending position: {:?}", msg)),
None => (),
}
}
// check if any open positions should be closed or modified
let mut i = 0;
while i < self.accounts.positions[symbol_id].open.len() {
let mut new_buying_power = 0;
let push_msg_opt: Option<(usize, BrokerResult)> = {
let &CachedPosition { pos_uuid, acct_uuid, ref pos } = &self.accounts.positions[symbol_id].open[i];
match pos.is_close_satisfied(bid, ask) {
Some((closure_price, closure_reason)) => {
let pos_value = self.get_position_value(&pos).expect("Unable to get position value for pending position!");
// if the position should be closed, remove it from the cache.
let mut ledger = &mut self.accounts.data.get_mut(&acct_uuid).unwrap().ledger;
let res = ledger.close_position(pos_uuid, pos_value, self.timestamp, closure_reason);
new_buying_power = ledger.buying_power;
Some((closure_price, res))
},
None => None,
}
};
i += 1;
if push_msg_opt.is_some() {
let (closure_price, push_msg) = push_msg_opt.unwrap();
// remove from the open cache
let mut cached_pos = self.accounts.positions[symbol_id].open.remove(i-1);
cached_pos.pos.exit_price = Some(closure_price);
cached_pos.pos.exit_time = Some(self.timestamp);
// this should always succeed
assert!(push_msg.is_ok());
// send notification of ledger buying power change to client
let buying_power_notification = BrokerMessage::LedgerBalanceChange{
account_uuid: cached_pos.acct_uuid,
new_buying_power: new_buying_power,
};
let output = TickOutput::Pushstream(self.timestamp, Ok(buying_power_notification));
// add the message to the buffer and increment the length
buffer[cur_index + push_msg_count] = output;
push_msg_count += 1;
// send the push message to the client
self.push_msg(push_msg.clone());
// put the new tick into the buffer to be returned to the client
let output = TickOutput::Pushstream(self.timestamp, push_msg);
// add the message to the buffer and increment the length
buffer[cur_index + push_msg_count] = output;
push_msg_count += 1;
// decrement i since we modified the cache
i -= 1;
}
}
push_msg_count
}
/// Sets the price for a symbol. If no Symbol currently exists with that designation, a new one
/// will be initialized with a static price.
fn oneshot_price_set(
&mut self, name: String, price: (usize, usize), is_fx: bool, decimal_precision: usize,
) {
if is_fx {
assert_eq!(name.len(), 6);
}
// insert new entry into `self.prices` or update if one exists
if self.symbols.contains(&name) {
self.symbols[&name].price = price;
} else {
let symbol = Symbol::new_oneshot(price, is_fx, decimal_precision, name.clone());
self.symbols.add(name, symbol).expect("Unable to set oneshot price for new symbol");
}
}
/// Returns a clone of an account's ledger or an error if it doesn't exist.
pub fn get_ledger_clone(&mut self, account_uuid: Uuid) -> Result<Ledger, BrokerError> {
match self.accounts.get(&account_uuid) {
Some(acct) => Ok(acct.ledger.clone()),
None => Err(BrokerError::Message{
message: "No account exists with that UUID.".to_string()
}),
}
}
/// Registers a data source into the SimBroker. Ticks from the supplied generator will be
/// used to upate the SimBroker's internal prices and transmitted to connected clients.
pub fn register_tickstream(
&mut self, name: String, raw_tickstream: BoxStream<Tick, ()>, is_fx: bool, decimal_precision: usize
) -> BrokerResult {
// allocate space for open positions of the new symbol in `Accounts`
self.accounts.add_symbol();
let mut sym = Symbol::new_from_stream(raw_tickstream, is_fx, decimal_precision, name.clone());
// get the first element out of the tickstream and set the next tick equal to it
let first_tick = sym.next().unwrap().unwrap();
self.cs.debug(None, &format!("Set first tick for tickstream {}: {:?}", name, &first_tick));
sym.next_tick = Some(first_tick);
self.symbols.add(name, sym)
}
/// Returns the current price for a given symbol or None if the SimBroker
/// doensn't have a price.
pub fn get_price(&self, ix: usize) -> Option<(usize, usize)> {
if !self.symbols.len() > ix {
return Some(self.symbols[ix].price)
}
None
}
}<|fim▁end|> | Ok(ref message) => match message {
&BrokerMessage::PositionClosed{position: ref pos, position_id: pos_uuid, reason: _, timestamp: _} => {
self.accounts.position_closed(pos, pos_uuid); |
<|file_name|>startup.js<|end_file_name|><|fim▁begin|>Meteor.startup(function () {
Meteor.defer(function () {
Session.setDefault("checked", $("input[type=checkbox]").is(":checked"));
});
if (Meteor.isCordova) {
window.alert = navigator.notification.alert;
}
Push.addListener('message', function(notification) {
// Called on every message
console.log(JSON.stringify(notification))
function alertDismissed() {
NotificationHistory.update({_id: notification.payload.historyId}, {
$set: {
"recievedAt": new Date()
}
});
}
alert(notification.message, alertDismissed, notification.payload.title, "Ok");<|fim▁hole|><|fim▁end|> | });
}); |
<|file_name|>JSonUtils.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2007 National Institute For Space Research (INPE) - Brazil.
This file is part of TerraMA2 - a free and open source computational
platform for analysis, monitoring, and alert of geo-environmental extremes.
TerraMA2 is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
TerraMA2 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with TerraMA2. See LICENSE. If not, write to
TerraMA2 Team at <[email protected]>.
*/
/*!
\file terrama2/core/utility/JSonUtils.hpp
\brief
\author Jano Simas
*/
#include <terralib/datatype/TimeInstantTZ.h>
#include <terralib/geometry/Geometry.h>
#include <terralib/geometry/Point.h>
#include <algorithm>
#include <functional>
#include <iterator>
//STL
#include <limits>
#include <map>
#include <string>
#include <unordered_map>
#include <utility>
#include <vector>
#include "../Exception.hpp"
#include "../data-model/DataSetOccurrence.hpp"
#include "../../Exception.hpp"
#include "../Typedef.hpp"
#include "../data-model/DataManager.hpp"
#include "../data-model/DataProvider.hpp"
#include "../data-model/DataSeries.hpp"
#include "../data-model/DataSeriesSemantics.hpp"
#include "../data-model/DataSet.hpp"
#include "../data-model/DataSetDcp.hpp"
#include "../data-model/DataSetGrid.hpp"
#include "../data-model/Project.hpp"
#include "../data-model/Risk.hpp"
#include "../utility/GeoUtils.hpp"
#include "../utility/Logger.hpp"
#include "../utility/TimeUtils.hpp"
#include "../utility/Verify.hpp"
#include "JSonUtils.hpp"
#include "SemanticsManager.hpp"
#include <QJsonArray>
terrama2::core::DataProviderPtr terrama2::core::fromDataProviderJson(QJsonObject json)
{
if(json["class"].toString() != "DataProvider")
{
QString errMsg = QObject::tr("Invalid DataProvider JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("id")
&& json.contains("project_id")
&& json.contains("name")
&& json.contains("description")
&& json.contains("intent")
&& json.contains("uri")
&& json.contains("active")
&& json.contains("data_provider_type")))
{
QString errMsg = QObject::tr("Invalid DataProvider JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
std::shared_ptr<terrama2::core::DataProvider> provider = std::make_shared<terrama2::core::DataProvider>();
provider->id = json["id"].toInt();
provider->projectId = json["project_id"].toInt();
provider->name = json["name"].toString().toStdString();
provider->description = json["description"].toString().toStdString();
provider->intent = static_cast<terrama2::core::DataProviderIntent>(json["intent"].toInt());
provider->uri = json["uri"].toString().toStdString();
provider->active = json["active"].toBool();
provider->dataProviderType = json["data_provider_type"].toString().toStdString();
if(json.contains("options"))
{
auto obj = json["options"].toObject();
for(auto it = obj.begin(); it != obj.end(); ++it)
{
provider->options.emplace(it.key().toStdString(), it.value().toString().toStdString());
}
}
try
{
auto timeout = provider->options.at("timeout");
provider->timeout = std::stoi(timeout);
}
catch (...)
{
provider->timeout = 8;
}
return provider;
}
terrama2::core::DataSeriesPtr terrama2::core::fromDataSeriesJson(QJsonObject json)
{
if(json["class"].toString() != "DataSeries")
{
QString errMsg = QObject::tr("Invalid DataSeries JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("id")
&& json.contains("data_provider_id")
&& json.contains("semantics")
&& json.contains("name")
&& json.contains("active")
&& json.contains("description")))
{
QString errMsg = QObject::tr("Invalid DataSeries JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
std::shared_ptr<terrama2::core::DataSeries> dataSeries = std::make_shared<terrama2::core::DataSeries>();
dataSeries->id = json["id"].toInt();
dataSeries->dataProviderId = json["data_provider_id"].toInt();
dataSeries->semantics = SemanticsManager::getInstance().getSemantics(json["semantics"].toString().toStdString());
dataSeries->name = json["name"].toString().toStdString();
dataSeries->description = json["description"].toString().toStdString();
dataSeries->active = json["active"].toBool();
QJsonArray dataSetArray = json["datasets"].toArray();
std::function<terrama2::core::DataSetPtr(QJsonObject)> createDataSet = nullptr;
switch(dataSeries->semantics.dataSeriesType)
{
case DataSeriesType::DCP:
createDataSet = fromDataSetDcpJson;
break;
case DataSeriesType::OCCURRENCE:
createDataSet = fromDataSetOccurrenceJson;
break;
case DataSeriesType::GRID:
createDataSet = fromDataSetGridJson;
break;
case DataSeriesType::GEOMETRIC_OBJECT:
case DataSeriesType::ANALYSIS_MONITORED_OBJECT:
createDataSet = fromDataSetJson;
break;
default:
{
QString errMsg = QObject::tr("Invalid DataSeries JSON object.\nUnknown DataSet type.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
}
for(auto json : dataSetArray)
{
if(json.isObject())
dataSeries->datasetList.push_back(createDataSet(json.toObject()));
else
throw terrama2::core::JSonParserException() << ErrorDescription(QObject::tr("Invalid DataSet JSON object."));
}
return dataSeries;
}
void terrama2::core::addBaseDataSetData(QJsonObject json, std::shared_ptr<terrama2::core::DataSet> dataSet)
{
if(json["class"].toString() != "DataSet")
{
QString errMsg = QObject::tr("Invalid DataSet JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("id")
&& json.contains("data_series_id")
&& json.contains("active")
&& json.contains("format")))
{
QString errMsg = QObject::tr("Invalid DataSet JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
dataSet->id = json["id"].toInt();
dataSet->dataSeriesId = json["data_series_id"].toInt();
dataSet->active = json["active"].toBool();
auto formatObj= json["format"].toObject();
for(auto it = formatObj.begin(); it != formatObj.end(); ++it)
{
dataSet->format.emplace(it.key().toStdString(), it.value().toString().toStdString());
}
}
terrama2::core::DataSetPtr terrama2::core::fromDataSetDcpJson(QJsonObject json)
{
std::shared_ptr<terrama2::core::DataSetDcp> dataSet = std::make_shared<terrama2::core::DataSetDcp>();
addBaseDataSetData(json, dataSet);
if(!json.contains("position"))
{
QString errMsg = QObject::tr("Invalid DataSet JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
std::string ewkt = json["position"].toString().toStdString();
auto geom = ewktToGeom(ewkt);
auto point = std::dynamic_pointer_cast<te::gm::Point>(geom);
if(!point.get())
{
QString errMsg = QObject::tr("Invalid DataSet JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
dataSet->position = point;
return dataSet;
}
terrama2::core::DataSetPtr terrama2::core::fromDataSetJson(QJsonObject json)
{
std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSet>();
addBaseDataSetData(json, dataSet);
return dataSet;
}
terrama2::core::DataSetPtr terrama2::core::fromDataSetOccurrenceJson(QJsonObject json)
{
std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSetOccurrence>();
addBaseDataSetData(json, dataSet);
return dataSet;
}
terrama2::core::DataSetPtr terrama2::core::fromDataSetGridJson(QJsonObject json)
{
std::shared_ptr<terrama2::core::DataSet> dataSet = std::make_shared<terrama2::core::DataSetGrid>();
addBaseDataSetData(json, dataSet);
return dataSet;
}
terrama2::core::Filter terrama2::core::fromFilterJson(QJsonObject json, DataManager* dataManager)
{
if(json.empty())
return terrama2::core::Filter();
if(json["class"].toString() != "Filter")
{
QString errMsg = QObject::tr("Invalid Filter JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
terrama2::core::Filter filter;
if(json.contains("discard_before") && !json.value("discard_before").isNull())
{
std::string dateTime = json.value("discard_before").toString().toStdString();
filter.discardBefore = TimeUtils::stringToTimestamp(dateTime, terrama2::core::TimeUtils::webgui_timefacet);
verify::date(filter.discardBefore);
}
if(json.contains("discard_after") && !json.value("discard_after").isNull())
{
std::string dateTime = json["discard_after"].toString().toStdString();
filter.discardAfter = TimeUtils::stringToTimestamp(dateTime, terrama2::core::TimeUtils::webgui_timefacet);
verify::date(filter.discardAfter);
}
if(filter.discardBefore && filter.discardAfter && (*filter.discardBefore > *filter.discardAfter))
{
QString errMsg = QObject::tr("Invalid Filter JSON object./nEmpty date filter interval.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(json.contains("region") && !json.value("region").isNull())
{
auto ewkt = json["region"].toString().toStdString();
filter.region = ewktToGeom(ewkt);
verify::srid(filter.region->getSRID());
}
if(json.contains("by_value") && !json.value("by_value").isNull())
{
filter.byValue = json["by_value"].toString().toStdString();
}
if(json.contains("last_values") && !json.value("last_values").isNull())
{
filter.lastValues = std::make_shared<size_t>(json["last_values"].toInt());
}
if(json.contains("data_series_id") && !json.value("data_series_id").isNull())
{
DataSeriesId dataSeriesId = json["data_series_id"].toInt();
// Sets the data series for a static data filter
if(dataSeriesId != 0)
{
auto dataSeries = dataManager->findDataSeries(dataSeriesId);
filter.dataSeries = dataSeries;
auto dataProvider = dataManager->findDataProvider(dataSeries->dataProviderId);
filter.dataProvider = dataProvider;
}
}
if (json.contains("crop_raster"))
filter.cropRaster = json["crop_raster"].toBool();
else
filter.cropRaster = false;
return filter;
}
terrama2::core::LegendPtr terrama2::core::fromRiskJson(QJsonObject json)
{
if(json["class"].toString() != "Legend")
{
QString errMsg = QObject::tr("Invalid Legend JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("name")
&& json.contains("description")
&& json.contains("levels")))
{
QString errMsg = QObject::tr("Invalid Risk JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
std::shared_ptr<terrama2::core::Risk> risk = std::make_shared<terrama2::core::Risk>();
risk->name = json["name"].toString().toStdString();
risk->description = json["description"].toString().toStdString();
risk->id = json["id"].toInt();
auto riskLevelsArray = json["levels"].toArray();
for(const auto& value : riskLevelsArray)
{
auto obj = value.toObject();
terrama2::core::RiskLevel riskLevel;
riskLevel.name = obj["name"].toString().toStdString();
riskLevel.value = obj["value"].toDouble();
if(obj["level"].isNull())
{
//default risk
riskLevel.level = std::numeric_limits<uint32_t>::max();
risk->defaultRisk = riskLevel;
}
else
{
riskLevel.level = static_cast<uint32_t>(obj["level"].toInt());
risk->riskLevels.push_back(riskLevel);
}
}
std::sort(std::begin(risk->riskLevels), std::end(risk->riskLevels));
return risk;
}
QJsonObject terrama2::core::toJson(const terrama2::core::Risk& risk)
{
QJsonObject obj;
obj.insert("class", QString("Risk"));
obj.insert("name", QString::fromStdString(risk.name));
obj.insert("description", QString::fromStdString(risk.description));
QJsonArray riskArray;
for(const auto& riskLevel : risk.riskLevels)
{
QJsonObject tempoObj;
tempoObj.insert("name", QString::fromStdString(riskLevel.name));
tempoObj.insert("level", static_cast<int>(riskLevel.level));
tempoObj.insert("value", riskLevel.value);
riskArray.append(tempoObj);
}
obj.insert("levels", riskArray);
return obj;
}
QJsonObject terrama2::core::toJson(const terrama2::core::Filter& filter)
{
QJsonObject obj;
obj.insert("class", QString("Filter"));
if(filter.discardBefore.get())
{
std::string discardBefore = TimeUtils::boostLocalTimeToString(filter.discardBefore->getTimeInstantTZ(), TimeUtils::webgui_timefacet);
obj.insert("discard_before", QString::fromStdString(discardBefore));
}
if(filter.discardAfter.get())
{
std::string discardAfter = TimeUtils::boostLocalTimeToString(filter.discardAfter->getTimeInstantTZ(), TimeUtils::webgui_timefacet);
obj.insert("discard_after", QString::fromStdString(discardAfter));
}
if(filter.region.get())
{
std::string region = filter.region->toString();
obj.insert("region", QString::fromStdString(region));
}
obj.insert("last_values", static_cast<qint32>(*filter.lastValues.get()));
if(filter.dataSeries)
obj.insert("data_series_id", static_cast<int32_t>(filter.dataSeries->id));
obj.insert("by_value", QString::fromStdString(filter.byValue));
return obj;
}
terrama2::core::Schedule terrama2::core::fromScheduleJson(QJsonObject json)
{
if(json.empty())
return terrama2::core::Schedule();
if(json["class"].toString() != "Schedule")
{
QString errMsg = QObject::tr("Invalid Schedule JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("id")
&& json.contains("frequency")
&& json.contains("frequency_unit")
&& json.contains("frequency_start_time")
&& json.contains("schedule")
&& json.contains("schedule_time")
&& json.contains("schedule_unit")
&& json.contains("schedule_retry")
&& json.contains("schedule_retry_unit")
&& json.contains("schedule_timeout")
&& json.contains("schedule_timeout_unit")))
{
QString errMsg = QObject::tr("Invalid Schedule JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
terrama2::core::Schedule schedule;
schedule.id = json["id"].toInt();
schedule.frequency = json["frequency"].toInt();
schedule.frequencyUnit = json["frequency_unit"].toString().toStdString();
schedule.frequencyStartTime = json["frequency_start_time"].toString().toStdString();
schedule.schedule = json["schedule"].toInt();
schedule.scheduleTime = json["schedule_time"].toString().toStdString();
schedule.scheduleUnit = json["schedule_unit"].toString().toStdString();
schedule.scheduleRetry = json["schedule_retry"].toInt();
schedule.scheduleRetryUnit = json["schedule_retry_unit"].toString().toStdString();
schedule.scheduleTimeout = json["schedule_timeout"].toInt();
schedule.scheduleTimeoutUnit = json["schedule_timeout_unit"].toString().toStdString();
if(json.contains("reprocessing_historical_data") && !json["reprocessing_historical_data"].isNull())
schedule.reprocessingHistoricalData = fromReprocessingHistoricalData(json["reprocessing_historical_data"].toObject());
return schedule;
}
QJsonObject terrama2::core::toJson(DataProviderPtr dataProviderPtr)
{
QJsonObject obj;
obj.insert("class", QString("DataProvider"));
obj.insert("id", static_cast<int32_t>(dataProviderPtr->id));
obj.insert("project_id", static_cast<int32_t>(dataProviderPtr->projectId));
obj.insert("name", QString::fromStdString(dataProviderPtr->name));
obj.insert("description", QString::fromStdString(dataProviderPtr->description));
obj.insert("intent", static_cast<int>(dataProviderPtr->intent));
obj.insert("uri", QString::fromStdString(dataProviderPtr->uri));
obj.insert("active", dataProviderPtr->active);
obj.insert("timeout", static_cast<int>(dataProviderPtr->timeout));
obj.insert("data_provider_type", QString::fromStdString(dataProviderPtr->dataProviderType));
return obj;
}
QJsonObject terrama2::core::toJson(DataSeriesPtr dataSeriesPtr)
{
QJsonObject obj;
obj.insert("class", QString("DataSeries"));
obj.insert("id", static_cast<int32_t>(dataSeriesPtr->id));
obj.insert("data_provider_id", static_cast<int32_t>(dataSeriesPtr->dataProviderId));
obj.insert("semantics", QString::fromStdString(dataSeriesPtr->semantics.code));
obj.insert("name", QString::fromStdString(dataSeriesPtr->name));
obj.insert("description", QString::fromStdString(dataSeriesPtr->description));
QJsonArray array;
for(const auto& dataSet : dataSeriesPtr->datasetList)
array.push_back(terrama2::core::toJson(dataSet, dataSeriesPtr->semantics));
obj.insert("datasets", array);
return obj;
}
QJsonObject terrama2::core::toJson(DataSetPtr dataSetPtr, DataSeriesSemantics semantics)
{
QJsonObject obj;
obj.insert("class", QString("DataSet"));
obj.insert("id", static_cast<int32_t>(dataSetPtr->id));
obj.insert("data_series_id", static_cast<int32_t>(dataSetPtr->dataSeriesId));
obj.insert("data_series_id", static_cast<int32_t>(dataSetPtr->dataSeriesId));
obj.insert("active", dataSetPtr->active);
QJsonObject format;
for(const auto & it : dataSetPtr->format)
{
format.insert(QString::fromStdString(it.first), QString::fromStdString(it.second));
}
obj.insert("format", format);
switch(semantics.dataSeriesType)
{
case terrama2::core::DataSeriesType::DCP :
{
auto dataSet = std::dynamic_pointer_cast<const DataSetDcp>(dataSetPtr);
terrama2::core::addToJson(obj, dataSet);
break;
}
case terrama2::core::DataSeriesType::OCCURRENCE :
{
auto dataSet = std::dynamic_pointer_cast<const DataSetOccurrence>(dataSetPtr);
terrama2::core::addToJson(obj, dataSet);
break;
}
case terrama2::core::DataSeriesType::GRID :
{
auto dataSet = std::dynamic_pointer_cast<const DataSetGrid>(dataSetPtr);
terrama2::core::addToJson(obj, dataSet);
break;
}
default:
/* code */
break;
}
return obj;
}
void terrama2::core::addToJson(QJsonObject& obj, DataSetDcpPtr dataSetPtr)
{
obj.insert("position", QString::fromStdString(dataSetPtr->position->toString()));
}
void terrama2::core::addToJson(QJsonObject& /*obj*/, DataSetOccurrencePtr /*dataSetPtr*/)
{
}
void terrama2::core::addToJson(QJsonObject& /*obj*/, DataSetGridPtr /*dataSetPtr*/)
{
}
QJsonObject terrama2::core::toJson(Schedule schedule)
{
QJsonObject obj;
obj.insert("class", QString("Schedule"));
obj.insert("id", static_cast<int32_t>(schedule.id));
obj.insert("frequency",static_cast<int32_t>(schedule.frequency));
obj.insert("frequency_unit", QString::fromStdString(schedule.frequencyUnit));
obj.insert("frequency_start_time", QString::fromStdString(schedule.frequencyStartTime));
obj.insert("schedule",static_cast<int32_t>(schedule.schedule));
obj.insert("schedule_time",QString::fromStdString(schedule.scheduleTime));
obj.insert("schedule_unit",QString::fromStdString(schedule.scheduleUnit));
obj.insert("schedule_retry",static_cast<int32_t>(schedule.scheduleRetry));
obj.insert("schedule_retry_unit", QString::fromStdString(schedule.scheduleRetryUnit));
obj.insert("schedule_timeout",static_cast<int32_t>(schedule.scheduleTimeout));
obj.insert("schedule_timeout_unit", QString::fromStdString(schedule.scheduleTimeoutUnit));
obj.insert("reprocessing_historical_data", toJson(schedule.reprocessingHistoricalData));
return obj;
}
QJsonObject terrama2::core::toJson(terrama2::core::ReprocessingHistoricalDataPtr
reprocessingHistoricalDataPtr)
{
QJsonObject obj;
if(!reprocessingHistoricalDataPtr)
return obj;
obj.insert("class", QString("ReprocessingHistoricalData"));
if(reprocessingHistoricalDataPtr->startDate.get())
{
std::string startDate = terrama2::core::TimeUtils::boostLocalTimeToString(reprocessingHistoricalDataPtr->startDate->getTimeInstantTZ(), terrama2::core::TimeUtils::webgui_timefacet);
obj.insert("start_date", QString::fromStdString(startDate));
}
if(reprocessingHistoricalDataPtr->endDate.get())
{
std::string endDate = terrama2::core::TimeUtils::boostLocalTimeToString(reprocessingHistoricalDataPtr->endDate->getTimeInstantTZ(), terrama2::core::TimeUtils::webgui_timefacet);
obj.insert("end_date", QString::fromStdString(endDate));
}
return obj;
}
terrama2::core::ReprocessingHistoricalDataPtr terrama2::core::fromReprocessingHistoricalData(
const QJsonObject& json)
{
if(json.isEmpty())
{
return terrama2::core::ReprocessingHistoricalDataPtr();
}
if(json["class"].toString() != "ReprocessingHistoricalData")
{
QString errMsg(QObject::tr("Invalid ReprocessingHistoricalData JSON object."));
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
<|fim▁hole|> && json.contains("end_date")))
{
QString errMsg(QObject::tr("Invalid ReprocessingHistoricalData JSON object."));
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
ReprocessingHistoricalData* reprocessingHistoricalData = new ReprocessingHistoricalData;
ReprocessingHistoricalDataPtr reprocessingHistoricalDataPtr(reprocessingHistoricalData);
if(!json.value("start_date").isNull())
{
std::string startDate = json["start_date"].toString().toStdString();
reprocessingHistoricalData->startDate = terrama2::core::TimeUtils::stringToTimestamp(startDate, terrama2::core::TimeUtils::webgui_timefacet);
}
if(!json.value("end_date").isNull())
{
std::string endDate = json["end_date"].toString().toStdString();
reprocessingHistoricalData->endDate = terrama2::core::TimeUtils::stringToTimestamp(endDate, terrama2::core::TimeUtils::webgui_timefacet);
}
return reprocessingHistoricalDataPtr;
}
terrama2::core::ProjectPtr terrama2::core::fromProjectJson(QJsonObject json)
{
if(json.empty())
{
QString errMsg = QObject::tr("Invalid Project JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(json["class"].toString() != "Project")
{
QString errMsg = QObject::tr("Invalid Project JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
if(!(json.contains("id")
&& json.contains("name")
&& json.contains("active")))
{
QString errMsg = QObject::tr("Invalid Project JSON object.");
TERRAMA2_LOG_ERROR() << errMsg;
throw terrama2::core::JSonParserException() << ErrorDescription(errMsg);
}
auto project = std::make_shared<terrama2::core::Project>();
project->id = json["id"].toInt();
project->name = json["name"].toString().toStdString();
project->active = json["active"].toBool();
return project;
}
QJsonObject terrama2::core::toJson(const terrama2::core::ProjectPtr& project)
{
QJsonObject obj;
if(!project)
return obj;
obj.insert("class", QString("Project"));
obj.insert("id", static_cast<int32_t>(project->id));
obj.insert("name", QString::fromStdString(project->name));
obj.insert("active", project->active);
return obj;
}<|fim▁end|> | if(!(json.contains("start_date") |
<|file_name|>VulkanMemoryAllocator.cpp<|end_file_name|><|fim▁begin|>#define VMA_IMPLEMENTATION
#include <vk_mem_alloc.h>
#include "VulkanMemoryAllocator.h"
#include "VulkanUtility.h"
#include "Core/Assertion.h"
namespace cube
{
namespace rapi
{
void VulkanMemoryAllocator::Initialize(VkInstance instance, VkPhysicalDevice GPU, VkDevice device)
{
VkResult res;
VmaAllocatorCreateInfo info = {};
info.instance = instance;
info.physicalDevice = GPU;
info.device = device;
res = vmaCreateAllocator(&info, &mAllocator);<|fim▁hole|>
void VulkanMemoryAllocator::Shutdown()
{
}
VulkanAllocation VulkanMemoryAllocator::Allocate(ResourceUsage usage, VkBufferCreateInfo& bufCreateInfo, VkBuffer* pBuf)
{
VmaAllocationCreateInfo createInfo = CreateVmaAllocationCreateInfo(usage);
VmaAllocationInfo allocationInfo;
VmaAllocation allocation;
vmaCreateBuffer(mAllocator, &bufCreateInfo, &createInfo, pBuf, &allocation, &allocationInfo);
VulkanAllocation res;
res.resourceType = VulkanAllocation::ResourceType::Buffer;
res.pResource = pBuf;
UpdateVulkanAllocation(res, allocation, allocationInfo);
return res;
}
VulkanAllocation VulkanMemoryAllocator::Allocate(ResourceUsage usage, VkImageCreateInfo& imageCreateInfo, VkImage* pImage)
{
VmaAllocationCreateInfo createInfo = CreateVmaAllocationCreateInfo(usage);
VmaAllocationInfo allocationInfo;
VmaAllocation allocation;
vmaCreateImage(mAllocator, &imageCreateInfo, &createInfo, pImage, &allocation, &allocationInfo);
VulkanAllocation res;
res.resourceType = VulkanAllocation::ResourceType::Texture;
res.pResource = pImage;
UpdateVulkanAllocation(res, allocation, allocationInfo);
return res;
}
void VulkanMemoryAllocator::Free(VulkanAllocation alloc)
{
switch(alloc.resourceType)
{
case VulkanAllocation::ResourceType::Buffer:
vmaDestroyBuffer(mAllocator, *(VkBuffer*)alloc.pResource, alloc.allocation);
break;
case VulkanAllocation::ResourceType::Texture:
vmaDestroyImage(mAllocator, *(VkImage*)alloc.pResource, alloc.allocation);
break;
default:
ASSERTION_FAILED("Invalid resource type in vulkan allocation. ({})", (int)alloc.resourceType);
break;
}
}
VmaAllocationCreateInfo VulkanMemoryAllocator::CreateVmaAllocationCreateInfo(ResourceUsage usage)
{
VmaAllocationCreateInfo info = {};
switch(usage)
{
case ResourceUsage::Default:
case ResourceUsage::Immutable:
info.usage = VMA_MEMORY_USAGE_GPU_ONLY;
break;
case ResourceUsage::Dynamic:
info.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
info.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
break;
case ResourceUsage::Staging:
info.usage = VMA_MEMORY_USAGE_GPU_TO_CPU;
break;
default:
ASSERTION_FAILED("Invalid resource type {}", (int)usage);
break;
}
return info;
}
void VulkanMemoryAllocator::UpdateVulkanAllocation(VulkanAllocation& allocation, VmaAllocation vmaAllocation, const VmaAllocationInfo& info)
{
allocation.allocator = mAllocator;
allocation.allocation = vmaAllocation;
allocation.pMappedPtr = info.pMappedData;
allocation.size = info.size;
VkMemoryPropertyFlags memFlags;
vmaGetMemoryTypeProperties(mAllocator, info.memoryType, &memFlags);
if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) > 0) {
allocation.isHostVisible = true;
} else {
allocation.isHostVisible = false;
}
}
} // namespace rapi
} // namespace cube<|fim▁end|> | CHECK_VK(res, "Failed to create vulkan memory allocator.");
} |
<|file_name|>VoteCandidate.js<|end_file_name|><|fim▁begin|>import React from 'react';
let lastClicked = 0;
export default class Candidate extends React.Component {
constructor(props) {
super(props);
this.state = { wiggle: false };
this.castVote = this.castVote.bind(this);
this.wiggleDone = this.wiggleDone.bind(this);
}
componentDidMount() {
const anim = this.refs[this.props.candidate];
anim.addEventListener('animationend', this.wiggleDone);
}
componentWillUnmount() {
const anim = this.refs[this.props.candidate];
anim.removeEventListener('animationend', this.wiggleDone);
}
wiggleDone() {
this.setState({ wiggle: false });
}
castVote() {
if (new Date() - lastClicked >= 3000) {
lastClicked = Date.now();
this.props.castVote(this.props.index);
this.setState({ wiggle: true });
// console.log('vote cast');
} else {<|fim▁hole|> // console.log('waiting for delay, vote not cast');
}
}
render() {
const wiggle = this.state.wiggle;
return (
<img
key={this.props.candidate}
ref={this.props.candidate}
className={wiggle ? 'candidate wiggle' : 'candidate'}
onClick={this.castVote}
src={`Image${this.props.index}.png`}
/>
);
}
}
Candidate.propTypes = {
index: React.PropTypes.number,
candidate: React.PropTypes.string,
votes: React.PropTypes.oneOfType([
React.PropTypes.number,
React.PropTypes.string,
]),
castVote: React.PropTypes.func,
};<|fim▁end|> | |
<|file_name|>learning_rate_schedule.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Various learning rate decay functions."""
import abc
import math
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.optimizers.schedules.LearningRateSchedule")
class LearningRateSchedule(object):
"""The learning rate schedule base class.
You can use a learning rate schedule to modulate how the learning rate
of your optimizer changes over time.
Several built-in learning rate schedules are available, such as
`tf.keras.optimizers.schedules.ExponentialDecay` or
`tf.keras.optimizers.schedules.PiecewiseConstantDecay`:
```python
lr_schedule = keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate=1e-2,
decay_steps=10000,
decay_rate=0.9)
optimizer = keras.optimizers.SGD(learning_rate=lr_schedule)
```
A `LearningRateSchedule` instance can be passed in as the `learning_rate`
argument of any optimizer.
To implement your own schedule object, you should implement the `__call__`
method, which takes a `step` argument (scalar integer tensor, the
current training step count).
Like for any other Keras object, you can also optionally
make your object serializable by implementing the `get_config`
and `from_config` methods.
Example:
```python
class MyLRSchedule(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate):
self.initial_learning_rate = initial_learning_rate
def __call__(self, step):
return self.initial_learning_rate / (step + 1)
optimizer = tf.keras.optimizers.SGD(learning_rate=MyLRSchedule(0.1))
```
"""
@abc.abstractmethod
def __call__(self, step):
raise NotImplementedError("Learning rate schedule must override __call__")
@abc.abstractmethod
def get_config(self):
raise NotImplementedError("Learning rate schedule must override get_config")
@classmethod
def from_config(cls, config):
"""Instantiates a `LearningRateSchedule` from its config.
Args:
config: Output of `get_config()`.
Returns:
A `LearningRateSchedule` instance.
"""
return cls(**config)
@keras_export("keras.optimizers.schedules.ExponentialDecay")
class ExponentialDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses an exponential decay schedule.
When training a model, it is often useful to lower the learning rate as
the training progresses. This schedule applies an exponential decay function
to an optimizer step, given a provided initial learning rate.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
return initial_learning_rate * decay_rate ^ (step / decay_steps)
```
If the argument `staircase` is `True`, then `step / decay_steps` is
an integer division and the decayed learning rate follows a
staircase function.
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate.
Example: When fitting a Keras model, decay every 100000 steps with a base
of 0.96:
```python
initial_learning_rate = 0.1
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate,
decay_steps=100000,
decay_rate=0.96,
staircase=True)
model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate=lr_schedule),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(data, labels, epochs=5)
```
The learning rate schedule is also serializable and deserializable using
`tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
decay_rate,
staircase=False,
name=None):
"""Applies exponential decay to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The initial learning rate.
decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
Must be positive. See the decay computation above.
decay_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The decay rate.
staircase: Boolean. If `True` decay the learning rate at discrete
intervals
name: String. Optional name of the operation. Defaults to
'ExponentialDecay'.
"""
super(ExponentialDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.decay_rate = decay_rate
self.staircase = staircase
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "ExponentialDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
decay_steps = math_ops.cast(self.decay_steps, dtype)
decay_rate = math_ops.cast(self.decay_rate, dtype)
global_step_recomp = math_ops.cast(step, dtype)
p = global_step_recomp / decay_steps
if self.staircase:
p = math_ops.floor(p)
return math_ops.multiply(
initial_learning_rate, math_ops.pow(decay_rate, p), name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"decay_rate": self.decay_rate,
"staircase": self.staircase,
"name": self.name
}
@keras_export("keras.optimizers.schedules.PiecewiseConstantDecay")
class PiecewiseConstantDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a piecewise constant decay schedule.
The function returns a 1-arg callable to compute the piecewise constant
when passed the current optimizer step. This can be useful for changing the
learning rate value across different invocations of optimizer functions.
Example: use a learning rate that's 1.0 for the first 100001 steps, 0.5
for the next 10000 steps, and 0.1 for any additional steps.
```python
step = tf.Variable(0, trainable=False)
boundaries = [100000, 110000]
values = [1.0, 0.5, 0.1]
learning_rate_fn = keras.optimizers.schedules.PiecewiseConstantDecay(
boundaries, values)
# Later, whenever we perform an optimization step, we pass in the step.
learning_rate = learning_rate_fn(step)
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate. The learning rate schedule is also serializable and
deserializable using `tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as the boundary tensors.
The output of the 1-arg function that takes the `step`
is `values[0]` when `step <= boundaries[0]`,
`values[1]` when `step > boundaries[0]` and `step <= boundaries[1]`, ...,
and values[-1] when `step > boundaries[-1]`.
"""
def __init__(
self,
boundaries,
values,
name=None):
"""Piecewise constant from boundaries and interval values.
Args:
boundaries: A list of `Tensor`s or `int`s or `float`s with strictly
increasing entries, and with all elements having the same type as the
optimizer step.
values: A list of `Tensor`s or `float`s or `int`s that specifies the
values for the intervals defined by `boundaries`. It should have one
more element than `boundaries`, and all elements should have the same
type.
name: A string. Optional name of the operation. Defaults to
'PiecewiseConstant'.
Raises:
ValueError: if the number of elements in the lists do not match.
"""
super(PiecewiseConstantDecay, self).__init__()
if len(boundaries) != len(values) - 1:
raise ValueError(
"The length of boundaries should be 1 less than the length of values")
self.boundaries = boundaries
self.values = values
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "PiecewiseConstant"):
boundaries = nest.map_structure(ops.convert_to_tensor_v2_with_dispatch,
nest.flatten(self.boundaries))
values = nest.map_structure(ops.convert_to_tensor_v2_with_dispatch,
nest.flatten(self.values))
x_recomp = ops.convert_to_tensor_v2_with_dispatch(step)
for i, b in enumerate(boundaries):
if b.dtype.base_dtype != x_recomp.dtype.base_dtype:
# We cast the boundaries to have the same type as the step
b = math_ops.cast(b, x_recomp.dtype.base_dtype)
boundaries[i] = b
pred_fn_pairs = []
pred_fn_pairs.append((x_recomp <= boundaries[0], lambda: values[0]))
pred_fn_pairs.append((x_recomp > boundaries[-1], lambda: values[-1]))
for low, high, v in zip(boundaries[:-1], boundaries[1:], values[1:-1]):
# Need to bind v here; can do this with lambda v=v: ...
pred = (x_recomp > low) & (x_recomp <= high)
pred_fn_pairs.append((pred, lambda v=v: v))
# The default isn't needed here because our conditions are mutually
# exclusive and exhaustive, but tf.case requires it.
default = lambda: values[0]
return control_flow_ops.case(pred_fn_pairs, default, exclusive=True)
def get_config(self):
return {
"boundaries": self.boundaries,
"values": self.values,
"name": self.name
}
@keras_export("keras.optimizers.schedules.PolynomialDecay")
class PolynomialDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a polynomial decay schedule.
It is commonly observed that a monotonically decreasing learning rate, whose
degree of change is carefully chosen, results in a better performing model.
This schedule applies a polynomial decay function to an optimizer step,
given a provided `initial_learning_rate`, to reach an `end_learning_rate`
in the given `decay_steps`.
It requires a `step` value to compute the decayed learning rate. You
can just pass a TensorFlow variable that you increment at each training
step.
The schedule is a 1-arg callable that produces a decayed learning rate
when passed the current optimizer step. This can be useful for changing the
learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
step = min(step, decay_steps)
return ((initial_learning_rate - end_learning_rate) *
(1 - step / decay_steps) ^ (power)
) + end_learning_rate
```
If `cycle` is True then a multiple of `decay_steps` is used, the first one
that is bigger than `step`.
```python
def decayed_learning_rate(step):
decay_steps = decay_steps * ceil(step / decay_steps)
return ((initial_learning_rate - end_learning_rate) *
(1 - step / decay_steps) ^ (power)
) + end_learning_rate
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate.
Example: Fit a model while decaying from 0.1 to 0.01 in 10000 steps using
sqrt (i.e. power=0.5):
```python
...
starter_learning_rate = 0.1
end_learning_rate = 0.01
decay_steps = 10000
learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay(
starter_learning_rate,
decay_steps,
end_learning_rate,
power=0.5)
model.compile(optimizer=tf.keras.optimizers.SGD(
learning_rate=learning_rate_fn),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(data, labels, epochs=5)
```
The learning rate schedule is also serializable and deserializable using
`tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
end_learning_rate=0.0001,
power=1.0,
cycle=False,
name=None):
"""Applies a polynomial decay to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The initial learning rate.
decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
Must be positive. See the decay computation above.
end_learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The minimal end learning rate.
power: A scalar `float32` or `float64` `Tensor` or a
Python number. The power of the polynomial. Defaults to linear, 1.0.
cycle: A boolean, whether or not it should cycle beyond decay_steps.
name: String. Optional name of the operation. Defaults to
'PolynomialDecay'.
"""
super(PolynomialDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.end_learning_rate = end_learning_rate
self.power = power
self.cycle = cycle
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "PolynomialDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
end_learning_rate = math_ops.cast(self.end_learning_rate, dtype)
power = math_ops.cast(self.power, dtype)
global_step_recomp = math_ops.cast(step, dtype)
decay_steps_recomp = math_ops.cast(self.decay_steps, dtype)
if self.cycle:
# Find the first multiple of decay_steps that is bigger than
# global_step. If global_step is zero set the multiplier to 1
multiplier = array_ops.where_v2(
math_ops.equal(global_step_recomp, 0), 1.0,
math_ops.ceil(global_step_recomp / self.decay_steps))
decay_steps_recomp = math_ops.multiply(decay_steps_recomp, multiplier)
else:
# Make sure that the global_step used is not bigger than decay_steps.
global_step_recomp = math_ops.minimum(global_step_recomp,
decay_steps_recomp)
p = math_ops.divide(global_step_recomp, decay_steps_recomp)
return math_ops.add(
math_ops.multiply(initial_learning_rate - end_learning_rate,
math_ops.pow(1 - p, power)),
end_learning_rate,
name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"end_learning_rate": self.end_learning_rate,
"power": self.power,
"cycle": self.cycle,
"name": self.name
}
@keras_export("keras.optimizers.schedules.InverseTimeDecay")
class InverseTimeDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses an inverse time decay schedule.
When training a model, it is often useful to lower the learning rate as
the training progresses. This schedule applies the inverse decay function
to an optimizer step, given a provided initial learning rate.
It requires a `step` value to compute the decayed learning rate. You can
just pass a TensorFlow variable that you increment at each training step.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
return initial_learning_rate / (1 + decay_rate * step / decay_step)
```
or, if `staircase` is `True`, as:
```python
def decayed_learning_rate(step):
return initial_learning_rate / (1 + decay_rate * floor(step / decay_step))
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate.
Example: Fit a Keras model when decaying 1/t with a rate of 0.5:
```python
...
initial_learning_rate = 0.1
decay_steps = 1.0
decay_rate = 0.5
learning_rate_fn = keras.optimizers.schedules.InverseTimeDecay(
initial_learning_rate, decay_steps, decay_rate)
model.compile(optimizer=tf.keras.optimizers.SGD(
learning_rate=learning_rate_fn),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(data, labels, epochs=5)
```
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
decay_rate,
staircase=False,
name=None):
"""Applies inverse time decay to the initial learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The initial learning rate.
decay_steps: How often to apply decay.
decay_rate: A Python number. The decay rate.
staircase: Whether to apply decay in a discrete staircase, as opposed to
continuous, fashion.
name: String. Optional name of the operation. Defaults to
'InverseTimeDecay'.
"""
super(InverseTimeDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.decay_rate = decay_rate
self.staircase = staircase
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "InverseTimeDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
decay_steps = math_ops.cast(self.decay_steps, dtype)
decay_rate = math_ops.cast(self.decay_rate, dtype)
global_step_recomp = math_ops.cast(step, dtype)
p = global_step_recomp / decay_steps
if self.staircase:
p = math_ops.floor(p)
const = math_ops.cast(constant_op.constant(1), dtype)
denom = math_ops.add(const, math_ops.multiply(decay_rate, p))
return math_ops.divide(initial_learning_rate, denom, name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"decay_rate": self.decay_rate,
"staircase": self.staircase,
"name": self.name
}
@keras_export("keras.optimizers.schedules.CosineDecay",
"keras.experimental.CosineDecay")
class CosineDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a cosine decay schedule.
See [Loshchilov & Hutter, ICLR2016](https://arxiv.org/abs/1608.03983),
SGDR: Stochastic Gradient Descent with Warm Restarts.
When training a model, it is often useful to lower the learning rate as<|fim▁hole|> to an optimizer step, given a provided initial learning rate.
It requires a `step` value to compute the decayed learning rate. You can
just pass a TensorFlow variable that you increment at each training step.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
step = min(step, decay_steps)
cosine_decay = 0.5 * (1 + cos(pi * step / decay_steps))
decayed = (1 - alpha) * cosine_decay + alpha
return initial_learning_rate * decayed
```
Example usage:
```python
decay_steps = 1000
lr_decayed_fn = tf.keras.optimizers.schedules.CosineDecay(
initial_learning_rate, decay_steps)
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate. The learning rate schedule is also serializable and
deserializable using `tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
alpha=0.0,
name=None):
"""Applies cosine decay to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` Tensor or a
Python number. The initial learning rate.
decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
Number of steps to decay over.
alpha: A scalar `float32` or `float64` Tensor or a Python number.
Minimum learning rate value as a fraction of initial_learning_rate.
name: String. Optional name of the operation. Defaults to 'CosineDecay'.
"""
super(CosineDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.alpha = alpha
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "CosineDecay"):
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
decay_steps = math_ops.cast(self.decay_steps, dtype)
global_step_recomp = math_ops.cast(step, dtype)
global_step_recomp = math_ops.minimum(global_step_recomp, decay_steps)
completed_fraction = global_step_recomp / decay_steps
cosine_decayed = 0.5 * (1.0 + math_ops.cos(
constant_op.constant(math.pi) * completed_fraction))
decayed = (1 - self.alpha) * cosine_decayed + self.alpha
return math_ops.multiply(initial_learning_rate, decayed)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"alpha": self.alpha,
"name": self.name
}
@keras_export("keras.optimizers.schedules.CosineDecayRestarts",
"keras.experimental.CosineDecayRestarts")
class CosineDecayRestarts(LearningRateSchedule):
"""A LearningRateSchedule that uses a cosine decay schedule with restarts.
See [Loshchilov & Hutter, ICLR2016](https://arxiv.org/abs/1608.03983),
SGDR: Stochastic Gradient Descent with Warm Restarts.
When training a model, it is often useful to lower the learning rate as
the training progresses. This schedule applies a cosine decay function with
restarts to an optimizer step, given a provided initial learning rate.
It requires a `step` value to compute the decayed learning rate. You can
just pass a TensorFlow variable that you increment at each training step.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
The learning rate multiplier first decays
from 1 to `alpha` for `first_decay_steps` steps. Then, a warm
restart is performed. Each new warm restart runs for `t_mul` times more
steps and with `m_mul` times smaller initial learning rate.
Example usage:
```python
first_decay_steps = 1000
lr_decayed_fn = (
tf.keras.optimizers.schedules.CosineDecayRestarts(
initial_learning_rate,
first_decay_steps))
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate. The learning rate schedule is also serializable and
deserializable using `tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
first_decay_steps,
t_mul=2.0,
m_mul=1.0,
alpha=0.0,
name=None):
"""Applies cosine decay with restarts to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` Tensor or a Python
number. The initial learning rate.
first_decay_steps: A scalar `int32` or `int64` `Tensor` or a Python
number. Number of steps to decay over.
t_mul: A scalar `float32` or `float64` `Tensor` or a Python number.
Used to derive the number of iterations in the i-th period
m_mul: A scalar `float32` or `float64` `Tensor` or a Python number.
Used to derive the initial learning rate of the i-th period:
alpha: A scalar `float32` or `float64` Tensor or a Python number.
Minimum learning rate value as a fraction of the initial_learning_rate.
name: String. Optional name of the operation. Defaults to 'SGDRDecay'.
"""
super(CosineDecayRestarts, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.first_decay_steps = first_decay_steps
self._t_mul = t_mul
self._m_mul = m_mul
self.alpha = alpha
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "SGDRDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
first_decay_steps = math_ops.cast(self.first_decay_steps, dtype)
alpha = math_ops.cast(self.alpha, dtype)
t_mul = math_ops.cast(self._t_mul, dtype)
m_mul = math_ops.cast(self._m_mul, dtype)
global_step_recomp = math_ops.cast(step, dtype)
completed_fraction = global_step_recomp / first_decay_steps
def compute_step(completed_fraction, geometric=False):
"""Helper for `cond` operation."""
if geometric:
i_restart = math_ops.floor(
math_ops.log(1.0 - completed_fraction * (1.0 - t_mul)) /
math_ops.log(t_mul))
sum_r = (1.0 - t_mul**i_restart) / (1.0 - t_mul)
completed_fraction = (completed_fraction - sum_r) / t_mul**i_restart
else:
i_restart = math_ops.floor(completed_fraction)
completed_fraction -= i_restart
return i_restart, completed_fraction
i_restart, completed_fraction = control_flow_ops.cond(
math_ops.equal(t_mul, 1.0),
lambda: compute_step(completed_fraction, geometric=False),
lambda: compute_step(completed_fraction, geometric=True))
m_fac = m_mul**i_restart
cosine_decayed = 0.5 * m_fac * (1.0 + math_ops.cos(
constant_op.constant(math.pi) * completed_fraction))
decayed = (1 - alpha) * cosine_decayed + alpha
return math_ops.multiply(initial_learning_rate, decayed, name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"first_decay_steps": self.first_decay_steps,
"t_mul": self._t_mul,
"m_mul": self._m_mul,
"alpha": self.alpha,
"name": self.name
}
# Note: this code is still used by V1 APIs.
class LinearCosineDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a linear cosine decay schedule.
See [Bello et al., ICML2017] Neural Optimizer Search with RL.
https://arxiv.org/abs/1709.07417
For the idea of warm starts here controlled by `num_periods`,
see [Loshchilov & Hutter, ICLR2016] SGDR: Stochastic Gradient Descent
with Warm Restarts. https://arxiv.org/abs/1608.03983
Note that linear cosine decay is more aggressive than cosine decay and
larger initial learning rates can typically be used.
When training a model, it is often recommended to lower the learning rate as
the training progresses. This schedule applies a linear cosine decay
function to an optimizer step, given a provided initial learning rate.
It requires a `step` value to compute the decayed learning rate. You can
just pass a TensorFlow variable that you increment at each training step.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
step = min(step, decay_steps)
linear_decay = (decay_steps - step) / decay_steps
cosine_decay = 0.5 * (
1 + cos(pi * 2 * num_periods * step / decay_steps))
decayed = (alpha + linear_decay) * cosine_decay + beta
return initial_learning_rate * decayed
```
Example usage:
```python
decay_steps = 1000
lr_decayed_fn = (
tf.keras.experimental.LinearCosineDecay(
initial_learning_rate, decay_steps))
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate. The learning rate schedule is also serializable and
deserializable using `tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
num_periods=0.5,
alpha=0.0,
beta=0.001,
name=None):
"""Applies linear cosine decay to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` Tensor or a Python
number. The initial learning rate.
decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
Number of steps to decay over.
num_periods: Number of periods in the cosine part of the decay.
See computation above.
alpha: See computation above.
beta: See computation above.
name: String. Optional name of the operation. Defaults to
'LinearCosineDecay'.
"""
super(LinearCosineDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.num_periods = num_periods
self.alpha = alpha
self.beta = beta
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "LinearCosineDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
decay_steps = math_ops.cast(self.decay_steps, dtype)
num_periods = math_ops.cast(self.num_periods, dtype)
alpha = math_ops.cast(self.alpha, dtype)
beta = math_ops.cast(self.beta, dtype)
global_step_recomp = math_ops.cast(step, dtype)
global_step_recomp = math_ops.minimum(global_step_recomp, decay_steps)
linear_decayed = (decay_steps - global_step_recomp) / decay_steps
completed_fraction = global_step_recomp / decay_steps
fraction = 2.0 * num_periods * completed_fraction
cosine_decayed = 0.5 * (
1.0 + math_ops.cos(constant_op.constant(math.pi) * fraction))
linear_cosine_decayed = (alpha + linear_decayed) * cosine_decayed + beta
return math_ops.multiply(initial_learning_rate, linear_cosine_decayed,
name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"num_periods": self.num_periods,
"alpha": self.alpha,
"beta": self.beta,
"name": self.name
}
# Note: this code is still used by V1 APIs.
class NoisyLinearCosineDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a noisy linear cosine decay schedule.
See [Bello et al., ICML2017] Neural Optimizer Search with RL.
https://arxiv.org/abs/1709.07417
For the idea of warm starts here controlled by `num_periods`,
see [Loshchilov & Hutter, ICLR2016] SGDR: Stochastic Gradient Descent
with Warm Restarts. https://arxiv.org/abs/1608.03983
Note that linear cosine decay is more aggressive than cosine decay and
larger initial learning rates can typically be used.
When training a model, it is often recommended to lower the learning rate as
the training progresses. This schedule applies a noisy linear cosine decay
function to an optimizer step, given a provided initial learning rate.
It requires a `step` value to compute the decayed learning rate. You can
just pass a TensorFlow variable that you increment at each training step.
The schedule a 1-arg callable that produces a decayed learning
rate when passed the current optimizer step. This can be useful for changing
the learning rate value across different invocations of optimizer functions.
It is computed as:
```python
def decayed_learning_rate(step):
step = min(step, decay_steps)
linear_decay = (decay_steps - step) / decay_steps)
cosine_decay = 0.5 * (
1 + cos(pi * 2 * num_periods * step / decay_steps))
decayed = (alpha + linear_decay + eps_t) * cosine_decay + beta
return initial_learning_rate * decayed
```
where eps_t is 0-centered gaussian noise with variance
initial_variance / (1 + global_step) ** variance_decay
Example usage:
```python
decay_steps = 1000
lr_decayed_fn = (
tf.keras.experimental.NoisyLinearCosineDecay(
initial_learning_rate, decay_steps))
```
You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
as the learning rate. The learning rate schedule is also serializable and
deserializable using `tf.keras.optimizers.schedules.serialize` and
`tf.keras.optimizers.schedules.deserialize`.
Returns:
A 1-arg callable learning rate schedule that takes the current optimizer
step and outputs the decayed learning rate, a scalar `Tensor` of the same
type as `initial_learning_rate`.
"""
def __init__(
self,
initial_learning_rate,
decay_steps,
initial_variance=1.0,
variance_decay=0.55,
num_periods=0.5,
alpha=0.0,
beta=0.001,
name=None):
"""Applies noisy linear cosine decay to the learning rate.
Args:
initial_learning_rate: A scalar `float32` or `float64` Tensor or a Python
number. The initial learning rate.
decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
Number of steps to decay over.
initial_variance: initial variance for the noise. See computation above.
variance_decay: decay for the noise's variance. See computation above.
num_periods: Number of periods in the cosine part of the decay.
See computation above.
alpha: See computation above.
beta: See computation above.
name: String. Optional name of the operation. Defaults to
'NoisyLinearCosineDecay'.
"""
super(NoisyLinearCosineDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.decay_steps = decay_steps
self.initial_variance = initial_variance
self.variance_decay = variance_decay
self.num_periods = num_periods
self.alpha = alpha
self.beta = beta
self.name = name
def __call__(self, step):
with ops.name_scope_v2(self.name or "NoisyLinearCosineDecay") as name:
initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
decay_steps = math_ops.cast(self.decay_steps, dtype)
initial_variance = math_ops.cast(self.initial_variance, dtype)
variance_decay = math_ops.cast(self.variance_decay, dtype)
num_periods = math_ops.cast(self.num_periods, dtype)
alpha = math_ops.cast(self.alpha, dtype)
beta = math_ops.cast(self.beta, dtype)
global_step_recomp = math_ops.cast(step, dtype)
global_step_recomp = math_ops.minimum(global_step_recomp, decay_steps)
linear_decayed = (decay_steps - global_step_recomp) / decay_steps
variance = initial_variance / (
math_ops.pow(1.0 + global_step_recomp, variance_decay))
std = math_ops.sqrt(variance)
noisy_linear_decayed = (
linear_decayed + random_ops.random_normal(
linear_decayed.shape, stddev=std))
completed_fraction = global_step_recomp / decay_steps
fraction = 2.0 * num_periods * completed_fraction
cosine_decayed = 0.5 * (
1.0 + math_ops.cos(constant_op.constant(math.pi) * fraction))
noisy_linear_cosine_decayed = (
(alpha + noisy_linear_decayed) * cosine_decayed + beta)
return math_ops.multiply(
initial_learning_rate, noisy_linear_cosine_decayed, name=name)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"decay_steps": self.decay_steps,
"initial_variance": self.initial_variance,
"variance_decay": self.variance_decay,
"num_periods": self.num_periods,
"alpha": self.alpha,
"beta": self.beta,
"name": self.name
}
@keras_export("keras.optimizers.schedules.serialize")
def serialize(learning_rate_schedule):
return generic_utils.serialize_keras_object(learning_rate_schedule)
@keras_export("keras.optimizers.schedules.deserialize")
def deserialize(config, custom_objects=None):
return generic_utils.deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name="decay")<|fim▁end|> | the training progresses. This schedule applies a cosine decay function |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
mod pcap;
mod pcapng;<|fim▁end|> | #![allow(clippy::unreadable_literal)] |
<|file_name|>test_request.py<|end_file_name|><|fim▁begin|># ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Description: File system resilience testing application
# Author: Hubert Kario <[email protected]>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Copyright (c) 2015 Hubert Kario. All rights reserved.
#
# This copyrighted material is made available to anyone wishing
# to use, modify, copy, or redistribute it subject to the terms
# and conditions of the GNU General Public License version 2.
#
# This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# compatibility with Python 2.6, for that we need unittest2 package,
# which is not available on 3.3 or 3.4
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import mock
from mock import call
except ImportError:
import unittest.mock as mock
from unittest.mock import call
from fsresck.nbd.request import NBDRequestSocket, recvexactly, Error, \
NBDRequest
from fsresck.compat import compat_str
class TestError(unittest.TestCase):
def test___repr__(self):
with self.assertRaises(Error) as exception:
raise Error('test')
self.assertIn("request.Error('test'", repr(exception.exception))
class TestNBDRequest(unittest.TestCase):
def test___init__(self):
request = NBDRequest(None, None, None, None)
self.assertIsNotNone(request)
def test___ne__(self):
request1 = NBDRequest(1, 2, 3, 4)
request2 = NBDRequest(1, 2, 3, 4)
self.assertFalse(request1 != request2)
class TestRecvexactly(unittest.TestCase):
def test_zero_read(self):
sock = None
data = recvexactly(sock, 0)
self.assertEqual(bytearray(0), data)
def test_full_read(self):
sock = mock.MagicMock()<|fim▁hole|>
data = recvexactly(sock, 10)
self.assertEqual(bytearray(10), data)
sock.recv_into.assert_called_once_with(data, 10, 0)
def test_partial_reads(self):
sock = mock.MagicMock()
sock.recv_into.side_effect = (4, 6)
data = recvexactly(sock, 10)
self.assertEqual(bytearray(10), data)
self.assertEqual(len(sock.recv_into.call_args_list), 2)
call = sock.recv_into.call_args_list[0]
self.assertEqual(call[0][1:], (10, 0))
call = sock.recv_into.call_args_list[1]
self.assertEqual(call[0][1:], (6, 0))
def test_broken_read(self):
sock = mock.MagicMock()
sock.recv_into.side_effect = (4, 0)
with self.assertRaises(Error):
recvexactly(sock, 10)
class TestNBDRequestSocket(unittest.TestCase):
def test___init__(self):
sock = NBDRequestSocket(None)
self.assertIsNotNone(sock)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv(self, mock_mthd):
mock_mthd.return_value = bytearray(
b'\x25\x60\x95\x13' # magic value
b'\x00\x00\x00\x00' # command type - read
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x40\x00' # length
)
obj = NBDRequestSocket(None).recv()
self.assertEqual(NBDRequest(0, 0x50e493010088ffff, 0, 0x4000), obj)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv_write(self, mock_mthd):
mock_mthd.side_effect = (bytearray(
b'\x25\x60\x95\x13' # magic value
b'\x00\x00\x00\x01' # command type - write
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x00\x04'), # length
bytearray(
b'\xff\xff\xff\xff' # payload
))
obj = NBDRequestSocket(None).recv()
self.assertEqual(bytearray(b'\xff'*4), obj.data)
self.assertEqual(NBDRequest(1, 0x50e493010088ffff, 0, 0x04,
bytearray(b'\xff'*4)), obj)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv_bad_write(self, mock_mthd):
mock_mthd.return_value = bytearray(
b'\x25\x60\x95\x14' # bad magic value
b'\x00\x00\x00\x00' # command type - read
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x40\x00' # length
)
sock = NBDRequestSocket(None)
with self.assertRaises(Error):
sock.recv()
def test_send_read(self):
raw_sock = mock.MagicMock()
raw_sock.sendall.return_value = None
cmd = NBDRequest(0, 0x134, 0, 0x4000)
sock = NBDRequestSocket(raw_sock)
sock.send(cmd)
raw_sock.sendall.assert_called_once_with(compat_str(bytearray(
b'\x25\x60\x95\x13'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x014'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00@\x00')))
def test_send_write(self):
raw_sock = mock.MagicMock()
raw_sock.sendall.return_value = None
cmd = NBDRequest(1, 0x134, 0, 0x04, bytearray(b'\xff'*4))
sock = NBDRequestSocket(raw_sock)
sock.send(cmd)
raw_sock.sendall.assert_called_once_with(compat_str(bytearray(
b'\x25\x60\x95\x13'
b'\x00\x00\x00\x01'
b'\x00\x00\x00\x00\x00\x00\x014'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x04'
b'\xff\xff\xff\xff')))<|fim▁end|> | sock.recv_into.return_value = 10 |
<|file_name|>MySQLProjectOtherContributionDAO.java<|end_file_name|><|fim▁begin|>/*****************************************************************
* This file is part of CCAFS Planning and Reporting Platform.
* CCAFS P&R is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* at your option) any later version.
* CCAFS P&R is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with CCAFS P&R. If not, see <http://www.gnu.org/licenses/>.
* ***************************************************************
*/
package org.cgiar.ccafs.ap.data.dao.mysql;
import org.cgiar.ccafs.ap.data.dao.ProjectOtherContributionDAO;
import org.cgiar.ccafs.utils.db.DAOManager;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Javier Andrés Gallego B.
*/
public class MySQLProjectOtherContributionDAO implements ProjectOtherContributionDAO {
// Logger
private static Logger LOG = LoggerFactory.getLogger(MySQLProjectOtherContributionDAO.class);
private DAOManager databaseManager;
@Inject
public MySQLProjectOtherContributionDAO(DAOManager databaseManager) {
this.databaseManager = databaseManager;
}
@Override
public Map<String, String> getIPOtherContributionById(int ipOtherContributionId) {
Map<String, String> ipOtherContributionData = new HashMap<String, String>();
LOG.debug(">> getIPOtherContributionById( ipOtherContributionId = {} )", ipOtherContributionId);
StringBuilder query = new StringBuilder();
query.append("SELECT ipo.* ");
query.append("FROM project_other_contributions as ipo ");
query.append("WHERE ipo.id= ");
query.append(ipOtherContributionId);
try (Connection con = databaseManager.getConnection()) {
ResultSet rs = databaseManager.makeQuery(query.toString(), con);
if (rs.next()) {
ipOtherContributionData.put("id", rs.getString("id"));
ipOtherContributionData.put("project_id", rs.getString("project_id"));
ipOtherContributionData.put("contribution", rs.getString("contribution"));
ipOtherContributionData.put("additional_contribution", rs.getString("additional_contribution"));
ipOtherContributionData.put("crp_contributions_nature", rs.getString("crp_contributions_nature"));
}
con.close();
} catch (SQLException e) {
LOG.error("Exception arised getting the IP Other Contribution {}.", ipOtherContributionId, e);
}
LOG.debug("-- getIPOtherContributionById() > Calling method executeQuery to get the results");
return ipOtherContributionData;
}
@Override
public Map<String, String> getIPOtherContributionByProjectId(int projectID) {
LOG.debug(">> getIPOtherContributionByProjectId (projectID = {} )", projectID);
Map<String, String> ipOtherContributionData = new HashMap<String, String>();
StringBuilder query = new StringBuilder();
query.append("SELECT ipo.* ");
query.append("FROM project_other_contributions as ipo ");
query.append("INNER JOIN projects p ON ipo.project_id = p.id ");
query.append("WHERE ipo.project_id= ");
query.append(projectID);
try (Connection con = databaseManager.getConnection()) {
ResultSet rs = databaseManager.makeQuery(query.toString(), con);
if (rs.next()) {
ipOtherContributionData.put("id", rs.getString("id"));
ipOtherContributionData.put("project_id", rs.getString("project_id"));
ipOtherContributionData.put("contribution", rs.getString("contribution"));
ipOtherContributionData.put("additional_contribution", rs.getString("additional_contribution"));
ipOtherContributionData.put("crp_contributions_nature", rs.getString("crp_contributions_nature"));
}
con.close();
} catch (SQLException e) {
LOG.error("Exception arised getting the IP Other Contribution by the projectID {}.", projectID, e);
}
LOG.debug("-- getIPOtherContributionByProjectId() : {}", ipOtherContributionData);
return ipOtherContributionData;
}
@Override
public int saveIPOtherContribution(int projectID, Map<String, Object> ipOtherContributionData) {
LOG.debug(">> saveIPOtherContribution(ipOtherContributionDataData={})", ipOtherContributionData);<|fim▁hole|> Object[] values;
if (ipOtherContributionData.get("id") == null) {
// Insert new IP Other Contribution record
query.append("INSERT INTO project_other_contributions (project_id, contribution, additional_contribution, ");
query.append("crp_contributions_nature, created_by, modified_by, modification_justification) ");
query.append("VALUES (?,?,?,?,?,?,?) ");
values = new Object[7];
values[0] = projectID;
values[1] = ipOtherContributionData.get("contribution");
values[2] = ipOtherContributionData.get("additional_contribution");
values[3] = ipOtherContributionData.get("crp_contributions_nature");
values[4] = ipOtherContributionData.get("user_id");
values[5] = ipOtherContributionData.get("user_id");
values[6] = ipOtherContributionData.get("justification");
result = databaseManager.saveData(query.toString(), values);
if (result <= 0) {
LOG.error("A problem happened trying to add a new IP Other Contribution with project id={}", projectID);
return -1;
}
} else {
// update IP Other Contribution record
query.append("UPDATE project_other_contributions SET project_id = ?, contribution = ?, ");
query.append("additional_contribution = ?, crp_contributions_nature = ?, modified_by = ?, ");
query.append("modification_justification = ? WHERE id = ? ");
values = new Object[7];
values[0] = projectID;
values[1] = ipOtherContributionData.get("contribution");
values[2] = ipOtherContributionData.get("additional_contribution");
values[3] = ipOtherContributionData.get("crp_contributions_nature");
values[4] = ipOtherContributionData.get("user_id");
values[5] = ipOtherContributionData.get("justification");
values[6] = ipOtherContributionData.get("id");
result = databaseManager.saveData(query.toString(), values);
if (result == -1) {
LOG.error("A problem happened trying to update the IP Other Contribution identified with the id = {}",
ipOtherContributionData.get("id"));
return -1;
}
}
LOG.debug("<< saveIPOtherContribution():{}", result);
return result;
}
}<|fim▁end|> | StringBuilder query = new StringBuilder();
int result = -1; |
<|file_name|>traits.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Ethcore database trait
use std::cell::RefCell;
pub type IteratorHandle = u32;
pub const DEFAULT_CACHE_LEN: usize = 12288;
#[derive(Binary)]
pub struct KeyValue {
pub key: Vec<u8>,
pub value: Vec<u8>,
}
#[derive(Debug, Binary)]
pub enum Error {
AlreadyOpen,
IsClosed,
RocksDb(String),
TransactionUnknown,
IteratorUnknown,
UncommitedTransactions,
}
impl From<String> for Error {
fn from(s: String) -> Error {
Error::RocksDb(s)
}
}
/// Database configuration
#[derive(Binary)]
pub struct DatabaseConfig {
/// Optional prefix size in bytes. Allows lookup by partial key.
pub prefix_size: Option<usize>,
/// write cache length
pub cache: usize,
}
impl Default for DatabaseConfig {
fn default() -> DatabaseConfig {
DatabaseConfig {
prefix_size: None,
cache: DEFAULT_CACHE_LEN,
}
}
}
impl DatabaseConfig {
fn with_prefix(prefix: usize) -> DatabaseConfig {
DatabaseConfig {
prefix_size: Some(prefix),
cache: DEFAULT_CACHE_LEN,
}
}
}
pub trait DatabaseService : Sized {
/// Opens database in the specified path
fn open(&self, config: DatabaseConfig, path: String) -> Result<(), Error>;
/// Opens database in the specified path with the default config
fn open_default(&self, path: String) -> Result<(), Error>;
/// Closes database
fn close(&self) -> Result<(), Error>;
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten.
fn put(&self, key: &[u8], value: &[u8]) -> Result<(), Error>;
/// Delete value by key.
fn delete(&self, key: &[u8]) -> Result<(), Error>;
/// Get value by key.
fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Error>;
/// Get value by partial key. Prefix size should match configured prefix size.
fn get_by_prefix(&self, prefix: &[u8]) -> Result<Option<Vec<u8>>, Error>;
/// Check if there is anything in the database.
fn is_empty(&self) -> Result<bool, Error>;
/// Get handle to iterate through keys
fn iter(&self) -> Result<IteratorHandle, Error>;
/// Next key-value for the the given iterator
fn iter_next(&self, iterator: IteratorHandle) -> Option<KeyValue>;
/// Dispose iteration that is no longer needed
fn dispose_iter(&self, handle: IteratorHandle) -> Result<(), Error>;
/// Write client transaction
fn write(&self, transaction: DBTransaction) -> Result<(), Error>;
}
#[derive(Binary)]
pub struct DBTransaction {<|fim▁hole|>
impl DBTransaction {
pub fn new() -> DBTransaction {
DBTransaction {
writes: RefCell::new(Vec::new()),
removes: RefCell::new(Vec::new()),
}
}
pub fn put(&self, key: &[u8], value: &[u8]) {
let mut brw = self.writes.borrow_mut();
brw.push(KeyValue { key: key.to_vec(), value: value.to_vec() });
}
pub fn delete(&self, key: &[u8]) {
let mut brw = self.removes.borrow_mut();
brw.push(key.to_vec());
}
}<|fim▁end|> | pub writes: RefCell<Vec<KeyValue>>,
pub removes: RefCell<Vec<Vec<u8>>>,
} |
<|file_name|>grid.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
#[cfg(any(feature = "v3_10", feature = "dox"))]
use BaselinePosition;
use Buildable;
use Container;
use Orientable;
use PositionType;
use Widget;
use ffi;
use glib;
use glib::StaticType;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::mem;
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct Grid(Object<ffi::GtkGrid, ffi::GtkGridClass>): Container, Widget, Buildable, Orientable;
match fn {
get_type => || ffi::gtk_grid_get_type(),
}
}
impl Grid {
pub fn new() -> Grid {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_grid_new()).downcast_unchecked()
}
}
}
impl Default for Grid {
fn default() -> Self {
Self::new()
}
}
pub trait GridExt {
fn attach<P: IsA<Widget>>(&self, child: &P, left: i32, top: i32, width: i32, height: i32);
fn attach_next_to<'a, P: IsA<Widget>, Q: IsA<Widget> + 'a, R: Into<Option<&'a Q>>>(&self, child: &P, sibling: R, side: PositionType, width: i32, height: i32);
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn get_baseline_row(&self) -> i32;
fn get_child_at(&self, left: i32, top: i32) -> Option<Widget>;
fn get_column_homogeneous(&self) -> bool;
fn get_column_spacing(&self) -> u32;
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn get_row_baseline_position(&self, row: i32) -> BaselinePosition;
fn get_row_homogeneous(&self) -> bool;
fn get_row_spacing(&self) -> u32;
fn insert_column(&self, position: i32);
fn insert_next_to<P: IsA<Widget>>(&self, sibling: &P, side: PositionType);
fn insert_row(&self, position: i32);
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn remove_column(&self, position: i32);
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn remove_row(&self, position: i32);
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn set_baseline_row(&self, row: i32);
fn set_column_homogeneous(&self, homogeneous: bool);
fn set_column_spacing(&self, spacing: u32);
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn set_row_baseline_position(&self, row: i32, pos: BaselinePosition);
fn set_row_homogeneous(&self, homogeneous: bool);
fn set_row_spacing(&self, spacing: u32);
fn get_property_baseline_row(&self) -> i32;
fn set_property_baseline_row(&self, baseline_row: i32);
fn get_cell_height<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_cell_height<T: IsA<Widget>>(&self, item: &T, height: i32);
fn get_cell_width<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_cell_width<T: IsA<Widget>>(&self, item: &T, width: i32);
fn get_cell_left_attach<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_cell_left_attach<T: IsA<Widget>>(&self, item: &T, left_attach: i32);
fn get_cell_top_attach<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_cell_top_attach<T: IsA<Widget>>(&self, item: &T, top_attach: i32);
fn connect_property_baseline_row_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_column_homogeneous_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_column_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_row_homogeneous_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_row_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<Grid> + IsA<Container> + IsA<glib::object::Object>> GridExt for O {
fn attach<P: IsA<Widget>>(&self, child: &P, left: i32, top: i32, width: i32, height: i32) {
unsafe {
ffi::gtk_grid_attach(self.to_glib_none().0, child.to_glib_none().0, left, top, width, height);
}
}
fn attach_next_to<'a, P: IsA<Widget>, Q: IsA<Widget> + 'a, R: Into<Option<&'a Q>>>(&self, child: &P, sibling: R, side: PositionType, width: i32, height: i32) {
let sibling = sibling.into();
let sibling = sibling.to_glib_none();
unsafe {
ffi::gtk_grid_attach_next_to(self.to_glib_none().0, child.to_glib_none().0, sibling.0, side.to_glib(), width, height);
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn get_baseline_row(&self) -> i32 {
unsafe {
ffi::gtk_grid_get_baseline_row(self.to_glib_none().0)
}
}
fn get_child_at(&self, left: i32, top: i32) -> Option<Widget> {
unsafe {
from_glib_none(ffi::gtk_grid_get_child_at(self.to_glib_none().0, left, top))
}
}
fn get_column_homogeneous(&self) -> bool {
unsafe {
from_glib(ffi::gtk_grid_get_column_homogeneous(self.to_glib_none().0))
}
}
fn get_column_spacing(&self) -> u32 {
unsafe {
ffi::gtk_grid_get_column_spacing(self.to_glib_none().0)
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn get_row_baseline_position(&self, row: i32) -> BaselinePosition {
unsafe {
from_glib(ffi::gtk_grid_get_row_baseline_position(self.to_glib_none().0, row))
}
}
fn get_row_homogeneous(&self) -> bool {
unsafe {<|fim▁hole|>
fn get_row_spacing(&self) -> u32 {
unsafe {
ffi::gtk_grid_get_row_spacing(self.to_glib_none().0)
}
}
fn insert_column(&self, position: i32) {
unsafe {
ffi::gtk_grid_insert_column(self.to_glib_none().0, position);
}
}
fn insert_next_to<P: IsA<Widget>>(&self, sibling: &P, side: PositionType) {
unsafe {
ffi::gtk_grid_insert_next_to(self.to_glib_none().0, sibling.to_glib_none().0, side.to_glib());
}
}
fn insert_row(&self, position: i32) {
unsafe {
ffi::gtk_grid_insert_row(self.to_glib_none().0, position);
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn remove_column(&self, position: i32) {
unsafe {
ffi::gtk_grid_remove_column(self.to_glib_none().0, position);
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn remove_row(&self, position: i32) {
unsafe {
ffi::gtk_grid_remove_row(self.to_glib_none().0, position);
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn set_baseline_row(&self, row: i32) {
unsafe {
ffi::gtk_grid_set_baseline_row(self.to_glib_none().0, row);
}
}
fn set_column_homogeneous(&self, homogeneous: bool) {
unsafe {
ffi::gtk_grid_set_column_homogeneous(self.to_glib_none().0, homogeneous.to_glib());
}
}
fn set_column_spacing(&self, spacing: u32) {
unsafe {
ffi::gtk_grid_set_column_spacing(self.to_glib_none().0, spacing);
}
}
#[cfg(any(feature = "v3_10", feature = "dox"))]
fn set_row_baseline_position(&self, row: i32, pos: BaselinePosition) {
unsafe {
ffi::gtk_grid_set_row_baseline_position(self.to_glib_none().0, row, pos.to_glib());
}
}
fn set_row_homogeneous(&self, homogeneous: bool) {
unsafe {
ffi::gtk_grid_set_row_homogeneous(self.to_glib_none().0, homogeneous.to_glib());
}
}
fn set_row_spacing(&self, spacing: u32) {
unsafe {
ffi::gtk_grid_set_row_spacing(self.to_glib_none().0, spacing);
}
}
fn get_property_baseline_row(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0, "baseline-row".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_baseline_row(&self, baseline_row: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0, "baseline-row".to_glib_none().0, Value::from(&baseline_row).to_glib_none().0);
}
}
fn get_cell_height<T: IsA<Widget>>(&self, item: &T) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "height".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_cell_height<T: IsA<Widget>>(&self, item: &T, height: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "height".to_glib_none().0, Value::from(&height).to_glib_none().0);
}
}
fn get_cell_width<T: IsA<Widget>>(&self, item: &T) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "width".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_cell_width<T: IsA<Widget>>(&self, item: &T, width: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "width".to_glib_none().0, Value::from(&width).to_glib_none().0);
}
}
fn get_cell_left_attach<T: IsA<Widget>>(&self, item: &T) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "left-attach".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_cell_left_attach<T: IsA<Widget>>(&self, item: &T, left_attach: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "left-attach".to_glib_none().0, Value::from(&left_attach).to_glib_none().0);
}
}
fn get_cell_top_attach<T: IsA<Widget>>(&self, item: &T) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "top-attach".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_cell_top_attach<T: IsA<Widget>>(&self, item: &T, top_attach: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "top-attach".to_glib_none().0, Value::from(&top_attach).to_glib_none().0);
}
}
fn connect_property_baseline_row_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::baseline-row",
transmute(notify_baseline_row_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_property_column_homogeneous_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::column-homogeneous",
transmute(notify_column_homogeneous_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_property_column_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::column-spacing",
transmute(notify_column_spacing_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_property_row_homogeneous_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::row-homogeneous",
transmute(notify_row_homogeneous_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_property_row_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::row-spacing",
transmute(notify_row_spacing_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn notify_baseline_row_trampoline<P>(this: *mut ffi::GtkGrid, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Grid> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Grid::from_glib_borrow(this).downcast_unchecked())
}
unsafe extern "C" fn notify_column_homogeneous_trampoline<P>(this: *mut ffi::GtkGrid, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Grid> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Grid::from_glib_borrow(this).downcast_unchecked())
}
unsafe extern "C" fn notify_column_spacing_trampoline<P>(this: *mut ffi::GtkGrid, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Grid> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Grid::from_glib_borrow(this).downcast_unchecked())
}
unsafe extern "C" fn notify_row_homogeneous_trampoline<P>(this: *mut ffi::GtkGrid, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Grid> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Grid::from_glib_borrow(this).downcast_unchecked())
}
unsafe extern "C" fn notify_row_spacing_trampoline<P>(this: *mut ffi::GtkGrid, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Grid> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Grid::from_glib_borrow(this).downcast_unchecked())
}<|fim▁end|> | from_glib(ffi::gtk_grid_get_row_homogeneous(self.to_glib_none().0))
}
} |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>const electron = require('electron')
const app = electron.app
const BrowserWindow = electron.BrowserWindow
const {ipcMain} = require('electron')
const {dialog} = require('electron')
const {Menu} = require('electron')
import {enableLiveReload} from 'electron-compile'
const path = require('path')
const url = require('url')
const fs = require('fs')
enableLiveReload()
//Window Creation
var windowArray = []
exports.windowCount = 0
function createWindow () {
// Create the new browser window.
windowArray.push( new BrowserWindow({width: 800, height: 600}) )
exports.windowCount = windowArray.length
var newWindow = windowArray[exports.windowCount-1]
// windowArray[windowCount-1].maximize()
// and load the index.html of the app.
newWindow.loadURL(url.format({<|fim▁hole|> protocol: 'file:',
slashes: true
}))
// Emitted when the window is closed.
newWindow.on('closed', function () {
newWindow = null
})
}
app.on('ready', createWindow)
app.on('window-all-closed', function () {
if (process.platform !== 'darwin') {
app.quit()
}
})
// app.on('activate', function () {
// // On OS X it's common to re-create a window in the app when the
// // dock icon is clicked and there are no other windows open.
// if (mainWindow === null) {
// createWindow()
// }
// })
//Menus
var template = [
{
label: 'File',
submenu: [
{label: 'New Project'},
{label: 'Open Project'},
{label: 'Import File'},
{type: 'separator'},
{label: 'Save'},
{label: 'Save As'},
{label: 'Settings'}
]
},
{
label: 'Edit',
submenu: [
{role: 'undo'},
{role: 'redo'},
{type: 'separator'},
{role: 'cut'},
{role: 'copy'},
{role: 'paste'},
{role: 'delete'},
{role: 'selectall'}
]
},
{
label: 'Window',
submenu: [
{label: 'New Window', click: createWindow},
{role: 'minimize'},
{type: 'separator'},
{role: 'toggledevtools'},
{role: 'close'}
]
},
]
var mainMenu = Menu.buildFromTemplate(template)
Menu.setApplicationMenu(mainMenu)
//File Functions
function importFile (event) {
dialog.showOpenDialog({properties: ['openFile', 'multiSelections']}, (filePaths) => {
console.log(filePaths)
event.sender.send('importer', filePaths)
})
}
//IPC Functions
ipcMain.on('window-manager', (event, arg) => {
console.log(arg)
if (arg == "New Window") { //Create new window
createWindow()
}
})
ipcMain.on('file-manager', (event, arg) => {
console.log(arg)
if (arg == "Import Files") {
importFile(event)
}
})<|fim▁end|> | pathname: path.join(__dirname, 'html/index.html'), |
<|file_name|>json_utils.py<|end_file_name|><|fim▁begin|>"""
Utilities for dealing with JSON.
"""
import simplejson
from xmodule.modulestore import EdxJSONEncoder
class EscapedEdxJSONEncoder(EdxJSONEncoder):
"""
Class for encoding edx JSON which will be printed inline into HTML
templates.
"""
def encode(self, obj):
"""
Encodes JSON that is safe to be embedded in HTML.
"""<|fim▁hole|> simplejson.loads(super(EscapedEdxJSONEncoder, self).encode(obj)),
cls=simplejson.JSONEncoderForHTML
)<|fim▁end|> | return simplejson.dumps( |
<|file_name|>geom.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC
#<|fim▁hole|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import scipy.stats as osp_stats
from jax import lax
from jax._src.lax.lax import _const as _lax_const
from jax._src.numpy import lax_numpy as jnp
from jax._src.numpy.util import _wraps
from jax.scipy.special import xlog1py
@_wraps(osp_stats.geom.logpmf, update_doc=False)
def logpmf(k, p, loc=0):
k, p, loc = jnp._promote_args_inexact("geom.logpmf", k, p, loc)
zero = _lax_const(k, 0)
one = _lax_const(k, 1)
x = lax.sub(k, loc)
log_probs = xlog1py(lax.sub(x, one), -p) + lax.log(p)
return jnp.where(lax.le(x, zero), -jnp.inf, log_probs)
@_wraps(osp_stats.geom.pmf, update_doc=False)
def pmf(k, p, loc=0):
return jnp.exp(logpmf(k, p, loc))<|fim▁end|> | |
<|file_name|>Method.ts<|end_file_name|><|fim▁begin|>namespace SitecoreExtensions.Http {<|fim▁hole|> export enum Method {
POST,
GET
}
}<|fim▁end|> | |
<|file_name|>sfm_reconst.py<|end_file_name|><|fim▁begin|>"""
.. _sfm-reconst:
==============================================
Reconstruction with the Sparse Fascicle Model
==============================================
In this example, we will use the Sparse Fascicle Model (SFM) [Rokem2015]_, to
reconstruct the fiber Orientation Distribution Function (fODF) in every voxel.
First, we import the modules we will use in this example:
"""
import dipy.reconst.sfm as sfm
import dipy.data as dpd
import dipy.direction.peaks as dpp
from dipy.io.image import load_nifti, save_nifti
from dipy.io.gradients import read_bvals_bvecs
from dipy.core.gradients import gradient_table
from dipy.viz import window, actor
"""
For the purpose of this example, we will use the Stanford HARDI dataset (150
directions, single b-value of 2000 $s/mm^2$) that can be automatically
downloaded. If you have not yet downloaded this data-set in one of the other
examples, you will need to be connected to the internet the first time you run
this example. The data will be stored for subsequent runs, and for use with
other examples.
"""
hardi_fname, hardi_bval_fname, hardi_bvec_fname = dpd.get_fnames('stanford_hardi')
data, affine = load_nifti(hardi_fname)
bvals, bvecs = read_bvals_bvecs(hardi_bval_fname, hardi_bvec_fname)
gtab = gradient_table(bvals, bvecs)
# Enables/disables interactive visualization
interactive = False
"""
Reconstruction of the fiber ODF in each voxel guides subsequent tracking
steps. Here, the model is the Sparse Fascicle Model, described in
[Rokem2014]_. This model reconstructs the diffusion signal as a combination of
the signals from different fascicles. This model can be written as:
.. math::
y = X\beta
Where $y$ is the signal and $\beta$ are weights on different points in the
sphere. The columns of the design matrix, $X$ are the signals in each point in
the measurement that would be predicted if there was a fascicle oriented in the
direction represented by that column. Typically, the signal used for this
kernel will be a prolate tensor with axial diffusivity 3-5 times higher than
its radial diffusivity. The exact numbers can also be estimated from examining
parts of the brain in which there is known to be only one fascicle (e.g. in
corpus callosum).
Sparsity constraints on the fiber ODF ($\beta$) are set through the Elastic Net
algorihtm [Zou2005]_.
Elastic Net optimizes the following cost function:
.. math::
\sum_{i=1}^{n}{(y_i - \hat{y}_i)^2} + \alpha (\lambda \sum_{j=1}^{m}{w_j}+(1-\lambda) \sum_{j=1}^{m}{w^2_j}
where $\hat{y}$ is the signal predicted for a particular setting of $\beta$,
such that the left part of this expression is the squared loss function;
$\alpha$ is a parameter that sets the balance between the squared loss on
the data, and the regularization constraints. The regularization parameter
$\lambda$ sets the `l1_ratio`, which controls the balance between L1-sparsity
(low sum of weights), and low L2-sparsity (low sum-of-squares of the weights).
Just like Constrained Spherical Deconvolution (see :ref:`reconst-csd`), the SFM
requires the definition of a response function. We'll take advantage of the
automated algorithm in the :mod:`csdeconv` module to find this response
function:
"""
from dipy.reconst.csdeconv import auto_response
response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
"""
The ``response`` return value contains two entries. The first is an array with
the eigenvalues of the response function and the second is the average S0 for
this response.
It is a very good practice to always validate the result of ``auto_response``.
For, this purpose we can print it and have a look at its values.
"""
print(response)
"""
(array([ 0.0014, 0.00029, 0.00029]), 416.206)
We initialize an SFM model object, using these values. We will use the default
sphere (362 vertices, symmetrically distributed on the surface of the sphere),
as a set of putative fascicle directions that are considered in the model
"""
sphere = dpd.get_sphere()
sf_model = sfm.SparseFascicleModel(gtab, sphere=sphere,
l1_ratio=0.5, alpha=0.001,
response=response[0])<|fim▁hole|>containing parts of the corpus callosum and of the centrum semiovale
"""
data_small = data[20:50, 55:85, 38:39]
"""
Fitting the model to this small volume of data, we calculate the ODF of this
model on the sphere, and plot it.
"""
sf_fit = sf_model.fit(data_small)
sf_odf = sf_fit.odf(sphere)
fodf_spheres = actor.odf_slicer(sf_odf, sphere=sphere, scale=0.8,
colormap='plasma')
ren = window.Renderer()
ren.add(fodf_spheres)
print('Saving illustration as sf_odfs.png')
window.record(ren, out_path='sf_odfs.png', size=(1000, 1000))
if interactive:
window.show(ren)
"""
We can extract the peaks from the ODF, and plot these as well
"""
sf_peaks = dpp.peaks_from_model(sf_model,
data_small,
sphere,
relative_peak_threshold=.5,
min_separation_angle=25,
return_sh=False)
window.clear(ren)
fodf_peaks = actor.peak_slicer(sf_peaks.peak_dirs, sf_peaks.peak_values)
ren.add(fodf_peaks)
print('Saving illustration as sf_peaks.png')
window.record(ren, out_path='sf_peaks.png', size=(1000, 1000))
if interactive:
window.show(ren)
"""
Finally, we plot both the peaks and the ODFs, overlayed:
"""
fodf_spheres.GetProperty().SetOpacity(0.4)
ren.add(fodf_spheres)
print('Saving illustration as sf_both.png')
window.record(ren, out_path='sf_both.png', size=(1000, 1000))
if interactive:
window.show(ren)
"""
.. figure:: sf_both.png
:align: center
SFM Peaks and ODFs.
To see how to use this information in tracking, proceed to :ref:`sfm-track`.
References
----------
.. [Rokem2015] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick
N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell
(2015). Evaluating the accuracy of diffusion MRI models in white
matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272
.. [Zou2005] Zou H, Hastie T (2005). Regularization and variable
selection via the elastic net. J R Stat Soc B:301-320
"""<|fim▁end|> |
"""
For the purpose of the example, we will consider a small volume of data |
<|file_name|>menu.js<|end_file_name|><|fim▁begin|>$(document).ready(function() {
$('#mostrar_menu').click(function() {
$('#sidebar-wrapper').toggle(300);
});
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from flask import Flask
<|fim▁hole|>app.config.from_object('blog.config')
from blog import views<|fim▁end|> |
app = Flask(__name__) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#[macro_use]
extern crate serde;
use euclid::default::{Point2D, Rect, Size2D};
use malloc_size_of_derive::MallocSizeOf;
use std::borrow::Cow;
#[derive(Clone, Copy, Debug, Deserialize, Eq, MallocSizeOf, PartialEq, Serialize)]
pub enum PixelFormat {
/// Luminance channel only
K8,
/// Luminance + alpha
KA8,
/// RGB, 8 bits per channel
RGB8,
/// RGB + alpha, 8 bits per channel
RGBA8,
/// BGR + alpha, 8 bits per channel
BGRA8,
}
pub fn rgba8_get_rect(pixels: &[u8], size: Size2D<u32>, rect: Rect<u32>) -> Cow<[u8]> {
assert!(!rect.is_empty());
assert!(Rect::from_size(size).contains_rect(&rect));
assert_eq!(pixels.len() % 4, 0);
assert_eq!(size.area() as usize, pixels.len() / 4);
let area = rect.size.area() as usize;
let first_column_start = rect.origin.x as usize * 4;
let row_length = size.width as usize * 4;
let first_row_start = rect.origin.y as usize * row_length;
if rect.origin.x == 0 && rect.size.width == size.width || rect.size.height == 1 {
let start = first_column_start + first_row_start;
return Cow::Borrowed(&pixels[start..start + area * 4]);
}
let mut data = Vec::with_capacity(area * 4);
for row in pixels[first_row_start..]
.chunks(row_length)
.take(rect.size.height as usize)
{
data.extend_from_slice(&row[first_column_start..][..rect.size.width as usize * 4]);
}
data.into()
}
// TODO(pcwalton): Speed up with SIMD, or better yet, find some way to not do this.
pub fn rgba8_byte_swap_colors_inplace(pixels: &mut [u8]) {
assert!(pixels.len() % 4 == 0);
for rgba in pixels.chunks_mut(4) {
let b = rgba[0];
rgba[0] = rgba[2];
rgba[2] = b;
}
}
pub fn rgba8_byte_swap_and_premultiply_inplace(pixels: &mut [u8]) {
assert!(pixels.len() % 4 == 0);
for rgba in pixels.chunks_mut(4) {
let b = rgba[0];
rgba[0] = multiply_u8_color(rgba[2], rgba[3]);
rgba[1] = multiply_u8_color(rgba[1], rgba[3]);
rgba[2] = multiply_u8_color(b, rgba[3]);
}
}
/// Returns true if the pixels were found to be completely opaque.
pub fn rgba8_premultiply_inplace(pixels: &mut [u8]) -> bool {<|fim▁hole|> let mut is_opaque = true;
for rgba in pixels.chunks_mut(4) {
rgba[0] = multiply_u8_color(rgba[0], rgba[3]);
rgba[1] = multiply_u8_color(rgba[1], rgba[3]);
rgba[2] = multiply_u8_color(rgba[2], rgba[3]);
is_opaque = is_opaque && rgba[3] == 255;
}
is_opaque
}
pub fn multiply_u8_color(a: u8, b: u8) -> u8 {
return (a as u32 * b as u32 / 255) as u8;
}
pub fn clip(
mut origin: Point2D<i32>,
mut size: Size2D<u32>,
surface: Size2D<u32>,
) -> Option<Rect<u32>> {
if origin.x < 0 {
size.width = size.width.saturating_sub(-origin.x as u32);
origin.x = 0;
}
if origin.y < 0 {
size.height = size.height.saturating_sub(-origin.y as u32);
origin.y = 0;
}
Rect::new(origin.to_u32(), size)
.intersection(&Rect::from_size(surface))
.filter(|rect| !rect.is_empty())
}<|fim▁end|> | assert!(pixels.len() % 4 == 0); |
<|file_name|>unnecessary_sort_by.rs<|end_file_name|><|fim▁begin|>// run-rustfix
#![allow(clippy::stable_sort_primitive)]
use std::cmp::Reverse;
fn unnecessary_sort_by() {
fn id(x: isize) -> isize {
x
}
let mut vec: Vec<isize> = vec![3, 6, 1, 2, 5];
// Forward examples
vec.sort_by(|a, b| a.cmp(b));
vec.sort_unstable_by(|a, b| a.cmp(b));
vec.sort_by(|a, b| (a + 5).abs().cmp(&(b + 5).abs()));
vec.sort_unstable_by(|a, b| id(-a).cmp(&id(-b)));
// Reverse examples
vec.sort_by(|a, b| b.cmp(a)); // not linted to avoid suggesting `Reverse(b)` which would borrow
vec.sort_by(|a, b| (b + 5).abs().cmp(&(a + 5).abs()));
vec.sort_unstable_by(|a, b| id(-b).cmp(&id(-a)));
// Negative examples (shouldn't be changed)
let c = &7;
vec.sort_by(|a, b| (b - a).cmp(&(a - b)));
vec.sort_by(|_, b| b.cmp(&5));
vec.sort_by(|_, b| b.cmp(c));
vec.sort_unstable_by(|a, _| a.cmp(c));
// Vectors of references are fine as long as the resulting key does not borrow
let mut vec: Vec<&&&isize> = vec![&&&3, &&&6, &&&1, &&&2, &&&5];
vec.sort_by(|a, b| (***a).abs().cmp(&(***b).abs()));
vec.sort_unstable_by(|a, b| (***a).abs().cmp(&(***b).abs()));
// `Reverse(b)` would borrow in the following cases, don't lint
vec.sort_by(|a, b| b.cmp(a));
vec.sort_unstable_by(|a, b| b.cmp(a));
}
// Do not suggest returning a reference to the closure parameter of `Vec::sort_by_key`
mod issue_5754 {
#[derive(Clone, Copy)]
struct Test(usize);
#[derive(PartialOrd, Ord, PartialEq, Eq)]
struct Wrapper<'a>(&'a usize);
impl Test {
fn name(&self) -> &usize {
&self.0
}
fn wrapped(&self) -> Wrapper<'_> {
Wrapper(&self.0)
}
}
pub fn test() {
let mut args: Vec<Test> = vec![];
// Forward<|fim▁hole|> args.sort_by(|a, b| a.name().cmp(b.name()));
args.sort_by(|a, b| a.wrapped().cmp(&b.wrapped()));
args.sort_unstable_by(|a, b| a.name().cmp(b.name()));
args.sort_unstable_by(|a, b| a.wrapped().cmp(&b.wrapped()));
// Reverse
args.sort_by(|a, b| b.name().cmp(a.name()));
args.sort_by(|a, b| b.wrapped().cmp(&a.wrapped()));
args.sort_unstable_by(|a, b| b.name().cmp(a.name()));
args.sort_unstable_by(|a, b| b.wrapped().cmp(&a.wrapped()));
}
}
// The closure parameter is not dereferenced anymore, so non-Copy types can be linted
mod issue_6001 {
use super::*;
struct Test(String);
impl Test {
// Return an owned type so that we don't hit the fix for 5754
fn name(&self) -> String {
self.0.clone()
}
}
pub fn test() {
let mut args: Vec<Test> = vec![];
// Forward
args.sort_by(|a, b| a.name().cmp(&b.name()));
args.sort_unstable_by(|a, b| a.name().cmp(&b.name()));
// Reverse
args.sort_by(|a, b| b.name().cmp(&a.name()));
args.sort_unstable_by(|a, b| b.name().cmp(&a.name()));
}
}
fn main() {
unnecessary_sort_by();
issue_5754::test();
issue_6001::test();
}<|fim▁end|> | |
<|file_name|>UpdateUiCallBack.java<|end_file_name|><|fim▁begin|>package com.umeng.soexample.run.step;
/**
* 步数更新回调<|fim▁hole|> /**
* 更新UI步数
*
* @param stepCount 步数
*/
void updateUi(int stepCount);
}<|fim▁end|> | * Created by dylan on 16/9/27.
*/
public interface UpdateUiCallBack { |
<|file_name|>dex_file.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "dex_file.h"
#include <fcntl.h>
#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/file.h>
#include <sys/stat.h>
#include "base/logging.h"
#include "base/stringprintf.h"
#include "class_linker.h"
#include "dex_file-inl.h"
#include "dex_file_verifier.h"
#include "globals.h"
#include "leb128.h"
#include "mirror/art_field-inl.h"
#include "mirror/art_method-inl.h"
#include "mirror/string.h"
#include "os.h"
#include "safe_map.h"
#include "thread.h"
#include "UniquePtr.h"
#include "utf.h"
#include "utils.h"
#include "well_known_classes.h"
#include "zip_archive.h"
namespace art {
const byte DexFile::kDexMagic[] = { 'd', 'e', 'x', '\n' };
const byte DexFile::kDexMagicVersion[] = { '0', '3', '5', '\0' };
DexFile::ClassPathEntry DexFile::FindInClassPath(const char* descriptor,
const ClassPath& class_path) {
for (size_t i = 0; i != class_path.size(); ++i) {
const DexFile* dex_file = class_path[i];
const DexFile::ClassDef* dex_class_def = dex_file->FindClassDef(descriptor);
if (dex_class_def != NULL) {
return ClassPathEntry(dex_file, dex_class_def);
}
}
// TODO: remove reinterpret_cast when issue with -std=gnu++0x host issue resolved
return ClassPathEntry(reinterpret_cast<const DexFile*>(NULL),
reinterpret_cast<const DexFile::ClassDef*>(NULL));
}
int OpenAndReadMagic(const std::string& filename, uint32_t* magic) {
CHECK(magic != NULL);
int fd = open(filename.c_str(), O_RDONLY, 0);
if (fd == -1) {
PLOG(WARNING) << "Unable to open '" << filename << "'";
return -1;
}
int n = TEMP_FAILURE_RETRY(read(fd, magic, sizeof(*magic)));
if (n != sizeof(*magic)) {
PLOG(ERROR) << "Failed to find magic in '" << filename << "'";
return -1;
}
if (lseek(fd, 0, SEEK_SET) != 0) {
PLOG(ERROR) << "Failed to seek to beginning of file '" << filename << "'";
return -1;
}
return fd;
}
bool DexFile::GetChecksum(const std::string& filename, uint32_t* checksum) {
CHECK(checksum != NULL);
uint32_t magic;
int fd = OpenAndReadMagic(filename, &magic);
if (fd == -1) {
return false;
}
if (IsZipMagic(magic)) {
UniquePtr<ZipArchive> zip_archive(ZipArchive::OpenFromFd(fd));
if (zip_archive.get() == NULL) {
return false;
}
UniquePtr<ZipEntry> zip_entry(zip_archive->Find(kClassesDex));
if (zip_entry.get() == NULL) {
LOG(ERROR) << "Zip archive '" << filename << "' doesn't contain " << kClassesDex;
return false;
}
*checksum = zip_entry->GetCrc32();
return true;
}
if (IsDexMagic(magic)) {
UniquePtr<const DexFile> dex_file(DexFile::OpenFile(fd, filename, false));
if (dex_file.get() == NULL) {
return false;
}
*checksum = dex_file->GetHeader().checksum_;
return true;
}
LOG(ERROR) << "Expected valid zip or dex file: " << filename;
return false;
}
const DexFile* DexFile::Open(const std::string& filename,
const std::string& location) {
uint32_t magic;
int fd = OpenAndReadMagic(filename, &magic);
if (fd == -1) {
return NULL;
}
if (IsZipMagic(magic)) {
return DexFile::OpenZip(fd, location);
}
if (IsDexMagic(magic)) {
return DexFile::OpenFile(fd, location, true);
}
LOG(ERROR) << "Expected valid zip or dex file: " << filename;
return NULL;
}
int DexFile::GetPermissions() const {
if (mem_map_.get() == NULL) {
return 0;
} else {
return mem_map_->GetProtect();
}
}
bool DexFile::IsReadOnly() const {
return GetPermissions() == PROT_READ;
}
bool DexFile::EnableWrite() const {
CHECK(IsReadOnly());
if (mem_map_.get() == NULL) {
return false;
} else {
return mem_map_->Protect(PROT_READ | PROT_WRITE);
}
}
bool DexFile::DisableWrite() const {
CHECK(!IsReadOnly());
if (mem_map_.get() == NULL) {
return false;
} else {
return mem_map_->Protect(PROT_READ);
}
}
const DexFile* DexFile::OpenFile(int fd,
const std::string& location,
bool verify) {
CHECK(!location.empty());
struct stat sbuf;
memset(&sbuf, 0, sizeof(sbuf));
if (fstat(fd, &sbuf) == -1) {
PLOG(ERROR) << "fstat \"" << location << "\" failed";
close(fd);
return NULL;
}
if (S_ISDIR(sbuf.st_mode)) {
LOG(ERROR) << "attempt to mmap directory \"" << location << "\"";
return NULL;
}
size_t length = sbuf.st_size;
UniquePtr<MemMap> map(MemMap::MapFile(length, PROT_READ, MAP_PRIVATE, fd, 0));
if (map.get() == NULL) {
LOG(ERROR) << "mmap \"" << location << "\" failed";
close(fd);
return NULL;
}
close(fd);
if (map->Size() < sizeof(DexFile::Header)) {
LOG(ERROR) << "Failed to open dex file '" << location << "' that is too short to have a header";
return NULL;
}
const Header* dex_header = reinterpret_cast<const Header*>(map->Begin());
const DexFile* dex_file = OpenMemory(location, dex_header->checksum_, map.release());
if (dex_file == NULL) {
LOG(ERROR) << "Failed to open dex file '" << location << "' from memory";
return NULL;
}
if (verify && !DexFileVerifier::Verify(dex_file, dex_file->Begin(), dex_file->Size())) {
LOG(ERROR) << "Failed to verify dex file '" << location << "'";
return NULL;
}
return dex_file;
}
const char* DexFile::kClassesDex = "classes.dex";
const DexFile* DexFile::OpenZip(int fd, const std::string& location) {
UniquePtr<ZipArchive> zip_archive(ZipArchive::OpenFromFd(fd));
if (zip_archive.get() == NULL) {
LOG(ERROR) << "Failed to open " << location << " when looking for classes.dex";
return NULL;
}
return DexFile::Open(*zip_archive.get(), location);
}
const DexFile* DexFile::OpenMemory(const std::string& location,
uint32_t location_checksum,
MemMap* mem_map) {
return OpenMemory(mem_map->Begin(),
mem_map->Size(),
location,
location_checksum,
mem_map);
}
const DexFile* DexFile::Open(const ZipArchive& zip_archive, const std::string& location) {
CHECK(!location.empty());
UniquePtr<ZipEntry> zip_entry(zip_archive.Find(kClassesDex));
if (zip_entry.get() == NULL) {
LOG(ERROR) << "Failed to find classes.dex within '" << location << "'";
return NULL;
}
UniquePtr<MemMap> map(zip_entry->ExtractToMemMap(kClassesDex));
if (map.get() == NULL) {
LOG(ERROR) << "Failed to extract '" << kClassesDex << "' from '" << location << "'";
return NULL;
}
UniquePtr<const DexFile> dex_file(OpenMemory(location, zip_entry->GetCrc32(), map.release()));
if (dex_file.get() == NULL) {
LOG(ERROR) << "Failed to open dex file '" << location << "' from memory";
return NULL;
}
if (!DexFileVerifier::Verify(dex_file.get(), dex_file->Begin(), dex_file->Size())) {
LOG(ERROR) << "Failed to verify dex file '" << location << "'";
return NULL;
}
if (!dex_file->DisableWrite()) {
LOG(ERROR) << "Failed to make dex file read only '" << location << "'";
return NULL;
}
CHECK(dex_file->IsReadOnly()) << location;
return dex_file.release();
}
const DexFile* DexFile::OpenMemory(const byte* base,
size_t size,
const std::string& location,
uint32_t location_checksum,
MemMap* mem_map) {
CHECK_ALIGNED(base, 4); // various dex file structures must be word aligned
UniquePtr<DexFile> dex_file(new DexFile(base, size, location, location_checksum, mem_map));
if (!dex_file->Init()) {
return NULL;
} else {
return dex_file.release();
}
}
DexFile::~DexFile() {
// We don't call DeleteGlobalRef on dex_object_ because we're only called by DestroyJavaVM, and
// that's only called after DetachCurrentThread, which means there's no JNIEnv. We could
// re-attach, but cleaning up these global references is not obviously useful. It's not as if
// the global reference table is otherwise empty!
}
bool DexFile::Init() {
InitMembers();
if (!CheckMagicAndVersion()) {
return false;
}
return true;
}
void DexFile::InitMembers() {
const byte* b = begin_;
header_ = reinterpret_cast<const Header*>(b);
const Header* h = header_;
string_ids_ = reinterpret_cast<const StringId*>(b + h->string_ids_off_);
type_ids_ = reinterpret_cast<const TypeId*>(b + h->type_ids_off_);
field_ids_ = reinterpret_cast<const FieldId*>(b + h->field_ids_off_);
method_ids_ = reinterpret_cast<const MethodId*>(b + h->method_ids_off_);
proto_ids_ = reinterpret_cast<const ProtoId*>(b + h->proto_ids_off_);
class_defs_ = reinterpret_cast<const ClassDef*>(b + h->class_defs_off_);
class_defs_off_=h->class_defs_off_;
data_off_=h->data_off_;
data_size_=h->data_size_;
}
bool DexFile::CheckMagicAndVersion() const {
CHECK(header_->magic_ != NULL) << GetLocation();
if (!IsMagicValid(header_->magic_)) {
LOG(ERROR) << "Unrecognized magic number in " << GetLocation() << ":"
<< " " << header_->magic_[0]
<< " " << header_->magic_[1]
<< " " << header_->magic_[2]
<< " " << header_->magic_[3];
return false;
}
if (!IsVersionValid(header_->magic_)) {
LOG(ERROR) << "Unrecognized version number in " << GetLocation() << ":"
<< " " << header_->magic_[4]
<< " " << header_->magic_[5]
<< " " << header_->magic_[6]
<< " " << header_->magic_[7];
return false;
}
return true;
}
bool DexFile::IsMagicValid(const byte* magic) {
return (memcmp(magic, kDexMagic, sizeof(kDexMagic)) == 0);
}
bool DexFile::IsVersionValid(const byte* magic) {
const byte* version = &magic[sizeof(kDexMagic)];
return (memcmp(version, kDexMagicVersion, sizeof(kDexMagicVersion)) == 0);
}
uint32_t DexFile::GetVersion() const {
const char* version = reinterpret_cast<const char*>(&GetHeader().magic_[sizeof(kDexMagic)]);
return atoi(version);
}
const DexFile::ClassDef* DexFile::FindClassDef(const char* descriptor) const {
size_t num_class_defs = NumClassDefs();
if (num_class_defs == 0) {
return NULL;
}
const StringId* string_id = FindStringId(descriptor);
if (string_id == NULL) {
return NULL;
}
const TypeId* type_id = FindTypeId(GetIndexForStringId(*string_id));
if (type_id == NULL) {
return NULL;
}
uint16_t type_idx = GetIndexForTypeId(*type_id);
for (size_t i = 0; i < num_class_defs; ++i) {
const ClassDef& class_def = GetClassDef(i);
if (class_def.class_idx_ == type_idx) {
return &class_def;
}
}
return NULL;
}
const DexFile::ClassDef* DexFile::FindClassDef(uint16_t type_idx) const {
size_t num_class_defs = NumClassDefs();
for (size_t i = 0; i < num_class_defs; ++i) {
const ClassDef& class_def = GetClassDef(i);
if (class_def.class_idx_ == type_idx) {
return &class_def;
}
}
return NULL;
}
const DexFile::FieldId* DexFile::FindFieldId(const DexFile::TypeId& declaring_klass,
const DexFile::StringId& name,
const DexFile::TypeId& type) const {
// Binary search MethodIds knowing that they are sorted by class_idx, name_idx then proto_idx
const uint16_t class_idx = GetIndexForTypeId(declaring_klass);
const uint32_t name_idx = GetIndexForStringId(name);
const uint16_t type_idx = GetIndexForTypeId(type);
int32_t lo = 0;
int32_t hi = NumFieldIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
const DexFile::FieldId& field = GetFieldId(mid);
if (class_idx > field.class_idx_) {
lo = mid + 1;
} else if (class_idx < field.class_idx_) {
hi = mid - 1;
} else {
if (name_idx > field.name_idx_) {
lo = mid + 1;
} else if (name_idx < field.name_idx_) {
hi = mid - 1;
} else {
if (type_idx > field.type_idx_) {
lo = mid + 1;
} else if (type_idx < field.type_idx_) {
hi = mid - 1;
} else {
return &field;
}
}
}
}
return NULL;
}
const DexFile::MethodId* DexFile::FindMethodId(const DexFile::TypeId& declaring_klass,
const DexFile::StringId& name,
const DexFile::ProtoId& signature) const {
// Binary search MethodIds knowing that they are sorted by class_idx, name_idx then proto_idx
const uint16_t class_idx = GetIndexForTypeId(declaring_klass);
const uint32_t name_idx = GetIndexForStringId(name);
const uint16_t proto_idx = GetIndexForProtoId(signature);
int32_t lo = 0;
int32_t hi = NumMethodIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
const DexFile::MethodId& method = GetMethodId(mid);
if (class_idx > method.class_idx_) {
lo = mid + 1;
} else if (class_idx < method.class_idx_) {
hi = mid - 1;
} else {
if (name_idx > method.name_idx_) {
lo = mid + 1;
} else if (name_idx < method.name_idx_) {
hi = mid - 1;
} else {
if (proto_idx > method.proto_idx_) {
lo = mid + 1;
} else if (proto_idx < method.proto_idx_) {
hi = mid - 1;
} else {
return &method;
}
}
}
}
return NULL;
}
const DexFile::StringId* DexFile::FindStringId(const char* string) const {
int32_t lo = 0;
int32_t hi = NumStringIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
uint32_t length;
const DexFile::StringId& str_id = GetStringId(mid);
const char* str = GetStringDataAndLength(str_id, &length);
int compare = CompareModifiedUtf8ToModifiedUtf8AsUtf16CodePointValues(string, str);
if (compare > 0) {
lo = mid + 1;
} else if (compare < 0) {
hi = mid - 1;
} else {
return &str_id;
}
}
return NULL;
}
const DexFile::StringId* DexFile::FindStringId(const uint16_t* string) const {
int32_t lo = 0;
int32_t hi = NumStringIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
uint32_t length;
const DexFile::StringId& str_id = GetStringId(mid);
const char* str = GetStringDataAndLength(str_id, &length);
int compare = CompareModifiedUtf8ToUtf16AsCodePointValues(str, string);
if (compare > 0) {
lo = mid + 1;
} else if (compare < 0) {
hi = mid - 1;
} else {
return &str_id;
}
}
return NULL;
}
const DexFile::TypeId* DexFile::FindTypeId(uint32_t string_idx) const {
int32_t lo = 0;
int32_t hi = NumTypeIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
const TypeId& type_id = GetTypeId(mid);
if (string_idx > type_id.descriptor_idx_) {
lo = mid + 1;
} else if (string_idx < type_id.descriptor_idx_) {
hi = mid - 1;
} else {
return &type_id;
}
}
return NULL;
}
const DexFile::ProtoId* DexFile::FindProtoId(uint16_t return_type_idx,
const std::vector<uint16_t>& signature_type_idxs) const {
int32_t lo = 0;
int32_t hi = NumProtoIds() - 1;
while (hi >= lo) {
int32_t mid = (hi + lo) / 2;
const DexFile::ProtoId& proto = GetProtoId(mid);
int compare = return_type_idx - proto.return_type_idx_;
if (compare == 0) {
DexFileParameterIterator it(*this, proto);
size_t i = 0;
while (it.HasNext() && i < signature_type_idxs.size() && compare == 0) {
compare = signature_type_idxs[i] - it.GetTypeIdx();
it.Next();
i++;
}
if (compare == 0) {
if (it.HasNext()) {
compare = -1;
} else if (i < signature_type_idxs.size()) {
compare = 1;
}
}
}
if (compare > 0) {
lo = mid + 1;
} else if (compare < 0) {
hi = mid - 1;
} else {
return &proto;
}
}
return NULL;
}
// Given a signature place the type ids into the given vector
bool DexFile::CreateTypeList(uint16_t* return_type_idx, std::vector<uint16_t>* param_type_idxs,
const std::string& signature) const {
if (signature[0] != '(') {
return false;
}
size_t offset = 1;
size_t end = signature.size();
bool process_return = false;
while (offset < end) {
char c = signature[offset];
offset++;
if (c == ')') {
process_return = true;
continue;
}
std::string descriptor;
descriptor += c;
while (c == '[') { // process array prefix
if (offset >= end) { // expect some descriptor following [
return false;
}
c = signature[offset];
offset++;
descriptor += c;
}
if (c == 'L') { // process type descriptors
do {
if (offset >= end) { // unexpected early termination of descriptor<|fim▁hole|> offset++;
descriptor += c;
} while (c != ';');
}
const DexFile::StringId* string_id = FindStringId(descriptor.c_str());
if (string_id == NULL) {
return false;
}
const DexFile::TypeId* type_id = FindTypeId(GetIndexForStringId(*string_id));
if (type_id == NULL) {
return false;
}
uint16_t type_idx = GetIndexForTypeId(*type_id);
if (!process_return) {
param_type_idxs->push_back(type_idx);
} else {
*return_type_idx = type_idx;
return offset == end; // return true if the signature had reached a sensible end
}
}
return false; // failed to correctly parse return type
}
// Materializes the method descriptor for a method prototype. Method
// descriptors are not stored directly in the dex file. Instead, one
// must assemble the descriptor from references in the prototype.
std::string DexFile::CreateMethodSignature(uint32_t proto_idx, int32_t* unicode_length) const {
const ProtoId& proto_id = GetProtoId(proto_idx);
std::string descriptor;
descriptor.push_back('(');
const TypeList* type_list = GetProtoParameters(proto_id);
size_t parameter_length = 0;
if (type_list != NULL) {
// A non-zero number of arguments. Append the type names.
for (size_t i = 0; i < type_list->Size(); ++i) {
const TypeItem& type_item = type_list->GetTypeItem(i);
uint32_t type_idx = type_item.type_idx_;
uint32_t type_length;
const char* name = StringByTypeIdx(type_idx, &type_length);
parameter_length += type_length;
descriptor.append(name);
}
}
descriptor.push_back(')');
uint32_t return_type_idx = proto_id.return_type_idx_;
uint32_t return_type_length;
const char* name = StringByTypeIdx(return_type_idx, &return_type_length);
descriptor.append(name);
if (unicode_length != NULL) {
*unicode_length = parameter_length + return_type_length + 2; // 2 for ( and )
}
return descriptor;
}
int32_t DexFile::GetLineNumFromPC(const mirror::ArtMethod* method, uint32_t rel_pc) const {
// For native method, lineno should be -2 to indicate it is native. Note that
// "line number == -2" is how libcore tells from StackTraceElement.
if (method->GetCodeItemOffset() == 0) {
return -2;
}
const CodeItem* code_item = GetCodeItem(method->GetCodeItemOffset());
DCHECK(code_item != NULL) << PrettyMethod(method) << " " << GetLocation();
// A method with no line number info should return -1
LineNumFromPcContext context(rel_pc, -1);
DecodeDebugInfo(code_item, method->IsStatic(), method->GetDexMethodIndex(), LineNumForPcCb,
NULL, &context);
return context.line_num_;
}
int32_t DexFile::FindTryItem(const CodeItem &code_item, uint32_t address) {
// Note: Signed type is important for max and min.
int32_t min = 0;
int32_t max = code_item.tries_size_ - 1;
while (min <= max) {
int32_t mid = min + ((max - min) / 2);
const art::DexFile::TryItem* ti = GetTryItems(code_item, mid);
uint32_t start = ti->start_addr_;
uint32_t end = start + ti->insn_count_;
if (address < start) {
max = mid - 1;
} else if (address >= end) {
min = mid + 1;
} else { // We have a winner!
return mid;
}
}
// No match.
return -1;
}
int32_t DexFile::FindCatchHandlerOffset(const CodeItem &code_item, uint32_t address) {
int32_t try_item = FindTryItem(code_item, address);
if (try_item == -1) {
return -1;
} else {
return DexFile::GetTryItems(code_item, try_item)->handler_off_;
}
}
void DexFile::DecodeDebugInfo0(const CodeItem* code_item, bool is_static, uint32_t method_idx,
DexDebugNewPositionCb position_cb, DexDebugNewLocalCb local_cb,
void* context, const byte* stream, LocalInfo* local_in_reg) const {
uint32_t line = DecodeUnsignedLeb128(&stream);
uint32_t parameters_size = DecodeUnsignedLeb128(&stream);
uint16_t arg_reg = code_item->registers_size_ - code_item->ins_size_;
uint32_t address = 0;
bool need_locals = (local_cb != NULL);
if (!is_static) {
if (need_locals) {
const char* descriptor = GetMethodDeclaringClassDescriptor(GetMethodId(method_idx));
local_in_reg[arg_reg].name_ = "this";
local_in_reg[arg_reg].descriptor_ = descriptor;
local_in_reg[arg_reg].signature_ = NULL;
local_in_reg[arg_reg].start_address_ = 0;
local_in_reg[arg_reg].is_live_ = true;
}
arg_reg++;
}
DexFileParameterIterator it(*this, GetMethodPrototype(GetMethodId(method_idx)));
for (uint32_t i = 0; i < parameters_size && it.HasNext(); ++i, it.Next()) {
if (arg_reg >= code_item->registers_size_) {
LOG(ERROR) << "invalid stream - arg reg >= reg size (" << arg_reg
<< " >= " << code_item->registers_size_ << ") in " << GetLocation();
return;
}
uint32_t id = DecodeUnsignedLeb128P1(&stream);
const char* descriptor = it.GetDescriptor();
if (need_locals && id != kDexNoIndex) {
const char* name = StringDataByIdx(id);
local_in_reg[arg_reg].name_ = name;
local_in_reg[arg_reg].descriptor_ = descriptor;
local_in_reg[arg_reg].signature_ = NULL;
local_in_reg[arg_reg].start_address_ = address;
local_in_reg[arg_reg].is_live_ = true;
}
switch (*descriptor) {
case 'D':
case 'J':
arg_reg += 2;
break;
default:
arg_reg += 1;
break;
}
}
if (it.HasNext()) {
LOG(ERROR) << "invalid stream - problem with parameter iterator in " << GetLocation();
return;
}
for (;;) {
uint8_t opcode = *stream++;
uint16_t reg;
uint16_t name_idx;
uint16_t descriptor_idx;
uint16_t signature_idx = 0;
switch (opcode) {
case DBG_END_SEQUENCE:
return;
case DBG_ADVANCE_PC:
address += DecodeUnsignedLeb128(&stream);
break;
case DBG_ADVANCE_LINE:
line += DecodeSignedLeb128(&stream);
break;
case DBG_START_LOCAL:
case DBG_START_LOCAL_EXTENDED:
reg = DecodeUnsignedLeb128(&stream);
if (reg > code_item->registers_size_) {
LOG(ERROR) << "invalid stream - reg > reg size (" << reg << " > "
<< code_item->registers_size_ << ") in " << GetLocation();
return;
}
name_idx = DecodeUnsignedLeb128P1(&stream);
descriptor_idx = DecodeUnsignedLeb128P1(&stream);
if (opcode == DBG_START_LOCAL_EXTENDED) {
signature_idx = DecodeUnsignedLeb128P1(&stream);
}
// Emit what was previously there, if anything
if (need_locals) {
InvokeLocalCbIfLive(context, reg, address, local_in_reg, local_cb);
local_in_reg[reg].name_ = StringDataByIdx(name_idx);
local_in_reg[reg].descriptor_ = StringByTypeIdx(descriptor_idx);
if (opcode == DBG_START_LOCAL_EXTENDED) {
local_in_reg[reg].signature_ = StringDataByIdx(signature_idx);
}
local_in_reg[reg].start_address_ = address;
local_in_reg[reg].is_live_ = true;
}
break;
case DBG_END_LOCAL:
reg = DecodeUnsignedLeb128(&stream);
if (reg > code_item->registers_size_) {
LOG(ERROR) << "invalid stream - reg > reg size (" << reg << " > "
<< code_item->registers_size_ << ") in " << GetLocation();
return;
}
if (need_locals) {
InvokeLocalCbIfLive(context, reg, address, local_in_reg, local_cb);
local_in_reg[reg].is_live_ = false;
}
break;
case DBG_RESTART_LOCAL:
reg = DecodeUnsignedLeb128(&stream);
if (reg > code_item->registers_size_) {
LOG(ERROR) << "invalid stream - reg > reg size (" << reg << " > "
<< code_item->registers_size_ << ") in " << GetLocation();
return;
}
if (need_locals) {
if (local_in_reg[reg].name_ == NULL || local_in_reg[reg].descriptor_ == NULL) {
LOG(ERROR) << "invalid stream - no name or descriptor in " << GetLocation();
return;
}
// If the register is live, the "restart" is superfluous,
// and we don't want to mess with the existing start address.
if (!local_in_reg[reg].is_live_) {
local_in_reg[reg].start_address_ = address;
local_in_reg[reg].is_live_ = true;
}
}
break;
case DBG_SET_PROLOGUE_END:
case DBG_SET_EPILOGUE_BEGIN:
case DBG_SET_FILE:
break;
default: {
int adjopcode = opcode - DBG_FIRST_SPECIAL;
address += adjopcode / DBG_LINE_RANGE;
line += DBG_LINE_BASE + (adjopcode % DBG_LINE_RANGE);
if (position_cb != NULL) {
if (position_cb(context, address, line)) {
// early exit
return;
}
}
break;
}
}
}
}
void DexFile::DecodeDebugInfo(const CodeItem* code_item, bool is_static, uint32_t method_idx,
DexDebugNewPositionCb position_cb, DexDebugNewLocalCb local_cb,
void* context) const {
const byte* stream = GetDebugInfoStream(code_item);
UniquePtr<LocalInfo[]> local_in_reg(local_cb != NULL ?
new LocalInfo[code_item->registers_size_] :
NULL);
if (stream != NULL) {
DecodeDebugInfo0(code_item, is_static, method_idx, position_cb, local_cb, context, stream, &local_in_reg[0]);
}
for (int reg = 0; reg < code_item->registers_size_; reg++) {
InvokeLocalCbIfLive(context, reg, code_item->insns_size_in_code_units_, &local_in_reg[0], local_cb);
}
}
bool DexFile::LineNumForPcCb(void* raw_context, uint32_t address, uint32_t line_num) {
LineNumFromPcContext* context = reinterpret_cast<LineNumFromPcContext*>(raw_context);
// We know that this callback will be called in
// ascending address order, so keep going until we find
// a match or we've just gone past it.
if (address > context->address_) {
// The line number from the previous positions callback
// wil be the final result.
return true;
} else {
context->line_num_ = line_num;
return address == context->address_;
}
}
// Decodes the header section from the class data bytes.
void ClassDataItemIterator::ReadClassDataHeader() {
CHECK(ptr_pos_ != NULL);
header_.static_fields_size_ = DecodeUnsignedLeb128(&ptr_pos_);
header_.instance_fields_size_ = DecodeUnsignedLeb128(&ptr_pos_);
header_.direct_methods_size_ = DecodeUnsignedLeb128(&ptr_pos_);
header_.virtual_methods_size_ = DecodeUnsignedLeb128(&ptr_pos_);
}
void ClassDataItemIterator::ReadClassDataField() {
field_.field_idx_delta_ = DecodeUnsignedLeb128(&ptr_pos_);
field_.access_flags_ = DecodeUnsignedLeb128(&ptr_pos_);
if (last_idx_ != 0 && field_.field_idx_delta_ == 0) {
LOG(WARNING) << "Duplicate field " << PrettyField(GetMemberIndex(), dex_file_)
<< " in " << dex_file_.GetLocation();
}
}
void ClassDataItemIterator::ReadClassDataMethod() {
method_.method_idx_delta_ = DecodeUnsignedLeb128(&ptr_pos_);
method_.access_flags_ = DecodeUnsignedLeb128(&ptr_pos_);
method_.code_off_ = DecodeUnsignedLeb128(&ptr_pos_);
if (last_idx_ != 0 && method_.method_idx_delta_ == 0) {
LOG(WARNING) << "Duplicate method " << PrettyMethod(GetMemberIndex(), dex_file_)
<< " in " << dex_file_.GetLocation();
}
}
// Read a signed integer. "zwidth" is the zero-based byte count.
static int32_t ReadSignedInt(const byte* ptr, int zwidth) {
int32_t val = 0;
for (int i = zwidth; i >= 0; --i) {
val = ((uint32_t)val >> 8) | (((int32_t)*ptr++) << 24);
}
val >>= (3 - zwidth) * 8;
return val;
}
// Read an unsigned integer. "zwidth" is the zero-based byte count,
// "fill_on_right" indicates which side we want to zero-fill from.
static uint32_t ReadUnsignedInt(const byte* ptr, int zwidth, bool fill_on_right) {
uint32_t val = 0;
if (!fill_on_right) {
for (int i = zwidth; i >= 0; --i) {
val = (val >> 8) | (((uint32_t)*ptr++) << 24);
}
val >>= (3 - zwidth) * 8;
} else {
for (int i = zwidth; i >= 0; --i) {
val = (val >> 8) | (((uint32_t)*ptr++) << 24);
}
}
return val;
}
// Read a signed long. "zwidth" is the zero-based byte count.
static int64_t ReadSignedLong(const byte* ptr, int zwidth) {
int64_t val = 0;
for (int i = zwidth; i >= 0; --i) {
val = ((uint64_t)val >> 8) | (((int64_t)*ptr++) << 56);
}
val >>= (7 - zwidth) * 8;
return val;
}
// Read an unsigned long. "zwidth" is the zero-based byte count,
// "fill_on_right" indicates which side we want to zero-fill from.
static uint64_t ReadUnsignedLong(const byte* ptr, int zwidth, bool fill_on_right) {
uint64_t val = 0;
if (!fill_on_right) {
for (int i = zwidth; i >= 0; --i) {
val = (val >> 8) | (((uint64_t)*ptr++) << 56);
}
val >>= (7 - zwidth) * 8;
} else {
for (int i = zwidth; i >= 0; --i) {
val = (val >> 8) | (((uint64_t)*ptr++) << 56);
}
}
return val;
}
EncodedStaticFieldValueIterator::EncodedStaticFieldValueIterator(const DexFile& dex_file,
mirror::DexCache* dex_cache,
mirror::ClassLoader* class_loader,
ClassLinker* linker,
const DexFile::ClassDef& class_def)
: dex_file_(dex_file), dex_cache_(dex_cache), class_loader_(class_loader), linker_(linker),
array_size_(), pos_(-1), type_(kByte) {
ptr_ = dex_file.GetEncodedStaticFieldValuesArray(class_def);
if (ptr_ == NULL) {
array_size_ = 0;
} else {
array_size_ = DecodeUnsignedLeb128(&ptr_);
}
if (array_size_ > 0) {
Next();
}
}
void EncodedStaticFieldValueIterator::Next() {
pos_++;
if (pos_ >= array_size_) {
return;
}
byte value_type = *ptr_++;
byte value_arg = value_type >> kEncodedValueArgShift;
size_t width = value_arg + 1; // assume and correct later
type_ = static_cast<ValueType>(value_type & kEncodedValueTypeMask);
switch (type_) {
case kBoolean:
jval_.i = (value_arg != 0) ? 1 : 0;
width = 0;
break;
case kByte:
jval_.i = ReadSignedInt(ptr_, value_arg);
CHECK(IsInt(8, jval_.i));
break;
case kShort:
jval_.i = ReadSignedInt(ptr_, value_arg);
CHECK(IsInt(16, jval_.i));
break;
case kChar:
jval_.i = ReadUnsignedInt(ptr_, value_arg, false);
CHECK(IsUint(16, jval_.i));
break;
case kInt:
jval_.i = ReadSignedInt(ptr_, value_arg);
break;
case kLong:
jval_.j = ReadSignedLong(ptr_, value_arg);
break;
case kFloat:
jval_.i = ReadUnsignedInt(ptr_, value_arg, true);
break;
case kDouble:
jval_.j = ReadUnsignedLong(ptr_, value_arg, true);
break;
case kString:
case kType:
jval_.i = ReadUnsignedInt(ptr_, value_arg, false);
break;
case kField:
case kMethod:
case kEnum:
case kArray:
case kAnnotation:
UNIMPLEMENTED(FATAL) << ": type " << type_;
break;
case kNull:
jval_.l = NULL;
width = 0;
break;
default:
LOG(FATAL) << "Unreached";
}
ptr_ += width;
}
void EncodedStaticFieldValueIterator::ReadValueToField(mirror::ArtField* field) const {
switch (type_) {
case kBoolean: field->SetBoolean(field->GetDeclaringClass(), jval_.z); break;
case kByte: field->SetByte(field->GetDeclaringClass(), jval_.b); break;
case kShort: field->SetShort(field->GetDeclaringClass(), jval_.s); break;
case kChar: field->SetChar(field->GetDeclaringClass(), jval_.c); break;
case kInt: field->SetInt(field->GetDeclaringClass(), jval_.i); break;
case kLong: field->SetLong(field->GetDeclaringClass(), jval_.j); break;
case kFloat: field->SetFloat(field->GetDeclaringClass(), jval_.f); break;
case kDouble: field->SetDouble(field->GetDeclaringClass(), jval_.d); break;
case kNull: field->SetObject(field->GetDeclaringClass(), NULL); break;
case kString: {
mirror::String* resolved = linker_->ResolveString(dex_file_, jval_.i, dex_cache_);
field->SetObject(field->GetDeclaringClass(), resolved);
break;
}
case kType: {
mirror::Class* resolved = linker_->ResolveType(dex_file_, jval_.i, dex_cache_, class_loader_);
field->SetObject(field->GetDeclaringClass(), resolved);
break;
}
default: UNIMPLEMENTED(FATAL) << ": type " << type_;
}
}
CatchHandlerIterator::CatchHandlerIterator(const DexFile::CodeItem& code_item, uint32_t address) {
handler_.address_ = -1;
int32_t offset = -1;
// Short-circuit the overwhelmingly common cases.
switch (code_item.tries_size_) {
case 0:
break;
case 1: {
const DexFile::TryItem* tries = DexFile::GetTryItems(code_item, 0);
uint32_t start = tries->start_addr_;
if (address >= start) {
uint32_t end = start + tries->insn_count_;
if (address < end) {
offset = tries->handler_off_;
}
}
break;
}
default:
offset = DexFile::FindCatchHandlerOffset(code_item, address);
}
Init(code_item, offset);
}
CatchHandlerIterator::CatchHandlerIterator(const DexFile::CodeItem& code_item,
const DexFile::TryItem& try_item) {
handler_.address_ = -1;
Init(code_item, try_item.handler_off_);
}
void CatchHandlerIterator::Init(const DexFile::CodeItem& code_item,
int32_t offset) {
if (offset >= 0) {
Init(DexFile::GetCatchHandlerData(code_item, offset));
} else {
// Not found, initialize as empty
current_data_ = NULL;
remaining_count_ = -1;
catch_all_ = false;
DCHECK(!HasNext());
}
}
void CatchHandlerIterator::Init(const byte* handler_data) {
current_data_ = handler_data;
remaining_count_ = DecodeSignedLeb128(¤t_data_);
// If remaining_count_ is non-positive, then it is the negative of
// the number of catch types, and the catches are followed by a
// catch-all handler.
if (remaining_count_ <= 0) {
catch_all_ = true;
remaining_count_ = -remaining_count_;
} else {
catch_all_ = false;
}
Next();
}
void CatchHandlerIterator::Next() {
if (remaining_count_ > 0) {
handler_.type_idx_ = DecodeUnsignedLeb128(¤t_data_);
handler_.address_ = DecodeUnsignedLeb128(¤t_data_);
remaining_count_--;
return;
}
if (catch_all_) {
handler_.type_idx_ = DexFile::kDexNoIndex16;
handler_.address_ = DecodeUnsignedLeb128(¤t_data_);
catch_all_ = false;
return;
}
// no more handler
remaining_count_ = -1;
}
} // namespace art<|fim▁end|> | return false;
}
c = signature[offset]; |
<|file_name|>cs_effects.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
from GSettingsWidgets import *
from ChooserButtonWidgets import TweenChooserButton, EffectChooserButton
EFFECT_SETS = {
"cinnamon": ("traditional", "traditional", "traditional", "none", "none", "none"),
"scale": ("scale", "scale", "scale", "scale", "scale", "scale"),
"fade": ("fade", "fade", "fade", "scale", "scale", "scale"),
"blend": ("blend", "blend", "blend", "scale", "scale", "scale"),
"move": ("move", "move", "move", "scale", "scale", "scale"),
"flyUp": ("flyUp", "flyDown", "flyDown", "scale", "scale", "scale"),
"flyDown": ("flyDown", "flyUp", "flyUp", "scale", "scale", "scale"),
"default": ("scale", "scale", "none", "none", "none", "none")
}
TRANSITIONS_SETS = {
"cinnamon": ("easeOutQuad", "easeOutQuad", "easeInQuad", "easeInExpo", "easeNone", "easeInQuad"),
"normal": ("easeOutSine", "easeInBack", "easeInSine", "easeInBack", "easeOutBounce", "easeInBack"),
"extra": ("easeOutElastic", "easeOutBounce", "easeOutExpo", "easeInExpo", "easeOutElastic", "easeInExpo"),
"fade": ("easeOutQuart", "easeInQuart", "easeInQuart", "easeInBack", "easeOutBounce", "easeInBack")
}
TIME_SETS = {
"cinnamon": (175, 175, 200, 100, 100, 100),
"slow": (400, 400, 400, 100, 100, 100),
"normal": (250, 250, 250, 100, 100, 100),
"fast": (100, 100, 100, 100, 100, 100),
"default": (250, 250, 150, 400, 400, 400)
}
COMBINATIONS = {
# name effect transition time
"cinnamon": ("cinnamon", "cinnamon", "cinnamon"),
"scale": ("scale", "normal", "normal"),
"fancyScale": ("scale", "extra", "slow"),
"fade": ("fade", "fade", "normal"),
"blend": ("blend", "fade", "normal"),
"move": ("move", "normal", "fast"),
"flyUp": ("flyUp", "normal", "fast"),
"flyDown": ("flyDown", "normal", "fast"),
#for previous versions
"default": ("default", "normal", "default")
}
OPTIONS = (
("cinnamon", _("Cinnamon")),
("scale", _("Scale")),
("fancyScale", _("Fancy Scale")),
("fade", _("Fade")),
("blend", _("Blend")),
("move", _("Move")),
("flyUp", _("Fly up, down")),
("flyDown", _("Fly down, up")),
#for previous versions
("default", _("Default"))
)
TYPES = ("map", "close", "minimize", "maximize", "unmaximize", "tile")
SCHEMA = "org.cinnamon"
DEP_PATH = "org.cinnamon/desktop-effects"
KEY_TEMPLATE = "desktop-effects-%s-%s"
class GSettingsTweenChooserButton(TweenChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key):
self.key = key
self.bind_prop = "tween"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsTweenChooserButton, self).__init__()
self.bind_settings()
class GSettingsEffectChooserButton(EffectChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key, options):
self.key = key
self.bind_prop = "effect"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsEffectChooserButton, self).__init__(options)
self.bind_settings()
class Module:
name = "effects"
category = "appear"
comment = _("Control Cinnamon visual effects.")
def __init__(self, content_box):
keywords = _("effects, fancy, window")
sidePage = SidePage(_("Effects"), "cs-desktop-effects", keywords, content_box, module=self)
self.sidePage = sidePage
def on_module_selected(self):
if not self.loaded:
print "Loading Effects module"
self.sidePage.stack = SettingsStack()
self.sidePage.add_widget(self.sidePage.stack)
self.schema = Gio.Settings(SCHEMA)
self.effect_sets = {}
for name, sets in COMBINATIONS.items():
self.effect_sets[name] = (EFFECT_SETS[sets[0]], TRANSITIONS_SETS[sets[1]], TIME_SETS[sets[2]])
# Enable effects
page = SettingsPage()
self.sidePage.stack.add_titled(page, "effects", _("Enable effects"))
settings = page.add_section(_("Enable Effects"))
widget = GSettingsSwitch(_("Window effects"), "org.cinnamon", "desktop-effects")
settings.add_row(widget)
widget = GSettingsSwitch(_("Effects on dialog boxes"), "org.cinnamon", "desktop-effects-on-dialogs")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Effects on menus"), "org.cinnamon", "desktop-effects-on-menus")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
self.chooser = GSettingsComboBox(_("Effects style"), "org.cinnamon", "desktop-effects-style", OPTIONS)
self.chooser.content_widget.connect("changed", self.on_value_changed)
settings.add_reveal_row(self.chooser, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Fade effect on Cinnamon scrollboxes (like the Menu application list)"), "org.cinnamon", "enable-vfade")
settings.add_row(widget)
widget = GSettingsSwitch(_("Session startup animation"), "org.cinnamon", "startup-animation")
settings.add_row(widget)
if Gtk.get_major_version() == 3 and Gtk.get_minor_version() >= 16:
widget = GSettingsSwitch(_("Overlay scroll bars (logout required)"), "org.cinnamon.desktop.interface", "gtk-overlay-scrollbars")
settings.add_row(widget)
self.schema.connect("changed::desktop-effects", self.on_desktop_effects_enabled_changed)
# Customize
page = SettingsPage()
self.sidePage.stack.add_titled(page, "customize", _("Customize"))
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
label = Gtk.Label()
label.set_markup("<b>%s</b>" % _("Customize settings"))
box.pack_start(label, False, False, 0)
self.custom_switch = Gtk.Switch(active = self.is_custom())
box.pack_end(self.custom_switch, False, False, 0)
self.custom_switch.connect("notify::active", self.update_effects)
page.add(box)
self.revealer = Gtk.Revealer()
self.revealer.set_transition_type(Gtk.RevealerTransitionType.SLIDE_DOWN)
self.revealer.set_transition_duration(150)
page.add(self.revealer)
settings = SettingsBox(_("Effect"))
self.revealer.add(settings)
self.size_group = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
effects = ["none", "scale", "fade", "blend", "move", "flyUp", "flyDown", "traditional"]
# MAPPING WINDOWS
widget = self.make_effect_group(_("Mapping windows"), "map", effects)
settings.add_row(widget)
# CLOSING WINDOWS
widget = self.make_effect_group(_("Closing windows"), "close", effects)
settings.add_row(widget)
# MINIMIZING WINDOWS
widget = self.make_effect_group(_("Minimizing windows"), "minimize", effects)
settings.add_row(widget)
# MAXIMIZING WINDOWS
# effects = ["none", _("None")], ["scale", _("Scale")]]
widget = self.make_effect_group(_("Maximizing windows"), "maximize")
settings.add_row(widget)<|fim▁hole|>
# TILING WINDOWS
widget = self.make_effect_group(_("Tiling and snapping windows"), "tile")
settings.add_row(widget)
self.update_effects(self.custom_switch, None)
def make_effect_group(self, group_label, key, effects=None):
tmin, tmax, tstep, tdefault = (0, 2000, 50, 200)
row =SettingsWidget()
row.set_spacing(5)
label = Gtk.Label()
label.set_markup(group_label)
label.props.xalign = 0.0
row.pack_start(label, False, False, 0)
label = Gtk.Label(_("ms"))
row.pack_end(label, False, False, 0)
effect = GSettingsEffectChooserButton(SCHEMA, KEY_TEMPLATE % (key, "effect"), DEP_PATH, effects)
self.size_group.add_widget(effect)
tween = GSettingsTweenChooserButton(SCHEMA, KEY_TEMPLATE % (key, "transition"), DEP_PATH)
self.size_group.add_widget(tween)
time = GSettingsSpinButton("", SCHEMA, KEY_TEMPLATE % (key, "time"), dep_key=DEP_PATH, mini=tmin, maxi=tmax, step=tstep, page=tdefault)
time.set_border_width(0)
time.set_margin_right(0)
time.set_margin_left(0)
time.set_spacing(0)
row.pack_end(time, False, False, 0)
row.pack_end(tween, False, False, 0)
row.pack_end(effect, False, False, 0)
return row
def is_custom(self):
effects = []
transitions = []
times = []
for i in TYPES:
effects.append(self.schema.get_string(KEY_TEMPLATE % (i, "effect")))
transitions.append(self.schema.get_string(KEY_TEMPLATE % (i, "transition")))
times.append(self.schema.get_int(KEY_TEMPLATE % (i, "time")))
value = (tuple(effects), tuple(transitions), tuple(times))
return value != self.effect_sets[self.chooser.value]
def on_value_changed(self, widget):
value = self.effect_sets[self.schema.get_string("desktop-effects-style")]
j = 0
for i in TYPES:
self.schema.set_string(KEY_TEMPLATE % (i, "effect"), value[0][j])
self.schema.set_string(KEY_TEMPLATE % (i, "transition"), value[1][j])
self.schema.set_int(KEY_TEMPLATE % (i, "time"), value[2][j])
j += 1
def update_effects(self, switch, gparam):
active = switch.get_active()
self.revealer.set_reveal_child(active)
#when unchecking the checkbutton, reset the values
if not active:
self.on_value_changed(self.chooser)
def on_desktop_effects_enabled_changed(self, schema, key):
active = schema.get_boolean(key)
if not active and schema.get_boolean("desktop-effects-on-dialogs"):
schema.set_boolean("desktop-effects-on-dialogs", False)
self.update_effects(self.custom_switch, None)<|fim▁end|> |
# UNMAXIMIZING WINDOWS
widget = self.make_effect_group(_("Unmaximizing windows"), "unmaximize")
settings.add_row(widget) |
<|file_name|>test_performance_parser.py<|end_file_name|><|fim▁begin|>import json
from treeherder.log_parser.parsers import (EmptyPerformanceData,
PerformanceParser)
def test_performance_log_parsing_malformed_perfherder_data():
"""
If we have malformed perfherder data lines, we should just ignore
them and still be able to parse the valid ones<|fim▁hole|>
# invalid json
parser.parse_line("PERFHERDER_DATA: {oh noes i am not valid json}", 1)
try:
# Empty performance data
parser.parse_line("PERFHERDER_DATA: {}", 2)
except EmptyPerformanceData:
pass
valid_perfherder_data = {
"framework": {"name": "talos"}, "suites": [{
"name": "basic_compositor_video",
"subtests": [{
"name": "240p.120fps.mp4_scale_fullscreen_startup",
"value": 1234
}]
}]
}
parser.parse_line('PERFHERDER_DATA: {}'.format(
json.dumps(valid_perfherder_data)), 3)
assert parser.get_artifact() == [valid_perfherder_data]<|fim▁end|> | """
parser = PerformanceParser() |
<|file_name|>DeadEndLevel.java<|end_file_name|><|fim▁begin|>package com.github.epd.sprout.levels;
import com.github.epd.sprout.Assets;
import com.github.epd.sprout.messages.Messages;
import java.util.Arrays;
public class DeadEndLevel extends Level {
private static final int SIZE = 5;
{
color1 = 0x534f3e;
color2 = 0xb9d661;
}
@Override
public String tilesTex() {
return Assets.TILES_CAVES;
}
@Override
public String waterTex() {
return Assets.WATER_HALLS;
}
@Override
protected boolean build() {
setSize(7, 7);
Arrays.fill(map, Terrain.WALL);
for (int i = 2; i < SIZE; i++) {
for (int j = 2; j < SIZE; j++) {
map[i * getWidth() + j] = Terrain.EMPTY;
}
}
for (int i = 1; i <= SIZE; i++) {
map[getWidth() + i] = map[getWidth() * SIZE + i] = map[getWidth() * i + 1] = map[getWidth()
* i + SIZE] = Terrain.WATER;
}
entrance = SIZE * getWidth() + SIZE / 2 + 1;
map[entrance] = Terrain.ENTRANCE;
exit = 0;
return true;<|fim▁hole|> }
@Override
public String tileName(int tile) {
switch (tile) {
case Terrain.WATER:
return Messages.get(CityLevel.class, "water_name");
case Terrain.HIGH_GRASS:
return Messages.get(CityLevel.class, "high_grass_name");
default:
return super.tileName(tile);
}
}
@Override
protected void createMobs() {
}
@Override
protected void createItems() {
}
@Override
public int randomRespawnCell() {
return entrance - getWidth();
}
}<|fim▁end|> | |
<|file_name|>Rectangle.js<|end_file_name|><|fim▁begin|>import { rectangle } from 'leaflet';<|fim▁hole|>
import boundsType from './types/bounds';
import Path from './Path';
export default class Rectangle extends Path {
static propTypes = {
bounds: boundsType.isRequired,
};
componentWillMount() {
super.componentWillMount();
const { bounds, map, ...props } = this.props;
this.leafletElement = rectangle(bounds, props);
}
componentDidUpdate(prevProps) {
if (this.props.bounds !== prevProps.bounds) {
this.leafletElement.setBounds(this.props.bounds);
}
this.setStyleIfChanged(prevProps, this.props);
}
}<|fim▁end|> | |
<|file_name|>eo.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'newpage', 'eo', {
toolbar: 'Nova Paĝo'
<|fim▁hole|><|fim▁end|> | } ); |
<|file_name|>database.py<|end_file_name|><|fim▁begin|>import datetime
from sqlalchemy import create_engine, ForeignKey, Column, Integer, String, Text, Date, Table, Boolean
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.ext.declarative import declarative_base
from . import app
from flask_login import UserMixin
engine = create_engine(app.config["SQLALCHEMY_DATABASE_URI"])
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
class Fighter(Base):
__tablename__ = "fighters"
id = Column(Integer, primary_key=True)
first_name = Column(String(1024), nullable=False)
last_name = Column(String(1024), nullable=False)
nickname = Column(String(1024))
gender = Column(String(128), nullable=False)
dob = Column(Date)
age = Column(Integer)
promotion = Column(String(1024), nullable=False)
profile_image = Column(String(1024))
right_full = Column(String(1024))
left_full = Column(String(1024))
height = Column(Integer)
weight = Column(String(128), nullable=False)
win = Column(Integer, nullable=False)
loss = Column(Integer, nullable=False)
draw = Column(Integer)
no_contest = Column(Integer)
def as_dictionary(self):
fighter = {
"id": self.id,
"first_name": self.first_name,
"last_name": self.last_name,
"nickname": self.nickname,
"gender": self.gender,
"age": self.age,
"promotion": self.promotion,
"profile_image": self.profile_image,
"right_full": self.right_full,
"left_full": self.left_full,
"height": self.height,
"weight": self.weight,
"win": self.win,
"loss": self.loss,
"draw": self.draw,
"no_contest": self.no_contest,
}
return fighter
class User(Base, UserMixin):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
email = Column(String(1024), unique=True, nullable=False)
password = Column(String(128), nullable=False)
user_history = relationship("History", backref="user")
class History(Base):
__tablename__ = "history"
id = Column(Integer, primary_key=True)
fight_date = Column(String, nullable=False)
has_occured = Column(Boolean, nullable=False)
red_corner = Column(String(1024), nullable=False)
blue_corner = Column(String(1024), nullable=False)
winner = Column(String(1024))
end_round = Column(String, nullable=False)
end_time = Column(String, nullable=False)
method = Column(String, nullable=False)
visible = Column(Boolean, nullable=False)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
def as_dictionary(self):
results = {
"id": self.id,
"fight_date": self.fight_date,
"has_occured": self.has_occured,
"red_corner": self.red_corner,
"blue_corner": self.blue_corner,
"winner": self.winner,
"end_round": self.end_round,
"end_time": self.end_time,
"method": self.method,<|fim▁hole|>class Event(Base):
__tablename__ = "events"
id = Column(Integer, primary_key=True)
event_date = Column(String(256))
base_title = Column(String(1024), nullable=False)
title_tag_line = Column(String(1024))
#feature_image = Column(String(1024))
arena = Column(String(1024))
location = Column(String(1024))
event_id = Column(Integer)
def as_dictionary(self):
event = {
"id": self.id,
"event_date": self.event_date,
"base_title": self.base_title,
"title_tag_line": self.title_tag_line,
#"feature_image": self.feature_image,
"arena": self.arena,
"location": self.location,
"event_id": self.event_id
}
return event
Base.metadata.create_all(engine)<|fim▁end|> | "user_id": self.user_id,
}
return results
|
<|file_name|>datapub.py<|end_file_name|><|fim▁begin|>"""Publishing native (typically pickled) objects.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from traitlets.config import Configurable
from ipykernel.inprocess.socket import SocketABC
from traitlets import Instance, Dict, CBytes
from ipykernel.jsonutil import json_clean
from ipykernel.serialize import serialize_object
from jupyter_client.session import Session, extract_header
class ZMQDataPublisher(Configurable):
topic = topic = CBytes(b'datapub')
session = Instance(Session, allow_none=True)
pub_socket = Instance(SocketABC, allow_none=True)
parent_header = Dict({})
def set_parent(self, parent):
"""Set the parent for outbound messages."""
self.parent_header = extract_header(parent)
def publish_data(self, data):
"""publish a data_message on the IOPub channel
Parameters
----------
data : dict
The data to be published. Think of it as a namespace.
"""<|fim▁hole|> buffer_threshold=session.buffer_threshold,
item_threshold=session.item_threshold,
)
content = json_clean(dict(keys=list(data.keys())))
session.send(self.pub_socket, 'data_message', content=content,
parent=self.parent_header,
buffers=buffers,
ident=self.topic,
)
def publish_data(data):
"""publish a data_message on the IOPub channel
Parameters
----------
data : dict
The data to be published. Think of it as a namespace.
"""
from ipykernel.zmqshell import ZMQInteractiveShell
ZMQInteractiveShell.instance().data_pub.publish_data(data)<|fim▁end|> | session = self.session
buffers = serialize_object(data, |
<|file_name|>cachekeys.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json<|fim▁hole|>
from django.utils import six
URL_LIST_CACHE = 'powerpages:url_list'
SITEMAP_CONTENT = 'powerpages:sitemap'
def get_cache_name(prefix, name):
"""
Cache name constructor. Uses the same methods as django cache system
Examples:
*) prefix=profile.cache, name=<requestuser.id>
*) prefix=template.cache.sidebar, name=<requestuser.id>
"""
return '{0}.{1}'.format(
prefix, hashlib.md5(six.text_type(name).encode('utf-8')).hexdigest()
)
def template_source(page_pk):
"""Create cache key for page template"""
return 'powerpages:template:{0}'.format(page_pk)
def rendered_source_for_user(page_pk, user_id):
"""Create cache key for rendered page source based on current user"""
return 'powerpages:rendered_source_user:{0}:{1}'.format(page_pk, user_id)
def rendered_source_for_lang(page_pk, lang):
"""Create cache key for rendered page source based on current language"""
return 'powerpages:rendered_source_lang:{0}:{1}'.format(page_pk, lang)
def url_cache(name, *args, **kwargs):
"""
Creates cache key for url of CMS page or standard Django URL
based on hashed serialized name with optional *args and **kwargs
"""
serialized_url = json.dumps([name, args, kwargs], sort_keys=True)
return get_cache_name('powerpages:urls', serialized_url)<|fim▁end|> | import hashlib |
<|file_name|>Test.java<|end_file_name|><|fim▁begin|>package main.java.test;
import java.util.Optional;
import main.java.modul.Catalog;
import main.java.modul.Categorie;
import main.java.modul.Oferta;
import main.java.modul.Produs;
import main.java.service.Log;
import main.java.util.Printer;
import main.java.util.Sort;
import main.java.util.SortUtil1;
import main.java.util.Sorter;
import main.java.util.Streams;
import main.java.util.TVAprice;
/**
*
* @author mancim
*/
public class Test {
public static void main(String[] args) {
addProduse(Catalog.getInstanec());
//Sort.orderPrice7(Catalog.getInstanec().getProduse());
//Sort.orderPrice8(Catalog.getInstanec().getProduse(), SortUtil1::Sort);
//Sorter s = new Sorter();
//s.Sort(Catalog.getInstanec().getProduse());
new Oferta(1231, 20);
new Oferta(1712, 5);
new Oferta(2334, 10);
//Streams.L();
//Streams.M();
//Streams.N();
//Streams.O();
//Streams.P();
//Streams.Q();
//Streams.R();
//Streams.S();
//Streams.T();
//Streams.U();
//Streams.V();
Catalog.getInstanec().getProduse().stream().forEach(Log.logger::trace);
//System.out.println(TVAprice.addTVARON(100.0, 19.0));
//System.out.println(TVAprice.addTVAEURO(100.0, 24.0));
//System.out.println(TVAprice.addTVARON(100.0, 19.0));
//System.out.println(TVAprice.TVAEURO(100.0, 24.0));
}
public static void addProduse(Catalog catalog) {
catalog.addProdus(new Produs(1712, "Electrocasnic 1", 234.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(1231, "Electrocasnic 2", 4234.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(1773, "Electrocasnic 3", 99.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(1221, "Electrocasnic 4", 56.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(1956, "Electrocasnic 5", 233.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(1918, "Electrocasnic 6", 564.99, Categorie.ELECTROCASNICE));
catalog.addProdus(new Produs(2334, "Telefon 1", 2189.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2231, "Telefon 2", 1449.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2456, "Telefon 3", 649.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2528, "Telefon 4", 899.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2445, "Telefon 5", 467.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2149, "Telefon 6", 355.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2859, "Telefon 7", 3578.99, Categorie.TELEFON));
catalog.addProdus(new Produs(2995, "Telefon 8", 344.99, Categorie.TELEFON));
catalog.addProdus(new Produs(3412, "Calculator 1", 2189.99, Categorie.CALCULATOR));
catalog.addProdus(new Produs(3419, "Calculator 2", 2289.99, Categorie.CALCULATOR));
catalog.addProdus(new Produs(3742, "Calculator 3", 999.99, Categorie.CALCULATOR));
catalog.addProdus(new Produs(3316, "Calculator 4", 1189.99, Categorie.CALCULATOR));
catalog.addProdus(new Produs(3123, "Calculator 5", 949.99, Categorie.CALCULATOR));
<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>EndpointConfig.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (C) 2017 Push Technology Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.pushtechnology.adapters.rest.model.v13;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.NonNull;
import lombok.ToString;
import lombok.Value;
/**
* Endpoint configuration. Version 13.
* <p>
* Description of a REST endpoint to poll.
*
* @author Push Technology Limited
*/
@Value
@Builder
@AllArgsConstructor
@ToString(of = "name")
public class EndpointConfig {
/**
* The name of the endpoint.
*/
@NonNull
String name;
/**
* The URL of the endpoint.
*/
@NonNull
String url;
/**
* The topic path to map the endpoint to. It is relative to the service
* topic path root.
*/
@NonNull
String topicPath;
/**
* The type of content produced by the endpoint.
* <p>
* Supports the values:
* <ul>
* <li>auto</li>
* <li>json</li>
* <li>application/json</li>
* <li>text/json</li>
* <li>string</li>
* <li>text/plain</li>
* <li>binary</li>
* <li>application/octet-stream</li>
* </ul>
*/
@NonNull
String produces;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>ContextualMenu.Checkmarks.Example.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import './ContextualMenuExample.scss';
export interface IContextualMenuMultiselectExampleState {
selection?: {
[key: string]: boolean;
};
isContextMenuVisible?: boolean;
}
export declare class ContextualMenuCheckmarksExample extends React.Component<any, IContextualMenuMultiselectExampleState> {
constructor();
render(): JSX.Element;
private _onToggleSelect(ev?, item?);
private _onClick(event);<|fim▁hole|><|fim▁end|> | private _onDismiss();
} |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># automate/server/user/views.py
#################
#### imports ####
#################
#from flask import render_template, Blueprint, url_for, \
# redirect, flash, request
#from flask_login import login_user, logout_user, login_required
#from automate.server import bcrypt, db
#from automate.server import db
#from automate.server.models import User
#from automate.server.user.forms import LoginForm, RegisterForm
################
#### config ####
################
#user_blueprint = Blueprint('user', __name__,)
################
#### routes ####
################
#@user_blueprint.route('/register', methods=['GET', 'POST'])
#def register():
# form = RegisterForm(request.form)
# if form.validate_on_submit():
# user = User(<|fim▁hole|># email=form.email.data,
# password=form.password.data
# )
# db.session.add(user)
# db.session.commit()
#
# login_user(user)
#
# flash('Thank you for registering.', 'success')
# return redirect(url_for("user.members"))
#
# return render_template('user/register.html', form=form)
#
#
#@user_blueprint.route('/login', methods=['GET', 'POST'])
#def login():
# form = LoginForm(request.form)
# if form.validate_on_submit():
# user = User.query.filter_by(email=form.email.data).first()
# if user:
# #if user and bcrypt.check_password_hash(
# # user.password, request.form['password']):
# # login_user(user)
# flash('You are logged in. Welcome!', 'success')
# return redirect(url_for('user.members'))
# else:
# flash('Invalid email and/or password.', 'danger')
# return render_template('user/login.html', form=form)
# return render_template('user/login.html', title='Please Login', form=form)
#
#
#@user_blueprint.route('/logout')
#@login_required
#def logout():
# logout_user()
# flash('You were logged out. Bye!', 'success')
# return redirect(url_for('main.home'))
#
#
#@user_blueprint.route('/members')
#@login_required
#def members():
# return render_template('user/members.html')
#<|fim▁end|> | |
<|file_name|>loop-break-cont-1.rs<|end_file_name|><|fim▁begin|>// run-pass
pub fn main() {
let _i = 0_usize;
loop {
break;
}
assert!(true);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>MockCodeGeneratorListItemTest.java<|end_file_name|><|fim▁begin|>package fr.jmini.asciidoctorj.converter.mockcode;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Collections;
import org.asciidoctor.ast.ListItem;
import org.junit.Test;
import fr.jmini.asciidoctorj.converter.code.CodeTestingUtility;
public class MockCodeGeneratorListItemTest {
@Test
public void testListItem() throws Exception {
ListItem mockListItem = createMock();
MockCodeGenerator generator = new MockCodeGenerator();
StringBuilder sb = new StringBuilder();
generator.createListItemCode(sb, mockListItem);
CodeTestingUtility.testGeneratedCode(sb.toString(), this.getClass());
}
// tag::generated-code[]
public ListItem createMock() {
ListItem mockListItem1 = mock(ListItem.class);
when(mockListItem1.getId()).thenReturn("id");
when(mockListItem1.getNodeName()).thenReturn("node-name");
when(mockListItem1.getParent()).thenReturn(null);
when(mockListItem1.getContext()).thenReturn(null);
when(mockListItem1.getDocument()).thenReturn(null);
when(mockListItem1.isInline()).thenReturn(false);
when(mockListItem1.isBlock()).thenReturn(false);
when(mockListItem1.getAttributes()).thenReturn(Collections.emptyMap());
when(mockListItem1.getRoles()).thenReturn(Collections.emptyList());
when(mockListItem1.isReftext()).thenReturn(false);
when(mockListItem1.getReftext()).thenReturn(null);
when(mockListItem1.getCaption()).thenReturn(null);
when(mockListItem1.getTitle()).thenReturn("T");
when(mockListItem1.getStyle()).thenReturn("S");
when(mockListItem1.getLevel()).thenReturn(2);
when(mockListItem1.getContentModel()).thenReturn(null);
when(mockListItem1.getSourceLocation()).thenReturn(null);
when(mockListItem1.getSubstitutions()).thenReturn(Collections.emptyList());
when(mockListItem1.getBlocks()).thenReturn(Collections.emptyList());
when(mockListItem1.getMarker()).thenReturn("m");<|fim▁hole|> }
// end::generated-code[]
}<|fim▁end|> | when(mockListItem1.getText()).thenReturn("t");
when(mockListItem1.getSource()).thenReturn("s");
when(mockListItem1.hasText()).thenReturn(true);
return mockListItem1; |
<|file_name|>15.2.3.6-3-114.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.6-3-114
description: >
Object.defineProperty - 'configurable' property in 'Attributes' is
a Boolean object (8.10.5 step 4.b)
---*/
var obj = {};
Object.defineProperty(obj, "property", {
configurable: new Boolean(true)
});
var beforeDeleted = obj.hasOwnProperty("property");
delete obj.property;
var afterDeleted = obj.hasOwnProperty("property");
assert.sameValue(beforeDeleted, true, 'beforeDeleted');<|fim▁hole|>assert.sameValue(afterDeleted, false, 'afterDeleted');<|fim▁end|> | |
<|file_name|>source-map.ts<|end_file_name|><|fim▁begin|>const maps = require('source-map-support');
<|fim▁hole|><|fim▁end|> | maps.install({environment: 'node'}); |
<|file_name|>ExtendedMetadataTest.java<|end_file_name|><|fim▁begin|>/*
* $Id$
*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.server.itests.hibernate;
import java.util.Arrays;
import java.util.Set;
import ome.model.IAnnotated;
import ome.model.ILink;
import ome.model.IObject;
import ome.model.annotations.Annotation;
import ome.model.annotations.BasicAnnotation;
import ome.model.annotations.LongAnnotation;
import ome.model.containers.Dataset;
import ome.model.containers.DatasetImageLink;
import ome.model.containers.Project;
import ome.model.containers.ProjectDatasetLink;
import ome.model.core.Image;
import ome.model.core.Pixels;
import ome.model.display.RenderingDef;
import ome.model.meta.Experimenter;
import ome.server.itests.AbstractManagedContextTest;
import ome.testing.ObjectFactory;
import ome.tools.hibernate.ExtendedMetadata;
import org.hibernate.SessionFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class ExtendedMetadataTest extends AbstractManagedContextTest {
ExtendedMetadata.Impl metadata;
@BeforeClass
public void init() throws Exception {
setUp();
metadata = new ExtendedMetadata.Impl();
metadata.setSessionFactory((SessionFactory)applicationContext.getBean("sessionFactory"));
tearDown();
}
@Test
public void testAnnotatedAreFound() throws Exception {
Set<Class<IAnnotated>> anns = metadata.getAnnotatableTypes();
assertTrue(anns.contains(Image.class));
assertTrue(anns.contains(Project.class));
// And several others
}
@Test
public void testAnnotationsAreFound() throws Exception {
Set<Class<Annotation>> anns = metadata.getAnnotationTypes();
assertTrue(anns.toString(), anns.contains(Annotation.class));
assertTrue(anns.toString(), anns.contains(BasicAnnotation.class));
assertTrue(anns.toString(), anns.contains(LongAnnotation.class));
// And several others
}
/**
* Where a superclass has a relationship to a class (Annotation to some link type),
* it is also necessary to be able to find the same relationship from a subclass
* (e.g. FileAnnotation).
*/
@Test
public void testLinkFromSubclassToSuperClassRel() {
assertNotNull(
metadata.getRelationship("ImageAnnotationLink", "FileAnnotation"));
}
/**
* For simplicity, the relationship map currently holds only the short
* class names. Here we are adding a test which checks for the full ones
* under "broken" to remember to re-evaluate.
*/
@Test(groups = {"broken","fixme"})
public void testAnnotatedAreFoundByFQN() throws Exception {
Set<Class<IAnnotated>> anns = metadata.getAnnotatableTypes();
assertTrue(anns.contains(Image.class));
assertTrue(anns.contains(Project.class));
// And several others
}
// ~ Locking
// =========================================================================
@Test
public void testProjectLocksDataset() throws Exception {
Project p = new Project();
Dataset d = new Dataset();
p.linkDataset(d);
ILink l = (ILink) p.collectDatasetLinks(null).iterator().next();
assertDoesntContain(metadata.getLockCandidates(p), d);
assertContains(metadata.getLockCandidates(l), d);
}
@Test
// Because Pixels does not have a reference to RenderingDef
public void testRenderingDefLocksPixels() throws Exception {
Pixels p = ObjectFactory.createPixelGraph(null);
RenderingDef r = ObjectFactory.createRenderingDef();
r.setPixels(p);
assertContains(metadata.getLockCandidates(r), p);
}
@Test(groups = "ticket:357")
// quirky because of defaultTag
// see https://trac.openmicroscopy.org/ome/ticket/357
public void testPixelsLocksImage() throws Exception {
Pixels p = ObjectFactory.createPixelGraph(null);
Image i = new Image();
i.setName("locking");
i.addPixels(p);
assertContains(metadata.getLockCandidates(p), i);
}
@Test
// omit locks for system types (TODO they shouldn't have permissions anyway)
public void testExperimenterDoesntGetLocked() throws Exception {
Experimenter e = new Experimenter();
Project p = new Project();
p.getDetails().setOwner(e);
assertDoesntContain(metadata.getLockCandidates(p), e);
}
@Test
public void testNoNulls() throws Exception {
Project p = new Project();
ProjectDatasetLink pdl = new ProjectDatasetLink();
pdl.link(p, null);
assertDoesntContain(metadata.getLockCandidates(pdl), null);
}
// ~ Unlocking
// =========================================================================
@Test
public void testProjectCanBeUnlockedFromDataset() throws Exception {
assertContains(metadata.getLockChecks(Project.class),
ProjectDatasetLink.class.getName(), "parent");
}
@Test
// Because Pixels does not have a reference to RenderingDef
public void testPixelsCanBeUnlockedFromRenderingDef() throws Exception {
assertContains(metadata.getLockChecks(Pixels.class), RenderingDef.class<|fim▁hole|> }
@Test(groups = "ticket:357")
// quirky because of defaultTag
// see https://trac.openmicroscopy.org/ome/ticket/357
public void testImageCanBeUnlockedFromPixels() throws Exception {
assertContains(metadata.getLockChecks(Image.class), Pixels.class
.getName(), "image");
}
// ~ Updating
// =========================================================================
@Test(groups = { "ticket:346", "broken" })
public void testCreateEventImmutable() throws Exception {
assertContains(metadata.getImmutableFields(Image.class),
"details.creationEvent");
}
// ~ Counting
// =========================================================================
@Test(groups = { "ticket:657" })
public void testCountQueriesAreCorrect() throws Exception {
assertEquals(metadata.getCountQuery(DatasetImageLink.CHILD), metadata
.getCountQuery(DatasetImageLink.CHILD),
"select target.child.id, count(target) "
+ "from ome.model.containers.DatasetImageLink target "
+ "group by target.child.id");
assertEquals(metadata.getCountQuery(Pixels.IMAGE), metadata
.getCountQuery(Pixels.IMAGE),
"select target.image.id, count(target) "
+ "from ome.model.core.Pixels target "
+ "group by target.image.id");
}
@Test(groups = { "ticket:657" })
public void testTargetTypes() throws Exception {
assertEquals(metadata.getTargetType(Pixels.IMAGE), Image.class);
assertEquals(metadata.getTargetType(DatasetImageLink.CHILD),
Image.class);
}
// ~ Relationships
// =========================================================================
@Test(groups = "ticket:2665")
public void testRelationships() {
String rel;
rel = metadata.getRelationship(Pixels.class.getSimpleName(), Image.class.getSimpleName());
assertEquals("image", rel);
rel = metadata.getRelationship(Image.class.getSimpleName(), Pixels.class.getSimpleName());
assertEquals("pixels", rel);
}
// ~ Helpers
// =========================================================================
private void assertContains(Object[] array, Object i) {
if (!contained(array, i)) {
fail(i + " not contained in " + Arrays.toString(array));
}
}
private void assertDoesntContain(IObject[] array, IObject i) {
if (contained(array, i)) {
fail(i + " contained in " + Arrays.toString(array));
}
}
private void assertContains(String[][] array, String t1, String t2) {
boolean contained = false;
for (int i = 0; i < array.length; i++) {
String[] test = array[i];
if (test[0].equals(t1) && test[1].equals(t2)) {
contained |= true;
}
}
assertTrue(contained);
}
private boolean contained(Object[] array, Object i) {
boolean contained = false;
for (Object object : array) {
if (i == null) {
if (object == null) {
contained = true;
}
} else {
if (i.equals(object)) {
contained = true;
}
}
}
return contained;
}
}<|fim▁end|> | .getName(), "pixels"); |
<|file_name|>kmeans.py<|end_file_name|><|fim▁begin|>from .estimator_base import *
class H2OKMeansEstimator(H2OEstimator):
def __init__(self, model_id=None, k=None, max_iterations=None,standardize=None,init=None,seed=None,
nfolds=None,fold_assignment=None, user_points=None,ignored_columns=None,
score_each_iteration=None, keep_cross_validation_predictions=None,
ignore_const_cols=None,checkpoint=None):
"""
Performs k-means clustering on an H2O dataset.
Parameters
----------
model_id : str, optional
The unique id assigned to the resulting model. If none is given, an id will
automatically be generated.
k : int
The number of clusters. Must be between 1 and 1e7 inclusive. k may be omitted
if the user specifies the initial centers in the init parameter. If k is not
omitted, in this case, then it should be equal to the number of user-specified
centers.
max_iterations : int
The maximum number of iterations allowed. Must be between 0 and 1e6 inclusive.
standardize : bool
Indicates whether the data should be standardized before running k-means.
init : str
A character string that selects the initial set of k cluster centers. Possible
values are
"Random": for random initialization,
"PlusPlus": for k-means plus initialization, or
"Furthest": for initialization at the furthest point from each successive
center.
Additionally, the user may specify a the initial centers as a matrix,
data.frame, H2OFrame, or list of vectors. For matrices, data.frames,
and H2OFrames, each row of the respective structure is an initial center. For
lists of vectors, each vector is an initial center.
seed : int, optional
Random seed used to initialize the cluster centroids.
nfolds : int, optional
Number of folds for cross-validation. If nfolds >= 2, then validation must
remain empty.
fold_assignment : str
Cross-validation fold assignment scheme, if fold_column is not specified
Must be "AUTO", "Random" or "Modulo"
:return: An instance of H2OClusteringModel.
"""
super(H2OKMeansEstimator, self).__init__()
self._parms = locals()
self._parms = {k:v for k,v in self._parms.iteritems() if k!="self"}
@property
def k(self):
return self._parms["k"]
@k.setter
def k(self, value):
self._parms["k"] = value
@property
def max_iterations(self):
return self._parms["max_iterations"]
@max_iterations.setter
def max_iterations(self, value):
self._parms["max_iterations"] = value
@property
def standardize(self):
return self._parms["standardize"]
@standardize.setter
def standardize(self, value):
self._parms["standardize"] = value
@property
def init(self):
return self._parms["init"]
@init.setter
def init(self, value):
self._parms["init"] = value<|fim▁hole|>
@seed.setter
def seed(self, value):
self._parms["seed"] = value
@property
def nfolds(self):
return self._parms["nfolds"]
@nfolds.setter
def nfolds(self, value):
self._parms["nfolds"] = value
@property
def fold_assignment(self):
return self._parms["fold_assignment"]
@fold_assignment.setter
def fold_assignment(self, value):
self._parms["fold_assignment"] = value
@property
def user_points(self):
return self._parms["user_points"]
@user_points.setter
def user_points(self, value):
self._parms["user_points"] = value
@property
def ignored_columns(self):
return self._parms["ignored_columns"]
@ignored_columns.setter
def ignored_columns(self, value):
self._parms["ignored_columns"] = value
@property
def score_each_iteration(self):
return self._parms["score_each_iteration"]
@score_each_iteration.setter
def score_each_iteration(self, value):
self._parms["score_each_iteration"] = value
@property
def keep_cross_validation_predictions(self):
return self._parms["keep_cross_validation_predictions"]
@keep_cross_validation_predictions.setter
def keep_cross_validation_predictions(self, value):
self._parms["keep_cross_validation_predictions"] = value
@property
def ignore_const_cols(self):
return self._parms["ignore_const_cols"]
@ignore_const_cols.setter
def ignore_const_cols(self, value):
self._parms["ignore_const_cols"] = value
@property
def checkpoint(self):
return self._parms["checkpoint"]
@checkpoint.setter
def checkpoint(self, value):
self._parms["checkpoint"] = value<|fim▁end|> |
@property
def seed(self):
return self._parms["seed"] |
<|file_name|>OpenWikiOnString.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin<|fim▁hole|>import webbrowser
class OpenWikiOnString(sublime_plugin.TextCommand):
def run(self, edit):
query = self.view.substr(self.view.sel()[0])
webbrowser.open_new("http://wiki.sa-mp.com/wiki/" + query)<|fim▁end|> | |
<|file_name|>manager.go<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2018 The "MysteriumNetwork/node" Authors.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package service<|fim▁hole|> "errors"
log "github.com/cihub/seelog"
"github.com/mysteriumnetwork/node/communication"
"github.com/mysteriumnetwork/node/identity"
"github.com/mysteriumnetwork/node/market"
"github.com/mysteriumnetwork/node/session"
)
var (
// ErrorLocation error indicates that action (i.e. disconnect)
ErrorLocation = errors.New("failed to detect service location")
// ErrUnsupportedServiceType indicates that manager tried to create an unsupported service type
ErrUnsupportedServiceType = errors.New("unsupported service type")
)
// Service interface represents pluggable Mysterium service
type Service interface {
Serve(providerID identity.Identity) error
Stop() error
ProvideConfig(publicKey json.RawMessage) (session.ServiceConfiguration, session.DestroyCallback, error)
}
// DialogWaiterFactory initiates communication channel which waits for incoming dialogs
type DialogWaiterFactory func(providerID identity.Identity, serviceType string) (communication.DialogWaiter, error)
// DialogHandlerFactory initiates instance which is able to handle incoming dialogs
type DialogHandlerFactory func(market.ServiceProposal, session.ConfigNegotiator) communication.DialogHandler
// DiscoveryFactory initiates instance which is able announce service discoverability
type DiscoveryFactory func() Discovery
// Discovery registers the service to the discovery api periodically
type Discovery interface {
Start(ownIdentity identity.Identity, proposal market.ServiceProposal)
Stop()
Wait()
}
// NewManager creates new instance of pluggable instances manager
func NewManager(
serviceRegistry *Registry,
dialogWaiterFactory DialogWaiterFactory,
dialogHandlerFactory DialogHandlerFactory,
discoveryFactory DiscoveryFactory,
) *Manager {
return &Manager{
serviceRegistry: serviceRegistry,
servicePool: NewPool(),
dialogWaiterFactory: dialogWaiterFactory,
dialogHandlerFactory: dialogHandlerFactory,
discoveryFactory: discoveryFactory,
}
}
// Manager entrypoint which knows how to start pluggable Mysterium instances
type Manager struct {
dialogWaiterFactory DialogWaiterFactory
dialogHandlerFactory DialogHandlerFactory
serviceRegistry *Registry
servicePool *Pool
discoveryFactory DiscoveryFactory
}
// Start starts an instance of the given service type if knows one in service registry.
// It passes the options to the start method of the service.
// If an error occurs in the underlying service, the error is then returned.
func (manager *Manager) Start(providerID identity.Identity, serviceType string, options Options) (id ID, err error) {
service, proposal, err := manager.serviceRegistry.Create(serviceType, options)
if err != nil {
return id, err
}
dialogWaiter, err := manager.dialogWaiterFactory(providerID, serviceType)
if err != nil {
return id, err
}
providerContact, err := dialogWaiter.Start()
if err != nil {
return id, err
}
proposal.SetProviderContact(providerID, providerContact)
dialogHandler := manager.dialogHandlerFactory(proposal, service)
if err = dialogWaiter.ServeDialogs(dialogHandler); err != nil {
return id, err
}
discovery := manager.discoveryFactory()
discovery.Start(providerID, proposal)
instance := Instance{
state: Starting,
options: options,
service: service,
proposal: proposal,
dialogWaiter: dialogWaiter,
discovery: discovery,
}
id, err = manager.servicePool.Add(&instance)
if err != nil {
return id, err
}
go func() {
instance.state = Running
serveErr := service.Serve(providerID)
if serveErr != nil {
log.Error("Service serve failed: ", serveErr)
}
instance.state = NotRunning
stopErr := manager.servicePool.Stop(id)
if stopErr != nil {
log.Error("Service stop failed: ", stopErr)
}
discovery.Wait()
}()
return id, nil
}
// List returns array of running service instances.
func (manager *Manager) List() map[ID]*Instance {
return manager.servicePool.List()
}
// Kill stops all services.
func (manager *Manager) Kill() error {
return manager.servicePool.StopAll()
}
// Stop stops the service.
func (manager *Manager) Stop(id ID) error {
return manager.servicePool.Stop(id)
}
// Service returns a service instance by requested id.
func (manager *Manager) Service(id ID) *Instance {
return manager.servicePool.Instance(id)
}<|fim▁end|> |
import (
"encoding/json" |
<|file_name|>path.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""
lunaport.dao.line
~~~~~~~~~~~~~~~~~
Storage interaction logic for line resource.
"""
import pprint
pp = pprint.PrettyPrinter(indent=4).pprint
from sqlalchemy import text, exc
from ..wsgi import app, db
from .. domain.line import LineBuilder, LineAdaptor
from exceptions import StorageError
class Filter(object):
params_allowed = {
'name': (
"AND name LIKE '%:name%'"),
}
cast_to_int = []
def __init__(self, **kw):
self.rule = []
self.q_params = {}
for p, v in kw.iteritems():
if p not in self.params_allowed.keys():
continue
elif isinstance(v, (unicode, basestring)):
self.rule.append(self.params_allowed[p][0])
self.q_params.update({p: v})
else:
raise StorageError('Wrong *{}* param type.'.format(p))
def cmpl_query(self):
sql_text = '\n' + ' '.join(self.rule)
return sql_text, self.q_params
class Dao(object):
"""Interface for line storage"""
@classmethod
def insert(cls, ammo):
raise NotImplemented()
@classmethod
def get_single(cls, **kw):
raise NotImplemented()
@classmethod
def get_many(cls, **kw):
raise NotImplemented()
class RDBMS(Dao):
"""PostgreSQL wrapper, implementing line.dao interface"""
per_page_default = app.config.get('LINE_PER_PAGE_DEFAULT') or 10
per_page_max = app.config.get('LINE_PER_PAGE_MAX') or 100
select_join_part = '''
SELECT l.*,
dc.name AS dc_name
FROM line l,
dc dc
WHERE l.dc_id = dc.id'''
@staticmethod
def rdbms_call(q_text, q_params):
return db.engine.connect().execute(text(q_text), **q_params)
@classmethod
def insert(cls, line):
kw = LineAdaptor.to_dict(line)
kw['dc_name'] = kw['dc']['name']
pp(kw)
def query():
return cls.rdbms_call('''
INSERT INTO line
(
id,
name,
dc_id
)
VALUES (
:id,
:name,
(SELECT id FROM dc WHERE name = :dc_name)
)
returning id''', kw)
err_duplicate = 'line:{} allready exists'.format(kw.get('name'))
try:
pk_id = [r for r in query()].pop()[0]
except exc.IntegrityError as e:
if 'unique constraint "line_pkey"' in str(e):
raise StorageError(err_duplicate)
raise StorageError('Some kind of IntegrityError')
return pk_id
@classmethod
def get_single(cls, **kw):
if kw.get('line_id'):
query_params = {
'line_id': kw.get('line_id'),
}
rv = cls.rdbms_call(' '.join([cls.select_join_part, 'AND l.id = :line_id']), query_params)
row = rv.first()
if not row:
return None
t_kw = dict(zip(rv.keys(), row))
return LineBuilder.from_row(**t_kw)
@classmethod
def get_many(cls, **kw):
"""pagination"""
pagination_part = '\nORDER BY id DESC\nLIMIT :limit OFFSET :offset'
param_per_page = kw.get('per_page')
if param_per_page and (param_per_page <= cls.per_page_max):
per_page = param_per_page
else:
per_page = cls.per_page_default
page_num = kw.get('page')
# page number starts from 1, page 0 and 1 mean the same -
# first slice from data set.
if page_num and isinstance(page_num, int) and (page_num >= 2):
offset = (page_num - 1) * per_page
next_page = page_num + 1
prev_page = page_num - 1
else:
offset = 0<|fim▁hole|> query_params = {
'limit': per_page,
'offset': offset,
}
"""filtering"""
f = Filter(**kw)
filter_part, q_params_up = f.cmpl_query()
query_params.update(q_params_up)
rv = cls.rdbms_call(
''.join([cls.select_join_part, filter_part, pagination_part]),
query_params)
rows = rv.fetchall()
if len(rows) == 0:
return None, None, None, None
elif len(rows) < per_page: # last chunk of data
next_page = None
def create_dc(row):
t_kw = dict(zip(rv.keys(), row))
return LineBuilder.from_row(**t_kw)
return map(create_dc, rows), per_page, next_page, prev_page<|fim▁end|> | next_page = 2
prev_page = None
|
<|file_name|>register.go<|end_file_name|><|fim▁begin|>/*<|fim▁hole|>Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package authentication
import (
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
)
// GroupName is the group name use in this package
const GroupName = "authentication.k8s.io"
// SchemeGroupVersion is group version used to register these objects
var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: runtime.APIVersionInternal}
// Kind takes an unqualified kind and returns a Group qualified GroupKind
func Kind(kind string) schema.GroupKind {
return SchemeGroupVersion.WithKind(kind).GroupKind()
}
// Resource takes an unqualified resource and returns a Group qualified GroupResource
func Resource(resource string) schema.GroupResource {
return SchemeGroupVersion.WithResource(resource).GroupResource()
}
var (
SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes)
AddToScheme = SchemeBuilder.AddToScheme
)
func addKnownTypes(scheme *runtime.Scheme) error {
scheme.AddKnownTypes(SchemeGroupVersion,
&TokenReview{},
&TokenRequest{},
)
return nil
}<|fim▁end|> | |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>"""
Tests for Blocks api.py
"""
from django.test.client import RequestFactory
from course_blocks.tests.helpers import EnableTransformerRegistryMixin
from student.tests.factories import UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import SampleCourseFactory
from ..api import get_blocks
class TestGetBlocks(EnableTransformerRegistryMixin, SharedModuleStoreTestCase):
"""
Tests for the get_blocks function
"""
@classmethod
def setUpClass(cls):
super(TestGetBlocks, cls).setUpClass()
cls.course = SampleCourseFactory.create()
# hide the html block
cls.html_block = cls.store.get_item(cls.course.id.make_usage_key('html', 'html_x1a_1'))
cls.html_block.visible_to_staff_only = True
cls.store.update_item(cls.html_block, ModuleStoreEnum.UserID.test)
def setUp(self):
super(TestGetBlocks, self).setUp()
self.user = UserFactory.create()
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
def test_basic(self):
blocks = get_blocks(self.request, self.course.location, self.user)
self.assertEquals(blocks['root'], unicode(self.course.location))
# subtract for (1) the orphaned course About block and (2) the hidden Html block
self.assertEquals(len(blocks['blocks']), len(self.store.get_items(self.course.id)) - 2)
self.assertNotIn(unicode(self.html_block.location), blocks['blocks'])
def test_no_user(self):
blocks = get_blocks(self.request, self.course.location)
self.assertIn(unicode(self.html_block.location), blocks['blocks'])
def test_access_before_api_transformer_order(self):
"""
Tests the order of transformers: access checks are made before the api
transformer is applied.
"""<|fim▁hole|> vertical_descendants = blocks['blocks'][unicode(vertical_block.location)]['descendants']
self.assertIn(unicode(problem_block.location), vertical_descendants)
self.assertNotIn(unicode(self.html_block.location), vertical_descendants)<|fim▁end|> | blocks = get_blocks(self.request, self.course.location, self.user, nav_depth=5, requested_fields=['nav_depth'])
vertical_block = self.store.get_item(self.course.id.make_usage_key('vertical', 'vertical_x1a'))
problem_block = self.store.get_item(self.course.id.make_usage_key('problem', 'problem_x1a_1'))
|
<|file_name|>custom_returning_requires_nonaggregate.rs<|end_file_name|><|fim▁begin|>extern crate diesel;
use diesel::*;
use diesel::dsl::count;
table! {
users {
id -> Integer,
name -> VarChar,
}
}
#[derive(Insertable)]
#[table_name="users"]
pub struct NewUser {
name: String,
}
fn main() {
use self::users::dsl::*;<|fim▁hole|> name: "Foobar".to_string(),
};
let stmt = insert_into(users).values(&new_user).returning((name, count(name)));
}<|fim▁end|> |
let stmt = update(users.filter(id.eq(1))).set(name.eq("Bill")).returning(count(id));
let new_user = NewUser { |
<|file_name|>huddle-test.js<|end_file_name|><|fim▁begin|>/*global assert: false, refute: false */
var buster = require("buster"),
huddle = require("../lib/huddle.js"),
resources;
buster.testCase('Huddle', {
setUp: function () {
resources = new huddle.Huddle();
},
"Empty input": function () {
resources.read('');
assert.equals(resources.write(), '');
},
"Doctype should be preserved": function () {
resources.read('<!DOCTYPE html>');
assert.equals(resources.write(), '<!DOCTYPE html>');
},
"Multi line doctype": function () {
resources.read('<!DOCTYPE html line1\nline2>');
assert.equals(resources.write(), '<!DOCTYPE html line1\nline2>');
},
"Singleton tag": function () {
resources.read('<div/>');
assert.equals(resources.write(), '<div/>');
},
"Strip whitespace": function () {
resources.read(' <div/>');
assert.equals(resources.write(), '<div/>');
},
"Preserve whitespace": function () {
resources = new huddle.Huddle({ignoreWhitespace: false});
resources.read(' <div/>');
assert.equals(resources.write(), ' <div/>');
},
"Open/Close tag without body": function () {
resources.read('<div></div>');
assert.equals(resources.write(), '<div/>');
},
"Open/Close tag with body": function () {
resources.read('<div>Test</div>');
assert.equals(resources.write(), '<div>Test</div>');
},
"Tag with attributes": function () {
resources.read('<div a1="v1" a2="v2"/>');
assert.equals(resources.write(), '<div a1="v1" a2="v2"/>');
},
"Stylesheet link tag": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css"/>');
assert.equals(resources.write(), '<link href="app.css" rel="stylesheet" type="text/css"/>');
assert.equals(resources.getStylesheets()['app']['a.css'], 'text/css');
},
"Multiple stylesheet link tags": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css"/><link href="b.less" rel="stylesheet/less" type="text/css"/>');
assert.equals(resources.write(), '<link href="app.css" rel="stylesheet" type="text/css"/>');
assert.equals(resources.getStylesheets()['app']['a.css'], 'text/css');<|fim▁hole|> },
"Stylesheet with module": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css" data-module="mymod"/>');
assert.equals(resources.write(), '<link href="mymod.css" rel="stylesheet" type="text/css"/>');
assert.equals(resources.getStylesheets()['mymod']['a.css'], 'text/css');
refute(resources.getStylesheets()['app']);
},
"Multiple stylesheet link tags with modules": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css" data-module="mylib"/><link href="b.less" rel="stylesheet/less" type="text/css"/>');
assert.equals(resources.write(), '<link href="mylib.css" rel="stylesheet" type="text/css"/><link href="app.css" rel="stylesheet" type="text/css"/>');
assert.equals(resources.getStylesheets()['mylib']['a.css'], 'text/css');
assert.equals(resources.getStylesheets()['app']['b.less'], 'text/less');
},
"Drop stylesheet link tag": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css" data-drop=""/>');
assert.equals(resources.write(), '');
refute(resources.getStylesheets()['app']);
},
"Include remote stylesheet": function () {
resources.read('<link href="a.css" rel="stylesheet" type="text/css" data-remote="b.css"/>');
assert.equals(resources.write(), '<link href="b.css" rel="stylesheet" type="text/css"/>');
refute(resources.getStylesheets()['app']);
},
"Script tag": function () {
resources.read('<script type="text/javascript" src="a.js"></script>');
assert.equals(resources.write(), '<script type="text/javascript" src="app.js"></script>');
assert.equals(resources.getScripts()['app']['a.js'], 'text/javascript');
},
"Multiple script tags with modules": function () {
resources.read('<script type="text/javascript" src="a.js"></script><script type="text/javascript" src="b.js" data-module="mylib"></script>');
assert.equals(resources.write(), '<script type="text/javascript" src="app.js"></script><script type="text/javascript" src="mylib.js"></script>');
assert.equals(resources.getScripts()['app']['a.js'], 'text/javascript');
assert.equals(resources.getScripts()['mylib']['b.js'], 'text/javascript');
},
"Drop script tag": function () {
resources.read('<script type="text/javascript" src="a.js" data-drop=""/>');
assert.equals(resources.write(), '');
refute(resources.getScripts()['app']);
},
"Include remote scripts": function () {
resources.read('<script type="text/javascript" src="a.js" data-remote="b.js"/>');
assert.equals(resources.write(), '<script type="text/javascript" src="b.js"></script>');
refute(resources.getScripts()['app']);
}
});<|fim▁end|> | assert.equals(resources.getStylesheets()['app']['b.less'], 'text/less'); |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>'''This module contains some glue code encapsulating a "main" process.
The code here is aimed at wrapping the most common tasks involved in creating
and, especially, training a neural network model.
'''
import climate
import datetime
import downhill
import os
import warnings
from . import graph
from . import trainer
logging = climate.get_logger(__name__)
class Experiment:
'''This class encapsulates tasks for training and evaluating a network.
Parameters
----------
model : :class:`Network <graph.Network>` or str
A specification for obtaining a model. If a string is given, it is
assumed to name a file containing a pickled model; this file will be
loaded and used. If a network instance is provided, it will be used
as the model. If a callable (such as a subclass) is provided, it
will be invoked using the provided keyword arguments to create a
network instance.
'''
def __init__(self, network, *args, **kwargs):
if isinstance(network, str) and os.path.isfile(network):
self.load(network)
elif isinstance(network, graph.Network):
self.network = network
else:
assert network is not graph.Network, \
'use a concrete theanets.Network subclass ' \
'like theanets.{Autoencoder,Regressor,...}'<|fim▁hole|> def create_trainer(self, train, algo='rmsprop'):
'''Create a trainer.
Additional keyword arguments are passed directly to the trainer.
Parameters
----------
train : str
A string describing a trainer to use.
algo : str
A string describing an optimization algorithm.
Returns
-------
trainer : :class:`Trainer <trainer.Trainer>`
A trainer instance to alter the parameters of our network.
'''
train = train.lower()
if train == 'sample':
return trainer.SampleTrainer(self.network)
if train.startswith('layer') or train.startswith('sup'):
return trainer.SupervisedPretrainer(algo, self.network)
if train.startswith('pre') or train.startswith('unsup'):
return trainer.UnsupervisedPretrainer(algo, self.network)
return trainer.DownhillTrainer(train, self.network)
def create_dataset(self, data, **kwargs):
'''Create a dataset for this experiment.
Parameters
----------
data : sequence of ndarray or callable
The values that you provide for data will be encapsulated inside a
:class:`Dataset <downhill.Dataset>` instance; see that class for
documentation on the types of things it needs. In particular, you
can currently pass in either a list/array/etc. of data, or a
callable that generates data dynamically.
Returns
-------
data : :class:`Dataset <downhill.Dataset>`
A dataset capable of providing mini-batches of data to a training
algorithm.
'''
default_axis = 0
if not callable(data) and not callable(data[0]) and len(data[0].shape) == 3:
default_axis = 1
name = kwargs.get('name', 'dataset')
b, i, s = 'batch_size', 'iteration_size', '{}_batches'.format(name)
return downhill.Dataset(
data,
name=name,
batch_size=kwargs.get(b, 32),
iteration_size=kwargs.get(i, kwargs.get(s)),
axis=kwargs.get('axis', default_axis))
def train(self, *args, **kwargs):
'''Train the network until the trainer converges.
All arguments are passed to :func:`itertrain`.
Returns
-------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
monitors = None
for monitors in self.itertrain(*args, **kwargs):
pass
return monitors
def itertrain(self, train, valid=None, algorithm='rmsprop', **kwargs):
'''Train our network, one batch at a time.
This method yields a series of ``(train, valid)`` monitor pairs. The
``train`` value is a dictionary mapping names to monitor values
evaluated on the training dataset. The ``valid`` value is also a
dictionary mapping names to values, but these values are evaluated on
the validation dataset.
Because validation might not occur every training iteration, the
validation monitors might be repeated for multiple training iterations.
It is probably most helpful to think of the validation monitors as being
the "most recent" values that have been computed.
After training completes, the network attribute of this class will
contain the trained network parameters.
Parameters
----------
train : sequence of ndarray or :class:`downhill.Dataset`
A dataset to use when training the network. If this is a
``downhill.Dataset`` instance, it will be used directly as the
training datset. If it is another type, like a numpy array, it will
be converted to a ``downhill.Dataset`` and then used as the training
set.
valid : sequence of ndarray or :class:`downhill.Dataset`, optional
If this is provided, it will be used as a validation dataset. If not
provided, the training set will be used for validation. (This is not
recommended!)
algorithm : str or list of str, optional
One or more optimization algorithms to use for training our network.
If not provided, RMSProp will be used.
Yields
------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
# set up datasets
if valid is None:
valid = train
if not isinstance(valid, downhill.Dataset):
valid = self.create_dataset(valid, name='valid', **kwargs)
if not isinstance(train, downhill.Dataset):
train = self.create_dataset(train, name='train', **kwargs)
# set up training algorithm(s)
if 'optimize' in kwargs:
warnings.warn(
'please use the "algorithm" keyword arg instead of "optimize"',
DeprecationWarning)
algorithm = kwargs.pop('optimize')
if isinstance(algorithm, str):
algorithm = algorithm.split()
# set up auto-saving if enabled
progress = kwargs.get('save_progress')
timeout = kwargs.get('save_every', 0)
if timeout < 0: # timeout < 0 is in minutes instead of iterations.
timeout *= 60
# loop over trainers, saving every N minutes/iterations if enabled
for algo in algorithm:
if not callable(getattr(algo, 'itertrain', None)):
algo = self.create_trainer(algo)
start = datetime.datetime.now()
for i, monitors in enumerate(algo.itertrain(train, valid, **kwargs)):
yield monitors
now = datetime.datetime.now()
elapsed = (now - start).total_seconds()
if i and progress and (
(timeout < 0 and elapsed > -timeout) or
(timeout > 0 and i % int(timeout) == 0)):
self.save(progress)
start = now
def save(self, path):
'''Save the current network to a pickle file on disk.
Parameters
----------
path : str
Location of the file to save the network.
'''
self.network.save(path)
def load(self, path):
'''Load a saved network from a pickle file on disk.
This method sets the ``network`` attribute of the experiment to the
loaded network model.
Parameters
----------
filename : str
Load the keyword arguments and parameters of a network from a pickle
file at the named path. If this name ends in ".gz" then the input
will automatically be gunzipped; otherwise the input will be treated
as a "raw" pickle.
Returns
-------
network : :class:`Network <graph.Network>`
A newly-constructed network, with topology and parameters loaded
from the given pickle file.
'''
self.network = graph.Network.load(path)
return self.network<|fim▁end|> | self.network = network(*args, **kwargs)
|
<|file_name|>export_global_model.py<|end_file_name|><|fim▁begin|># Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Export global feature tensorflow inference model.
This model includes image pyramids for multi-scale processing.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import tensorflow as tf
from delf.python.training.model import delf_model
from delf.python.training.model import export_model_utils
FLAGS = flags.FLAGS
flags.DEFINE_string('ckpt_path', '/tmp/delf-logdir/delf-weights',
'Path to saved checkpoint.')
flags.DEFINE_string('export_path', None, 'Path where model will be exported.')
flags.DEFINE_list(
'input_scales_list', None,
'Optional input image scales to use. If None (default), an input end-point '
'"input_scales" is added for the exported model. If not None, the '
'specified list of floats will be hard-coded as the desired input scales.')
flags.DEFINE_enum(
'multi_scale_pool_type', 'None', ['None', 'average', 'sum'],
"If 'None' (default), the model is exported with an output end-point "
"'global_descriptors', where the global descriptor for each scale is "
"returned separately. If not 'None', the global descriptor of each scale is"
' pooled and a 1D global descriptor is returned, with output end-point '
"'global_descriptor'.")
flags.DEFINE_boolean('normalize_global_descriptor', False,
'If True, L2-normalizes global descriptor.')
class _ExtractModule(tf.Module):
"""Helper module to build and save global feature model."""
def __init__(self,
multi_scale_pool_type='None',
normalize_global_descriptor=False,
input_scales_tensor=None):
"""Initialization of global feature model.
Args:
multi_scale_pool_type: Type of multi-scale pooling to perform.
normalize_global_descriptor: Whether to L2-normalize global descriptor.
input_scales_tensor: If None, the exported function to be used should be
ExtractFeatures, where an input end-point "input_scales" is added for
the exported model. If not None, the specified 1D tensor of floats will
be hard-coded as the desired input scales, in conjunction with
ExtractFeaturesFixedScales.
"""
self._multi_scale_pool_type = multi_scale_pool_type
self._normalize_global_descriptor = normalize_global_descriptor
if input_scales_tensor is None:
self._input_scales_tensor = []
else:
self._input_scales_tensor = input_scales_tensor
# Setup the DELF model for extraction.
self._model = delf_model.Delf(block3_strides=False, name='DELF')
def LoadWeights(self, checkpoint_path):
self._model.load_weights(checkpoint_path)
@tf.function(input_signature=[
tf.TensorSpec(shape=[None, None, 3], dtype=tf.uint8, name='input_image'),
tf.TensorSpec(shape=[None], dtype=tf.float32, name='input_scales'),
tf.TensorSpec(
shape=[None], dtype=tf.int32, name='input_global_scales_ind')
])
def ExtractFeatures(self, input_image, input_scales, input_global_scales_ind):
extracted_features = export_model_utils.ExtractGlobalFeatures(
input_image,
input_scales,
input_global_scales_ind,
lambda x: self._model.backbone.build_call(x, training=False),
multi_scale_pool_type=self._multi_scale_pool_type,
normalize_global_descriptor=self._normalize_global_descriptor)
named_output_tensors = {}
if self._multi_scale_pool_type == 'None':
named_output_tensors['global_descriptors'] = tf.identity(
extracted_features, name='global_descriptors')
else:
named_output_tensors['global_descriptor'] = tf.identity(
extracted_features, name='global_descriptor')
return named_output_tensors
@tf.function(input_signature=[
tf.TensorSpec(shape=[None, None, 3], dtype=tf.uint8, name='input_image')
])
def ExtractFeaturesFixedScales(self, input_image):
return self.ExtractFeatures(input_image, self._input_scales_tensor,
tf.range(tf.size(self._input_scales_tensor)))
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
export_path = FLAGS.export_path
if os.path.exists(export_path):
raise ValueError('export_path %s already exists.' % export_path)
if FLAGS.input_scales_list is None:
input_scales_tensor = None
else:
input_scales_tensor = tf.constant(
[float(s) for s in FLAGS.input_scales_list],
dtype=tf.float32,
shape=[len(FLAGS.input_scales_list)],
name='input_scales')
module = _ExtractModule(FLAGS.multi_scale_pool_type,
FLAGS.normalize_global_descriptor,
input_scales_tensor)
# Load the weights.
checkpoint_path = FLAGS.ckpt_path
module.LoadWeights(checkpoint_path)
print('Checkpoint loaded from ', checkpoint_path)
# Save the module
if FLAGS.input_scales_list is None:
served_function = module.ExtractFeatures
else:
served_function = module.ExtractFeaturesFixedScales
tf.saved_model.save(
module, export_path, signatures={'serving_default': served_function})
if __name__ == '__main__':
app.run(main)<|fim▁end|> | |
<|file_name|>dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateConversationProfile
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_async]
from google.cloud import dialogflow_v2
<|fim▁hole|> client = dialogflow_v2.ConversationProfilesAsyncClient()
# Initialize request argument(s)
conversation_profile = dialogflow_v2.ConversationProfile()
conversation_profile.display_name = "display_name_value"
request = dialogflow_v2.UpdateConversationProfileRequest(
conversation_profile=conversation_profile,
)
# Make the request
response = await client.update_conversation_profile(request=request)
# Handle the response
print(response)
# [END dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_async]<|fim▁end|> |
async def sample_update_conversation_profile():
# Create a client |
<|file_name|>status.rs<|end_file_name|><|fim▁begin|>//! Status Codes
use std::fmt;
use std::num::{FromPrimitive, ToPrimitive};
use std::mem::transmute;
use std::cmp::Ordering::{self, Less, Equal, Greater};
// shamelessly lifted from Teepee. I tried a few schemes, this really
// does seem like the best.
/// An HTTP status code (`Status-Code` in RFC 2616).
///
/// This enum is absolutely exhaustive, covering all 500 possible values (100–599).
///<|fim▁hole|>///
/// As this is a C‐style enum with each variant having a corresponding value, you may use the likes
/// of `Continue as u16` to retreive the value `100u16`. Normally, though, you should not need to do
/// any such thing; just use the status code as a `StatusCode`.
///
/// If you encounter a status code that you do not know how to deal with, you should treat it as the
/// `x00` status code—e.g. for code 123, treat it as 100 (Continue). This can be achieved with
/// `self.class().default_code()`:
///
/// ```rust
/// # use hyper::status::StatusCode::{Code123, Continue};
/// assert_eq!(Code123.class().default_code(), Continue);
/// ```
#[derive(Debug)]
pub enum StatusCode {
/// 100 Continue
Continue = 100,
/// 101 Switching Protocols
SwitchingProtocols = 101,
/// 102 Processing
Processing = 102,
/// 103 (unregistered)
Code103 = 103,
/// 104 (unregistered)
Code104 = 104,
/// 105 (unregistered)
Code105 = 105,
/// 106 (unregistered)
Code106 = 106,
/// 107 (unregistered)
Code107 = 107,
/// 108 (unregistered)
Code108 = 108,
/// 109 (unregistered)
Code109 = 109,
/// 110 (unregistered)
Code110 = 110,
/// 111 (unregistered)
Code111 = 111,
/// 112 (unregistered)
Code112 = 112,
/// 113 (unregistered)
Code113 = 113,
/// 114 (unregistered)
Code114 = 114,
/// 115 (unregistered)
Code115 = 115,
/// 116 (unregistered)
Code116 = 116,
/// 117 (unregistered)
Code117 = 117,
/// 118 (unregistered)
Code118 = 118,
/// 119 (unregistered)
Code119 = 119,
/// 120 (unregistered)
Code120 = 120,
/// 121 (unregistered)
Code121 = 121,
/// 122 (unregistered)
Code122 = 122,
/// 123 (unregistered)
Code123 = 123,
/// 124 (unregistered)
Code124 = 124,
/// 125 (unregistered)
Code125 = 125,
/// 126 (unregistered)
Code126 = 126,
/// 127 (unregistered)
Code127 = 127,
/// 128 (unregistered)
Code128 = 128,
/// 129 (unregistered)
Code129 = 129,
/// 130 (unregistered)
Code130 = 130,
/// 131 (unregistered)
Code131 = 131,
/// 132 (unregistered)
Code132 = 132,
/// 133 (unregistered)
Code133 = 133,
/// 134 (unregistered)
Code134 = 134,
/// 135 (unregistered)
Code135 = 135,
/// 136 (unregistered)
Code136 = 136,
/// 137 (unregistered)
Code137 = 137,
/// 138 (unregistered)
Code138 = 138,
/// 139 (unregistered)
Code139 = 139,
/// 140 (unregistered)
Code140 = 140,
/// 141 (unregistered)
Code141 = 141,
/// 142 (unregistered)
Code142 = 142,
/// 143 (unregistered)
Code143 = 143,
/// 144 (unregistered)
Code144 = 144,
/// 145 (unregistered)
Code145 = 145,
/// 146 (unregistered)
Code146 = 146,
/// 147 (unregistered)
Code147 = 147,
/// 148 (unregistered)
Code148 = 148,
/// 149 (unregistered)
Code149 = 149,
/// 150 (unregistered)
Code150 = 150,
/// 151 (unregistered)
Code151 = 151,
/// 152 (unregistered)
Code152 = 152,
/// 153 (unregistered)
Code153 = 153,
/// 154 (unregistered)
Code154 = 154,
/// 155 (unregistered)
Code155 = 155,
/// 156 (unregistered)
Code156 = 156,
/// 157 (unregistered)
Code157 = 157,
/// 158 (unregistered)
Code158 = 158,
/// 159 (unregistered)
Code159 = 159,
/// 160 (unregistered)
Code160 = 160,
/// 161 (unregistered)
Code161 = 161,
/// 162 (unregistered)
Code162 = 162,
/// 163 (unregistered)
Code163 = 163,
/// 164 (unregistered)
Code164 = 164,
/// 165 (unregistered)
Code165 = 165,
/// 166 (unregistered)
Code166 = 166,
/// 167 (unregistered)
Code167 = 167,
/// 168 (unregistered)
Code168 = 168,
/// 169 (unregistered)
Code169 = 169,
/// 170 (unregistered)
Code170 = 170,
/// 171 (unregistered)
Code171 = 171,
/// 172 (unregistered)
Code172 = 172,
/// 173 (unregistered)
Code173 = 173,
/// 174 (unregistered)
Code174 = 174,
/// 175 (unregistered)
Code175 = 175,
/// 176 (unregistered)
Code176 = 176,
/// 177 (unregistered)
Code177 = 177,
/// 178 (unregistered)
Code178 = 178,
/// 179 (unregistered)
Code179 = 179,
/// 180 (unregistered)
Code180 = 180,
/// 181 (unregistered)
Code181 = 181,
/// 182 (unregistered)
Code182 = 182,
/// 183 (unregistered)
Code183 = 183,
/// 184 (unregistered)
Code184 = 184,
/// 185 (unregistered)
Code185 = 185,
/// 186 (unregistered)
Code186 = 186,
/// 187 (unregistered)
Code187 = 187,
/// 188 (unregistered)
Code188 = 188,
/// 189 (unregistered)
Code189 = 189,
/// 190 (unregistered)
Code190 = 190,
/// 191 (unregistered)
Code191 = 191,
/// 192 (unregistered)
Code192 = 192,
/// 193 (unregistered)
Code193 = 193,
/// 194 (unregistered)
Code194 = 194,
/// 195 (unregistered)
Code195 = 195,
/// 196 (unregistered)
Code196 = 196,
/// 197 (unregistered)
Code197 = 197,
/// 198 (unregistered)
Code198 = 198,
/// 199 (unregistered)
Code199 = 199,
/// 200 OK
Ok = 200,
/// 201 Created
Created = 201,
/// 202 Accepted
Accepted = 202,
/// 203 Non-Authoritative Information
NonAuthoritativeInformation = 203,
/// 204 No Content
NoContent = 204,
/// 205 Reset Content
ResetContent = 205,
/// 206 Partial Content
PartialContent = 206,
/// 207 Multi-Status
MultiStatus = 207,
/// 208 Already Reported
AlreadyReported = 208,
/// 209 (unregistered)
Code209 = 209,
/// 210 (unregistered)
Code210 = 210,
/// 211 (unregistered)
Code211 = 211,
/// 212 (unregistered)
Code212 = 212,
/// 213 (unregistered)
Code213 = 213,
/// 214 (unregistered)
Code214 = 214,
/// 215 (unregistered)
Code215 = 215,
/// 216 (unregistered)
Code216 = 216,
/// 217 (unregistered)
Code217 = 217,
/// 218 (unregistered)
Code218 = 218,
/// 219 (unregistered)
Code219 = 219,
/// 220 (unregistered)
Code220 = 220,
/// 221 (unregistered)
Code221 = 221,
/// 222 (unregistered)
Code222 = 222,
/// 223 (unregistered)
Code223 = 223,
/// 224 (unregistered)
Code224 = 224,
/// 225 (unregistered)
Code225 = 225,
/// 226 IM Used
ImUsed = 226,
/// 227 (unregistered)
Code227 = 227,
/// 228 (unregistered)
Code228 = 228,
/// 229 (unregistered)
Code229 = 229,
/// 230 (unregistered)
Code230 = 230,
/// 231 (unregistered)
Code231 = 231,
/// 232 (unregistered)
Code232 = 232,
/// 233 (unregistered)
Code233 = 233,
/// 234 (unregistered)
Code234 = 234,
/// 235 (unregistered)
Code235 = 235,
/// 236 (unregistered)
Code236 = 236,
/// 237 (unregistered)
Code237 = 237,
/// 238 (unregistered)
Code238 = 238,
/// 239 (unregistered)
Code239 = 239,
/// 240 (unregistered)
Code240 = 240,
/// 241 (unregistered)
Code241 = 241,
/// 242 (unregistered)
Code242 = 242,
/// 243 (unregistered)
Code243 = 243,
/// 244 (unregistered)
Code244 = 244,
/// 245 (unregistered)
Code245 = 245,
/// 246 (unregistered)
Code246 = 246,
/// 247 (unregistered)
Code247 = 247,
/// 248 (unregistered)
Code248 = 248,
/// 249 (unregistered)
Code249 = 249,
/// 250 (unregistered)
Code250 = 250,
/// 251 (unregistered)
Code251 = 251,
/// 252 (unregistered)
Code252 = 252,
/// 253 (unregistered)
Code253 = 253,
/// 254 (unregistered)
Code254 = 254,
/// 255 (unregistered)
Code255 = 255,
/// 256 (unregistered)
Code256 = 256,
/// 257 (unregistered)
Code257 = 257,
/// 258 (unregistered)
Code258 = 258,
/// 259 (unregistered)
Code259 = 259,
/// 260 (unregistered)
Code260 = 260,
/// 261 (unregistered)
Code261 = 261,
/// 262 (unregistered)
Code262 = 262,
/// 263 (unregistered)
Code263 = 263,
/// 264 (unregistered)
Code264 = 264,
/// 265 (unregistered)
Code265 = 265,
/// 266 (unregistered)
Code266 = 266,
/// 267 (unregistered)
Code267 = 267,
/// 268 (unregistered)
Code268 = 268,
/// 269 (unregistered)
Code269 = 269,
/// 270 (unregistered)
Code270 = 270,
/// 271 (unregistered)
Code271 = 271,
/// 272 (unregistered)
Code272 = 272,
/// 273 (unregistered)
Code273 = 273,
/// 274 (unregistered)
Code274 = 274,
/// 275 (unregistered)
Code275 = 275,
/// 276 (unregistered)
Code276 = 276,
/// 277 (unregistered)
Code277 = 277,
/// 278 (unregistered)
Code278 = 278,
/// 279 (unregistered)
Code279 = 279,
/// 280 (unregistered)
Code280 = 280,
/// 281 (unregistered)
Code281 = 281,
/// 282 (unregistered)
Code282 = 282,
/// 283 (unregistered)
Code283 = 283,
/// 284 (unregistered)
Code284 = 284,
/// 285 (unregistered)
Code285 = 285,
/// 286 (unregistered)
Code286 = 286,
/// 287 (unregistered)
Code287 = 287,
/// 288 (unregistered)
Code288 = 288,
/// 289 (unregistered)
Code289 = 289,
/// 290 (unregistered)
Code290 = 290,
/// 291 (unregistered)
Code291 = 291,
/// 292 (unregistered)
Code292 = 292,
/// 293 (unregistered)
Code293 = 293,
/// 294 (unregistered)
Code294 = 294,
/// 295 (unregistered)
Code295 = 295,
/// 296 (unregistered)
Code296 = 296,
/// 297 (unregistered)
Code297 = 297,
/// 298 (unregistered)
Code298 = 298,
/// 299 (unregistered)
Code299 = 299,
/// 300 Multiple Choices
MultipleChoices = 300,
/// 301 Moved Permanently
MovedPermanently = 301,
/// 302 Found
Found = 302,
/// 303 See Other
SeeOther = 303,
/// 304 Not Modified
NotModified = 304,
/// 305 Use Proxy
UseProxy = 305,
/// 306 Switch Proxy
SwitchProxy = 306,
/// 307 Temporary Redirect
TemporaryRedirect = 307,
/// 308 Permanent Redirect
PermanentRedirect = 308,
/// 309 (unregistered)
Code309 = 309,
/// 310 (unregistered)
Code310 = 310,
/// 311 (unregistered)
Code311 = 311,
/// 312 (unregistered)
Code312 = 312,
/// 313 (unregistered)
Code313 = 313,
/// 314 (unregistered)
Code314 = 314,
/// 315 (unregistered)
Code315 = 315,
/// 316 (unregistered)
Code316 = 316,
/// 317 (unregistered)
Code317 = 317,
/// 318 (unregistered)
Code318 = 318,
/// 319 (unregistered)
Code319 = 319,
/// 320 (unregistered)
Code320 = 320,
/// 321 (unregistered)
Code321 = 321,
/// 322 (unregistered)
Code322 = 322,
/// 323 (unregistered)
Code323 = 323,
/// 324 (unregistered)
Code324 = 324,
/// 325 (unregistered)
Code325 = 325,
/// 326 (unregistered)
Code326 = 326,
/// 327 (unregistered)
Code327 = 327,
/// 328 (unregistered)
Code328 = 328,
/// 329 (unregistered)
Code329 = 329,
/// 330 (unregistered)
Code330 = 330,
/// 331 (unregistered)
Code331 = 331,
/// 332 (unregistered)
Code332 = 332,
/// 333 (unregistered)
Code333 = 333,
/// 334 (unregistered)
Code334 = 334,
/// 335 (unregistered)
Code335 = 335,
/// 336 (unregistered)
Code336 = 336,
/// 337 (unregistered)
Code337 = 337,
/// 338 (unregistered)
Code338 = 338,
/// 339 (unregistered)
Code339 = 339,
/// 340 (unregistered)
Code340 = 340,
/// 341 (unregistered)
Code341 = 341,
/// 342 (unregistered)
Code342 = 342,
/// 343 (unregistered)
Code343 = 343,
/// 344 (unregistered)
Code344 = 344,
/// 345 (unregistered)
Code345 = 345,
/// 346 (unregistered)
Code346 = 346,
/// 347 (unregistered)
Code347 = 347,
/// 348 (unregistered)
Code348 = 348,
/// 349 (unregistered)
Code349 = 349,
/// 350 (unregistered)
Code350 = 350,
/// 351 (unregistered)
Code351 = 351,
/// 352 (unregistered)
Code352 = 352,
/// 353 (unregistered)
Code353 = 353,
/// 354 (unregistered)
Code354 = 354,
/// 355 (unregistered)
Code355 = 355,
/// 356 (unregistered)
Code356 = 356,
/// 357 (unregistered)
Code357 = 357,
/// 358 (unregistered)
Code358 = 358,
/// 359 (unregistered)
Code359 = 359,
/// 360 (unregistered)
Code360 = 360,
/// 361 (unregistered)
Code361 = 361,
/// 362 (unregistered)
Code362 = 362,
/// 363 (unregistered)
Code363 = 363,
/// 364 (unregistered)
Code364 = 364,
/// 365 (unregistered)
Code365 = 365,
/// 366 (unregistered)
Code366 = 366,
/// 367 (unregistered)
Code367 = 367,
/// 368 (unregistered)
Code368 = 368,
/// 369 (unregistered)
Code369 = 369,
/// 370 (unregistered)
Code370 = 370,
/// 371 (unregistered)
Code371 = 371,
/// 372 (unregistered)
Code372 = 372,
/// 373 (unregistered)
Code373 = 373,
/// 374 (unregistered)
Code374 = 374,
/// 375 (unregistered)
Code375 = 375,
/// 376 (unregistered)
Code376 = 376,
/// 377 (unregistered)
Code377 = 377,
/// 378 (unregistered)
Code378 = 378,
/// 379 (unregistered)
Code379 = 379,
/// 380 (unregistered)
Code380 = 380,
/// 381 (unregistered)
Code381 = 381,
/// 382 (unregistered)
Code382 = 382,
/// 383 (unregistered)
Code383 = 383,
/// 384 (unregistered)
Code384 = 384,
/// 385 (unregistered)
Code385 = 385,
/// 386 (unregistered)
Code386 = 386,
/// 387 (unregistered)
Code387 = 387,
/// 388 (unregistered)
Code388 = 388,
/// 389 (unregistered)
Code389 = 389,
/// 390 (unregistered)
Code390 = 390,
/// 391 (unregistered)
Code391 = 391,
/// 392 (unregistered)
Code392 = 392,
/// 393 (unregistered)
Code393 = 393,
/// 394 (unregistered)
Code394 = 394,
/// 395 (unregistered)
Code395 = 395,
/// 396 (unregistered)
Code396 = 396,
/// 397 (unregistered)
Code397 = 397,
/// 398 (unregistered)
Code398 = 398,
/// 399 (unregistered)
Code399 = 399,
/// 400 Bad Request
BadRequest = 400,
/// 401 Unauthorized
Unauthorized = 401,
/// 402 Payment Required
PaymentRequired = 402,
/// 403 Forbidden
Forbidden = 403,
/// 404 Not Found
NotFound = 404,
/// 405 Method Not Allowed
MethodNotAllowed = 405,
/// 406 Not Acceptable
NotAcceptable = 406,
/// 407 Proxy Authentication Required
ProxyAuthenticationRequired = 407,
/// 408 Request Timeout
RequestTimeout = 408,
/// 409 Conflict
Conflict = 409,
/// 410 Gone
Gone = 410,
/// 411 Length Required
LengthRequired = 411,
/// 412 Precondition Failed
PreconditionFailed = 412,
/// 413 Request Entity Too Large
RequestEntityTooLarge = 413,
/// 414 Request-URI Too Long
RequestUriTooLong = 414,
/// 415 Unsupported Media Type
UnsupportedMediaType = 415,
/// 416 Requested Range Not Satisfiable
RequestedRangeNotSatisfiable = 416,
/// 417 Expectation Failed
ExpectationFailed = 417,
/// 418 I'm a teapot
ImATeapot = 418,
/// 419 Authentication Timeout
AuthenticationTimeout = 419,
/// 420 (unregistered)
Code420 = 420,
/// 421 (unregistered)
Code421 = 421,
/// 422 Unprocessable Entity
UnprocessableEntity = 422,
/// 423 Locked
Locked = 423,
/// 424 Failed Dependency
FailedDependency = 424,
/// 425 Unordered Collection
UnorderedCollection = 425,
/// 426 Upgrade Required
UpgradeRequired = 426,
/// 427 (unregistered)
Code427 = 427,
/// 428 Precondition Required
PreconditionRequired = 428,
/// 429 Too Many Requests
TooManyRequests = 429,
/// 430 (unregistered)
Code430 = 430,
/// 431 Request Header Fields Too Large
RequestHeaderFieldsTooLarge = 431,
/// 432 (unregistered)
Code432 = 432,
/// 433 (unregistered)
Code433 = 433,
/// 434 (unregistered)
Code434 = 434,
/// 435 (unregistered)
Code435 = 435,
/// 436 (unregistered)
Code436 = 436,
/// 437 (unregistered)
Code437 = 437,
/// 438 (unregistered)
Code438 = 438,
/// 439 (unregistered)
Code439 = 439,
/// 440 (unregistered)
Code440 = 440,
/// 441 (unregistered)
Code441 = 441,
/// 442 (unregistered)
Code442 = 442,
/// 443 (unregistered)
Code443 = 443,
/// 444 (unregistered)
Code444 = 444,
/// 445 (unregistered)
Code445 = 445,
/// 446 (unregistered)
Code446 = 446,
/// 447 (unregistered)
Code447 = 447,
/// 448 (unregistered)
Code448 = 448,
/// 449 (unregistered)
Code449 = 449,
/// 450 (unregistered)
Code450 = 450,
/// 451 Unavailable For Legal Reasons
UnavailableForLegalReasons = 451,
/// 452 (unregistered)
Code452 = 452,
/// 453 (unregistered)
Code453 = 453,
/// 454 (unregistered)
Code454 = 454,
/// 455 (unregistered)
Code455 = 455,
/// 456 (unregistered)
Code456 = 456,
/// 457 (unregistered)
Code457 = 457,
/// 458 (unregistered)
Code458 = 458,
/// 459 (unregistered)
Code459 = 459,
/// 460 (unregistered)
Code460 = 460,
/// 461 (unregistered)
Code461 = 461,
/// 462 (unregistered)
Code462 = 462,
/// 463 (unregistered)
Code463 = 463,
/// 464 (unregistered)
Code464 = 464,
/// 465 (unregistered)
Code465 = 465,
/// 466 (unregistered)
Code466 = 466,
/// 467 (unregistered)
Code467 = 467,
/// 468 (unregistered)
Code468 = 468,
/// 469 (unregistered)
Code469 = 469,
/// 470 (unregistered)
Code470 = 470,
/// 471 (unregistered)
Code471 = 471,
/// 472 (unregistered)
Code472 = 472,
/// 473 (unregistered)
Code473 = 473,
/// 474 (unregistered)
Code474 = 474,
/// 475 (unregistered)
Code475 = 475,
/// 476 (unregistered)
Code476 = 476,
/// 477 (unregistered)
Code477 = 477,
/// 478 (unregistered)
Code478 = 478,
/// 479 (unregistered)
Code479 = 479,
/// 480 (unregistered)
Code480 = 480,
/// 481 (unregistered)
Code481 = 481,
/// 482 (unregistered)
Code482 = 482,
/// 483 (unregistered)
Code483 = 483,
/// 484 (unregistered)
Code484 = 484,
/// 485 (unregistered)
Code485 = 485,
/// 486 (unregistered)
Code486 = 486,
/// 487 (unregistered)
Code487 = 487,
/// 488 (unregistered)
Code488 = 488,
/// 489 (unregistered)
Code489 = 489,
/// 490 (unregistered)
Code490 = 490,
/// 491 (unregistered)
Code491 = 491,
/// 492 (unregistered)
Code492 = 492,
/// 493 (unregistered)
Code493 = 493,
/// 494 (unregistered)
Code494 = 494,
/// 495 (unregistered)
Code495 = 495,
/// 496 (unregistered)
Code496 = 496,
/// 497 (unregistered)
Code497 = 497,
/// 498 (unregistered)
Code498 = 498,
/// 499 (unregistered)
Code499 = 499,
/// 500 Internal Server Error
InternalServerError = 500,
/// 501 Not Implemented
NotImplemented = 501,
/// 502 Bad Gateway
BadGateway = 502,
/// 503 Service Unavailable
ServiceUnavailable = 503,
/// 504 Gateway Timeout
GatewayTimeout = 504,
/// 505 HTTP Version Not Supported
HttpVersionNotSupported = 505,
/// 506 Variant Also Negotiates
VariantAlsoNegotiates = 506,
/// 507 Insufficient Storage
InsufficientStorage = 507,
/// 508 Loop Detected
LoopDetected = 508,
/// 509 (unregistered)
Code509 = 509,
/// 510 Not Extended
NotExtended = 510,
/// 511 Network Authentication Required
NetworkAuthenticationRequired = 511,
/// 512 (unregistered)
Code512 = 512,
/// 513 (unregistered)
Code513 = 513,
/// 514 (unregistered)
Code514 = 514,
/// 515 (unregistered)
Code515 = 515,
/// 516 (unregistered)
Code516 = 516,
/// 517 (unregistered)
Code517 = 517,
/// 518 (unregistered)
Code518 = 518,
/// 519 (unregistered)
Code519 = 519,
/// 520 (unregistered)
Code520 = 520,
/// 521 (unregistered)
Code521 = 521,
/// 522 (unregistered)
Code522 = 522,
/// 523 (unregistered)
Code523 = 523,
/// 524 (unregistered)
Code524 = 524,
/// 525 (unregistered)
Code525 = 525,
/// 526 (unregistered)
Code526 = 526,
/// 527 (unregistered)
Code527 = 527,
/// 528 (unregistered)
Code528 = 528,
/// 529 (unregistered)
Code529 = 529,
/// 530 (unregistered)
Code530 = 530,
/// 531 (unregistered)
Code531 = 531,
/// 532 (unregistered)
Code532 = 532,
/// 533 (unregistered)
Code533 = 533,
/// 534 (unregistered)
Code534 = 534,
/// 535 (unregistered)
Code535 = 535,
/// 536 (unregistered)
Code536 = 536,
/// 537 (unregistered)
Code537 = 537,
/// 538 (unregistered)
Code538 = 538,
/// 539 (unregistered)
Code539 = 539,
/// 540 (unregistered)
Code540 = 540,
/// 541 (unregistered)
Code541 = 541,
/// 542 (unregistered)
Code542 = 542,
/// 543 (unregistered)
Code543 = 543,
/// 544 (unregistered)
Code544 = 544,
/// 545 (unregistered)
Code545 = 545,
/// 546 (unregistered)
Code546 = 546,
/// 547 (unregistered)
Code547 = 547,
/// 548 (unregistered)
Code548 = 548,
/// 549 (unregistered)
Code549 = 549,
/// 550 (unregistered)
Code550 = 550,
/// 551 (unregistered)
Code551 = 551,
/// 552 (unregistered)
Code552 = 552,
/// 553 (unregistered)
Code553 = 553,
/// 554 (unregistered)
Code554 = 554,
/// 555 (unregistered)
Code555 = 555,
/// 556 (unregistered)
Code556 = 556,
/// 557 (unregistered)
Code557 = 557,
/// 558 (unregistered)
Code558 = 558,
/// 559 (unregistered)
Code559 = 559,
/// 560 (unregistered)
Code560 = 560,
/// 561 (unregistered)
Code561 = 561,
/// 562 (unregistered)
Code562 = 562,
/// 563 (unregistered)
Code563 = 563,
/// 564 (unregistered)
Code564 = 564,
/// 565 (unregistered)
Code565 = 565,
/// 566 (unregistered)
Code566 = 566,
/// 567 (unregistered)
Code567 = 567,
/// 568 (unregistered)
Code568 = 568,
/// 569 (unregistered)
Code569 = 569,
/// 570 (unregistered)
Code570 = 570,
/// 571 (unregistered)
Code571 = 571,
/// 572 (unregistered)
Code572 = 572,
/// 573 (unregistered)
Code573 = 573,
/// 574 (unregistered)
Code574 = 574,
/// 575 (unregistered)
Code575 = 575,
/// 576 (unregistered)
Code576 = 576,
/// 577 (unregistered)
Code577 = 577,
/// 578 (unregistered)
Code578 = 578,
/// 579 (unregistered)
Code579 = 579,
/// 580 (unregistered)
Code580 = 580,
/// 581 (unregistered)
Code581 = 581,
/// 582 (unregistered)
Code582 = 582,
/// 583 (unregistered)
Code583 = 583,
/// 584 (unregistered)
Code584 = 584,
/// 585 (unregistered)
Code585 = 585,
/// 586 (unregistered)
Code586 = 586,
/// 587 (unregistered)
Code587 = 587,
/// 588 (unregistered)
Code588 = 588,
/// 589 (unregistered)
Code589 = 589,
/// 590 (unregistered)
Code590 = 590,
/// 591 (unregistered)
Code591 = 591,
/// 592 (unregistered)
Code592 = 592,
/// 593 (unregistered)
Code593 = 593,
/// 594 (unregistered)
Code594 = 594,
/// 595 (unregistered)
Code595 = 595,
/// 596 (unregistered)
Code596 = 596,
/// 597 (unregistered)
Code597 = 597,
/// 598 (unregistered)
Code598 = 598,
/// 599 (unregistered)
Code599 = 599,
}
impl StatusCode {
/// Get the standardised `Reason-Phrase` for this status code.
///
/// This is mostly here for servers writing responses, but could potentially have application at
/// other times.
///
/// The reason phrase is defined as being exclusively for human readers. You should avoid
/// derive any meaning from it at all costs.
///
/// Bear in mind also that in HTTP/2.0 the reason phrase is abolished from transmission, and so
/// this canonical reason phrase really is the only reason phrase you’ll find.
pub fn canonical_reason(&self) -> Option<&'static str> {
match *self {
StatusCode::Continue => Some("Continue"),
StatusCode::SwitchingProtocols => Some("Switching Protocols"),
StatusCode::Processing => Some("Processing"),
StatusCode::Code103 => None,
StatusCode::Code104 => None,
StatusCode::Code105 => None,
StatusCode::Code106 => None,
StatusCode::Code107 => None,
StatusCode::Code108 => None,
StatusCode::Code109 => None,
StatusCode::Code110 => None,
StatusCode::Code111 => None,
StatusCode::Code112 => None,
StatusCode::Code113 => None,
StatusCode::Code114 => None,
StatusCode::Code115 => None,
StatusCode::Code116 => None,
StatusCode::Code117 => None,
StatusCode::Code118 => None,
StatusCode::Code119 => None,
StatusCode::Code120 => None,
StatusCode::Code121 => None,
StatusCode::Code122 => None,
StatusCode::Code123 => None,
StatusCode::Code124 => None,
StatusCode::Code125 => None,
StatusCode::Code126 => None,
StatusCode::Code127 => None,
StatusCode::Code128 => None,
StatusCode::Code129 => None,
StatusCode::Code130 => None,
StatusCode::Code131 => None,
StatusCode::Code132 => None,
StatusCode::Code133 => None,
StatusCode::Code134 => None,
StatusCode::Code135 => None,
StatusCode::Code136 => None,
StatusCode::Code137 => None,
StatusCode::Code138 => None,
StatusCode::Code139 => None,
StatusCode::Code140 => None,
StatusCode::Code141 => None,
StatusCode::Code142 => None,
StatusCode::Code143 => None,
StatusCode::Code144 => None,
StatusCode::Code145 => None,
StatusCode::Code146 => None,
StatusCode::Code147 => None,
StatusCode::Code148 => None,
StatusCode::Code149 => None,
StatusCode::Code150 => None,
StatusCode::Code151 => None,
StatusCode::Code152 => None,
StatusCode::Code153 => None,
StatusCode::Code154 => None,
StatusCode::Code155 => None,
StatusCode::Code156 => None,
StatusCode::Code157 => None,
StatusCode::Code158 => None,
StatusCode::Code159 => None,
StatusCode::Code160 => None,
StatusCode::Code161 => None,
StatusCode::Code162 => None,
StatusCode::Code163 => None,
StatusCode::Code164 => None,
StatusCode::Code165 => None,
StatusCode::Code166 => None,
StatusCode::Code167 => None,
StatusCode::Code168 => None,
StatusCode::Code169 => None,
StatusCode::Code170 => None,
StatusCode::Code171 => None,
StatusCode::Code172 => None,
StatusCode::Code173 => None,
StatusCode::Code174 => None,
StatusCode::Code175 => None,
StatusCode::Code176 => None,
StatusCode::Code177 => None,
StatusCode::Code178 => None,
StatusCode::Code179 => None,
StatusCode::Code180 => None,
StatusCode::Code181 => None,
StatusCode::Code182 => None,
StatusCode::Code183 => None,
StatusCode::Code184 => None,
StatusCode::Code185 => None,
StatusCode::Code186 => None,
StatusCode::Code187 => None,
StatusCode::Code188 => None,
StatusCode::Code189 => None,
StatusCode::Code190 => None,
StatusCode::Code191 => None,
StatusCode::Code192 => None,
StatusCode::Code193 => None,
StatusCode::Code194 => None,
StatusCode::Code195 => None,
StatusCode::Code196 => None,
StatusCode::Code197 => None,
StatusCode::Code198 => None,
StatusCode::Code199 => None,
StatusCode::Ok => Some("OK"),
StatusCode::Created => Some("Created"),
StatusCode::Accepted => Some("Accepted"),
StatusCode::NonAuthoritativeInformation => Some("Non-Authoritative Information"),
StatusCode::NoContent => Some("No Content"),
StatusCode::ResetContent => Some("Reset Content"),
StatusCode::PartialContent => Some("Partial Content"),
StatusCode::MultiStatus => Some("Multi-Status"),
StatusCode::AlreadyReported => Some("Already Reported"),
StatusCode::Code209 => None,
StatusCode::Code210 => None,
StatusCode::Code211 => None,
StatusCode::Code212 => None,
StatusCode::Code213 => None,
StatusCode::Code214 => None,
StatusCode::Code215 => None,
StatusCode::Code216 => None,
StatusCode::Code217 => None,
StatusCode::Code218 => None,
StatusCode::Code219 => None,
StatusCode::Code220 => None,
StatusCode::Code221 => None,
StatusCode::Code222 => None,
StatusCode::Code223 => None,
StatusCode::Code224 => None,
StatusCode::Code225 => None,
StatusCode::ImUsed => Some("IM Used"),
StatusCode::Code227 => None,
StatusCode::Code228 => None,
StatusCode::Code229 => None,
StatusCode::Code230 => None,
StatusCode::Code231 => None,
StatusCode::Code232 => None,
StatusCode::Code233 => None,
StatusCode::Code234 => None,
StatusCode::Code235 => None,
StatusCode::Code236 => None,
StatusCode::Code237 => None,
StatusCode::Code238 => None,
StatusCode::Code239 => None,
StatusCode::Code240 => None,
StatusCode::Code241 => None,
StatusCode::Code242 => None,
StatusCode::Code243 => None,
StatusCode::Code244 => None,
StatusCode::Code245 => None,
StatusCode::Code246 => None,
StatusCode::Code247 => None,
StatusCode::Code248 => None,
StatusCode::Code249 => None,
StatusCode::Code250 => None,
StatusCode::Code251 => None,
StatusCode::Code252 => None,
StatusCode::Code253 => None,
StatusCode::Code254 => None,
StatusCode::Code255 => None,
StatusCode::Code256 => None,
StatusCode::Code257 => None,
StatusCode::Code258 => None,
StatusCode::Code259 => None,
StatusCode::Code260 => None,
StatusCode::Code261 => None,
StatusCode::Code262 => None,
StatusCode::Code263 => None,
StatusCode::Code264 => None,
StatusCode::Code265 => None,
StatusCode::Code266 => None,
StatusCode::Code267 => None,
StatusCode::Code268 => None,
StatusCode::Code269 => None,
StatusCode::Code270 => None,
StatusCode::Code271 => None,
StatusCode::Code272 => None,
StatusCode::Code273 => None,
StatusCode::Code274 => None,
StatusCode::Code275 => None,
StatusCode::Code276 => None,
StatusCode::Code277 => None,
StatusCode::Code278 => None,
StatusCode::Code279 => None,
StatusCode::Code280 => None,
StatusCode::Code281 => None,
StatusCode::Code282 => None,
StatusCode::Code283 => None,
StatusCode::Code284 => None,
StatusCode::Code285 => None,
StatusCode::Code286 => None,
StatusCode::Code287 => None,
StatusCode::Code288 => None,
StatusCode::Code289 => None,
StatusCode::Code290 => None,
StatusCode::Code291 => None,
StatusCode::Code292 => None,
StatusCode::Code293 => None,
StatusCode::Code294 => None,
StatusCode::Code295 => None,
StatusCode::Code296 => None,
StatusCode::Code297 => None,
StatusCode::Code298 => None,
StatusCode::Code299 => None,
StatusCode::MultipleChoices => Some("Multiple Choices"),
StatusCode::MovedPermanently => Some("Moved Permanently"),
StatusCode::Found => Some("Found"),
StatusCode::SeeOther => Some("See Other"),
StatusCode::NotModified => Some("Not Modified"),
StatusCode::UseProxy => Some("Use Proxy"),
StatusCode::SwitchProxy => Some("Switch Proxy"),
StatusCode::TemporaryRedirect => Some("Temporary Redirect"),
StatusCode::PermanentRedirect => Some("Permanent Redirect"),
StatusCode::Code309 => None,
StatusCode::Code310 => None,
StatusCode::Code311 => None,
StatusCode::Code312 => None,
StatusCode::Code313 => None,
StatusCode::Code314 => None,
StatusCode::Code315 => None,
StatusCode::Code316 => None,
StatusCode::Code317 => None,
StatusCode::Code318 => None,
StatusCode::Code319 => None,
StatusCode::Code320 => None,
StatusCode::Code321 => None,
StatusCode::Code322 => None,
StatusCode::Code323 => None,
StatusCode::Code324 => None,
StatusCode::Code325 => None,
StatusCode::Code326 => None,
StatusCode::Code327 => None,
StatusCode::Code328 => None,
StatusCode::Code329 => None,
StatusCode::Code330 => None,
StatusCode::Code331 => None,
StatusCode::Code332 => None,
StatusCode::Code333 => None,
StatusCode::Code334 => None,
StatusCode::Code335 => None,
StatusCode::Code336 => None,
StatusCode::Code337 => None,
StatusCode::Code338 => None,
StatusCode::Code339 => None,
StatusCode::Code340 => None,
StatusCode::Code341 => None,
StatusCode::Code342 => None,
StatusCode::Code343 => None,
StatusCode::Code344 => None,
StatusCode::Code345 => None,
StatusCode::Code346 => None,
StatusCode::Code347 => None,
StatusCode::Code348 => None,
StatusCode::Code349 => None,
StatusCode::Code350 => None,
StatusCode::Code351 => None,
StatusCode::Code352 => None,
StatusCode::Code353 => None,
StatusCode::Code354 => None,
StatusCode::Code355 => None,
StatusCode::Code356 => None,
StatusCode::Code357 => None,
StatusCode::Code358 => None,
StatusCode::Code359 => None,
StatusCode::Code360 => None,
StatusCode::Code361 => None,
StatusCode::Code362 => None,
StatusCode::Code363 => None,
StatusCode::Code364 => None,
StatusCode::Code365 => None,
StatusCode::Code366 => None,
StatusCode::Code367 => None,
StatusCode::Code368 => None,
StatusCode::Code369 => None,
StatusCode::Code370 => None,
StatusCode::Code371 => None,
StatusCode::Code372 => None,
StatusCode::Code373 => None,
StatusCode::Code374 => None,
StatusCode::Code375 => None,
StatusCode::Code376 => None,
StatusCode::Code377 => None,
StatusCode::Code378 => None,
StatusCode::Code379 => None,
StatusCode::Code380 => None,
StatusCode::Code381 => None,
StatusCode::Code382 => None,
StatusCode::Code383 => None,
StatusCode::Code384 => None,
StatusCode::Code385 => None,
StatusCode::Code386 => None,
StatusCode::Code387 => None,
StatusCode::Code388 => None,
StatusCode::Code389 => None,
StatusCode::Code390 => None,
StatusCode::Code391 => None,
StatusCode::Code392 => None,
StatusCode::Code393 => None,
StatusCode::Code394 => None,
StatusCode::Code395 => None,
StatusCode::Code396 => None,
StatusCode::Code397 => None,
StatusCode::Code398 => None,
StatusCode::Code399 => None,
StatusCode::BadRequest => Some("Bad Request"),
StatusCode::Unauthorized => Some("Unauthorized"),
StatusCode::PaymentRequired => Some("Payment Required"),
StatusCode::Forbidden => Some("Forbidden"),
StatusCode::NotFound => Some("Not Found"),
StatusCode::MethodNotAllowed => Some("Method Not Allowed"),
StatusCode::NotAcceptable => Some("Not Acceptable"),
StatusCode::ProxyAuthenticationRequired => Some("Proxy Authentication Required"),
StatusCode::RequestTimeout => Some("Request Timeout"),
StatusCode::Conflict => Some("Conflict"),
StatusCode::Gone => Some("Gone"),
StatusCode::LengthRequired => Some("Length Required"),
StatusCode::PreconditionFailed => Some("Precondition Failed"),
StatusCode::RequestEntityTooLarge => Some("Request Entity Too Large"),
StatusCode::RequestUriTooLong => Some("Request-URI Too Long"),
StatusCode::UnsupportedMediaType => Some("Unsupported Media Type"),
StatusCode::RequestedRangeNotSatisfiable => Some("Requested Range Not Satisfiable"),
StatusCode::ExpectationFailed => Some("Expectation Failed"),
StatusCode::ImATeapot => Some("I'm a teapot"),
StatusCode::AuthenticationTimeout => Some("Authentication Timeout"),
StatusCode::Code420 => None,
StatusCode::Code421 => None,
StatusCode::UnprocessableEntity => Some("Unprocessable Entity"),
StatusCode::Locked => Some("Locked"),
StatusCode::FailedDependency => Some("Failed Dependency"),
StatusCode::UnorderedCollection => Some("Unordered Collection"),
StatusCode::UpgradeRequired => Some("Upgrade Required"),
StatusCode::Code427 => None,
StatusCode::PreconditionRequired => Some("Precondition Required"),
StatusCode::TooManyRequests => Some("Too Many Requests"),
StatusCode::Code430 => None,
StatusCode::RequestHeaderFieldsTooLarge => Some("Request Header Fields Too Large"),
StatusCode::Code432 => None,
StatusCode::Code433 => None,
StatusCode::Code434 => None,
StatusCode::Code435 => None,
StatusCode::Code436 => None,
StatusCode::Code437 => None,
StatusCode::Code438 => None,
StatusCode::Code439 => None,
StatusCode::Code440 => None,
StatusCode::Code441 => None,
StatusCode::Code442 => None,
StatusCode::Code443 => None,
StatusCode::Code444 => None,
StatusCode::Code445 => None,
StatusCode::Code446 => None,
StatusCode::Code447 => None,
StatusCode::Code448 => None,
StatusCode::Code449 => None,
StatusCode::Code450 => None,
StatusCode::UnavailableForLegalReasons => Some("Unavailable For Legal Reasons"),
StatusCode::Code452 => None,
StatusCode::Code453 => None,
StatusCode::Code454 => None,
StatusCode::Code455 => None,
StatusCode::Code456 => None,
StatusCode::Code457 => None,
StatusCode::Code458 => None,
StatusCode::Code459 => None,
StatusCode::Code460 => None,
StatusCode::Code461 => None,
StatusCode::Code462 => None,
StatusCode::Code463 => None,
StatusCode::Code464 => None,
StatusCode::Code465 => None,
StatusCode::Code466 => None,
StatusCode::Code467 => None,
StatusCode::Code468 => None,
StatusCode::Code469 => None,
StatusCode::Code470 => None,
StatusCode::Code471 => None,
StatusCode::Code472 => None,
StatusCode::Code473 => None,
StatusCode::Code474 => None,
StatusCode::Code475 => None,
StatusCode::Code476 => None,
StatusCode::Code477 => None,
StatusCode::Code478 => None,
StatusCode::Code479 => None,
StatusCode::Code480 => None,
StatusCode::Code481 => None,
StatusCode::Code482 => None,
StatusCode::Code483 => None,
StatusCode::Code484 => None,
StatusCode::Code485 => None,
StatusCode::Code486 => None,
StatusCode::Code487 => None,
StatusCode::Code488 => None,
StatusCode::Code489 => None,
StatusCode::Code490 => None,
StatusCode::Code491 => None,
StatusCode::Code492 => None,
StatusCode::Code493 => None,
StatusCode::Code494 => None,
StatusCode::Code495 => None,
StatusCode::Code496 => None,
StatusCode::Code497 => None,
StatusCode::Code498 => None,
StatusCode::Code499 => None,
StatusCode::InternalServerError => Some("Internal Server Error"),
StatusCode::NotImplemented => Some("Not Implemented"),
StatusCode::BadGateway => Some("Bad Gateway"),
StatusCode::ServiceUnavailable => Some("Service Unavailable"),
StatusCode::GatewayTimeout => Some("Gateway Timeout"),
StatusCode::HttpVersionNotSupported => Some("HTTP Version Not Supported"),
StatusCode::VariantAlsoNegotiates => Some("Variant Also Negotiates"),
StatusCode::InsufficientStorage => Some("Insufficient Storage"),
StatusCode::LoopDetected => Some("Loop Detected"),
StatusCode::Code509 => None,
StatusCode::NotExtended => Some("Not Extended"),
StatusCode::NetworkAuthenticationRequired => Some("Network Authentication Required"),
StatusCode::Code512 => None,
StatusCode::Code513 => None,
StatusCode::Code514 => None,
StatusCode::Code515 => None,
StatusCode::Code516 => None,
StatusCode::Code517 => None,
StatusCode::Code518 => None,
StatusCode::Code519 => None,
StatusCode::Code520 => None,
StatusCode::Code521 => None,
StatusCode::Code522 => None,
StatusCode::Code523 => None,
StatusCode::Code524 => None,
StatusCode::Code525 => None,
StatusCode::Code526 => None,
StatusCode::Code527 => None,
StatusCode::Code528 => None,
StatusCode::Code529 => None,
StatusCode::Code530 => None,
StatusCode::Code531 => None,
StatusCode::Code532 => None,
StatusCode::Code533 => None,
StatusCode::Code534 => None,
StatusCode::Code535 => None,
StatusCode::Code536 => None,
StatusCode::Code537 => None,
StatusCode::Code538 => None,
StatusCode::Code539 => None,
StatusCode::Code540 => None,
StatusCode::Code541 => None,
StatusCode::Code542 => None,
StatusCode::Code543 => None,
StatusCode::Code544 => None,
StatusCode::Code545 => None,
StatusCode::Code546 => None,
StatusCode::Code547 => None,
StatusCode::Code548 => None,
StatusCode::Code549 => None,
StatusCode::Code550 => None,
StatusCode::Code551 => None,
StatusCode::Code552 => None,
StatusCode::Code553 => None,
StatusCode::Code554 => None,
StatusCode::Code555 => None,
StatusCode::Code556 => None,
StatusCode::Code557 => None,
StatusCode::Code558 => None,
StatusCode::Code559 => None,
StatusCode::Code560 => None,
StatusCode::Code561 => None,
StatusCode::Code562 => None,
StatusCode::Code563 => None,
StatusCode::Code564 => None,
StatusCode::Code565 => None,
StatusCode::Code566 => None,
StatusCode::Code567 => None,
StatusCode::Code568 => None,
StatusCode::Code569 => None,
StatusCode::Code570 => None,
StatusCode::Code571 => None,
StatusCode::Code572 => None,
StatusCode::Code573 => None,
StatusCode::Code574 => None,
StatusCode::Code575 => None,
StatusCode::Code576 => None,
StatusCode::Code577 => None,
StatusCode::Code578 => None,
StatusCode::Code579 => None,
StatusCode::Code580 => None,
StatusCode::Code581 => None,
StatusCode::Code582 => None,
StatusCode::Code583 => None,
StatusCode::Code584 => None,
StatusCode::Code585 => None,
StatusCode::Code586 => None,
StatusCode::Code587 => None,
StatusCode::Code588 => None,
StatusCode::Code589 => None,
StatusCode::Code590 => None,
StatusCode::Code591 => None,
StatusCode::Code592 => None,
StatusCode::Code593 => None,
StatusCode::Code594 => None,
StatusCode::Code595 => None,
StatusCode::Code596 => None,
StatusCode::Code597 => None,
StatusCode::Code598 => None,
StatusCode::Code599 => None,
}
}
/// Determine the class of a status code, based on its first digit.
pub fn class(&self) -> StatusClass {
let code = *self as u16; // Range of possible values: 100..599.
// We could match 100..199 &c., but this way we avoid unreachable!() at the end.
if code < 200 {
StatusClass::Informational
} else if code < 300 {
StatusClass::Success
} else if code < 400 {
StatusClass::Redirection
} else if code < 500 {
StatusClass::ClientError
} else {
StatusClass::ServerError
}
}
/// Check if class is Informational.
pub fn is_informational(&self) -> bool {
self.class() == StatusClass::Informational
}
/// Check if class is Success.
pub fn is_success(&self) -> bool {
self.class() == StatusClass::Success
}
/// Check if class is Redirection.
pub fn is_redirection(&self) -> bool {
self.class() == StatusClass::Redirection
}
/// Check if class is ClientError.
pub fn is_client_error(&self) -> bool {
self.class() == StatusClass::ClientError
}
/// Check if class is ServerError.
pub fn is_server_error(&self) -> bool {
self.class() == StatusClass::ServerError
}
}
impl Copy for StatusCode {}
/// Formats the status code, *including* the canonical reason.
///
/// ```rust
/// # use hyper::status::StatusCode::{ImATeapot, Code123};
/// # #[allow(unstable)]
/// # fn main() {
/// assert_eq!(&format!("{}", ImATeapot)[],
/// "418 I'm a teapot");
/// assert_eq!(&format!("{}", Code123)[],
/// "123 <unknown status code>");
/// # }
/// ```
///
/// If you wish to just include the number, cast to a u16 instead.
impl fmt::Display for StatusCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", *self as u16,
self.canonical_reason().unwrap_or("<unknown status code>"))
}
}
// Specified manually because the codegen for derived is slow (at the time of writing on the machine
// of writing, 1.2 seconds) and verbose (though the optimiser cuts it down to size).
impl PartialEq for StatusCode {
#[inline]
fn eq(&self, other: &StatusCode) -> bool {
*self as u16 == *other as u16
}
}
impl Eq for StatusCode {}
// Ditto (though #[derive(Clone)] only takes about 0.4 seconds).
impl Clone for StatusCode {
#[inline]
fn clone(&self) -> StatusCode {
*self
}
}
// Of the other common derivable traits, I didn’t measure them, but I guess they would be slow too.
impl FromPrimitive for StatusCode {
fn from_i64(n: i64) -> Option<StatusCode> {
if n < 100 || n > 599 {
None
} else {
Some(unsafe { transmute::<u16, StatusCode>(n as u16) })
}
}
fn from_u64(n: u64) -> Option<StatusCode> {
if n < 100 || n > 599 {
None
} else {
Some(unsafe { transmute::<u16, StatusCode>(n as u16) })
}
}
}
impl PartialOrd for StatusCode {
#[inline]
fn partial_cmp(&self, other: &StatusCode) -> Option<Ordering> {
(*self as u16).partial_cmp(&(*other as u16))
}
}
impl Ord for StatusCode {
#[inline]
fn cmp(&self, other: &StatusCode) -> Ordering {
if *self < *other {
Less
} else if *self > *other {
Greater
} else {
Equal
}
}
}
impl ToPrimitive for StatusCode {
fn to_i64(&self) -> Option<i64> {
Some(*self as i64)
}
fn to_u64(&self) -> Option<u64> {
Some(*self as u64)
}
}
/// The class of an HTTP `Status-Code`.
///
/// [RFC 2616, section 6.1.1 (Status Code and Reason
/// Phrase)](https://tools.ietf.org/html/rfc2616#section-6.1.1):
///
/// > The first digit of the Status-Code defines the class of response. The
/// > last two digits do not have any categorization role.
/// >
/// > ...
/// >
/// > HTTP status codes are extensible. HTTP applications are not required
/// > to understand the meaning of all registered status codes, though such
/// > understanding is obviously desirable. However, applications MUST
/// > understand the class of any status code, as indicated by the first
/// > digit, and treat any unrecognized response as being equivalent to the
/// > x00 status code of that class, with the exception that an
/// > unrecognized response MUST NOT be cached. For example, if an
/// > unrecognized status code of 431 is received by the client, it can
/// > safely assume that there was something wrong with its request and
/// > treat the response as if it had received a 400 status code. In such
/// > cases, user agents SHOULD present to the user the entity returned
/// > with the response, since that entity is likely to include human-
/// > readable information which will explain the unusual status.
///
/// This can be used in cases where a status code’s meaning is unknown, also,
/// to get the appropriate *category* of status.
///
/// For HTTP/2.0, the 1xx Informational class is invalid.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy)]
pub enum StatusClass {
/// 1xx: Informational - Request received, continuing process
Informational = 100,
/// 2xx: Success - The action was successfully received, understood, and accepted
Success = 200,
/// 3xx: Redirection - Further action must be taken in order to complete the request
Redirection = 300,
/// 4xx: Client Error - The request contains bad syntax or cannot be fulfilled
ClientError = 400,
/// 5xx: Server Error - The server failed to fulfill an apparently valid request
ServerError = 500,
}
impl StatusClass {
/// Get the default status code for the class.
///
/// This produces the x00 status code; thus, for `ClientError` (4xx), for example, this will
/// produce `BadRequest` (400):
///
/// ```rust
/// # use hyper::status::StatusClass::ClientError;
/// # use hyper::status::StatusCode::BadRequest;
/// assert_eq!(ClientError.default_code(), BadRequest);
/// ```
///
/// The use for this is outlined in [RFC 2616, section 6.1.1 (Status Code and Reason
/// Phrase)](https://tools.ietf.org/html/rfc2616#section-6.1.1):
///
/// > HTTP status codes are extensible. HTTP applications are not required
/// > to understand the meaning of all registered status codes, though such
/// > understanding is obviously desirable. However, applications MUST
/// > understand the class of any status code, as indicated by the first
/// > digit, and treat any unrecognized response as being equivalent to the
/// > x00 status code of that class, with the exception that an
/// > unrecognized response MUST NOT be cached. For example, if an
/// > unrecognized status code of 431 is received by the client, it can
/// > safely assume that there was something wrong with its request and
/// > treat the response as if it had received a 400 status code. In such
/// > cases, user agents SHOULD present to the user the entity returned
/// > with the response, since that entity is likely to include human-
/// > readable information which will explain the unusual status.
///
/// This is demonstrated thusly (I’ll use 432 rather than 431 as 431 *is* now in use):
///
/// ```rust
/// # use hyper::status::StatusCode::{Code432, BadRequest};
/// // Suppose we have received this status code.
/// let status = Code432;
///
/// // Uh oh! Don’t know what to do with it.
/// // Let’s fall back to the default:
/// let status = status.class().default_code();
///
/// // And look! That is 400 Bad Request.
/// assert_eq!(status, BadRequest);
/// // So now let’s treat it as that.
/// ```
#[inline]
pub fn default_code(&self) -> StatusCode {
unsafe { transmute::<StatusClass, StatusCode>(*self) }
}
}
impl ToPrimitive for StatusClass {
fn to_i64(&self) -> Option<i64> {
Some(*self as i64)
}
fn to_u64(&self) -> Option<u64> {
Some(*self as u64)
}
}<|fim▁end|> | /// For HTTP/2.0, statuses belonging to the 1xx Informational class are invalid. |
<|file_name|>init.js<|end_file_name|><|fim▁begin|>'use strict'
//Globals will be the stage which is the parrent of all graphics, canvas object for resizing and the renderer which is pixi.js framebuffer.
var stage = new PIXI.Container();
var canvas = document.getElementById("game");;
var renderer = PIXI.autoDetectRenderer(1024, 570, {view:document.getElementById("game")});
var graphics = new PIXI.Graphics();
// Create or grab the application
var app = app || {};
function init(){
resize();
renderer = PIXI.autoDetectRenderer(1024, 570, {view:document.getElementById("game")} );
renderer.backgroundColor = 0x50503E;
//level();
canvas.focus();
app.Game.init(renderer, window, canvas, stage);
}
function resize(){
var gameWidth = window.innerWidth;
var gameHeight = window.innerHeight;
var scaleToFitX = gameWidth / 1000;
var scaleToFitY = gameHeight / 500;
// Scaling statement belongs to: https://www.davrous.com/2012/04/06/modernizing-your-html5-canvas-games-part-1-hardware-scaling-css3/
var optimalRatio = Math.min(scaleToFitX, scaleToFitY);
var currentScreenRatio = gameWidth / gameHeight;
if(currentScreenRatio >= 1.77 && currentScreenRatio <= 1.79) {
canvas.style.width = gameWidth + "px";
canvas.style.height = gameHeight + "px";<|fim▁hole|> }
}
//do not REMOVE
/*
//takes two arrays
// the w_array is an array of column width values [w1, w2, w3, ...], y_array is
//3d array setup as such [[row 1], [row 2], [row3]] and the rows are arrays
// that contain pairs of y,l values where y is the fixed corner of the
//rectangle and L is the height of the rectangle.
function level(){
// drawRect( xstart, ystart, x size side, y size side)
app.levelData = {
w_array: [102 * 2, 102 * 2, 102 * 2, 102 * 2, 102 * 2],
y_array: [
[
[0 * 2, 90 * 2],
[0 * 2, 90 * 2],
[0 * 2, 90 * 2],
[0 * 2, 90 * 2],
[0 * 2, 90 * 2],
],
[
[90 * 2, 90 * 2],
[90 * 2, 90 * 2],
[90 * 2, 90 * 2],
[90 * 2, 90 * 2],
[90 * 2, 90 * 2],
],
[
[180 * 2, 90 * 2],
[180 * 2, 90 * 2],
[180 * 2, 90 * 2],
[180 * 2, 90 * 2],
[180 * 2, 90 * 2],
],
[
[270 * 2, 90 * 2],
[270 * 2, 90 * 2],
[270 * 2, 90 * 2],
[270 * 2, 90 * 2],
[270 * 2, 90 * 2],
]
],
p_array: [
[50,50,50,50],
[50,450,50,50],
[920,50,50,50],
[920,450,50,50],
]
};
// set a fill and a line style again and draw a rectangle
graphics.lineStyle(2, 0x995702, 1);
graphics.beginFill(0x71FF33, 1);
var x = 0;
//reset the x
x = 0;
//post fence post
for(var h = 0, hlen = app.levelData.y_array.length; h < hlen; h++){
for( var i = 0, len = app.levelData.w_array.length; i < len; i++){
//setup the y value
graphics.drawRect(x, app.levelData.y_array[h][i][0], app.levelData.w_array[i], app.levelData.y_array[h][i][1]);
x += app.levelData.w_array[i];
}
//reset the x
x = 0;
}
graphics.lineStyle(2, 0x3472D8, 1);
graphics.beginFill(0x3472D8, 1);
for(var i = 0, len = app.levelData.p_array.length; i < len; i++){
graphics.drawRect(app.levelData.p_array[i][0], app.levelData.p_array[i][1], app.levelData.p_array[i][2], app.levelData.p_array[i][3]);
}
stage.addChild(graphics);
}
// Reads in a JSON object with data
function readJSONFile( filePath ){
$.getJSON( filePath, function(){} )
.done( function( data ){
console.log( "SUCCESS: File read from " + filePath );
app.levelData = data;
} )
.fail( function( ){
console.log( "FAILED: File at " + filePath );
} );
}
*/
window.addEventListener('resize', resize, false);
window.addEventListener('orientationchange', resize, false);<|fim▁end|> | }else{
canvas.style.width = 1000 * optimalRatio + "px";
canvas.style.height = 500 * optimalRatio + "px"; |
<|file_name|>DbGroupBreastModel.java<|end_file_name|><|fim▁begin|>package com.ms.meizinewsapplication.features.meizi.model;<|fim▁hole|>import com.ms.retrofitlibrary.web.MyOkHttpClient;
import org.loader.model.OnModelListener;
import java.util.List;
import rx.Observable;
import rx.Subscription;
/**
* Created by 啟成 on 2016/3/15.
*/
public class DbGroupBreastModel extends DbGroupModel {
private String pager_offset;
public Subscription loadWeb(Context context, OnModelListener<List<ImgItem>> listener, String pager_offset) {
this.pager_offset = pager_offset;
return loadWeb(context, listener);
}
@Override
protected Subscription reSubscription(Context context, OnModelListener<List<ImgItem>> listener) {
Observable<String> dbGroupBreast = getDbGroup().RxDbGroupBreast(
MyOkHttpClient.getCacheControl(context),
pager_offset
);
return rxDbGroup(dbGroupBreast, listener);
}
}<|fim▁end|> |
import android.content.Context;
import com.ms.meizinewsapplication.features.base.pojo.ImgItem; |
<|file_name|>source_module.py<|end_file_name|><|fim▁begin|>class MyClass():<|fim▁hole|><|fim▁end|> | @classmethod
def foo_method(cls):
spam = "eggs" |
<|file_name|>import-duplicate-src-1.js<|end_file_name|><|fim▁begin|>// namespace line first
import * as XA from "X";<|fim▁hole|><|fim▁end|> | import XD, { X2 } from "X";
import { X3 } from "X"; |
<|file_name|>generateTemplate.py<|end_file_name|><|fim▁begin|>def getSpeciesValue(species):
"""
Return the initial amount of a species.
If species.isSetInitialAmount() == True, return the initial amount.
Otherwise, return the initial concentration.
***** args *****
species: a libsbml.Species object
"""
if species.isSetInitialAmount():
return species.getInitialAmount()
else:
return species.getInitialConcentration()
def generateTemplate(source, filename, sumname, dataname=None):
"""
Generate a model summary file (model_summary.txt) and a template file (filename) from one or more SBML source files.
***** args *****
source: a list of strings.
Each entry describes a SBML file.
***** kwargs *****
filename: a string.
The name of the template to be generated.
sumnname: a string.
The name of the summary to be generated.
dataname: a string.
The name of a datafile.
"""
out_file=open(filename,"w")
sum_file=open(sumname,"w")
have_data = False
times = []
vars = []
nvar = 0
first = True
if dataname != None:
have_data = True
df = open(dataname,'r')
for line in df:
strs = str(line).split(' ')
vals = [float(i) for i in strs]
if first==True:
for j in range(1,len(vals)):
vars.append([])
first=False
nvar = len(vals)-1
times.append(vals[0])
for j in range(1,len(vals)):
vars[j-1].append(vals[j])
#print times
#print vars
out_file.write("<input>\n\n")
out_file.write("######################## number of models\n\n")
out_file.write("# Number of models for which details are described in this input file\n")
out_file.write("<modelnumber> "+repr(len(source))+ " </modelnumber>\n\n")
out_file.write("######################## restart\n\n")
out_file.write("# Restart from previous (pickled) population?\n")
out_file.write("<restart> False </restart>\n\n")
out_file.write("######################## epsilon schedule\n\n")
out_file.write("# Automatic epsilon schedule. Provide a vector of final epsilons and the alpha (defaults to 0.9)\n")
out_file.write("<autoepsilon>\n")
out_file.write("<finalepsilon> 1.0 </finalepsilon>\n")
out_file.write("<alpha> 0.9 </alpha>\n")
out_file.write("</autoepsilon>\n\n")
out_file.write("# OR\n")
out_file.write("# Series of epsilons. (Whitespace delimited list)\n")
out_file.write("# Multiple epsilon schedules can be specified by giving additional vectors enclosed in <e2> </e2>, <e3> </e3> etc\n")
out_file.write("# NOTE: the parser always reads them in order and ignores the tag value\n")
out_file.write("<!-- <epsilon> -->\n")
out_file.write("<!-- <e1> 5.0 3.0 2.0 1.0 </e1> -->\n")
out_file.write("<!--</epsilon> -->\n")
out_file.write("\n")
<|fim▁hole|> out_file.write("######################## beta\n\n")
out_file.write("# Beta is the number of times to simulate each sampled parameter set.\n# This is only applicable for models simulated using Gillespie and SDE\n")
out_file.write("<beta> 1 </beta>\n\n")
out_file.write("######################## dt\n\n")
out_file.write("# Internal timestep for solver.\n# Make this small for a stiff model.\n")
out_file.write("<dt> 0.01 </dt>\n\n")
out_file.write("######################## perturbation kernels : OPTIONAL (default uniform)\n\n")
out_file.write("# The pertubation kernels are computed with respect to the previous parameter distribution\n")
out_file.write("# Currently uniform and normal are implemented\n")
out_file.write("<kernel> uniform </kernel>\n\n")
out_file.write("######################## model kernel : OPTIONAL (default 0.7)\n\n")
out_file.write("# Probability of perturbing the sampled model (ignored when modelnumber = 1)\n")
out_file.write("<modelkernel> 0.7 </modelkernel>\n\n")
out_file.write("######################## ODE solver control : OPTIONAL \n\n")
out_file.write("# rtol and atol can be specified here. If the model is stiff then setting these to small might help the simulation to run\n")
out_file.write("#<rtol> </rtol> \n#<atol> </atol>\n\n")
out_file.write("######################## User-supplied data\n\n")
out_file.write("<data>\n")
out_file.write("# times: For ABC SMC, times must be a whitespace delimited list\n")
out_file.write("# In simulation mode these are the timepoints for which the simulations will be output\n")
if have_data == False:
out_file.write("<times> 0 1 2 3 4 5 6 7 8 9 10 </times>\n\n")
else:
out_file.write("<times>");
for i in times:
out_file.write(" "+repr(i) )
out_file.write(" </times>\n\n");
out_file.write("# variables: For ABC SMC, whitespace delimited lists of concentrations (ODE or SDE) or molecule numbers (Gillespie)\n")
out_file.write("# Denote your data via tags <v1> </v1> or <var1> </var1> or <v2> </v2> etc. The tags are ignored and the data read in order\n")
out_file.write("# For simulation these data are ignored\n")
out_file.write("# See fitting instruction below if the dimensionality of your data sets differ from the dimensionality of your model\n")
out_file.write("<variables>\n")
if have_data == False:
out_file.write(" <var1> </var1>\n")
else:
for k in range(nvar):
out_file.write("<var"+repr(k+1)+"> ");
for i in vars[k]:
out_file.write(" "+repr(i) )
out_file.write(" </var"+repr(k+1)+">\n");
out_file.write("</variables>\n")
out_file.write("</data>\n\n")
out_file.write("######################## Models\n\n")
out_file.write("<models>\n")
import libsbml
reader=libsbml.SBMLReader()
for i in range(0,len(source)):
sum_file.write("Model "+repr(i+1)+"\n")
sum_file.write("name: model"+repr(i+1)+"\nsource: "+source[i]+"\n\n")
out_file.write("<model"+repr(i+1)+">\n")
out_file.write("<name> model"+repr(i+1)+" </name>\n<source> "+source[i]+" </source>\n\n")
out_file.write("# type: the method used to simulate your model. ODE, SDE or Gillespie.\n")
out_file.write("<type> SDE </type>\n\n")
out_file.write("# Fitting information. If fit is None, all species in the model are fitted to the data in the order they are listed in the model.\n")
out_file.write("# Otherwise, give a whitespace delimited list of fitting instrictions the same length as the dimensions of your data.\n")
out_file.write("# Use speciesN to denote the Nth species in your model. Simple arithmetic operations can be performed on the species from your model.\n")
out_file.write("# For example, to fit the sum of the first two species in your model to your first variable, write fit: species1+species2\n")
out_file.write("<fit> None </fit>\n\n")
document=reader.readSBML(source[i])
model=document.getModel()
numSpecies=model.getNumSpecies()
numGlobalParameters=model.getNumParameters()
parameter=[]
parameterId=[]
parameterId2=[]
listOfParameter=[]
r1=0
r2=0
r3=0
listOfRules=model.getListOfRules()
for k in range(0, len(listOfRules)):
if model.getRule(k).isAlgebraic(): r1=r1+1
if model.getRule(k).isAssignment(): r2=r2+1
if model.getRule(k).isRate(): r3=r3+1
comp=0
NumCompartments=model.getNumCompartments()
for k in range(0,NumCompartments):
if model.getCompartment(k).isSetVolume():
comp=comp+1
numGlobalParameters=numGlobalParameters+1
parameter.append(model.getListOfCompartments()[k].getVolume())
parameterId.append(model.getListOfCompartments()[k].getId())
parameterId2.append('compartment'+repr(k+1))
listOfParameter.append(model.getListOfCompartments()[k])
for k in range(0,numGlobalParameters-comp):
param=model.getParameter(k)
parameter.append(param.getValue())
parameterId.append(param.getId())
parameterId2.append('parameter'+repr(k+1))
listOfParameter.append(param)
numLocalParameters=0
NumReactions=model.getNumReactions()
for k in range(0,NumReactions):
local=model.getReaction(k).getKineticLaw().getNumParameters()
numLocalParameters=numLocalParameters+local
for j in range(0,local):
parameter.append(model.getListOfReactions()[k].getKineticLaw().getParameter(j).getValue())
parameterId.append(model.getListOfReactions()[k].getKineticLaw().getParameter(j).getId())
x=len(parameterId)-comp
parameterId2.append('parameter'+repr(x))
listOfParameter.append(model.getListOfReactions()[k].getKineticLaw().getParameter(j))
numParameters=numLocalParameters+numGlobalParameters
species = model.getListOfSpecies()
##for k in range(0, len(species)):
##if (species[k].getConstant() == True):
##numParameters=numParameters+1
##parameter.append(getSpeciesValue(species[k]))
##parameterId.append(species[k].getId())
##parameterId2.append('species'+repr(k+1))
##numSpecies=numSpecies-1
sum_file.write("number of compartments: "+repr(NumCompartments)+"\n")
sum_file.write("number of reactions: "+repr(NumReactions)+"\n")
sum_file.write("number of rules: "+repr(model.getNumRules())+"\n")
if model.getNumRules()>0:
sum_file.write("\t Algebraic rules: "+repr(r1)+"\n")
sum_file.write("\t Assignment rules: "+repr(r2)+"\n")
sum_file.write("\t Rate rules: "+repr(r3)+"\n\n")
sum_file.write("number of functions: "+repr(model.getNumFunctionDefinitions())+"\n")
sum_file.write("number of events: "+repr(model.getNumEvents())+"\n\n")
paramAsSpecies=0
sum_file.write("Species with initial values: "+repr(numSpecies)+"\n")
out_file.write("# Priors on initial conditions and parameters:\n")
out_file.write("# one of \n")
out_file.write("# constant, value \n")
out_file.write("# normal, mean, variance \n")
out_file.write("# uniform, lower, upper \n")
out_file.write("# lognormal, mean, variance \n\n")
out_file.write("<initial>\n")
x=0
for k in range(0,len(species)):
##if (species[k].getConstant() == False):
x=x+1
#out_file.write(repr(getSpeciesValue(species[k]))+", ")
out_file.write(" <ic"+repr(x)+"> constant "+repr(getSpeciesValue(species[k]))+" </ic"+repr(x)+">\n")
sum_file.write("S"+repr(x)+":\t"+species[k].getId()+"\tspecies"+repr(k+1)+"\t("+repr(getSpeciesValue(species[k]))+")\n")
for k in range(0,len(listOfParameter)):
if listOfParameter[k].getConstant()==False:
for j in range(0, len(listOfRules)):
if listOfRules[j].isRate():
if parameterId[k]==listOfRules[j].getVariable():
x=x+1
paramAsSpecies=paramAsSpecies+1
#out_file.write(repr(listOfParameter[k].getValue())+", ")
out_file.write(" <ic"+repr(x)+"> constant "+repr(listOfParameter[k].getValue())+" </ic"+repr(x)+">\n")
sum_file.write("S"+repr(x)+":\t"+listOfParameter[k].getId()+"\tparameter"+repr(k+1-comp)+"\t("+repr(listOfParameter[k].getValue())+") (parameter included in a rate rule and therefore treated as species)\n")
out_file.write("</initial>\n\n")
sum_file.write("\n")
if(numGlobalParameters==0): string=" (all of them are local parameters)\n"
elif(numGlobalParameters==1): string=" (the first parameter is a global parameter)\n"
elif(numLocalParameters==0): string=" (all of them are global parameters)\n"
else: string=" (the first "+repr(numGlobalParameters)+" are global parameter)\n"
sum_file.write("Parameter: "+repr(numParameters)+string)
sum_file.write("("+repr(paramAsSpecies)+" parameter is treated as species)\n")
out_file.write("<parameters>\n")
counter=0
for k in range(0,numParameters-paramAsSpecies):
Print = True
if k<len(listOfParameter):
if listOfParameter[k].getConstant()==False:
for j in range(0, len(listOfRules)):
if listOfRules[j].isRate():
if parameterId[k]==listOfRules[j].getVariable(): Print = False
else: Print == True
if Print ==True:
counter=counter+1
sum_file.write("P"+repr(counter)+":\t"+parameterId[k]+"\t"+parameterId2[k]+"\t("+repr(parameter[k])+")\n")
out_file.write("<parameter"+repr(counter)+">")
out_file.write(" constant ")
out_file.write(repr(parameter[k])+" </parameter"+repr(counter)+">\n")
sum_file.write("\n############################################################\n\n")
out_file.write("</parameters>\n")
out_file.write("</model"+repr(i+1)+">\n\n")
out_file.write("</models>\n\n")
out_file.write("</input>\n\n")
out_file.close()
sum_file.close()<|fim▁end|> | out_file.write("######################## particles\n\n")
out_file.write("<particles> 100 </particles>\n\n")
|
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
class ParserError(Exception):
pass
class Sentence(object):
def __init__(self, subject, verb, object):
# remember we take ('noun', 'princess') tuples and convert them
self.subject = subject[1]
self.verb = verb[1]
self.object = object[1]
def get_sentence(self):
self.sentence = ' '.join([self.subject, self.verb, self.object])
return self.sentence
def peek(word_list):
if word_list:
word = word_list[0]
return word[0]
else:
return None
def match(word_list, expecting):
if word_list:
word = word_list.pop(0)
if word[0] == expecting:
return word
else:
return None
else:
return None
def skip(word_list, word_type):
while peek(word_list) == word_type:
match(word_list, word_type)
def parse_verb(word_list):
skip(word_list, 'stop')
if peek(word_list) == 'verb':
return match(word_list, 'verb')
else:
raise ParserError("Expected a verb next.")
def parse_object(word_list):
skip(word_list, 'stop')
next = peek(word_list)
if next == 'noun':
return match(word_list, 'noun')
elif next == 'direction':
return match(word_list, 'direction')
else:
raise ParserError("Expected a noun or direction next.")
def parse_subject(word_list, subj):
verb = parse_verb(word_list)<|fim▁hole|>
return Sentence(subj, verb, obj)
def parse_sentence(word_list):
skip(word_list, 'stop')
start = peek(word_list)
if start == 'noun':
subj = match(word_list, 'noun')
return parse_subject(word_list, subj)
elif start == 'verb':
# assume the subject is the player then
return parse_subject(word_list, ('noun', 'player'))
else:
raise ParserError("Must start with subject, object or verb not: %s" % start)<|fim▁end|> | obj = parse_object(word_list) |
<|file_name|>_tracmor.src.js<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////
// The Qcodo Object is used for everything in Qcodo
///////////////////////////////////////////////////
var qcodo = {
initialize: function() {
////////////////////////////////
// Browser-related functionality
////////////////////////////////
this.isBrowser = function(intBrowserType) {
return (intBrowserType & qcodo._intBrowserType);
};
this.IE = 1;
this.IE_6_0 = 2;
this.IE_7_0 = 4;
this.FIREFOX = 8;
this.FIREFOX_1_0 = 16;
this.FIREFOX_1_5 = 32;
this.FIREFOX_2_0 = 64;
this.SAFARI = 128;
this.SAFARI_2_0 = 256;
this.SAFARI_3_0 = 512;
this.MACINTOSH = 1024;
this.UNSUPPORTED = 2048;
// INTERNET EXPLORER (supporting versions 6.0 and 7.0)
if (navigator.userAgent.toLowerCase().indexOf("msie") >= 0) {
this._intBrowserType = this.IE;
if (navigator.userAgent.toLowerCase().indexOf("msie 6.0") >= 0)
this._intBrowserType = this._intBrowserType | this.IE_6_0;
else if (navigator.userAgent.toLowerCase().indexOf("msie 7.0") >= 0)
this._intBrowserType = this._intBrowserType | this.IE_7_0;
else
this._intBrowserType = this._intBrowserType | this.UNSUPPORTED;
// FIREFOX (supporting versions 1.0, 1.5 and 2.0)
} else if ((navigator.userAgent.toLowerCase().indexOf("firefox") >= 0) || (navigator.userAgent.toLowerCase().indexOf("iceweasel") >= 0)) {
this._intBrowserType = this.FIREFOX;
var strUserAgent = navigator.userAgent.toLowerCase();
strUserAgent = strUserAgent.replace('iceweasel/', 'firefox/');
if (strUserAgent.indexOf("firefox/1.0") >= 0)
this._intBrowserType = this._intBrowserType | this.FIREFOX_1_0;
else if (strUserAgent.indexOf("firefox/1.5") >= 0)
this._intBrowserType = this._intBrowserType | this.FIREFOX_1_5;
else if (strUserAgent.indexOf("firefox/2.0") >= 0)
this._intBrowserType = this._intBrowserType | this.FIREFOX_2_0;
else
this._intBrowserType = this._intBrowserType | this.UNSUPPORTED;
// SAFARI (supporting version 2.0 and eventually 3.0)
} else if (navigator.userAgent.toLowerCase().indexOf("safari") >= 0) {
this._intBrowserType = this.SAFARI;
if (navigator.userAgent.toLowerCase().indexOf("safari/41") >= 0)
this._intBrowserType = this._intBrowserType | this.SAFARI_2_0;
else if (navigator.userAgent.toLowerCase().indexOf("safari/52") >= 0)
this._intBrowserType = this._intBrowserType | this.SAFARI_3_0;
else
this._intBrowserType = this._intBrowserType | this.UNSUPPORTED;
// COMPLETELY UNSUPPORTED
} else
this._intBrowserType = this.UNSUPPORTED;
// MACINTOSH?
if (navigator.userAgent.toLowerCase().indexOf("macintosh") >= 0)
this._intBrowserType = this._intBrowserType | this.MACINTOSH;
////////////////////////////////
// Browser-related functionality
////////////////////////////////
this.loadJavaScriptFile = function(strScript, objCallback) {
strScript = qc.jsAssets + "/" + strScript;
var objNewScriptInclude = document.createElement("script");
objNewScriptInclude.setAttribute("type", "text/javascript");
objNewScriptInclude.setAttribute("src", strScript);
document.getElementById(document.getElementById("Qform__FormId").value).appendChild(objNewScriptInclude);
// IE does things differently...
if (qc.isBrowser(qcodo.IE)) {
objNewScriptInclude.callOnLoad = objCallback;
objNewScriptInclude.onreadystatechange = function() {
if ((this.readyState == "complete") || (this.readyState == "loaded"))
if (this.callOnLoad)
this.callOnLoad();
};
// ... than everyone else
} else {
objNewScriptInclude.onload = objCallback;
};
};
this.loadStyleSheetFile = function(strStyleSheetFile, strMediaType) {
// IE does things differently...
if (qc.isBrowser(qcodo.IE)) {
var objNewScriptInclude = document.createStyleSheet(strStyleSheetFile);
// ...than everyone else
} else {
var objNewScriptInclude = document.createElement("style");
objNewScriptInclude.setAttribute("type", "text/css");
objNewScriptInclude.setAttribute("media", strMediaType);
objNewScriptInclude.innerHTML = '@import "' + strStyleSheetFile + '";';
document.body.appendChild(objNewScriptInclude);
};
};
/////////////////////////////
// QForm-related functionality
/////////////////////////////
this.registerForm = function() {
// "Lookup" the QForm's FormId
var strFormId = document.getElementById("Qform__FormId").value;
// Register the Various Hidden Form Elements needed for QForms
this.registerFormHiddenElement("Qform__FormControl", strFormId);
this.registerFormHiddenElement("Qform__FormEvent", strFormId);
this.registerFormHiddenElement("Qform__FormParameter", strFormId);
this.registerFormHiddenElement("Qform__FormCallType", strFormId);
this.registerFormHiddenElement("Qform__FormUpdates", strFormId);
this.registerFormHiddenElement("Qform__FormCheckableControls", strFormId);
};
this.registerFormHiddenElement = function(strId, strFormId) {
var objHiddenElement = document.createElement("input");
objHiddenElement.type = "hidden";
objHiddenElement.id = strId;
objHiddenElement.name = strId;
document.getElementById(strFormId).appendChild(objHiddenElement);
};
this.wrappers = new Array();
////////////////////////////////////
// Mouse Drag Handling Functionality
////////////////////////////////////
this.enableMouseDrag = function() {
document.onmousedown = qcodo.handleMouseDown;
document.onmousemove = qcodo.handleMouseMove;
document.onmouseup = qcodo.handleMouseUp;
};
this.handleMouseDown = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
var objHandle = qcodo.target;
if (!objHandle) return true;
var objWrapper = objHandle.wrapper;
if (!objWrapper) return true;
// Qcodo-Wide Mouse Handling Functions only operate on the Left Mouse Button
// (Control-specific events can respond to QRightMouse-based Events)
if (qcodo.mouse.left) {
if (objWrapper.handleMouseDown) {
// Specifically for Microsoft IE
if (objHandle.setCapture)
objHandle.setCapture();
// Ensure the Cleanliness of Dragging
objHandle.onmouseout = null;
if (document.selection)
document.selection.empty();
qcodo.currentMouseHandleControl = objWrapper;
return objWrapper.handleMouseDown(objEvent, objHandle);
};
};
qcodo.currentMouseHandleControl = null;
return true;
};
this.handleMouseMove = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
if (qcodo.currentMouseHandleControl) {
var objWrapper = qcodo.currentMouseHandleControl;
var objHandle = objWrapper.handle;
// In case IE accidentally marks a selection...
if (document.selection)
document.selection.empty();
if (objWrapper.handleMouseMove)
return objWrapper.handleMouseMove(objEvent, objHandle);
};
return true;
};
this.handleMouseUp = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
if (qcodo.currentMouseHandleControl) {
var objWrapper = qcodo.currentMouseHandleControl;
var objHandle = objWrapper.handle;
// In case IE accidentally marks a selection...
if (document.selection)
document.selection.empty();
// For IE to release release/setCapture
if (objHandle.releaseCapture) {
objHandle.releaseCapture();
objHandle.onmouseout = function() {this.releaseCapture()};
};
qcodo.currentMouseHandleControl = null;
if (objWrapper.handleMouseUp)
return objWrapper.handleMouseUp(objEvent, objHandle);
};
return true;
};
////////////////////////////////////
// Window Unloading
////////////////////////////////////
this.unloadFlag = false;
this.handleBeforeUnload = function() {
qcodo.unloadFlag = true;
};
window.onbeforeunload = this.handleBeforeUnload;
////////////////////////////////////
// Color Handling Functionality
////////////////////////////////////
this.colorRgbValues = function(strColor) {
strColor = strColor.replace("#", "");
try {
if (strColor.length == 3)
return new Array(
eval("0x" + strColor.substring(0, 1)),
eval("0x" + strColor.substring(1, 2)),
eval("0x" + strColor.substring(2, 3))
);
else if (strColor.length == 6)
return new Array(
eval("0x" + strColor.substring(0, 2)),
eval("0x" + strColor.substring(2, 4)),
eval("0x" + strColor.substring(4, 6))
);
} catch (Exception) {};
return new Array(0, 0, 0);
};
this.hexFromInt = function(intNumber) {
intNumber = (intNumber > 255) ? 255 : ((intNumber < 0) ? 0 : intNumber);
intFirst = Math.floor(intNumber / 16);
intSecond = intNumber % 16;
return intFirst.toString(16) + intSecond.toString(16);
};
this.colorRgbString = function(intRgbArray) {
return "#" + qcodo.hexFromInt(intRgbArray[0]) + qcodo.hexFromInt(intRgbArray[1]) + qcodo.hexFromInt(intRgbArray[2]);
};
}
};
////////////////////////////////
// Qcodo Shortcut and Initialize
////////////////////////////////
var qc = qcodo;
qc.initialize();
////////////////////////////////
// Logging-related functionality
////////////////////////////////
qcodo.logMessage = function(strMessage, blnReset, blnNonEscape) {
var objLogger = qcodo.getControl("Qform_Logger");
if (!objLogger) {
var objLogger = document.createElement("div");
objLogger.id = "Qform_Logger";
objLogger.style.display = "none";
objLogger.style.width = "400px";
objLogger.style.backgroundColor = "#dddddd";
objLogger.style.fontSize = "10px";
objLogger.style.fontFamily = "lucida console, courier, monospaced";
objLogger.style.padding = "6px";
objLogger.style.overflow = "auto";
if (qcodo.isBrowser(qcodo.IE))
objLogger.style.filter = "alpha(opacity=50)";
else
objLogger.style.opacity = 0.5;
document.body.appendChild(objLogger);
};
if (!blnNonEscape)
if (strMessage.replace)
strMessage = strMessage.replace(/</g, '<');
var strPosition = "fixed";
var strTop = "0px";
var strLeft = "0px";
if (qcodo.isBrowser(qcodo.IE)) {
// IE doesn't support position:fixed, so manually set positioning
strPosition = "absolute";
strTop = qcodo.scroll.y + "px";
strLeft = qcodo.scroll.x + "px";
};
objLogger.style.position = strPosition;
objLogger.style.top = strTop;
objLogger.style.left = strLeft;
objLogger.style.height = (qcodo.client.height - 100) + "px";
objLogger.style.display = 'inline';
var strHeader = '<a href="javascript:qcodo.logRemove()">Remove</a><br/><br/>';
if (blnReset)
objLogger.innerHTML = strHeader + strMessage + "<br/>";
else if (objLogger.innerHTML == "")
objLogger.innerHTML = strHeader + strMessage + "<br/>";
else
objLogger.innerHTML += strMessage + "<br/>";
};
qcodo.logRemove = function() {
var objLogger = qcodo.getControl('Qform_Logger');
if (objLogger)
objLogger.style.display = 'none';
};
qcodo.logEventStats = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
var strMessage = "";
strMessage += "scroll (x, y): " + qcodo.scroll.x + ", " + qcodo.scroll.y + "<br/>";
strMessage += "scroll (width, height): " + qcodo.scroll.width + ", " + qcodo.scroll.height + "<br/>";
strMessage += "client (x, y): " + qcodo.client.x + ", " + qcodo.client.y + "<br/>";
strMessage += "client (width, height): " + qcodo.client.width + ", " + qcodo.client.height + "<br/>";
strMessage += "page (x, y): " + qcodo.page.x + ", " + qcodo.page.y + "<br/>";
strMessage += "page (width, height): " + qcodo.page.width + ", " + qcodo.page.height + "<br/>";
strMessage += "mouse (x, y): " + qcodo.mouse.x + ", " + qcodo.mouse.y + "<br/>";
strMessage += "mouse (left, middle, right): " + qcodo.mouse.left + ", " + qcodo.mouse.middle + ", " + qcodo.mouse.right + "<br/>";
strMessage += "key (alt, shift, control, code): " + qcodo.key.alt + ", " + qcodo.key.shift + ", " +
qcodo.key.control + ", " + qcodo.key.code;
qcodo.logMessage("Event Stats", true);
qcodo.logMessage(strMessage, false, true);
};
qcodo.logObject = function(objObject) {
var strDump = "";
for (var strKey in objObject) {
var strData = objObject[strKey];
strDump += strKey + ": ";
if (typeof strData == 'function')
strDump += "<FUNCTION>";
else if (typeof strData == 'object')
strDump += "<OBJECT>";
else if ((strKey == 'outerText') || (strKey == 'innerText') || (strKey == 'outerHTML') || (strKey == 'innerHTML'))
strDump += "<TEXT>";
else
strDump += strData;
strDump += "<br/>";
};
qcodo.logMessage("Object Stats", true);
qcodo.logMessage(strDump, false, true);
};///////////////////////////////
// Timers-related functionality
///////////////////////////////
qcodo._objTimers = new Object();
qcodo.clearTimeout = function(strTimerId) {
if (qcodo._objTimers[strTimerId]) {
clearTimeout(qcodo._objTimers[strTimerId]);
qcodo._objTimers[strTimerId] = null;
};
};
qcodo.setTimeout = function(strTimerId, strAction, intDelay) {
qcodo.clearTimeout(strTimerId);
qcodo._objTimers[strTimerId] = setTimeout(strAction, intDelay);
};
/////////////////////////////////////
// Event Object-related functionality
/////////////////////////////////////
qcodo.handleEvent = function(objEvent) {
objEvent = (objEvent) ? objEvent : ((typeof(event) == "object") ? event : null);
if (objEvent) {
if (typeof(objEvent.clientX) != "undefined") {
if (qcodo.isBrowser(qcodo.SAFARI)) {
qcodo.mouse.x = objEvent.clientX - window.document.body.scrollLeft;
qcodo.mouse.y = objEvent.clientY - window.document.body.scrollTop;
qcodo.client.x = objEvent.clientX - window.document.body.scrollLeft;
qcodo.client.y = objEvent.clientY - window.document.body.scrollTop;
} else {
qcodo.mouse.x = objEvent.clientX;
qcodo.mouse.y = objEvent.clientY;
qcodo.client.x = objEvent.clientX;
qcodo.client.y = objEvent.clientY;
};
};
if (qcodo.isBrowser(qcodo.IE)) {
qcodo.mouse.left = ((objEvent.button & 1) ? true : false);
qcodo.mouse.right = ((objEvent.button & 2) ? true : false);
qcodo.mouse.middle = ((objEvent.button & 4) ? true : false);
} else if (qcodo.isBrowser(qcodo.SAFARI)) {
qcodo.mouse.left = ((objEvent.button && !objEvent.ctrlKey) ? true : false);
qcodo.mouse.right = ((objEvent.button && objEvent.ctrlKey) ? true : false);
qcodo.mouse.middle = false;
} else {
qcodo.mouse.left = (objEvent.button == 0);
qcodo.mouse.right = (objEvent.button == 2);
qcodo.mouse.middle = (objEvent.button == 1);
};
qcodo.key.alt = (objEvent.altKey) ? true : false;
qcodo.key.control = (objEvent.ctrlKey) ? true : false;
qcodo.key.shift = (objEvent.shiftKey) ? true : false;
qcodo.key.code = (objEvent.keyCode) ? (objEvent.keyCode) : 0;
if (objEvent.originalTarget)
qcodo.target = objEvent.originalTarget;
else if (objEvent.srcElement)
qcodo.target = objEvent.srcElement;
else
qcodo.target = null;
};
/*
qcodo.client.width = (qcodo.isBrowser(qcodo.SAFARI)) ? window.innerWidth : window.document.body.clientWidth;
qcodo.client.height = (qcodo.isBrowser(qcodo.SAFARI)) ? window.innerHeight: window.document.body.clientHeight;
qcodo.page.x = qcodo.mouse.x + qcodo.scroll.x;
qcodo.page.y = qcodo.mouse.y + qcodo.scroll.y;
qcodo.page.width = Math.max(window.document.body.scrollWidth, qcodo.client.width);
qcodo.page.height = Math.max(window.document.body.scrollHeight, qcodo.client.height);
qcodo.scroll.x = window.scrollX || window.document.body.scrollLeft;
qcodo.scroll.y = window.scrollY || window.document.body.scrollTop;
qcodo.scroll.width = window.document.body.scrollWidth - qcodo.client.width;
qcodo.scroll.height = window.document.body.scrollHeight - qcodo.client.height;
*/
if (window.document.compatMode == "BackCompat") {
qcodo.client.width = (qcodo.isBrowser(qcodo.SAFARI)) ? window.innerWidth : window.document.body.clientWidth;
qcodo.client.height = (qcodo.isBrowser(qcodo.SAFARI)) ? window.innerHeight: window.document.body.clientHeight;
qcodo.page.width = Math.max(window.document.body.scrollWidth, qcodo.client.width);
qcodo.page.height = Math.max(window.document.body.scrollHeight, qcodo.client.height);
qcodo.scroll.x = window.scrollX || window.document.body.scrollLeft;
qcodo.scroll.y = window.scrollY || window.document.body.scrollTop;
} else if (qcodo.isBrowser(qcodo.SAFARI)) {
qcodo.client.width = window.innerWidth;
qcodo.client.height = window.innerHeight;
qcodo.page.width = Math.max(window.document.body.scrollWidth, qcodo.client.width);
qcodo.page.height = Math.max(window.document.body.scrollHeight, qcodo.client.height);
qcodo.scroll.x = window.scrollX || window.document.body.scrollLeft;
qcodo.scroll.y = window.scrollY || window.document.body.scrollTop;
} else if (qcodo.isBrowser(qcodo.IE)) {
qcodo.client.width = window.document.documentElement.offsetWidth;
qcodo.client.height = window.document.documentElement.offsetHeight;
qcodo.page.width = Math.max(window.document.documentElement.scrollWidth, qcodo.client.width);
qcodo.page.height = Math.max(window.document.documentElement.scrollHeight, qcodo.client.height);
qcodo.scroll.x = window.document.documentElement.scrollLeft;
qcodo.scroll.y = window.document.documentElement.scrollTop;
} else {
if (window.scrollMaxY)
// Take the Y Scroll Bar into account by subtracting 15 pixels
qcodo.client.width = window.innerWidth - 15;
else
qcodo.client.width = window.innerWidth;
if (window.scrollMaxX)
// Take the X Scroll Bar into account by subtracting 15 pixels
qcodo.client.height = window.innerHeight - 15;
else
qcodo.client.height = window.innerHeight;
qcodo.page.width = window.scrollMaxX + qcodo.client.width;
qcodo.page.height = window.scrollMaxY + qcodo.client.height;
qcodo.scroll.x = window.scrollX;
qcodo.scroll.y = window.scrollY;
};
// These Values are "By Definition"
qcodo.page.x = qcodo.mouse.x + qcodo.scroll.x;
qcodo.page.y = qcodo.mouse.y + qcodo.scroll.y;
qcodo.scroll.width = qcodo.page.width - qcodo.client.width;
qcodo.scroll.height = qcodo.page.height - qcodo.client.height;
return objEvent;
};
qcodo.terminateEvent = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
if (objEvent) {
// Stop Propogation
if (objEvent.preventDefault)
objEvent.preventDefault();
if (objEvent.stopPropagation)
objEvent.stopPropagation();
objEvent.cancelBubble = true;
objEvent.returnValue = false;
};
return false;
};
///////////////////////////////
// Event Stats-Releated Objects
///////////////////////////////
qcodo.key = {
control: false,
alt: false,
shift: false,
code: null
};
qcodo.mouse = {
x: 0,
y: 0,
left: false,
middle: false,
right: false
};
qcodo.client = {
x: null,
y: null,
width: null,
height: null
// width: (qcodo.isBrowser(qcodo.IE)) ? window.document.body.clientWidth : window.innerWidth,
// height: (qcodo.isBrowser(qcodo.IE)) ? window.document.body.clientHeight : window.innerHeight
};
qcodo.page = {
x: null,
y: null,
width: null,
height: null
// width: window.document.body.scrollWidth,
// height: window.document.body.scrollHeight
};
qcodo.scroll = {
x: window.scrollX || (window.document.body) ? window.document.body.scrollLeft : null,
y: window.scrollY || (window.document.body) ? window.document.body.scrollTop : null,
// x: null,
// y: null,
width: (window.document.body) ? (window.document.body.scrollWidth - qcodo.client.width) : null,
height: (window.document.body) ? (window.document.body.scrollHeight - qcodo.client.height) : null
// width: null,
// height: null
};
////////////////////////////////////////////
// PostBack and AjaxPostBack
////////////////////////////////////////////
qcodo.postBack = function(strForm, strControl, strEvent, strParameter) {
var objForm = document.getElementById(strForm);
objForm.Qform__FormControl.value = strControl;
objForm.Qform__FormEvent.value = strEvent;
objForm.Qform__FormParameter.value = strParameter;
objForm.Qform__FormCallType.value = "Server";
objForm.Qform__FormUpdates.value = this.formUpdates();
objForm.Qform__FormCheckableControls.value = this.formCheckableControls(strForm, "Server");
objForm.submit();
};
qcodo.formUpdates = function() {
var strToReturn = "";
for (var strControlId in qcodo.controlModifications)
for (var strProperty in qcodo.controlModifications[strControlId])
strToReturn += strControlId + " " + strProperty + " " + qcodo.controlModifications[strControlId][strProperty] + "\n";
qcodo.controlModifications = new Array();
return strToReturn;
};
qcodo.formCheckableControls = function(strForm, strCallType) {
var objForm = document.getElementById(strForm);
var strToReturn = "";
for (var intIndex = 0; intIndex < objForm.elements.length; intIndex++) {
if (((objForm.elements[intIndex].type == "checkbox") ||
(objForm.elements[intIndex].type == "radio")) &&
((strCallType == "Ajax") ||
(!objForm.elements[intIndex].disabled))) {
// CheckBoxList
if (objForm.elements[intIndex].id.indexOf('[') >= 0) {
if (objForm.elements[intIndex].id.indexOf('[0]') >= 0)
strToReturn += " " + objForm.elements[intIndex].id.substring(0, objForm.elements[intIndex].id.length - 3);
// RadioButtonList
} else if (objForm.elements[intIndex].id.indexOf('_') >= 0) {
if (objForm.elements[intIndex].id.indexOf('_0') >= 0)
strToReturn += " " + objForm.elements[intIndex].id.substring(0, objForm.elements[intIndex].id.length - 2);
// Standard Radio or Checkbox
} else {
strToReturn += " " + objForm.elements[intIndex].id;
};
};
};
if (strToReturn.length > 0)
return strToReturn.substring(1);
else
return "";
};
qcodo.ajaxQueue = new Array();
qcodo.postAjax = function(strForm, strControl, strEvent, strParameter, strWaitIconControlId) {
// alert(strForm + " " + strControl + " " + strEvent + " " + strParameter);
// Figure out if Queue is Empty
var blnQueueEmpty = false;
if (qcodo.ajaxQueue.length == 0)
blnQueueEmpty = true;
// Enqueue the AJAX Request
qcodo.ajaxQueue.push(new Array(strForm, strControl, strEvent, strParameter, strWaitIconControlId));
// If the Queue was originally empty, call the Dequeue
if (blnQueueEmpty)
qcodo.dequeueAjaxQueue();
};
qcodo.clearAjaxQueue = function() {
qcodo.ajaxQueue = new Array();
};
qcodo.objAjaxWaitIcon = null;
qcodo.dequeueAjaxQueue = function() {
if (qcodo.ajaxQueue.length > 0) {
strForm = this.ajaxQueue[0][0];
strControl = this.ajaxQueue[0][1];
strEvent = this.ajaxQueue[0][2];
strParameter = this.ajaxQueue[0][3];
strWaitIconControlId = this.ajaxQueue[0][4];
// Display WaitIcon (if applicable)
if (strWaitIconControlId) {
this.objAjaxWaitIcon = this.getWrapper(strWaitIconControlId);
if (this.objAjaxWaitIcon)
this.objAjaxWaitIcon.style.display = 'inline';
};
var objForm = document.getElementById(strForm);
objForm.Qform__FormControl.value = strControl;
objForm.Qform__FormEvent.value = strEvent;
objForm.Qform__FormParameter.value = strParameter;
objForm.Qform__FormCallType.value = "Ajax";
objForm.Qform__FormUpdates.value = qcodo.formUpdates();
objForm.Qform__FormCheckableControls.value = this.formCheckableControls(strForm, "Ajax");
var strPostData = "";
for (var i = 0; i < objForm.elements.length; i++) {
switch (objForm.elements[i].type) {
case "checkbox":
case "radio":
if (objForm.elements[i].checked) {
var strTestName = objForm.elements[i].name + "_";
if (objForm.elements[i].id.substring(0, strTestName.length) == strTestName)
strPostData += "&" + objForm.elements[i].name + "=" + objForm.elements[i].id.substring(strTestName.length);
else
// strPostData += "&" + objForm.elements[i].id + "=" + "1";
strPostData += "&" + objForm.elements[i].id + "=" + objForm.elements[i].value;
};
break;
case "select-multiple":
var blnOneSelected = false;
for (var intIndex = 0; intIndex < objForm.elements[i].options.length; intIndex++)
if (objForm.elements[i].options[intIndex].selected) {
strPostData += "&" + objForm.elements[i].name + "=";
strPostData += objForm.elements[i].options[intIndex].value;
};
break;
default:
strPostData += "&" + objForm.elements[i].id + "=";
// For Internationalization -- we must escape the element's value properly
var strPostValue = objForm.elements[i].value;
if (strPostValue) {
strPostValue = strPostValue.replace(/\%/g, "%25");
strPostValue = strPostValue.replace(/&/g, escape('&'));
strPostValue = strPostValue.replace(/\+/g, "%2B");
};
strPostData += strPostValue;
break;
};
};
var strUri = objForm.action;
var objRequest;
if (window.XMLHttpRequest) {
objRequest = new XMLHttpRequest();
} else if (typeof ActiveXObject != "undefined") {
objRequest = new ActiveXObject("Microsoft.XMLHTTP");
};
if (objRequest) {
objRequest.open("POST", strUri, true);
objRequest.setRequestHeader("Method", "POST " + strUri + " HTTP/1.1");
objRequest.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
objRequest.onreadystatechange = function() {
if (!qcodo.unloadFlag && objRequest.readyState == 4) {
try {
var objXmlDoc = objRequest.responseXML;
// qcodo.logMessage(objRequest.responseText, true);
// alert('AJAX Response Received');
if (!objXmlDoc) {
alert("An error occurred during AJAX Response parsing.\r\n\r\nThe error response will appear in a new popup.");
var objErrorWindow = window.open('about:blank', 'qcodo_error','menubar=no,toolbar=no,location=no,status=no,scrollbars=yes,resizable=yes,width=1000,height=700,left=50,top=50');
objErrorWindow.focus();
objErrorWindow.document.write(objRequest.responseText);
return;
} else {
var intLength = 0;
// Go through Controls
var objXmlControls = objXmlDoc.getElementsByTagName('control');
intLength = objXmlControls.length;
for (var intIndex = 0; intIndex < intLength; intIndex++) {
var strControlId = objXmlControls[intIndex].attributes.getNamedItem('id').nodeValue;
var strControlHtml = "";
if (objXmlControls[intIndex].firstChild)
strControlHtml = objXmlControls[intIndex].firstChild.nodeValue;
if (qcodo.isBrowser(qcodo.FIREFOX))
strControlHtml = objXmlControls[intIndex].textContent;
// Perform Callback Responsibility
if (strControlId == "Qform__FormState") {
var objFormState = document.getElementById(strControlId);
objFormState.value = strControlHtml;
} else {
var objSpan = document.getElementById(strControlId + "_ctl");
if (objSpan)
objSpan.innerHTML = strControlHtml;
};
};
// Go through Commands
var objXmlCommands = objXmlDoc.getElementsByTagName('command');
intLength = objXmlCommands.length;
for (var intIndex = 0; intIndex < intLength; intIndex++) {
if (objXmlCommands[intIndex] && objXmlCommands[intIndex].firstChild) {
var strCommand = "";
intChildLength = objXmlCommands[intIndex].childNodes.length;
for (var intChildIndex = 0; intChildIndex < intChildLength; intChildIndex++)
strCommand += objXmlCommands[intIndex].childNodes[intChildIndex].nodeValue;
eval(strCommand);
};
};
};
} catch (objExc) {
alert(objExc.message + "\r\non line number " + objExc.lineNumber + "\r\nin file " + objExc.fileName);
alert("An error occurred during AJAX Response handling.\r\n\r\nThe error response will appear in a new popup.");
var objErrorWindow = window.open('about:blank', 'qcodo_error','menubar=no,toolbar=no,location=no,status=no,scrollbars=yes,resizable=yes,width=1000,height=700,left=50,top=50');
objErrorWindow.focus();
objErrorWindow.document.write(objRequest.responseText);
return;
};
// Perform the Dequeue
qcodo.ajaxQueue.reverse();
qcodo.ajaxQueue.pop();
qcodo.ajaxQueue.reverse();
// Hid the WaitIcon (if applicable)
if (qcodo.objAjaxWaitIcon)
qcodo.objAjaxWaitIcon.style.display = 'none';
// If there are still AjaxEvents in the queue, go ahead and process/dequeue them
if (qcodo.ajaxQueue.length > 0)
qcodo.dequeueAjaxQueue();
};
};
objRequest.send(strPostData);
};
};
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.pB = qcodo.postBack;
qc.pA = qcodo.postAjax;
/////////////////////////////////
// Controls-related functionality
/////////////////////////////////
qcodo.getControl = function(mixControl) {
if (typeof(mixControl) == 'string')
return document.getElementById(mixControl);
else
return mixControl;
};
qcodo.getWrapper = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
if (objControl)
return this.getControl(objControl.id + "_ctl");
else
return null;
};
/////////////////////////////
// Register Control - General
/////////////////////////////
qcodo.controlModifications = new Array();
qcodo.javascriptStyleToQcodo = new Array();
qcodo.javascriptStyleToQcodo["backgroundColor"] = "BackColor";
qcodo.javascriptStyleToQcodo["borderColor"] = "BorderColor";
qcodo.javascriptStyleToQcodo["borderStyle"] = "BorderStyle";
qcodo.javascriptStyleToQcodo["border"] = "BorderWidth";
qcodo.javascriptStyleToQcodo["height"] = "Height";
qcodo.javascriptStyleToQcodo["width"] = "Width";
qcodo.javascriptStyleToQcodo["text"] = "Text";
qcodo.javascriptWrapperStyleToQcodo = new Array();
qcodo.javascriptWrapperStyleToQcodo["position"] = "Position";
qcodo.javascriptWrapperStyleToQcodo["top"] = "Top";
qcodo.javascriptWrapperStyleToQcodo["left"] = "Left";
qcodo.recordControlModification = function(strControlId, strProperty, strNewValue) {
if (!qcodo.controlModifications[strControlId])
qcodo.controlModifications[strControlId] = new Array();
qcodo.controlModifications[strControlId][strProperty] = strNewValue;
};
qcodo.registerControl = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
// Link the Wrapper and the Control together
var objWrapper = this.getWrapper(objControl);
objControl.wrapper = objWrapper;
objWrapper.control = objControl;
// Add the wrapper to the global qcodo wrappers array
qcodo.wrappers[objWrapper.id] = objWrapper;
// Create New Methods, etc.
// Like: objWrapper.something = xyz;
// Updating Style-related Things
objWrapper.updateStyle = function(strStyleName, strNewValue) {
var objControl = this.control;
switch (strStyleName) {
case "className":
objControl.className = strNewValue;
qcodo.recordControlModification(objControl.id, "CssClass", strNewValue);
break;
case "parent":
if (strNewValue) {
var objNewParentControl = qcodo.getControl(strNewValue);
objNewParentControl.appendChild(this);
qcodo.recordControlModification(objControl.id, "Parent", strNewValue);
} else {
var objParentControl = this.parentNode;
objParentControl.removeChild(this);
qcodo.recordControlModification(objControl.id, "Parent", "");
};
break;
case "displayStyle":
objControl.style.display = strNewValue;
qcodo.recordControlModification(objControl.id, "DisplayStyle", strNewValue);
break;
case "display":
if (strNewValue) {
objWrapper.style.display = "inline";
qcodo.recordControlModification(objControl.id, "Display", "1");
} else {
objWrapper.style.display = "none";
qcodo.recordControlModification(objControl.id, "Display", "0");
};
break;
case "enabled":
if (strNewValue) {
objWrapper.control.disabled = false;
qcodo.recordControlModification(objControl.id, "Enabled", "1");
} else {
objWrapper.control.disabled = true;
qcodo.recordControlModification(objControl.id, "Enabled", "0");
};
break;
case "width":
case "height":
objControl.style[strStyleName] = strNewValue;
if (qcodo.javascriptStyleToQcodo[strStyleName])
qcodo.recordControlModification(objControl.id, qcodo.javascriptStyleToQcodo[strStyleName], strNewValue);
if (objWrapper.handle)
objWrapper.updateHandle();
break;
case "text":
objControl.innerHTML = strNewValue;
qcodo.recordControlModification(objControl.id, "Text", strNewValue);
break;
default:
if (qcodo.javascriptWrapperStyleToQcodo[strStyleName]) {
this.style[strStyleName] = strNewValue;
qcodo.recordControlModification(objControl.id, qcodo.javascriptWrapperStyleToQcodo[strStyleName], strNewValue);
} else {
objControl.style[strStyleName] = strNewValue;
if (qcodo.javascriptStyleToQcodo[strStyleName])
qcodo.recordControlModification(objControl.id, qcodo.javascriptStyleToQcodo[strStyleName], strNewValue);
};
break;
};
};
// Positioning-related functions
objWrapper.getAbsolutePosition = function() {
var intOffsetLeft = 0;
var intOffsetTop = 0;
var objControl = this.control;
while (objControl) {
// If we are IE, we don't want to include calculating
// controls who's wrappers are position:relative
if ((objControl.wrapper) && (objControl.wrapper.style.position == "relative")) {
} else {
intOffsetLeft += objControl.offsetLeft;
intOffsetTop += objControl.offsetTop;
};
objControl = objControl.offsetParent;
};
return {x:intOffsetLeft, y:intOffsetTop};
};
objWrapper.setAbsolutePosition = function(intNewX, intNewY, blnBindToParent) {
var objControl = this.offsetParent;
while (objControl) {
intNewX -= objControl.offsetLeft;
intNewY -= objControl.offsetTop;
objControl = objControl.offsetParent;
};
if (blnBindToParent) {
if (this.parentNode.nodeName.toLowerCase() != 'form') {
// intNewX and intNewY must be within the parent's control
intNewX = Math.max(intNewX, 0);
intNewY = Math.max(intNewY, 0);
intNewX = Math.min(intNewX, this.offsetParent.offsetWidth - this.offsetWidth);
intNewY = Math.min(intNewY, this.offsetParent.offsetHeight - this.offsetHeight);
};
};
this.updateStyle("left", intNewX + "px");
this.updateStyle("top", intNewY + "px");
};
objWrapper.setDropZoneMaskAbsolutePosition = function(intNewX, intNewY, blnBindToParent) {
/*
var objControl = this.offsetParent;
while (objControl) {
intNewX -= objControl.offsetLeft;
intNewY -= objControl.offsetTop;
objControl = objControl.offsetParent;
}
if (blnBindToParent) {
if (this.parentNode.nodeName.toLowerCase() != 'form') {
// intNewX and intNewY must be within the parent's control
intNewX = Math.max(intNewX, 0);
intNewY = Math.max(intNewY, 0);
intNewX = Math.min(intNewX, this.offsetParent.offsetWidth - this.offsetWidth);
intNewY = Math.min(intNewY, this.offsetParent.offsetHeight - this.offsetHeight);
}
}
qc.logObject(intNewX + " x " + intNewY);
*/
this.dropZoneMask.style.left = intNewX + "px";
this.dropZoneMask.style.top = intNewY + "px";
};
objWrapper.setMaskOffset = function(intDeltaX, intDeltaY) {
var objAbsolutePosition = this.getAbsolutePosition();
this.mask.style.left = (objAbsolutePosition.x + intDeltaX) + "px";
this.mask.style.top = (objAbsolutePosition.y + intDeltaY) + "px";
};
objWrapper.containsPoint = function(intX, intY) {
var objAbsolutePosition = this.getAbsolutePosition();
if ((intX >= objAbsolutePosition.x) && (intX <= objAbsolutePosition.x + this.control.offsetWidth) &&
(intY >= objAbsolutePosition.y) && (intY <= objAbsolutePosition.y + this.control.offsetHeight))
return true;
else
return false;
};
// Toggle Display / Enabled
objWrapper.toggleDisplay = function(strShowOrHide) {
// Toggles the display/hiding of the entire control (including any design/wrapper HTML)
// If ShowOrHide is blank, then we toggle
// Otherwise, we'll execute a "show" or a "hide"
if (strShowOrHide) {
if (strShowOrHide == "show")
this.updateStyle("display", true);
else
this.updateStyle("display", false);
} else
this.updateStyle("display", (this.style.display == "none") ? true : false);
};
objWrapper.toggleEnabled = function(strEnableOrDisable) {
if (strEnableOrDisable) {
if (strEnableOrDisable == "enable")
this.updateStyle("enabled", true);
else
this.updateStyle("enabled", false);
} else
this.updateStyle("enabled", (this.control.disabled) ? true : false);
};
objWrapper.registerClickPosition = function(objEvent) {
objEvent = (objEvent) ? objEvent : ((typeof(event) == "object") ? event : null);
qcodo.handleEvent(objEvent);
var intX = qcodo.mouse.x - this.getAbsolutePosition().x + qcodo.scroll.x;
var intY = qcodo.mouse.y - this.getAbsolutePosition().y + qcodo.scroll.y;
// Random IE Check
if (qcodo.isBrowser(qcodo.IE)) {
intX = intX - 2;
intY = intY - 2;
};
document.getElementById(this.control.id + "_x").value = intX;
document.getElementById(this.control.id + "_y").value = intY;
};
// Focus
objWrapper.focus = function() {
if (this.control.focus) {
if (qcodo.isBrowser(qcodo.IE) && (typeof (this.control.focus) == "object"))
this.control.focus();
else if (typeof (this.control.focus) == "function")
this.control.focus();
};
};
// Blink
objWrapper.blink = function(strFromColor, strToColor) {
objWrapper.blinkStart = qcodo.colorRgbValues(strFromColor);
objWrapper.blinkEnd = qcodo.colorRgbValues(strToColor);
objWrapper.blinkStep = new Array(
Math.round((objWrapper.blinkEnd[0] - objWrapper.blinkStart[0]) / 12.5),
Math.round((objWrapper.blinkEnd[1] - objWrapper.blinkStart[1]) / 12.5),
Math.round((objWrapper.blinkEnd[2] - objWrapper.blinkStart[2]) / 12.5)
);
objWrapper.blinkDown = new Array(
(objWrapper.blinkStep[0] < 0) ? true : false,
(objWrapper.blinkStep[1] < 0) ? true : false,
(objWrapper.blinkStep[2] < 0) ? true : false
);
objWrapper.blinkCurrent = objWrapper.blinkStart;
this.control.style.backgroundColor = qcodo.colorRgbString(objWrapper.blinkCurrent);
qcodo.setTimeout(objWrapper.id, "qc.getC('" + objWrapper.id + "').blinkHelper()", 20);
};
objWrapper.blinkHelper = function() {
objWrapper.blinkCurrent[0] += objWrapper.blinkStep[0];
objWrapper.blinkCurrent[1] += objWrapper.blinkStep[1];
objWrapper.blinkCurrent[2] += objWrapper.blinkStep[2];
if (((objWrapper.blinkDown[0]) && (objWrapper.blinkCurrent[0] < objWrapper.blinkEnd[0])) ||
((!objWrapper.blinkDown[0]) && (objWrapper.blinkCurrent[0] > objWrapper.blinkEnd[0])))
objWrapper.blinkCurrent[0] = objWrapper.blinkEnd[0];
if (((objWrapper.blinkDown[1]) && (objWrapper.blinkCurrent[1] < objWrapper.blinkEnd[1])) ||
((!objWrapper.blinkDown[1]) && (objWrapper.blinkCurrent[1] > objWrapper.blinkEnd[1])))
objWrapper.blinkCurrent[1] = objWrapper.blinkEnd[1];
if (((objWrapper.blinkDown[2]) && (objWrapper.blinkCurrent[2] < objWrapper.blinkEnd[2])) ||
((!objWrapper.blinkDown[2]) && (objWrapper.blinkCurrent[2] > objWrapper.blinkEnd[2])))
objWrapper.blinkCurrent[2] = objWrapper.blinkEnd[2];
this.control.style.backgroundColor = qcodo.colorRgbString(objWrapper.blinkCurrent);
if ((objWrapper.blinkCurrent[0] == objWrapper.blinkEnd[0]) &&
(objWrapper.blinkCurrent[1] == objWrapper.blinkEnd[1]) &&
(objWrapper.blinkCurrent[2] == objWrapper.blinkEnd[2])) {
// Done with Blink!
} else {
qcodo.setTimeout(objWrapper.id, "qc.getC('" + objWrapper.id + "').blinkHelper()", 20);
};
};
};
qcodo.registerControlArray = function(mixControlArray) {
var intLength = mixControlArray.length;
for (var intIndex = 0; intIndex < intLength; intIndex++)
qcodo.registerControl(mixControlArray[intIndex]);
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.getC = qcodo.getControl;
qc.getW = qcodo.getWrapper;
qc.regC = qcodo.registerControl;
qc.regCA = qcodo.registerControlArray;
/////////////////////////////////////////////
// Control: Dialog Box functionality
/////////////////////////////////////////////
qcodo.registerDialogBox = function(mixControl, strMatteColor, intMatteOpacity, blnMatteClickable, blnAnyKeyCloses) {
// Initialize the Event Handler
qcodo.handleEvent();
// Get Control/Wrapper
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
// Setup the DialogBoxBackground (DbBg) if applicable
var objDbBg = objWrapper.dbBg;
if (!objDbBg) {
var objDbBg = document.createElement("div");
objDbBg.id = objWrapper.id + "dbbg";
document.getElementById(document.getElementById("Qform__FormId").value).appendChild(objDbBg);
// Setup the Object Links
objWrapper.dbBg = objDbBg;
objDbBg.wrapper = objWrapper;
if (qcodo.isBrowser(qcodo.IE)) {
var objIframe = document.createElement("iframe");
objIframe.id = objWrapper.id + "dbbgframe";
objIframe.style.left = "0px";
objIframe.style.top = "0px";
objIframe.style.position = "absolute";
objIframe.style.filter = "alpha(opacity=0)";
objIframe.src = "javascript: false;";
objIframe.frameBorder = 0;
objIframe.scrolling = "no";
objIframe.style.zIndex = 990;
objIframe.display = "none";
document.getElementById(document.getElementById("Qform__FormId").value).appendChild(objIframe);
objWrapper.dbBgFrame = objIframe;
};
};
objWrapper.handleResize = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
if (objEvent.target) {
if ((objEvent.target.nodeName.toLowerCase() == 'div') || (objEvent.target.nodeName.toLowerCase() == 'span'))
return;
};
// Restore from Link
var objWrapper = qcodo.activeDialogBox;
var objDbBg = objWrapper.dbBg;
var objDbBgFrame = objWrapper.dbBgFrame;
// Hide Everything
objWrapper.style.display = "none";
objDbBg.style.display = "none";
if (objDbBgFrame) objDbBgFrame.style.display = "none";
// Setup Events
qcodo.handleEvent(objEvent);
// Show Everything
objWrapper.style.display = "inline";
objDbBg.style.display = "block";
if (objDbBgFrame) objDbBgFrame.style.display = "block";
// DbBg Re-Setup
objDbBg.style.width = Math.max(qcodo.page.width, qcodo.client.width) + "px";
objDbBg.style.height = Math.max(qcodo.page.height, qcodo.client.height) + "px";
if (objDbBgFrame) {
objDbBgFrame.style.width = Math.max(qcodo.page.width, qcodo.client.width) + "px";
objDbBgFrame.style.height = Math.max(qcodo.page.height, qcodo.client.height) + "px";
};
// Wrapper Re-Setup
var intWidth = objWrapper.offsetWidth;
var intHeight = objWrapper.offsetHeight;
var intTop = Math.round((qcodo.client.height - intHeight) / 2) + qcodo.scroll.y;
var intLeft = Math.round((qcodo.client.width - intWidth) / 2) + qcodo.scroll.x;
objWrapper.setAbsolutePosition(intLeft, intTop);
return true;
};
objWrapper.handleKeyPress = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
qcodo.terminateEvent(objEvent);
var objWrapper = qcodo.activeDialogBox;
objWrapper.hideDialogBox();
return false;
};
objWrapper.showDialogBox = function() {
// Restore from Object Link
var objDbBg = this.dbBg;
var objDbBgFrame = this.dbBgFrame;
// Hide Everything
objWrapper.style.display = "none";
objDbBg.style.display = "none";
if (objDbBgFrame) objDbBgFrame.style.display = "none";
// Setup Events
qcodo.handleEvent();
// Show Everything
objDbBg.style.display = "block";
if (objDbBgFrame) objDbBgFrame.style.display = "block";
this.toggleDisplay("show");
// DbBg Re-Setup
objDbBg.style.width = Math.max(qcodo.page.width, qcodo.client.width) + "px";
objDbBg.style.height = Math.max(qcodo.page.height, qcodo.client.height) + "px";
if (objDbBgFrame) {
objDbBgFrame.style.width = Math.max(qcodo.page.width, qcodo.client.width) + "px";
objDbBgFrame.style.height = Math.max(qcodo.page.height, qcodo.client.height) + "px";
};
// Wrapper Re-Setup
var intWidth = objWrapper.offsetWidth;
var intHeight = objWrapper.offsetHeight;
var intTop = Math.round((qcodo.client.height - intHeight) / 2) + qcodo.scroll.y;
var intLeft = Math.round((qcodo.client.width - intWidth) / 2) + qcodo.scroll.x;
objWrapper.setAbsolutePosition(intLeft, intTop);
// Set Window OnResize Handling
window.onresize = this.handleResize;
window.onscroll = this.handleResize;
qcodo.activeDialogBox = this;
// If we have blnMatteClickable and blnAnyKeyCloses
if (objWrapper.anyKeyCloses) {
document.body.onkeypress = this.handleKeyPress;
objWrapper.control.focus();
};
};
objWrapper.hideDialogBox = function() {
var objWrapper = this;
if (this.id.indexOf("_ctldbbg") > 0)
objWrapper = this.wrapper;
objWrapper.dbBg.style.display = "none";
if (objWrapper.dbBgFrame) objWrapper.dbBgFrame.style.display = "none";
objWrapper.toggleDisplay("hide");
// Unsetup OnResize Handling
window.onresize = null;
window.onscroll = null;
// Unsetup KeyPress Closing
document.body.onkeypress = null;
// Unsetup ActiveDialogBox
qcodo.activeDialogBox = null;
};
// Initial Wrapper Setup
objWrapper.style.zIndex = 999;
objWrapper.position = "absolute";
objWrapper.anyKeyCloses = blnAnyKeyCloses;
// Initial DbBg Setup
objDbBg.style.position = "absolute";
objDbBg.style.zIndex = 998;
objDbBg.style.top = "0px";
objDbBg.style.left = "0px";
if (qcodo.isBrowser(qcodo.IE))
objDbBg.style.overflow = "auto";
else
objDbBg.style.overflow = "hide";
if (blnMatteClickable) {
objDbBg.style.cursor = "pointer";
objDbBg.onclick = objWrapper.hideDialogBox;
} else {
objDbBg.style.cursor = "url(" + qc.imageAssets + "/_core/move_nodrop.cur), auto";
objDbBg.onclick = null;
};
// Background Color and Opacity
objDbBg.style.backgroundColor = strMatteColor;
if (qcodo.isBrowser(qcodo.IE))
objDbBg.style.filter = "alpha(opacity=" + intMatteOpacity + ")";
else
objDbBg.style.opacity = intMatteOpacity / 100.0;
// Other Random Stuff
objDbBg.style.fontSize = "1px";
objDbBg.innerHTML = " ";
// Perform a Show or Hide (depending on state)
if (objWrapper.style.display == 'none')
objWrapper.hideDialogBox();
else
objWrapper.showDialogBox();
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.regDB = qcodo.registerDialogBox;///////////////////////////////
// Control Handle Functionality
///////////////////////////////
qcodo.registerControlHandle = function(mixControl, strCursor) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
if (!objWrapper.handle) {
var objHandle = document.createElement("span");
objHandle.id = objWrapper.id + "handle";
objWrapper.parentNode.appendChild(objHandle);
objWrapper.handle = objHandle;
objHandle.wrapper = objWrapper;
if (!objWrapper.style.position) {
// The Wrapper is not defined as Positioned Relatively or Absolutely
// Therefore, no offsetTop/Left/Width/Height values are available on the wrapper itself
objHandle.style.width = objWrapper.control.style.width;
objHandle.style.height = objWrapper.control.style.height;
objHandle.style.top = objWrapper.control.offsetTop + "px";
objHandle.style.left = objWrapper.control.offsetLeft + "px";
} else {
objHandle.style.width = objWrapper.offsetWidth + "px";
objHandle.style.height = objWrapper.offsetHeight + "px";
objHandle.style.top = objWrapper.offsetTop + "px";
objHandle.style.left = objWrapper.offsetLeft + "px";
};
objHandle.style.cursor = strCursor;
objHandle.style.zIndex = 999;
objHandle.style.backgroundColor = "white";
if (qcodo.isBrowser(qcodo.IE))
objHandle.style.filter = "alpha(opacity=0)";
else
objHandle.style.opacity = 0.0;
objHandle.style.position = "absolute";
objHandle.style.fontSize = "1px";
objHandle.innerHTML = ".";
};
objWrapper.updateHandle = function(blnUpdateParent, strCursor) {
var objHandle = this.handle;
// Make Sure the Wrapper's Parent owns this Handle
if (blnUpdateParent)
this.parentNode.appendChild(objHandle);
// Fixup Size and Positioning
objHandle.style.top = this.offsetTop + "px";
objHandle.style.left = this.offsetLeft + "px";
objHandle.style.width = this.offsetWidth + "px";
objHandle.style.height = this.offsetHeight + "px";
// Update the Cursor
if (strCursor)
objHandle.style.cursor = strCursor;
};
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.regCH = qcodo.registerControlHandle;
/////////////////////////////////////////////
// Control: Moveable functionality
/////////////////////////////////////////////
qcodo.registerControlMoveable = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
objWrapper.moveable = true;
// Control Handle and Mask
objWrapper.mask = qcodo.getControl(objWrapper.id + "mask");
if (!objWrapper.mask) {
var objSpanElement = document.createElement('span');
objSpanElement.id = objWrapper.id + "mask";
objSpanElement.style.position = "absolute";
document.getElementById(document.getElementById("Qform__FormId").value).appendChild(objSpanElement);
objWrapper.mask = objSpanElement;
};
objWrapper.mask.wrapper = objWrapper;
// Setup Mask
objMask = objWrapper.mask;
objMask.style.position = "absolute";
objMask.style.zIndex = 998;
if (qcodo.isBrowser(qcodo.IE))
objMask.style.filter = "alpha(opacity=50)";
else
objMask.style.opacity = 0.5;
objMask.style.display = "none";
objMask.innerHTML = "";
objMask.handleAnimateComplete = function(mixControl) {
this.style.display = "none";
};
};
// Update Absolutely-positioned children on Scroller (if applicable)
// to fix Firefox b/c firefox uses position:absolute incorrectly
/* if (qcodo.isBrowser(qcodo.FIREFOX) && (objControl.style.overflow == "auto"))
objControl.onscroll = function(objEvent) {
objEvent = qcodo.handleEvent(objEvent);
for (var intIndex = 0; intIndex < this.childNodes.length; intIndex++) {
if ((this.childNodes[intIndex].style) && (this.childNodes[intIndex].style.position == "absolute")) {
if (!this.childNodes[intIndex].originalX) {
this.childNodes[intIndex].originalX = this.childNodes[intIndex].offsetLeft;
this.childNodes[intIndex].originalY = this.childNodes[intIndex].offsetTop;
}
this.childNodes[intIndex].style.left = this.childNodes[intIndex].originalX - this.scrollLeft + "px";
this.childNodes[intIndex].style.top = this.childNodes[intIndex].originalY - this.scrollTop + "px";
}
}
}*/
///////////////////////////////////////////////
// Block Control: DropZone Target Functionality
///////////////////////////////////////////////
qcodo.registerControlDropZoneTarget = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
// Control Handle and Mask
objWrapper.dropZoneMask = qcodo.getControl(objWrapper.id + "dzmask");
if (!objWrapper.dropZoneMask) {
//<span id="%s_ctldzmask" style="position:absolute;"><span style="font-size: 1px"> </span></span>
var objSpanElement = document.createElement("span");
objSpanElement.id = objWrapper.id + "dzmask";
objSpanElement.style.position = "absolute";
var objInnerSpanElement = document.createElement("span");
objInnerSpanElement.style.fontSize = "1px";
objInnerSpanElement.innerHTML = " ";
objSpanElement.appendChild(objInnerSpanElement);
document.getElementById(document.getElementById("Qform__FormId").value).appendChild(objSpanElement);
objWrapper.dropZoneMask = objSpanElement;
objWrapper.dropZoneMask.wrapper = objWrapper;
// Setup Mask
objMask = objWrapper.dropZoneMask;
objMask.style.position = "absolute";
objMask.style.top = "0px";
objMask.style.left = "0px";
objMask.style.borderColor = "#bb3399";
objMask.style.borderStyle = "solid";
objMask.style.borderWidth = "3px";
objMask.style.display = "none";
};
objWrapper.addToDropZoneGrouping = function(strGroupingId, blnAllowSelf, blnAllowSelfParent) {
if (!qcodo.dropZoneGrouping[strGroupingId])
qcodo.dropZoneGrouping[strGroupingId] = new Array();
qcodo.dropZoneGrouping[strGroupingId][this.control.id] = this;
qcodo.dropZoneGrouping[strGroupingId]["__allowSelf"] = (blnAllowSelf) ? true : false;
qcodo.dropZoneGrouping[strGroupingId]["__allowSelfParent"] = (blnAllowSelfParent) ? true : false;
qcodo.registerControlDropZoneTarget(this.control);
};
objWrapper.removeFromDropZoneGrouping = function(strGroupingId) {
if (!qcodo.dropZoneGrouping[strGroupingId])
qcodo.dropZoneGrouping[strGroupingId] = new Array();
else
qcodo.dropZoneGrouping[strGroupingId][this.control.id] = false;
};
// Qcodo Shortcuts
objWrapper.a2DZG = objWrapper.addToDropZoneGrouping;
objWrapper.rfDZG = objWrapper.removeFromDropZoneGrouping;
};
///////////////////////////////////
// Block Control: DropZone Grouping
///////////////////////////////////
qcodo.dropZoneGrouping = new Array();
///////////////////////////////////////////
// Block Control: Move Handle Functionality
///////////////////////////////////////////
qcodo.registerControlMoveHandle = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
if (!objWrapper.handle) {
qcodo.registerControlHandle(objControl, 'move');
// Assign Event Handlers
qcodo.enableMouseDrag();
objWrapper.handleMouseDown = function(objEvent, objHandle) {
// Set the Handle's MoveControls Bounding Box
this.setupBoundingBox();
// Calculate the offset (the top-left page coordinates of the bounding box vs. where the mouse is on the page)
this.offsetX = qcodo.page.x - this.boundingBox.x;
this.offsetY = qcodo.page.y - this.boundingBox.y;
this.startDragX = qcodo.page.x;
this.startDragY = qcodo.page.y;
// Clear MaskReturn Timeout (if applicable)
if (qcodo.moveHandleReset)
qcodo.moveHandleReset.resetMasksCancel();
// Make the Masks appear (if applicable)
for (var strKey in this.moveControls) {
var objMoveControl = this.moveControls[strKey];
var objMask = objMoveControl.mask;
var objAbsolutePosition = objMoveControl.getAbsolutePosition();
objMask.style.display = "block";
objMask.style.top = objAbsolutePosition.y + "px";
objMask.style.left = objAbsolutePosition.x + "px";
objMask.innerHTML = "";
};
return qcodo.terminateEvent(objEvent);
};
objWrapper.handleMouseMove = function(objEvent, objHandle) {
// Do We Scroll?
if ((qcodo.client.x <= 30) || (qcodo.client.y >= (qcodo.client.height - 30)) ||
(qcodo.client.y <= 30) || (qcodo.client.x >= (qcodo.client.width - 30))) {
qcodo.scrollMoveHandle = this;
qcodo.handleScroll();
} else {
// Clear Handle Timeout
qcodo.clearTimeout(objWrapper.id);
this.moveMasks();
};
return qcodo.terminateEvent(objEvent);
};
objWrapper.handleMouseUp = function(objEvent, objHandle) {
// Calculate Move Delta
var objMoveDelta = this.calculateMoveDelta();
var intDeltaX = objMoveDelta.x;
var intDeltaY = objMoveDelta.y;
// Stop Scrolling
qcodo.clearTimeout(this.id);
// Validate Drop Zone
var objDropControl;
if ((intDeltaX == 0) && (intDeltaY == 0)) {
// Nothing Moved!
objDropControl = null;
} else {
objDropControl = this.getDropTarget();
};
if (objDropControl) {
// Update everything that's moving (e.g. all controls in qcodo.moveControls)
for (var strKey in this.moveControls) {
var objWrapper = this.moveControls[strKey];
var objMask = objWrapper.mask;
objMask.style.display = "none";
objMask.style.cursor = null;
// qcodo.moveControls[strKey] = null;
objWrapper.updateStyle("position", "absolute");
// Get Control's Position
var objAbsolutePosition = objWrapper.getAbsolutePosition();
// Update Parent -- Wrapper now belongs to a new DropControl
if (objDropControl.nodeName.toLowerCase() == 'form') {
if (objWrapper.parentNode != objDropControl)
objWrapper.updateStyle("parent", objDropControl.id);
} else {
if (objDropControl.id != objWrapper.parentNode.parentNode.id)
objWrapper.updateStyle("parent", objDropControl.control.id);
};
// Update Control's Position
objWrapper.setAbsolutePosition(objAbsolutePosition.x + intDeltaX, objAbsolutePosition.y + intDeltaY, true);
if (objWrapper.updateHandle)
objWrapper.updateHandle(true, "move");
// Setup OnMove (if applicable)
if (objWrapper.control.getAttribute("onqcodomove")) {
objWrapper.control.qcodomove = function(strOnMoveCommand) {
eval(strOnMoveCommand);
};
objWrapper.control.qcodomove(objWrapper.control.getAttribute("onqcodomove"));
};
};
} else {
// Rejected
for (var strKey in this.moveControls) {
var objWrapper = this.moveControls[strKey];
var objMask = objWrapper.mask;
objMask.style.cursor = null;
};
if (objWrapper.updateHandle)
objWrapper.updateHandle(false, "move");
if (qcodo.isBrowser(this.IE))
this.resetMasks(intDeltaX, intDeltaY, 25);
else
this.resetMasks(intDeltaX, intDeltaY, 50);
};
// If we haven't moved at all, go ahead and run the control's onclick method
// (if applicable) or just propogate the click up
if ((intDeltaX == 0) && (intDeltaY == 0)) {
if (this.control.onclick)
return this.control.onclick(objEvent);
else
return true;
} else {
return qcodo.terminateEvent(objEvent);
};
};
// Setup Move Targets
objWrapper.moveControls = new Object();
objWrapper.registerMoveTarget = function(mixControl) {
// If they pass in null, then register itself as the move target
if (mixControl == null) mixControl = this.control;
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objTargetWrapper = objControl.wrapper;
if (objTargetWrapper)
this.moveControls[objControl.id] = objTargetWrapper;
// this.registerDropZone(objTargetWrapper.parentNode);
};
objWrapper.unregisterMoveTarget = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
if (objControl.id)
this.moveControls[objControl.id] = null;
};
objWrapper.clearMoveTargets = function() {
this.moveControls = new Object();
};
// Setup Drop Zones
objWrapper.registerDropZone = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
if (objControl.wrapper) {
qcodo.registerControlDropZoneTarget(objControl);
this.dropControls[objControl.id] = objControl.wrapper;
} else
this.dropControls[objControl.id] = objControl;
};
objWrapper.unregisterDropZone = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
this.dropControls[objControl.id] = null;
};
objWrapper.clearDropZones = function() {
this.dropControls = new Object();
};
objWrapper.clearDropZones();
objWrapper.registerDropZoneGrouping = function(strGroupingId) {
if (!qcodo.dropZoneGrouping[strGroupingId])
qcodo.dropZoneGrouping[strGroupingId] = new Array();
this.dropGroupings[strGroupingId] = true;
};
objWrapper.clearDropZoneGroupings = function() {
this.dropGroupings = new Object();
};
objWrapper.clearDropZoneGroupings();
// Mouse Delta Calculator
objWrapper.calculateMoveDelta = function() {
// Calculate Move Delta
var intDeltaX = qcodo.page.x - this.startDragX;
var intDeltaY = qcodo.page.y - this.startDragY;
intDeltaX = Math.min(Math.max(intDeltaX, -1 * this.boundingBox.x), qcodo.page.width - this.boundingBox.boundX);
intDeltaY = Math.min(Math.max(intDeltaY, -1 * this.boundingBox.y), qcodo.page.height - this.boundingBox.boundY);
return {x: intDeltaX, y: intDeltaY};
};
objWrapper.setupBoundingBox = function() {
// Calculate moveControls aggregate bounding box (x,y,width,height,boundX,boundY)
// Note that boundX is just (x + width), and boundY is just (y + height)
var intMinX = null;
var intMinY = null;
var intMaxX = null;
var intMaxY = null;
for (var strKey in this.moveControls) {
var objMoveControl = this.moveControls[strKey];
var objAbsolutePosition = objMoveControl.getAbsolutePosition();
if (intMinX == null) {
intMinX = objAbsolutePosition.x;
intMinY = objAbsolutePosition.y;
intMaxX = objAbsolutePosition.x + objMoveControl.offsetWidth;
intMaxY = objAbsolutePosition.y + objMoveControl.offsetHeight;
} else {
intMinX = Math.min(intMinX, objAbsolutePosition.x);
intMinY = Math.min(intMinY, objAbsolutePosition.y);
intMaxX = Math.max(intMaxX, objAbsolutePosition.x + objMoveControl.offsetWidth);
intMaxY = Math.max(intMaxY, objAbsolutePosition.y + objMoveControl.offsetHeight);
};
};
if (!this.boundingBox)
this.boundingBox = new Object();
this.boundingBox.x = intMinX;
this.boundingBox.y = intMinY;
this.boundingBox.boundX = intMaxX;
this.boundingBox.boundY = intMaxY;
this.boundingBox.width = intMaxX - intMinX;
this.boundingBox.height = intMaxY - intMinY;
};
objWrapper.updateBoundingBox = function() {
// Just like SETUP BoundingBox, except now we're using the MASKS instead of the Controls
// (in case, becuase of hte move, the size of the control may have changed/been altered)
var intMinX = null;
var intMinY = null;
var intMaxX = null;
var intMaxY = null;
for (var strKey in this.moveControls) {
var objMoveControl = this.moveControls[strKey];
var objAbsolutePosition = objMoveControl.getAbsolutePosition();
if (intMinX == null) {
intMinX = objAbsolutePosition.x;
intMinY = objAbsolutePosition.y;
intMaxX = objAbsolutePosition.x + objMoveControl.mask.offsetWidth;
intMaxY = objAbsolutePosition.y + objMoveControl.mask.offsetHeight;
} else {
intMinX = Math.min(intMinX, objAbsolutePosition.x);
intMinY = Math.min(intMinY, objAbsolutePosition.y);
intMaxX = Math.max(intMaxX, objAbsolutePosition.x + objMoveControl.mask.offsetWidth);
intMaxY = Math.max(intMaxY, objAbsolutePosition.y + objMoveControl.mask.offsetHeight);
};
};
this.boundingBox.x = intMinX;
this.boundingBox.y = intMinY;
this.boundingBox.boundX = intMaxX;
this.boundingBox.boundY = intMaxY;
this.boundingBox.width = intMaxX - intMinX;
this.boundingBox.height = intMaxY - intMinY;
};
objWrapper.moveMasks = function() {
// Calculate Move Delta
var objMoveDelta = this.calculateMoveDelta();
var intDeltaX = objMoveDelta.x;
var intDeltaY = objMoveDelta.y;
var blnValidDropZone = this.validateDropZone();
if (blnValidDropZone)
this.handle.style.cursor = "url(" + qc.imageAssets + "/_core/move_drop.cur), auto";
else
this.handle.style.cursor = "url(" + qc.imageAssets + "/_core/move_nodrop.cur), auto";
// Update Everything that's Moving (e.g. all controls in qcodo.moveControls)
for (var strKey in this.moveControls) {
var objWrapper = this.moveControls[strKey];
var objMask = objWrapper.mask;
// Fixes a weird Firefox bug
if (objMask.innerHTML == "")
objMask.innerHTML = ".";
if (objMask.innerHTML == ".")
objMask.innerHTML = objWrapper.innerHTML.replace(' id="', ' id="invalid_mask_');
// Recalculate Widths
this.updateBoundingBox();
// Move this control's mask
objWrapper.setMaskOffset(intDeltaX, intDeltaY);
if (blnValidDropZone) {
objMask.style.cursor = "url(" + qc.imageAssets + "/_core/move_drop.cur), auto";
} else {
objMask.style.cursor = "url(" + qc.imageAssets + "/_core/move_nodrop.cur), auto";
};
};
};
objWrapper.getDropZoneControlWrappers = function() {
var arrayToReturn = new Array();
for (var strDropKey in this.dropControls) {
var objDropWrapper = this.dropControls[strDropKey];
if (objDropWrapper)
arrayToReturn[strDropKey] = objDropWrapper;
};
for (var strGroupingId in this.dropGroupings) {
if (this.dropGroupings[strGroupingId]) for (var strControlId in qcodo.dropZoneGrouping[strGroupingId]) {
if (strControlId.substring(0, 1) != "_") {
var objDropWrapper = qcodo.dropZoneGrouping[strGroupingId][strControlId];
if (objDropWrapper) {
if (objDropWrapper.control.id == objWrapper.control.id) {
if (qcodo.dropZoneGrouping[strGroupingId]["__allowSelf"])
arrayToReturn[strControlId] = objDropWrapper;
} else if (objDropWrapper.control.id == objWrapper.parentNode.id) {
if (qcodo.dropZoneGrouping[strGroupingId]["__allowSelfParent"])
arrayToReturn[strControlId] = objDropWrapper;
} else {
arrayToReturn[strControlId] = objDropWrapper;
};
};
};
};
};
return arrayToReturn;
};
objWrapper.validateDropZone = function() {
var blnFoundTarget = false;
var blnFormOkay = false;
var dropControls = this.getDropZoneControlWrappers();
for (var strDropKey in dropControls) {
var objDropWrapper = dropControls[strDropKey];
if (objDropWrapper) {
if (objDropWrapper.nodeName.toLowerCase() == 'form') {
blnFormOkay = true;
} else if (objDropWrapper.containsPoint(qcodo.page.x, qcodo.page.y)) {
if (blnFoundTarget) {
objDropWrapper.dropZoneMask.style.display = "none";
} else {
objDropWrapper.dropZoneMask.style.display = "block";
var objAbsolutePosition = objDropWrapper.getAbsolutePosition();
if (qcodo.isBrowser(qcodo.IE) && (window.document.compatMode == "BackCompat")) {
objDropWrapper.dropZoneMask.style.width = Math.max(7, objDropWrapper.control.offsetWidth) + "px";
objDropWrapper.dropZoneMask.style.height = Math.max(7, objDropWrapper.control.offsetHeight) + "px";
// if (objDropWrapper.style.position == 'absolute') {
var objAbsolutePosition = objDropWrapper.getAbsolutePosition();
// objDropWrapper.setDropZoneMaskAbsolutePosition(objAbsolutePosition.x + 10, objAbsolutePosition.y + 10);
objDropWrapper.setDropZoneMaskAbsolutePosition(objAbsolutePosition.x, objAbsolutePosition.y);
// };
} else {
objDropWrapper.dropZoneMask.style.width = Math.max(1, objDropWrapper.control.offsetWidth - 6) + "px";
objDropWrapper.dropZoneMask.style.height = Math.max(1, objDropWrapper.control.offsetHeight - 6) + "px";
// if (objDropWrapper.style.position != 'absolute') {
var objAbsolutePosition = objDropWrapper.getAbsolutePosition();
objDropWrapper.setDropZoneMaskAbsolutePosition(objAbsolutePosition.x, objAbsolutePosition.y);
// }
};
blnFoundTarget = true;
};
} else {
objDropWrapper.dropZoneMask.style.display = "none";
};
};
};
return (blnFoundTarget || blnFormOkay);
};
// Will return "NULL" if there was no target found
// Could also return the Form if not dropped on any valid target BUT tbe Form is still a drop zone
objWrapper.getDropTarget = function() {
var objForm = null;
var objToReturn = null;
var dropControls = this.getDropZoneControlWrappers();
for (var strDropKey in dropControls) {
var objDropWrapper = dropControls[strDropKey];
if (objDropWrapper) {
if (objDropWrapper.nodeName.toLowerCase() == 'form')
objForm = objDropWrapper;
else if (objDropWrapper.containsPoint(qcodo.page.x, qcodo.page.y)) {
objDropWrapper.dropZoneMask.style.display = "none";
if (!objToReturn)
objToReturn = objDropWrapper;
};
};
};
if (objToReturn)
return objToReturn;
if (objForm)
return objForm;
return null;
};
objWrapper.resetMasks = function(intDeltaX, intDeltaY, intSpeed) {
qcodo.moveHandleReset = this;
if (intDeltaX || intDeltaY) {
this.resetCurrentOffsetX = intDeltaX * 1.0;
this.resetCurrentOffsetY = intDeltaY * 1.0;
var fltTotalMove = Math.sqrt(Math.pow(intDeltaX, 2) + Math.pow(intDeltaY, 2));
var fltRatio = (intSpeed * 1.0) / fltTotalMove;
this.resetStepX = fltRatio * intDeltaX;
this.resetStepY = fltRatio * intDeltaY;
qcodo.setTimeout("move_mask_return", "qcodo.wrappers['" + this.id + "'].resetMaskHelper()", 10);
};
};
objWrapper.resetMaskHelper = function() {
if (this.resetCurrentOffsetX < 0)
this.resetCurrentOffsetX = Math.min(this.resetCurrentOffsetX - this.resetStepX, 0);
else
this.resetCurrentOffsetX = Math.max(this.resetCurrentOffsetX - this.resetStepX, 0);
if (this.resetCurrentOffsetY < 0)
this.resetCurrentOffsetY = Math.min(this.resetCurrentOffsetY - this.resetStepY, 0);
else
this.resetCurrentOffsetY = Math.max(this.resetCurrentOffsetY - this.resetStepY, 0);
for (var strKey in this.moveControls) {
var objWrapper = this.moveControls[strKey];
objWrapper.setMaskOffset(this.resetCurrentOffsetX, this.resetCurrentOffsetY);
if ((this.resetCurrentOffsetX == 0) && (this.resetCurrentOffsetY == 0)) {
objWrapper.mask.style.display = "none";
};
};
if ((this.resetCurrentOffsetX != 0) || (this.resetCurrentOffsetY != 0))
qcodo.setTimeout("move_mask_return", "qcodo.wrappers['" + this.id + "'].resetMaskHelper()", 10);
else
qcodo.moveHandleReset = null;
};
objWrapper.resetMasksCancel = function() {
qcodo.clearTimeout("move_mask_return");
qcodo.moveHandleReset = null;
for (var strKey in this.moveControls) {
var objWrapper = this.moveControls[strKey];
objWrapper.mask.style.display = "none";
};
};
// Wrapper Shortcuts
objWrapper.regMT = objWrapper.registerMoveTarget;
objWrapper.regDZ = objWrapper.registerDropZone;
objWrapper.regDZG = objWrapper.registerDropZoneGrouping;
} else {
objWrapper.updateHandle();
};
};
qcodo.animateMove = function(mixControl, intDestinationX, intDestinationY, intSpeed) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
// Record Destination Coordinates
objControl.destinationX = intDestinationX;
objControl.destinationY = intDestinationY;
// Get Starting Coordinates
var objAbsolutePosition = qcodo.getAbsolutePosition(objControl);
objControl.currentX = objAbsolutePosition.x * 1.0;
objControl.currentY = objAbsolutePosition.y * 1.0;
// Calculate the amount to move in the X- and Y- direction per step
var fltTotalMove = Math.sqrt(Math.pow(objControl.destinationY - objControl.currentY, 2) + Math.pow(objControl.destinationX - objControl.currentX, 2));
var fltTotalMoveX = (objControl.destinationX * 1.0) - objControl.currentX;
var fltTotalMoveY = (objControl.destinationY * 1.0) - objControl.currentY;
objControl.stepMoveX = ((intSpeed * 1.0) / fltTotalMove) * fltTotalMoveX;
objControl.stepMoveY = ((intSpeed * 1.0) / fltTotalMove) * fltTotalMoveY;
qcodo.setTimeout(objControl, "qcodo.handleAnimateMove('" + objControl.id + "');", 10);
};
qcodo.handleAnimateMove = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
// Update Current Coordinates
if (objControl.stepMoveX < 0)
objControl.currentX = Math.max(objControl.destinationX, objControl.currentX + objControl.stepMoveX);
else
objControl.currentX = Math.min(objControl.destinationX, objControl.currentX + objControl.stepMoveX);
if (objControl.stepMoveY < 0)
objControl.currentY = Math.max(objControl.destinationY, objControl.currentY + objControl.stepMoveY);
else
objControl.currentY = Math.min(objControl.destinationY, objControl.currentY + objControl.stepMoveY);
qcodo.setAbsolutePosition(objControl, Math.round(objControl.currentX), Math.round(objControl.currentY));
if ((Math.round(objControl.currentX) == objControl.destinationX) &&
(Math.round(objControl.currentY) == objControl.destinationY)) {
// We are done
if (objControl.handleAnimateComplete)
objControl.handleAnimateComplete(objControl);
} else {
// Do it again
qcodo.setTimeout(objControl, "qcodo.handleAnimateMove('" + objControl.id + "');", 10);
};
};
qcodo.handleScroll = function() {
var objHandle = qcodo.scrollMoveHandle;
// Clear Timeout
qcodo.clearTimeout(objHandle.id);
// How much to scroll by
var intScrollByX = 0;
var intScrollByY = 0;
// Calculate our ScrollByY amount
if (qcodo.client.y <= 30) {
var intDivisor = (qcodo.isBrowser(qcodo.IE)) ? 1.5 : 3;
intScrollByY = Math.round((qcodo.client.y - 30) / intDivisor);
} else if (qcodo.client.y >= (qcodo.client.height - 30)) {
var intDivisor = (qcodo.isBrowser(qcodo.IE)) ? 1.5 : 3;
intScrollByY = Math.round((qcodo.client.y - (qcodo.client.height - 30)) / intDivisor);
};
// Calculate our ScrollByX amount
if (qcodo.client.x <= 30) {
var intDivisor = (qcodo.isBrowser(qcodo.IE)) ? 1 : 2;
intScrollByX = Math.round((qcodo.client.x - 30) / intDivisor);
} else if (qcodo.client.x >= (qcodo.client.width - 30)) {
var intDivisor = (qcodo.isBrowser(qcodo.IE)) ? 1 : 2;
intScrollByX = Math.round((qcodo.client.x - (qcodo.client.width - 30)) / intDivisor);
};
// Limit ScrollBy amounts (dependent on current scroll and scroll.max's)
if (intScrollByX < 0) {
// Scroll to Left
intScrollByX = Math.max(intScrollByX, 0 - qcodo.scroll.x);
} else if (intScrollByX > 0) {
// Scroll to Right
intScrollByX = Math.min(intScrollByX, qcodo.scroll.width - qcodo.scroll.x);
};
if (intScrollByY < 0) {
// Scroll to Left
intScrollByY = Math.max(intScrollByY, 0 - qcodo.scroll.y);
} else if (intScrollByY > 0) {
// Scroll to Right
intScrollByY = Math.min(intScrollByY, qcodo.scroll.height - qcodo.scroll.y);
};
// Perform the Scroll
window.scrollBy(intScrollByX, intScrollByY);
// Update Event Stats
qcodo.handleEvent(null);
// Update Handle Offset
objHandle.offsetX -= intScrollByX;
objHandle.offsetY -= intScrollByY;
objHandle.moveMasks();
if (intScrollByX || intScrollByY)
qcodo.setTimeout(objHandle.id, "qcodo.handleScroll()", 25);
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.regCM = qcodo.registerControlMoveable;
qc.regCMH = qcodo.registerControlMoveHandle;
/////////////////////////////////////////////
// Block Control: Resize Handle functionality
/////////////////////////////////////////////
qcodo.registerControlResizeHandle = function(mixControl, blnVertical) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
objWrapper.resizeHandle = true;
objWrapper.resizeDirectionVertical = blnVertical;
objWrapper.resizeUpperControls = new Array();
objWrapper.resizeLowerControls = new Array();
if (!objWrapper.handle) {
if (qcodo.isBrowser(qcodo.SAFARI))
qcodo.registerControlHandle(objControl, 'move');
else if (qcodo.isBrowser(qcodo.IE)) {
if (objWrapper.resizeDirectionVertical)
qcodo.registerControlHandle(objControl, 'row-resize');
else
qcodo.registerControlHandle(objControl, 'col-resize');
} else {
if (objWrapper.resizeDirectionVertical)
qcodo.registerControlHandle(objControl, 'ns-resize');
else
qcodo.registerControlHandle(objControl, 'ew-resize');
};
// Assign Event Handlers
qcodo.enableMouseDrag();
objWrapper.handleMouseDown = function(objEvent, objHandle) {
this.startUpperSizes = new Array();
this.startLowerSizes = new Array();
this.startLowerPositions = new Array();
if (this.resizeDirectionVertical) {
this.offsetY = qcodo.page.y - this.getAbsolutePosition().y;
this.startDragY = qcodo.page.y;
for (var intIndex = 0; intIndex < this.resizeUpperControls.length; intIndex++) {
var objUpperControl = this.resizeUpperControls[intIndex];
this.startUpperSizes[intIndex] = eval(objUpperControl.control.style.height.replace(/px/, ""));
};
for (var intIndex = 0; intIndex < this.resizeLowerControls.length; intIndex++) {
var objLowerControl = this.resizeLowerControls[intIndex];
this.startLowerPositions[intIndex] = objLowerControl.getAbsolutePosition().y;
this.startLowerSizes[intIndex] = eval(objLowerControl.control.style.height.replace(/px/, ""));
};
if (this.resizeMinimum != null)
this.resizeMinimumY = this.getAbsolutePosition().y - (this.offsetTop - this.resizeMinimum);
else
this.resizeMinimumY = null;
if (this.resizeMaximum != null)
this.resizeMaximumY = this.getAbsolutePosition().y - (this.offsetTop - this.resizeMaximum);
else
this.resizeMaximumY = null;
} else {
this.offsetX = qcodo.page.x - this.getAbsolutePosition().x;
this.startDragX = qcodo.page.x;
for (var intIndex = 0; intIndex < this.resizeUpperControls.length; intIndex++) {
var objUpperControl = this.resizeUpperControls[intIndex];
this.startUpperSizes[intIndex] = eval(objUpperControl.control.style.width.replace(/px/, ""));<|fim▁hole|> for (var intIndex = 0; intIndex < this.resizeLowerControls.length; intIndex++) {
var objLowerControl = this.resizeLowerControls[intIndex];
this.startLowerPositions[intIndex] = objLowerControl.getAbsolutePosition().x;
this.startLowerSizes[intIndex] = eval(objLowerControl.control.style.width.replace(/px/, ""));
};
if (this.resizeMinimum != null)
this.resizeMinimumX = this.getAbsolutePosition().x - (this.offsetLeft - this.resizeMinimum);
else
this.resizeMinimumX = null;
if (this.resizeMaximum != null)
this.resizeMaximumX = this.getAbsolutePosition().x - (this.offsetLeft - this.resizeMaximum);
else
this.resizeMaximumX = null;
};
return qcodo.terminateEvent(objEvent);
};
objWrapper.handleMouseMove = function(objEvent, objHandle) {
if (this.resizeDirectionVertical) {
var intNewY = qcodo.page.y - this.offsetY;
if (this.resizeMinimumY != null)
intNewY = Math.max(intNewY, this.resizeMinimumY);
if (this.resizeMaximumY != null)
intNewY = Math.min(intNewY, this.resizeMaximumY);
var intDeltaY = intNewY - this.startDragY + this.offsetY;
// Update ResizeHandle's Position
this.setAbsolutePosition(this.getAbsolutePosition().x, intNewY);
// Resize Upper Controls
for (var intIndex = 0; intIndex < this.resizeUpperControls.length; intIndex++) {
var objUpperControl = this.resizeUpperControls[intIndex];
objUpperControl.updateStyle("height", this.startUpperSizes[intIndex] + intDeltaY + "px");
};
// Reposition Lower Controls
for (var intIndex = 0; intIndex < this.resizeLowerControls.length; intIndex++) {
var objLowerControl = this.resizeLowerControls[intIndex];
objLowerControl.setAbsolutePosition(
objLowerControl.getAbsolutePosition().x,
this.startLowerPositions[intIndex] + intDeltaY);
objLowerControl.updateStyle("height", this.startLowerSizes[intIndex] - intDeltaY + "px");
};
} else {
var intNewX = qcodo.page.x - this.offsetX;
if (this.resizeMinimumX != null)
intNewX = Math.max(intNewX, this.resizeMinimumX);
if (this.resizeMaximumX != null)
intNewX = Math.min(intNewX, this.resizeMaximumX);
var intDeltaX = intNewX - this.startDragX + this.offsetX;
// Update ResizeHandle's Position
this.setAbsolutePosition(intNewX, this.getAbsolutePosition().y);
// Resize Upper Controls
for (var intIndex = 0; intIndex < this.resizeUpperControls.length; intIndex++) {
var objUpperControl = this.resizeUpperControls[intIndex];
objUpperControl.updateStyle("width", this.startUpperSizes[intIndex] + intDeltaX + "px");
};
// Reposition Lower Controls
for (var intIndex = 0; intIndex < this.resizeLowerControls.length; intIndex++) {
var objLowerControl = this.resizeLowerControls[intIndex];
objLowerControl.setAbsolutePosition(
this.startLowerPositions[intIndex] + intDeltaX,
objLowerControl.getAbsolutePosition().y);
objLowerControl.updateStyle("width", this.startLowerSizes[intIndex] - intDeltaX + "px");
};
};
// Update Handle Position
this.updateHandle(false);
return qcodo.terminateEvent(objEvent);
};
objWrapper.handleMouseUp = function(objEvent, objHandle) {
// See if we've even resized at all
var blnResized = true;
if (this.resizeDirectionVertical) {
if (this.startDragY == qcodo.page.y)
blnResized = false;
} else {
if (this.startDragX == qcodo.page.x)
blnResized = false;
};
if (blnResized) {
this.updateHandle(true);
// Setup OnResize (if applicable)
if (this.control.getAttribute("onqcodoresize")) {
this.control.qcodoresize = function(strOnResizeCommand) {
eval(strOnResizeCommand);
};
this.control.qcodoresize(this.control.getAttribute("onqcodoresize"));
};
return qcodo.terminateEvent(objEvent);
} else {
// If we haven't resized at all, go ahead and run the control's onclick method
// (if applicable) or just propogate the click up
if (this.control.onclick)
return this.control.onclick(objEvent);
else
return true;
};
};
objWrapper.setUpperControl = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
this.resizeUpperControls[this.resizeUpperControls.length] = objWrapper;
};
objWrapper.setLowerControl = function(mixControl) {
var objControl; if (!(objControl = qcodo.getControl(mixControl))) return;
var objWrapper = objControl.wrapper;
this.resizeLowerControls[this.resizeLowerControls.length] = objWrapper;
};
objWrapper.resizeMinimum = null;
objWrapper.resizeMaximum = null;
objWrapper.setResizeMinimum = function(intMinimum) {
this.resizeMinimum = intMinimum;
};
objWrapper.setResizeMaximum = function(intMaximum) {
this.resizeMaximum = intMaximum;
};
// Wrapper Shortcuts
objWrapper.setUC = objWrapper.setUpperControl;
objWrapper.setLC = objWrapper.setLowerControl;
objWrapper.setReMi = objWrapper.setResizeMinimum;
objWrapper.setReMa = objWrapper.setResizeMaximum;
} else {
objWrapper.updateHandle();
};
};
//////////////////
// Qcodo Shortcuts
//////////////////
qc.regCRH = qcodo.registerControlResizeHandle;
// This function hides the toggle menu if it is being displayed
function clickWindow(toggleMenuId) {
var objToggleMenu = document.getElementById(toggleMenuId);
if (objToggleMenu.parentNode.style.display != 'none') {
qc.getW(toggleMenuId).toggleDisplay('hide');
}
}
// This function repositions the toggle menu when the window is resized
function resizeWindow(toggleMenuId, toggleButtonId) {
var objToggleMenu = document.getElementById(toggleMenuId);
if (objToggleMenu.parentNode.style.display != 'none') {
setPosition(toggleButtonId, toggleMenuId);
}
}
// This function is run when the ColumnToggleButton is clicked
// Positions and Displays the column toggle menu
function toggleColumnToggleDisplay(e, toggleMenuId, toggleButtonId) {
// Set the position of the toggle menu based on the location of the menu button
setPosition(toggleButtonId, toggleMenuId);
// Display/Hide the column toggle menu
qc.getW(toggleMenuId).toggleDisplay();
var objToggleMenu = document.getElementById(toggleMenuId);
// Set the onresize and onclick event handlers only when the menu is being displayed to avoid unnecessarily running the function
if (objToggleMenu.parentNode.style.display != 'none') {
function r() {
resizeWindow(toggleMenuId, toggleButtonId);
}
window.onresize = r;
function c() {
clickWindow(toggleMenuId);
}
window.document.onclick = c;
}
// Set event handlers to null when menu is not being displayed
else {
window.onresize = null;
window.document.onclick = null;
}
// Stop bubbling up and propagation down in events so that functions don't get run more than once
// This was specifically because setPosition was getting run from the window.onClick() event and from clicking on the button
if (!e) { var e = window.event; }
e.cancelBubble = true;
if (e.stopPropagation) { e.stopPropagation(); }
}
// Based on the position of the button (strLabelControlId), this positions the column toggle menu (strPanelControlId)
function setPosition(strLabelControlId, strPanelControlId) {
var objLabel = document.getElementById(strLabelControlId);
var arrCurrentLabelPosition = findPosition(objLabel.offsetParent);
var objToggleMenu = document.getElementById(strPanelControlId);
var strMenuWidth = objToggleMenu.offsetWidth;
// The menu width will be 0 when it is first rendered as display: none. This uses it's style parameters to calculate what it's width will be
// This was necessary in order to be able to set the position of the menu before it was displayed, to avoid a scrollbar flicker.
if (strMenuWidth==0) {
strMenuWidth = getWidth(objToggleMenu);
}
objToggleMenu.style.position = 'absolute';
objToggleMenu.style.left = (arrCurrentLabelPosition[0] + objLabel.offsetParent.offsetWidth - strMenuWidth) + 'px';
objToggleMenu.style.top = (arrCurrentLabelPosition[1] + objLabel.offsetParent.offsetHeight) + 'px';
}
// This function finds the absolute position of and element in pixels by drilling down through all parent elements and summing all left and top offsets.
function findPosition(obj) {
var current_top = 0;
var current_left = 0;
if (obj.offsetParent) {
current_left = obj.offsetLeft;
current_top = obj.offsetTop;
while (obj = obj.offsetParent) {
current_left += obj.offsetLeft;
current_top += obj.offsetTop;
}
}
return [current_left,current_top];
}
function getWidth(obj) {
var strWidth = 0;
var intWidth = parseInt(obj.style.width);
var intPaddingLeft = parseInt(obj.style.paddingLeft);
var intPaddingRight = parseInt(obj.style.paddingRight);
var intBorderLeftWidth = parseInt(obj.style.borderLeftWidth);
var intBorderRightWidth = parseInt(obj.style.borderRightWidth);
strWidth += (!isNaN(intWidth)) ? intWidth : 0;
strWidth += (!isNaN(intPaddingLeft)) ? intPaddingLeft : 0;
strWidth += (!isNaN(intPaddingRight)) ? intPaddingRight : 0;
strWidth += (!isNaN(intBorderLeftWidth)) ? intBorderLeftWidth : 0;
strWidth += (!isNaN(intBorderRightWidth)) ? intBorderRightWidth : 0;
return strWidth;
}
function __resetListBox(strFormId, strControlId) {
var objListBox = document.forms[strFormId].elements[strControlId];
objListBox.selectedIndex = -1;
if (objListBox.onchange)
objListBox.onchange();
}; function Qcodo__DateTimePicker_Change(strControlId, objListbox) {
var objMonth = document.getElementById(strControlId + "_lstMonth");
var objDay = document.getElementById(strControlId + "_lstDay");
var objYear = document.getElementById(strControlId + "_lstYear");
if (objListbox.options[objListbox.selectedIndex].value == "") {
objMonth.selectedIndex = 0;
objYear.selectedIndex = 0;
while(objDay.options.length)
objDay.options[objDay.options.length - 1] = null;
objDay.options[0] = new Option("--", "");
objDay.selectedIndex = 0;
} else {
if ((objListbox == objMonth) || ((objListbox == objYear) && (objMonth.options[objMonth.selectedIndex].value == 2))) {
var intCurrentDay = objDay.options[objDay.selectedIndex].value;
var intCurrentMaxDay = objDay.options[objDay.options.length - 1].value;
// Calculate new Max Day
var intNewMaxDay = 0;
var intSelectedMonth = objMonth.options[objMonth.selectedIndex].value;
var intSelectedYear = new Number(objYear.options[objYear.selectedIndex].value);
if (!intSelectedYear)
intSelectedYear = 2000;
switch (intSelectedMonth) {
case "1":
case "3":
case "5":
case "7":
case "8":
case "10":
case "12":
intNewMaxDay = 31;
break;
case "4":
case "6":
case "9":
case "11":
intNewMaxDay = 30;
break;
case "2":
if ((intSelectedYear % 4) != 0)
intNewMaxDay = 28;
else if ((intSelectedYear % 1000) == 0)
intNewMaxDay = 29;
else if ((intSelectedYear % 100) == 0)
intNewMaxDay = 28;
else
intNewMaxDay = 29;
break;
};
if (intNewMaxDay != intCurrentMaxDay) {
// Redo the Days Dropdown
var blnRequired = true;
if (objDay.options[0].value == "")
blnRequired = false;
while (objDay.options.length)
objDay.options[objDay.options.length - 1] = null;
if (!blnRequired)
objDay.options[0] = new Option("--", "");
for (var intDay = 1; intDay <= intNewMaxDay; intDay++) {
objDay.options[objDay.options.length] = new Option(intDay, intDay);
};
intCurrentDay = Math.min(intCurrentDay, intNewMaxDay);
if (blnRequired)
objDay.options[intCurrentDay - 1].selected = true;
else
objDay.options[intCurrentDay].selected = true;
};
};
};
};<|fim▁end|> | };
|
<|file_name|>Contacts.js<|end_file_name|><|fim▁begin|>/**
* Created by raynald on 8/22/14.
*/
App.Collections.Contacts = Backbone.Collection.extend({
model : App.Models.Contact,<|fim▁hole|><|fim▁end|> | localStorage: new Backbone.LocalStorage('my-contacts')
}); |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>tressa.title('HyperHTML');
tressa.assert(typeof hyperHTML === 'function', 'hyperHTML is a function');
try { tressa.log(''); } catch(e) { tressa.log = console.log.bind(console); }
tressa.async(function (done) {
tressa.log('## injecting text and attributes');
var i = 0;
var div = document.body.appendChild(document.createElement('div'));
var render = hyperHTML.bind(div);
function update(i) {
return render`
<p data-counter="${i}">
Time: ${
// IE Edge mobile did something funny here
// as template string returned xxx.xxxx
// but as innerHTML returned xxx.xx
(Math.random() * new Date).toFixed(2)
}
</p>
`;
}
function compare(html) {
return /^\s*<p data-counter="\d">\s*Time: \d+\.\d+<[^>]+?>\s*<\/p>\s*$/i.test(html);
}
var html = update(i++).innerHTML;
var p = div.querySelector('p');
var attr = p.attributes[0];
tressa.assert(compare(html), 'correct HTML');
tressa.assert(html === div.innerHTML, 'correctly returned');
setTimeout(function () {
tressa.log('## updating same nodes');
var html = update(i++).innerHTML;
tressa.assert(compare(html), 'correct HTML update');
tressa.assert(html === div.innerHTML, 'update applied');
tressa.assert(p === div.querySelector('p'), 'no node was changed');
tressa.assert(attr === p.attributes[0], 'no attribute was changed');
done();
});
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## perf: same virtual text twice');
var div = document.body.appendChild(document.createElement('div'));
var render = hyperHTML.bind(div);
var html = (update('hello').innerHTML, update('hello').innerHTML);
function update(text) {
return render`<p>${text} world</p>`;
}
tressa.assert(
update('hello').innerHTML ===
update('hello').innerHTML,
'same text'
);
done(div);
});
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## injecting HTML');
var div = document.body.appendChild(document.createElement('div'));
var render = hyperHTML.bind(div);
var html = update('hello').innerHTML;
function update(text) {
return render`<p>${['<strong>' + text + '</strong>']}</p>`;
}
function compare(html) {
return /^<p><strong>\w+<\/strong><!--.+?--><\/p>$/i.test(html);
}
tressa.assert(compare(html), 'HTML injected');
tressa.assert(html === div.innerHTML, 'HTML returned');
done(div);
});
})
.then(function (div) {
return tressa.async(function (done) {
tressa.log('## function attributes');
var render = hyperHTML.bind(div);
var times = 0;
update(function (e) {
console.log(e.type);
if (++times > 1) {
return tressa.assert(false, 'events are broken');
}
if (e) {
e.preventDefault();
e.stopPropagation();
}
tressa.assert(true, 'onclick invoked');
tressa.assert(!a.hasAttribute('onclick'), 'no attribute');
update(null);
e = document.createEvent('Event');
e.initEvent('click', false, false);
a.dispatchEvent(e);
done(div);
});
function update(click) {
// also test case-insensitive builtin events
return render`<a href="#" onClick="${click}">click</a>`;
}
var a = div.querySelector('a');
var e = document.createEvent('Event');
e.initEvent('click', false, false);
a.dispatchEvent(e);
});
})
.then(function (div) {
return tressa.async(function (done) {
tressa.log('## changing template');
var render = hyperHTML.bind(div);
var html = update('hello').innerHTML;
function update(text) {
return render`<p>${{any: ['<em>' + text + '</em>']}}</p>`;
}
function compare(html) {
return /^<p><em>\w+<\/em><!--.+?--><\/p>$/i.test(html);
}
tressa.assert(compare(html), 'new HTML injected');
tressa.assert(html === div.innerHTML, 'new HTML returned');
done(div);
});
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## custom events');
var render = hyperHTML.bind(document.createElement('p'));
var e = document.createEvent('Event');
e.initEvent('Custom-EVENT', true, true);
(render`<span onCustom-EVENT="${function (e) {
tressa.assert(e.type === 'Custom-EVENT', 'event triggered');
done();
}}">how cool</span>`
).firstElementChild.dispatchEvent(e);
});
})
.then(function () {
tressa.log('## multi wire removal');
var render = hyperHTML.wire();
var update = function () {
return render`
<p>1</p>
<p>2</p>
`;
};
update().remove();
update = function () {
return render`
<p>1</p>
<p>2</p>
<p>3</p>
`;
};
update().remove();
tressa.assert(true, 'OK');
})
.then(function () {
tressa.log('## the attribute id');
var div = document.createElement('div');
hyperHTML.bind(div)`<p id=${'id'} class='class'>OK</p>`;
tressa.assert(div.firstChild.id === 'id', 'the id is preserved');
tressa.assert(div.firstChild.className === 'class', 'the class is preserved');
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## hyperHTML.wire()');
var render = hyperHTML.wire();
var update = function () {
return render`
<p>1</p>
`;
};
var node = update();
tressa.assert(node.nodeName.toLowerCase() === 'p', 'correct node');
var same = update();
tressa.assert(node === same, 'same node returned');
render = hyperHTML.wire(null);
update = function () {
return render`
0
<p>1</p>
`;
};
node = update().childNodes;
tressa.assert(Array.isArray(node), 'list of nodes');
same = update().childNodes;
tressa.assert(
node.length === same.length &&
node[0] &&
node.every(function (n, i) { return same[i] === n; }),
'same list returned'
);
var div = document.createElement('div');
render = hyperHTML.bind(div);
render`${node}`;
same = div.childNodes;
tressa.assert(
node[0] &&
node.every(function (n, i) { return same[i] === n; }),
'same list applied'
);
function returnSame() {
return render`a`;
}
render = hyperHTML.wire();
tressa.assert(
returnSame() === returnSame(),
'template sensible wire'
);
done();
});
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## hyperHTML.wire(object)');
var point = {x: 1, y: 2};
function update() {
return hyperHTML.wire(point)`
<span style="${`
position: absolute;
left: ${point.x}px;
top: ${point.y}px;
`}">O</span>`;
}
try { update(); } catch(e) { console.error(e) }
tressa.assert(update() === update(), 'same output');
tressa.assert(hyperHTML.wire(point) === hyperHTML.wire(point), 'same wire');
done();
});
})
.then(function () {
if (typeof MutationObserver === 'undefined') return;
return tressa.async(function (done) {
tressa.log('## preserve first child where first child is the same as incoming');
var div = document.body.appendChild(document.createElement('div'));
var render = hyperHTML.bind(div);
var observer = new MutationObserver(function (mutations) {
for (var i = 0, len = mutations.length; i < len; i++) {
trackMutations(mutations[i].addedNodes, 'added');
trackMutations(mutations[i].removedNodes, 'removed');
}
});
observer.observe(div, {
childList: true,
subtree: true,
});
var counters = [];
function trackMutations (nodes, countKey) {
for (var i = 0, len = nodes.length, counter, key; i < len; i++) {
if (nodes[i] && nodes[i].getAttribute && nodes[i].getAttribute('data-test')) {
key = nodes[i].getAttribute('data-test');
counter = counters[key] || (counters[key] = { added: 0, removed: 0 });
counter[countKey]++;
}
if (nodes[i].childNodes.length > 0) {
trackMutations(nodes[i].childNodes, countKey);
}
}
}
var listItems = [];
function update(items) {
render`
<section>
<ul>${
items.map(function (item, i) {
return hyperHTML.wire((listItems[i] || (listItems[i] = {})))`
<li data-test="${i}">${() => item.text}</li>
`;
})
}</ul>
</section>`;
}
update([]);
setTimeout(function () {
update([{ text: 'test1' }]);
}, 10);
setTimeout(function () {
update([{ text: 'test1' }, { text: 'test2' }]);
}, 20);
setTimeout(function () {
update([{ text: 'test1' }]);
}, 30);
setTimeout(function () {
if (counters.length) {
tressa.assert(counters[0].added === 1, 'first item added only once');
tressa.assert(counters[0].removed === 0, 'first item never removed');
}
done();
}, 100);
});
})
.then(function () {
tressa.log('## rendering one node');
var div = document.createElement('div');
var br = document.createElement('br');
var hr = document.createElement('hr');
hyperHTML.bind(div)`<div>${br}</div>`;
tressa.assert(div.firstChild.firstChild === br, 'one child is added');
hyperHTML.bind(div)`<div>${hr}</div>`;
tressa.assert(div.firstChild.firstChild === hr, 'one child is changed');
hyperHTML.bind(div)`<div>${[hr, br]}</div>`;
tressa.assert(
div.firstChild.childNodes[0] === hr &&
div.firstChild.childNodes[1] === br,
'more children are added'
);
hyperHTML.bind(div)`<div>${[br, hr]}</div>`;
tressa.assert(
div.firstChild.childNodes[0] === br &&
div.firstChild.childNodes[1] === hr,
'children can be swapped'
);
hyperHTML.bind(div)`<div>${br}</div>`;
tressa.assert(div.firstChild.firstChild === br, 'one child is kept');
hyperHTML.bind(div)`<div>${[]}</div>`;<|fim▁hole|>.then(function () {
tressa.log('## wire by id');
let ref = {};
let wires = {
a: function () {
return hyperHTML.wire(ref, ':a')`<a></a>`;
},
p: function () {
return hyperHTML.wire(ref, ':p')`<p></p>`;
}
};
tressa.assert(wires.a().nodeName.toLowerCase() === 'a', '<a> is correct');
tressa.assert(wires.p().nodeName.toLowerCase() === 'p', '<p> is correct');
tressa.assert(wires.a() === wires.a(), 'same wire for <a>');
tressa.assert(wires.p() === wires.p(), 'same wire for <p>');
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## Promises instead of nodes');
let wrap = document.createElement('div');
let render = hyperHTML.bind(wrap);
render`<p>${
new Promise(function (r) { setTimeout(r, 50, 'any'); })
}</p>${
new Promise(function (r) { setTimeout(r, 10, 'virtual'); })
}<hr><div>${[
new Promise(function (r) { setTimeout(r, 20, 1); }),
new Promise(function (r) { setTimeout(r, 10, 2); }),
]}</div>${[
new Promise(function (r) { setTimeout(r, 20, 3); }),
new Promise(function (r) { setTimeout(r, 10, 4); }),
]}`;
let result = wrap.innerHTML;
setTimeout(function () {
tressa.assert(result !== wrap.innerHTML, 'promises fullfilled');
tressa.assert(
/^<p>any<!--.+?--><\/p>virtual<!--.+?--><hr(?: ?\/)?><div>12<!--.+?--><\/div>34<!--.+?-->$/.test(wrap.innerHTML),
'both any and virtual content correct'
);
done();
}, 100);
});
})
.then(function () {
hyperHTML.engine = hyperHTML.engine;
tressa.log('## for code coverage sake');
let wrap = document.createElement('div');
let text = [document.createTextNode('a'), document.createTextNode('b'), document.createTextNode('c')];
let testingMajinBuu = hyperHTML.bind(wrap);
testingMajinBuu`${[text]}`;
tressa.assert(wrap.textContent === 'abc');
text[0] = document.createTextNode('c');
text[2] = document.createTextNode('a');
testingMajinBuu`${[text]}`;
tressa.assert(wrap.textContent === 'cba');
let result = hyperHTML.wire()`<!--not hyperHTML-->`;
tressa.assert(result.nodeType === 8, 'it is a comment');
tressa.assert(result.textContent === 'not hyperHTML', 'correct content');
hyperHTML.bind(wrap)`<br/>${'node before'}`;
tressa.assert(/^<br(?: ?\/)?>node before<!--.+?-->$/i.test(wrap.innerHTML), 'node before');
hyperHTML.bind(wrap)`${'node after'}<br/>`;
tressa.assert(/^node after<!--.+?--><br(?: ?\/)?>$/i.test(wrap.innerHTML), 'node after');
hyperHTML.bind(wrap)`<style> ${'hyper-html{}'} </style>`;
tressa.assert('<style>hyper-html{}</style>' === wrap.innerHTML.toLowerCase(), 'node style');
var empty = function (value) {
return hyperHTML.bind(wrap)`${value}`;
};
empty(document.createTextNode('a'));
empty(document.createDocumentFragment());
empty(document.createDocumentFragment());
let fragment = document.createDocumentFragment();
fragment.appendChild(document.createTextNode('b'));
empty(fragment);
empty(123);
tressa.assert(wrap.textContent === '123', 'text as number');
empty(true);
tressa.assert(wrap.textContent === 'true', 'text as boolean');
empty([1]);
tressa.assert(wrap.textContent === '1', 'text as one entry array');
empty(['1', '2']);
tressa.assert(wrap.textContent === '12', 'text as multi entry array of strings');
let arr = [document.createTextNode('a'), document.createTextNode('b')];
empty([arr]);
tressa.assert(wrap.textContent === 'ab', 'text as multi entry array of nodes');
empty([arr]);
tressa.assert(wrap.textContent === 'ab', 'same array of nodes');
empty(wrap.childNodes);
tressa.assert(wrap.textContent === 'ab', 'childNodes as list');
hyperHTML.bind(wrap)`a=${{length:1, '0':'b'}}`;
tressa.assert(wrap.textContent === 'a=b', 'childNodes as virtual list');
empty = function () {
return hyperHTML.bind(wrap)`[${'text'}]`;
};
empty();
empty();
let onclick = (e) => {};
let handler = {handleEvent: onclick};
empty = function () {
return hyperHTML.bind(wrap)`<p onclick="${onclick}" onmouseover="${handler}" align="${'left'}"></p>`;
};
empty();
handler = {handleEvent: onclick};
empty();
empty();
empty = function (value) {
return hyperHTML.bind(wrap)`<br/>${value}<br/>`;
};
empty(arr[0]);
empty(arr);
empty(arr);
empty([]);
empty(['1', '2']);
empty(document.createDocumentFragment());
tressa.assert(true, 'passed various virtual content scenarios');
let svgContainer = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
if (!('ownerSVGElement' in svgContainer)) svgContainer.ownerSVGElement = null;
hyperHTML.bind(svgContainer)`<rect x="1" y="2" />`;
result = hyperHTML.wire(null, 'svg')`<svg></svg>`;
tressa.assert(result.nodeName.toLowerCase() === 'svg', 'svg content is allowed too');
result = hyperHTML.wire()``;
tressa.assert(!result.innerHTML, 'empty content');
let tr = hyperHTML.wire()`<tr><td>ok</td></tr>`;
tressa.assert(true, 'even TR as template');
hyperHTML.bind(wrap)`${' 1 '}`;
tressa.assert(wrap.textContent === ' 1 ', 'text in between');
hyperHTML.bind(wrap)` <br/>${1}<br/> `;
tressa.assert(/^\s*<br(?: ?\/)?>1<!--.+?--><br(?: ?\/)?>\s*$/.test(wrap.innerHTML), 'virtual content in between');
let last = hyperHTML.wire();
empty = function (style) {
return last`<textarea style=${style}>${() => 'same text'}</textarea>`;
};
empty('border:0');
empty({border: 0});
empty({vh: 100});
empty({vh: 10, vw: 1});
empty(null);
empty('');
const sameStyle = {ord: 0};
empty(sameStyle);
empty(sameStyle);
empty = function () {
return last`<p data=${last}></p>`;
};
empty();
empty();
let p = last`<p data=${last}>${0}</p>`;
const UID = p.childNodes[1].data;
last`<textarea new>${`<!--${UID}-->`}</textarea>`;
hyperHTML.wire()`<p><!--ok--></p>`;
})
.then(function () {
tressa.log('## <script> shenanigans');
return tressa.async(function (done) {
var div = document.createElement('div');
document.body.appendChild(div);
hyperHTML.bind(div)`<script
src="../index.js?_=asd"
onreadystatechange="${event => {
if (/loaded|complete/.test(event.readyState))
setTimeout(() => {
tressa.assert(true, 'executed');
done();
});
}}"
onload="${() => {
tressa.assert(true, 'executed');
done();
}}"
onerror="${() => {
tressa.assert(true, 'executed');
done();
}}"
></script>`;
// in nodejs case
if (!('onload' in document.defaultView)) {
var evt = document.createEvent('Event');
evt.initEvent('load', false, false);
div.firstChild.dispatchEvent(evt);
}
});
})
.then(function () {
tressa.log('## SVG and style');
var render = hyperHTML.wire(null, 'svg');
Object.prototype.ownerSVGElement = null;
function rect(style) {
return render`<rect style=${style} />`;
}
var node = rect({});
delete Object.prototype.ownerSVGElement;
rect({width: 100});
console.log(node.getAttribute('style'));
tressa.assert(/width:\s*100px;/.test(node.getAttribute('style')), 'correct style object');
rect('height:10px;');
tressa.assert(/height:\s*10px;/.test(node.getAttribute('style')), 'correct style string');
rect(null);
tressa.assert(/^(?:|null)$/.test(node.getAttribute('style')), 'correct style reset');
})
.then(function () {
var a = document.createTextNode('a');
var b = document.createTextNode('b');
var c = document.createTextNode('c');
var d = document.createTextNode('d');
var e = document.createTextNode('e');
var f = document.createTextNode('f');
var g = document.createTextNode('g');
var h = document.createTextNode('h');
var i = document.createTextNode('i');
var div = document.createElement('div');
var render = hyperHTML.bind(div);
render`${[]}`;
tressa.assert(div.textContent === '', 'div is empty');
render`${[c, d, e, f]}`;
// all tests know that a comment node is inside the div
tressa.assert(div.textContent === 'cdef' && div.childNodes.length === 5, 'div has 4 nodes');
render`${[c, d, e, f]}`;
tressa.assert(div.textContent === 'cdef', 'div has same 4 nodes');
render`${[a, b, c, d, e, f]}`;
tressa.assert(div.textContent === 'abcdef' && div.childNodes.length === 7, 'div has same 4 nodes + 2 prepends');
render`${[a, b, c, d, e, f, g, h, i]}`;
tressa.assert(div.textContent === 'abcdefghi' && div.childNodes.length === 10, 'div has 6 nodes + 3 appends');
render`${[b, c, d, e, f, g, h, i]}`;
tressa.assert(div.textContent === 'bcdefghi' && div.childNodes.length === 9, 'div has dropped first node');
render`${[b, c, d, e, f, g, h]}`;
tressa.assert(div.textContent === 'bcdefgh' && div.childNodes.length === 8, 'div has dropped last node');
render`${[b, c, d, f, e, g, h]}`;
tressa.assert(div.textContent === 'bcdfegh', 'div has changed 2 nodes');
render`${[b, d, c, f, g, e, h]}`;
tressa.assert(div.textContent === 'bdcfgeh', 'div has changed 4 nodes');
render`${[b, d, c, g, e, h]}`;
tressa.assert(div.textContent === 'bdcgeh' && div.childNodes.length === 7, 'div has removed central node');
})
.then(function () {
tressa.log('## no WebKit backfire');
var div = document.createElement('div');
function update(value, attr) {
return hyperHTML.bind(div)`
<input value="${value}" shaka="${attr}">`;
}
var input = update('', '').firstElementChild;
input.value = '456';
input.setAttribute('shaka', 'laka');
update('123', 'laka');
tressa.assert(input.value === '123', 'correct input');
tressa.assert(input.value === '123', 'correct attribute');
update('', '');
input.value = '123';
input.attributes.shaka.value = 'laka';
update('123', 'laka');
tressa.assert(input.value === '123', 'input.value was not reassigned');
})
.then(function () {
tressa.log('## wired arrays are rendered properly');
var div = document.createElement('div');
var employees = [
{first: 'Bob', last: 'Li'},
{first: 'Ayesha', last: 'Johnson'}
];
var getEmployee = employee => hyperHTML.wire(employee)`
<div>First name: ${employee.first}</div>
<p></p>`;
hyperHTML.bind(div)`${employees.map(getEmployee)}`;
tressa.assert(div.childElementCount === 4, 'correct elements as setAny');
hyperHTML.bind(div)`
<p></p>${employees.map(getEmployee)}`;
tressa.assert(div.childElementCount === 5, 'correct elements as setVirtual');
hyperHTML.bind(div)`
<p></p>${[]}`;
tressa.assert(div.childElementCount === 1, 'only one element left');
})
.then(function () {return tressa.async(function (done) {
function textarea(value) {
return hyperHTML.bind(div)`<textarea>${value}</textarea>`;
}
tressa.log('## textarea text');
var div = document.createElement('div');
textarea(1);
var ta = div.firstElementChild;
tressa.assert(ta.textContent === '1', 'primitives are fine');
textarea(null);
tressa.assert(ta.textContent === '', 'null/undefined is fine');
var p = Promise.resolve('OK');
textarea(p);
p.then(function () {
console.log(div.innerHTML);
tressa.assert(ta.textContent === 'OK', 'promises are fine');
textarea({text: 'text'});
tressa.assert(ta.textContent === 'text', 'text is fine');
textarea({html: 'html'});
tressa.assert(ta.textContent === 'html', 'html is fine');
textarea({any: 'any'});
tressa.assert(ta.textContent === 'any', 'any is fine');
textarea(['ar', 'ray']);
tressa.assert(ta.textContent === 'array', 'array is fine');
textarea({placeholder: 'placeholder'});
tressa.assert(ta.textContent === 'placeholder', 'placeholder is fine');
textarea({unknown: 'unknown'});
tressa.assert(ta.textContent === '', 'intents are fine');
done();
});
})})
.then(function () {
tressa.log('## attributes with weird chars');
var div = document.createElement('div');
hyperHTML.bind(div)`<p _foo=${'bar'}></p>`;
tressa.assert(div.firstChild.getAttribute('_foo') === 'bar', 'OK');
})
.then(function () {
tressa.log('## attributes without quotes');
var div = document.createElement('div');
hyperHTML.bind(div)`<p test=${'a"b'}></p>`;
tressa.assert(div.firstChild.getAttribute('test') === 'a"b', 'OK');
})
.then(function () {
tressa.log('## any content extras');
var div = document.createElement('div');
var html = hyperHTML.bind(div);
setContent(undefined);
tressa.assert(/<p><!--.+?--><\/p>/.test(div.innerHTML), 'expected layout');
setContent({text: '<img/>'});
tressa.assert(/<p><img(?: ?\/)?><!--.+?--><\/p>/.test(div.innerHTML), 'expected text');
function setContent(which) {
return html`<p>${which}</p>`;
}
})
.then(function () {
tressa.log('## any different content extras');
var div = document.createElement('div');
hyperHTML.bind(div)`<p>${undefined}</p>`;
tressa.assert(/<p><!--.+?--><\/p>/.test(div.innerHTML), 'expected layout');
hyperHTML.bind(div)`<p>${{text: '<img/>'}}</p>`;
tressa.assert(/<p><img(?: ?\/)?><!--.+?--><\/p>/.test(div.innerHTML), 'expected text');
})
.then(function () {
tressa.log('## virtual content extras');
var div = document.createElement('div');
hyperHTML.bind(div)`a ${null}`;
tressa.assert(/a <[^>]+?>/.test(div.innerHTML), 'expected layout');
hyperHTML.bind(div)`a ${{text: '<img/>'}}`;
tressa.assert(/a <img(?: ?\/)?><[^>]+?>/.test(div.innerHTML), 'expected text');
hyperHTML.bind(div)`a ${{any: 123}}`;
tressa.assert(/a 123<[^>]+?>/.test(div.innerHTML), 'expected any');
hyperHTML.bind(div)`a ${{html: '<b>ok</b>'}}`;
tressa.assert(/a <b>ok<\/b><[^>]+?>/.test(div.innerHTML), 'expected html');
hyperHTML.bind(div)`a ${{}}`;
tressa.assert(/a <[^>]+?>/.test(div.innerHTML), 'expected nothing');
})
.then(function () {
tressa.log('## defined transformer');
hyperHTML.define('eUC', encodeURIComponent);
var div = document.createElement('div');
hyperHTML.bind(div)`a=${{eUC: 'b c'}}`;
tressa.assert(/a=b%20c<[^>]+?>/.test(div.innerHTML), 'expected virtual layout');
hyperHTML.bind(div)`<p>${{eUC: 'b c'}}</p>`;
tressa.assert(/<p>b%20c<!--.+?--><\/p>/.test(div.innerHTML), 'expected layout');
// TODO: for coverage sake
// defined transformer ... so what?
hyperHTML.define('eUC', encodeURIComponent);
// non existent one ... so what?
hyperHTML.bind(div)`a=${{nOPE: 'b c'}}`;
})
.then(function () {
tressa.log('## attributes with null values');
var div = document.createElement('div');
var anyAttr = function (value) {
hyperHTML.bind(div)`<p any-attr=${value}>any content</p>`;
};
anyAttr('1');
tressa.assert(
div.firstChild.hasAttribute('any-attr') &&
div.firstChild.getAttribute('any-attr') === '1',
'regular attribute'
);
anyAttr(null);
tressa.assert(
!div.firstChild.hasAttribute('any-attr') &&
div.firstChild.getAttribute('any-attr') == null,
'can be removed'
);
anyAttr(undefined);
tressa.assert(
!div.firstChild.hasAttribute('any-attr') &&
div.firstChild.getAttribute('any-attr') == null,
'multiple times'
);
anyAttr('2');
tressa.assert(
div.firstChild.hasAttribute('any-attr') &&
div.firstChild.getAttribute('any-attr') === '2',
'but can be also reassigned'
);
anyAttr('3');
tressa.assert(
div.firstChild.hasAttribute('any-attr') &&
div.firstChild.getAttribute('any-attr') === '3',
'many other times'
);
var inputName = function (value) {
hyperHTML.bind(div)`<input name=${value}>`;
};
inputName('test');
tressa.assert(
div.firstChild.hasAttribute('name') &&
div.firstChild.name === 'test',
'special attributes are set too'
);
inputName(null);
tressa.assert(
!div.firstChild.hasAttribute('name') &&
!div.firstChild.name,
'but can also be removed'
);
inputName(undefined);
tressa.assert(
!div.firstChild.hasAttribute('name') &&
!div.firstChild.name,
'with either null or undefined'
);
inputName('back');
tressa.assert(
div.firstChild.hasAttribute('name') &&
div.firstChild.name === 'back',
'and can be put back'
);
})
.then(function () {return tressa.async(function (done) {
tressa.log('## placeholder');
var div = document.createElement('div');
var vdiv = document.createElement('div');
hyperHTML.bind(div)`<p>${{eUC: 'b c', placeholder: 'z'}}</p>`;
hyperHTML.bind(vdiv)`a=${{eUC: 'b c', placeholder: 'z'}}`;
tressa.assert(/<p>z<!--.+?--><\/p>/.test(div.innerHTML), 'expected inner placeholder layout');
tressa.assert(/a=z<[^>]+?>/.test(vdiv.innerHTML), 'expected virtual placeholder layout');
setTimeout(function () {
tressa.assert(/<p>b%20c<!--.+?--><\/p>/.test(div.innerHTML), 'expected inner resolved layout');
tressa.assert(/a=b%20c<[^>]+?>/.test(vdiv.innerHTML), 'expected virtual resolved layout');
hyperHTML.bind(div)`<p>${{text: 1, placeholder: '9'}}</p>`;
setTimeout(function () {
tressa.assert(/<p>1<!--.+?--><\/p>/.test(div.innerHTML), 'placeholder with text');
hyperHTML.bind(div)`<p>${{any: [1, 2], placeholder: '9'}}</p>`;
setTimeout(function () {
tressa.assert(/<p>12<!--.+?--><\/p>/.test(div.innerHTML), 'placeholder with any');
hyperHTML.bind(div)`<p>${{html: '<b>3</b>', placeholder: '9'}}</p>`;
setTimeout(function () {
tressa.assert(/<p><b>3<\/b><!--.+?--><\/p>/.test(div.innerHTML), 'placeholder with html');
done();
}, 10);
}, 10);
}, 10);
}, 10);
});})
.then(function () {
tressa.log('## hyper(...)');
var hyper = hyperHTML.hyper;
tressa.assert(typeof hyper() === 'function', 'empty hyper() is a wire tag');
tressa.assert((hyper`abc`).textContent === 'abc', 'hyper`abc`');
tressa.assert((hyper`<p>a${2}c</p>`).textContent === 'a2c', 'hyper`<p>a${2}c</p>`');
tressa.assert((hyper(document.createElement('div'))`abc`).textContent === 'abc', 'hyper(div)`abc`');
tressa.assert((hyper(document.createElement('div'))`a${'b'}c`).textContent === 'abc', 'hyper(div)`a${"b"}c`');
// WFT jsdom ?!
delete Object.prototype.nodeType;
tressa.assert((hyper({})`abc`).textContent === 'abc', 'hyper({})`abc`');
tressa.assert((hyper({})`<p>a${'b'}c</p>`).textContent === 'abc', 'hyper({})`<p>a${\'b\'}c</p>`');
tressa.assert((hyper({}, ':id')`abc`).textContent === 'abc', 'hyper({}, \':id\')`abc`');
tressa.assert((hyper({}, ':id')`<p>a${'b'}c</p>`).textContent === 'abc', 'hyper({}, \':id\')`<p>a${\'b\'}c</p>`');
tressa.assert((hyper('svg')`<rect />`), 'hyper("svg")`<rect />`');
})
.then(function () {
tressa.log('## data=${anyContent}');
var obj = {rand: Math.random()};
var div = hyperHTML.wire()`<div data=${obj}>abc</div>`;
tressa.assert(div.data === obj, 'data available without serialization');
tressa.assert(div.outerHTML === '<div>abc</div>', 'attribute not there');
})
.then(function () {
tressa.log('## hyper.Component');
class Button extends hyperHTML.Component {
render() { return this.html`
<button>hello</button>`;
}
}
class Rect extends hyperHTML.Component {
constructor(state) {
super();
this.setState(state, false);
}
render() { return this.svg`
<rect x=${this.state.x} y=${this.state.y} />`;
}
}
class Paragraph extends hyperHTML.Component {
constructor(state) {
super();
this.setState(state);
}
onclick() { this.clicked = true; }
render() { return this.html`
<p attr=${this.state.attr} onclick=${this}>hello</p>`;
}
}
var div = document.createElement('div');
var render = hyperHTML.bind(div);
render`${[
new Button,
new Rect({x: 123, y: 456})
]}`;
tressa.assert(div.querySelector('button'), 'the <button> exists');
tressa.assert(div.querySelector('rect'), 'the <rect /> exists');
tressa.assert(div.querySelector('rect').getAttribute('x') == '123', 'attributes are OK');
var p = new Paragraph(() => ({attr: 'test'}));
render`${p}`;
tressa.assert(div.querySelector('p').getAttribute('attr') === 'test', 'the <p attr=test> is defined');
p.render().click();
tressa.assert(p.clicked, 'the event worked');
render`${[
hyperHTML.Component.for.call(Rect, {x: 789, y: 123})
]}`;
tressa.assert(div.querySelector('rect').getAttribute('x') == '789', 'the for(state) worked');
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## Component method via data-call');
class Paragraph extends hyperHTML.Component {
globally(e) {
tressa.assert(e.type === 'click', 'data-call invoked globall');
done();
}
test(e) {
tressa.assert(e.type === 'click', 'data-call invoked locally');
}
render() { return this.html`
<p data-call="test" onclick=${this}>hello</p>`;
}
}
class GlobalEvent extends hyperHTML.Component {
onclick(e) {
tressa.assert(e.type === 'click', 'click invoked globally');
document.removeEventListener('click', this);
done();
}
render() {
document.addEventListener('click', this);
return document;
}
}
var p = new Paragraph();
p.render().click();
var e = document.createEvent('Event');
e.initEvent('click', true, true);
(new GlobalEvent).render().dispatchEvent(e);
});
})
.then(function () { return tressa.async(function (done) {
tressa.log('## Custom Element attributes');
var global = document.defaultView;
var registry = global.customElements;
var customElements = {
_: Object.create(null),
define: function (name, Class) {
this._[name.toLowerCase()] = Class;
},
get: function (name) {
return this._[name.toLowerCase()];
}
};
Object.defineProperty(global, 'customElements', {
configurable: true,
value: customElements
});
function DumbElement() {}
DumbElement.prototype.dumb = null;
DumbElement.prototype.asd = null;
customElements.define('dumb-element', DumbElement);
function update(wire) {
return wire`<div>
<dumb-element dumb=${true} asd=${'qwe'}></dumb-element><dumber-element dumb=${true}></dumber-element>
</div>`;
}
var div = update(hyperHTML.wire());
if (!(div.firstElementChild instanceof DumbElement)) {
tressa.assert(div.firstElementChild.dumb !== true, 'not upgraded elements does not have special attributes');
tressa.assert(div.lastElementChild.dumb !== true, 'unknown elements never have special attributes');
// simulate an upgrade
div.firstElementChild.constructor.prototype.dumb = null;
}
div = update(hyperHTML.wire());
delete div.firstElementChild.constructor.prototype.dumb;
tressa.assert(div.firstElementChild.dumb === true, 'upgraded elements have special attributes');
Object.defineProperty(global, 'customElements', {
configurable: true,
value: registry
});
done();
}); })
.then(function () {
tressa.log('## hyper.Component state');
class DefaultState extends hyperHTML.Component {
get defaultState() { return {a: 'a'}; }
render() {}
}
class State extends hyperHTML.Component {}
var ds = new DefaultState;
var o = ds.state;
tressa.assert(!ds.propertyIsEnumerable('state'), 'states are not enumerable');
tressa.assert(!ds.propertyIsEnumerable('_state$'), 'neither their secret');
tressa.assert(o.a === 'a', 'default state retrieved');
var s = new State;
s.state = o;
tressa.assert(s.state === o, 'state can be set too');
ds.setState({b: 'b'});
tressa.assert(o.a === 'a' && o.b === 'b', 'state was updated');
s.state = {z: 123};
tressa.assert(s.state.z === 123 && !s.state.a, 'state can be re-set too');
})
.then(function () {
tressa.log('## splice and sort');
var todo = [
{id: 0, text: 'write documentation'},
{id: 1, text: 'publish online'},
{id: 2, text: 'create Code Pen'}
];
var div = document.createElement('div');
update();
todo.sort(function(a, b) { return a.text < b.text ? -1 : 1; });
update();
tressa.assert(/^\s+create Code Pen\s*publish online\s*write documentation\s+$/.test(div.textContent), 'correct order');
function update() {
hyperHTML.bind(div)`<ul>
${todo.map(function (item) {
return hyperHTML.wire(item)
`<li data-id=${item.id}>${item.text}</li>`;
})}
</ul>`;
}
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## Component connected/disconnected');
var calls = 0;
class Paragraph extends hyperHTML.Component {
onconnected(e) {
calls++;
tressa.assert(e.type === 'connected', 'component connected');
e.currentTarget.parentNode.removeChild(e.currentTarget);
}
ondisconnected(e) {
calls++;
tressa.assert(e.type === 'disconnected', 'component disconnected');
}
render() { return this.html`
<p onconnected=${this} ondisconnected=${this}>hello</p>`;
}
}
var p = new Paragraph().render();
document.body.appendChild(p);
if (p.parentNode) {
setTimeout(function () {
var e = document.createEvent('Event');
e.initEvent('DOMNodeInserted', false, false);
Object.defineProperty(e, 'target', {value: document.body});
document.dispatchEvent(e);
setTimeout(function () {
e = document.createEvent('Event');
e.initEvent('DOMNodeInserted', false, false);
Object.defineProperty(e, 'target', {value: document.createTextNode('')});
document.dispatchEvent(e);
setTimeout(function () {
e = document.createEvent('Event');
e.initEvent('DOMNodeRemoved', false, false);
Object.defineProperty(e, 'target', {value: p});
document.dispatchEvent(e);
setTimeout(function () {
tressa.assert(calls === 2, 'correct amount of calls');
done();
}, 100);
}, 100);
}, 100);
}, 100);
}
});
})
.then(function () {
tressa.log('## style=${fun}');
var render = hyperHTML.wire();
function p(style) {
return render`<p style=${style}></p>`;
}
var node = p({fontSize:24});
tressa.assert(node.style.fontSize, node.style.fontSize);
p({});
tressa.assert(!node.style.fontSize, 'object cleaned');
p('font-size: 18px');
tressa.assert(node.style.fontSize, node.style.fontSize);
p({'--custom-color': 'red'});
if (node.style.cssText !== '')
tressa.assert(node.style.getPropertyValue('--custom-color') === 'red', 'custom style');
else
console.log('skipping CSS properties for IE');
})
.then(function () {
tressa.log('## <self-closing />');
var div = hyperHTML.wire()`<div><self-closing test=${1} /><input /><self-closing test="2" /></div>`;
tressa.assert(div.childNodes.length === 3, 'nodes did self close');
tressa.assert(div.childNodes[0].getAttribute('test') == "1", 'first node ok');
tressa.assert(/input/i.test(div.childNodes[1].nodeName), 'second node ok');
tressa.assert(div.childNodes[2].getAttribute('test') == "2", 'third node ok');
div = hyperHTML.wire()`<div>
<self-closing
test=1
/><input
/><self-closing test="2"
/>
</div>`;
tressa.assert(div.children.length === 3, 'nodes did self close');
tressa.assert(div.children[0].getAttribute('test') == "1", 'first node ok');
tressa.assert(/input/i.test(div.children[1].nodeName), 'second node ok');
tressa.assert(div.children[2].getAttribute('test') == "2", 'third node ok');
div = hyperHTML.wire()`
<div style="width: 200px;">
<svg viewBox="0 0 30 30" fill="currentColor">
<path d="M 0,27 L 27,0 L 30,3 L 3,30 Z" />
<path d="M 0,3 L 3,0 L 30,27 L 27,30 Z" />
</svg>
</div>
`;
tressa.assert(div.children.length === 1, 'one svg');
tressa.assert(div.querySelectorAll('path').length === 2, 'two paths');
})
.then(function () {
tressa.log('## <with><self-closing /></with>');
function check(form) {
return form.children.length === 3 &&
/label/i.test(form.children[0].nodeName) &&
/input/i.test(form.children[1].nodeName) &&
/button/i.test(form.children[2].nodeName)
}
tressa.assert(
check(hyperHTML.wire()`
<form onsubmit=${check}>
<label/>
<input type="email" placeholder="email">
<button>Button</button>
</form>`),
'no quotes is OK'
);
tressa.assert(
check(hyperHTML.wire()`
<form onsubmit=${check}>
<label />
<input type="email" placeholder="email"/>
<button>Button</button>
</form>`),
'self closing is OK'
);
tressa.assert(
check(hyperHTML.wire()`
<form onsubmit="${check}">
<label/>
<input type="email" placeholder="email">
<button>Button</button>
</form>`),
'quotes are OK'
);
tressa.assert(
check(hyperHTML.wire()`
<form onsubmit="${check}">
<label/>
<input type="email" placeholder="email" />
<button>Button</button>
</form>`),
'quotes and self-closing too OK'
);
})
.then(function () {
return tressa.async(function (done) {
tressa.log('## Nested Component connected/disconnected');
class GrandChild extends hyperHTML.Component {
onconnected(e) {
tressa.assert(e.type === 'connected', 'grand child component connected');
}
ondisconnected(e) {
tressa.assert(e.type === 'disconnected', 'grand child component disconnected');
}
render() {
return this.html`
<p class="grandchild" onconnected=${this} ondisconnected=${this}>I'm grand child</p>`;
}
}
class Child extends hyperHTML.Component {
onconnected(e) {
tressa.assert(e.type === 'connected', 'child component connected');
}
ondisconnected(e) {
tressa.assert(e.type === 'disconnected', 'child component disconnected');
}
render() {
return this.html`
<div class="child" onconnected=${this} ondisconnected=${this}>I'm child
${new GrandChild()}
</div>
`;
}
}
let connectedTimes = 0, disconnectedTimes = 0;
class Parent extends hyperHTML.Component {
onconnected(e) {
connectedTimes ++;
tressa.assert(e.type === 'connected', 'parent component connected');
tressa.assert(connectedTimes === 1, 'connected callback should only be triggered once');
}
ondisconnected(e) {
disconnectedTimes ++;
tressa.assert(e.type === 'disconnected', 'parent component disconnected');
tressa.assert(disconnectedTimes === 1, 'disconnected callback should only be triggered once');
done();
}
render() {
return this.html`
<div class="parent" onconnected=${this} ondisconnected=${this}>I'm parent
${new Child()}
</div>
`;
}
}
var p = new Parent().render();
document.body.appendChild(p);
setTimeout(function () {
if (p.parentNode) {
var e = document.createEvent('Event');
e.initEvent('DOMNodeInserted', false, false);
Object.defineProperty(e, 'target', {value: document.body});
document.dispatchEvent(e);
setTimeout(function () {
e = document.createEvent('Event');
e.initEvent('DOMNodeRemoved', false, false);
Object.defineProperty(e, 'target', {value: p});
document.dispatchEvent(e);
if (p.parentNode)
p.parentNode.removeChild(p);
}, 100);
}
}, 100);
});
})
.then(function () {
tressa.log('## Declarative Components');
class MenuSimple extends hyperHTML.Component {
render(props) {
return this.setState(props, false).html`
<div>A simple menu</div>
<ul>
${props.items.map(
(item, i) => MenuItem.for(this, i).render(item)
)}
</ul>
`;
}
}
class MenuWeakMap extends hyperHTML.Component {
render(props) {
return this.setState(props, false).html`
<div>A simple menu</div>
<ul>
${props.items.map(
item => MenuItem.for(this, item).render(item)
)}
</ul>
`;
}
}
class MenuItem extends hyperHTML.Component {
render(props) {
return this.setState(props, false).html`
<li>${props.name}</li>
`;
}
}
var a = document.createElement('div');
var b = document.createElement('div');
var method = hyperHTML.Component.for;
if (!MenuSimple.for) {
MenuSimple.for = method;
MenuWeakMap.for = method;
MenuItem.for = method;
}
hyperHTML.bind(a)`${MenuSimple.for(a).render({
items: [{name: 'item 1'}, {name: 'item 2'}, {name: 'item 3'}]
})}`;
tressa.assert(MenuSimple.for(a) === MenuSimple.for(a), 'same simple menu');
hyperHTML.bind(b)`${MenuWeakMap.for(b).render({
items: [{name: 'item 1'}, {name: 'item 2'}, {name: 'item 3'}]
})}`;
tressa.assert(MenuWeakMap.for(a) === MenuWeakMap.for(a), 'same weakmap menu');
tressa.assert(MenuSimple.for(a) !== MenuWeakMap.for(a), 'different from simple');
tressa.assert(MenuSimple.for(a) === MenuSimple.for(a), 'same as simple');
tressa.assert(a.outerHTML === b.outerHTML, 'same layout');
})
.then(function () {
tressa.log('## Component.dispatch');
class Pomponent extends hyperHTML.Component {
trigger() {
this.dispatch('event', 123);
}
render() {
return this.html`<p>a</p><p>b</p>`;
}
}
class Solonent extends hyperHTML.Component {
render() {
return this.html`<p>c</p>`;
}
}
var a = document.createElement('div');
var p = new Pomponent;
p.trigger();
var s = new Solonent;
var dispatched = false;
hyperHTML.bind(a)`${[p, s]}`;
a.addEventListener('event', event => {
tressa.assert(event.detail === 123, 'expected details');
tressa.assert(event.component === p, 'expected component');
dispatched = true;
});
p.trigger();
s.dispatch('test');
if (!dispatched) throw new Error('broken dispatch');
})
.then(function () {
tressa.log('## slotted callback');
var div = document.createElement('div');
var result = [];
function A() {
result.push(arguments);
return {html: '<b>a</b>'};
}
function B() {
result.push(arguments);
return {html: '<b>b</b>'};
}
function update() {
hyperHTML.bind(div)`${A} - ${B}`;
}
update();
tressa.assert(result[0][0].parentNode === div, 'expected parent node for A');
tressa.assert(result[1][0].parentNode === div, 'expected parent node for B');
})
.then(function () {
tressa.log('## define(hyper-attribute, callback)');
var a = document.createElement('div');
var random = Math.random().toPrecision(6); // IE < 11
var result = [];
hyperHTML.define('hyper-attribute', function (target, value) {
result.push(target, value);
return random;
});
hyperHTML.bind(a)`<p hyper-attribute=${random}/>`;
if (!result.length)
throw new Error('attributes intents failed');
else {
tressa.assert(result[0] === a.firstElementChild, 'expected target');
tressa.assert(result[1] === random, 'expected value');
tressa.assert(
a.firstElementChild.getAttribute('hyper-attribute') == random,
'expected attribute'
);
}
result.splice(0);
hyperHTML.define('other-attribute', function (target, value) {
result.push(target, value);
return '';
});
hyperHTML.define('disappeared-attribute', function (target, value) {
});
hyperHTML.define('whatever-attribute', function (target, value) {
return value;
});
hyperHTML.define('null-attribute', function (target, value) {
return null;
});
hyperHTML.bind(a)`<p
other-attribute=${random}
disappeared-attribute=${random}
whatever-attribute=${random}
null-attribute=${random}
/>`;
if (!result.length)
throw new Error('attributes intents failed');
else {
tressa.assert(result[0] === a.firstElementChild, 'expected other target');
tressa.assert(result[1] === random, 'expected other value');
tressa.assert(
a.firstElementChild.getAttribute('other-attribute') === '',
'expected other attribute'
);
tressa.assert(
!a.firstElementChild.hasAttribute('disappeared-attribute'),
'disappeared-attribute removed'
);
tressa.assert(
a.firstElementChild.getAttribute('whatever-attribute') == random,
'whatever-attribute set'
);
tressa.assert(
!a.firstElementChild.hasAttribute('null-attribute'),
'null-attribute removed'
);
}
})
// WARNING THESE TEST MUST BE AT THE VERY END
// WARNING THESE TEST MUST BE AT THE VERY END
// WARNING THESE TEST MUST BE AT THE VERY END
.then(function () {
// WARNING THESE TEST MUST BE AT THE VERY END
tressa.log('## IE9 double viewBox 🌈 🌈');
var output = document.createElement('div');
try {
hyperHTML.bind(output)`<svg viewBox=${'0 0 50 50'}></svg>`;
tressa.assert(output.firstChild.getAttribute('viewBox') == '0 0 50 50', 'correct camelCase attribute');
} catch(o_O) {
tressa.assert(true, 'code coverage caveat');
}
})
.then(function () {
tressa.log('## A-Frame compatibility');
var output = hyperHTML.wire()`<a-scene></a-scene>`;
tressa.assert(output.nodeName.toLowerCase() === 'a-scene', 'correct element');
})
// */
.then(function () {
if (!tressa.exitCode) {
document.body.style.backgroundColor = '#0FA';
}
tressa.end();
});<|fim▁end|> | tressa.assert(/<div><!--.+?--><\/div>/.test(div.innerHTML), 'dropped all children');
}) |
<|file_name|>past-date-validator-widget-factory-spec.js<|end_file_name|><|fim▁begin|>describe('PastDateValidatorWidgetFactory', function() {
var Mock = {};
var factory;
var whoAmI;
beforeEach(function() {
angular.mock.module('studio');
mockElement();
inject(function(_$injector_) {
mockWidgetScope(_$injector_);
factory = _$injector_.get('PastDateValidatorWidgetFactory');
});
widget = factory.create(Mock.scope, Mock.element);
});
describe('Start a PastDate Factory Object', function() {
it('should return a PastDate Validator Object', function() {
pending();<|fim▁hole|> expect(widget.data).toBeDefined();
expect(widget.data).toEqual(false);
});
});
describe('updates on data', function() {
xit('should model data value be equal to self value', function() {
// expect(Mock.question.fillingRules.options['pastDate'].data.reference).toEqual(widget.data);
});
it('should call updateFillingRules from parente widget', function() {
spyOn(Mock.parentWidget, 'updateFillingRules');
widget.updateData();
expect(Mock.parentWidget.updateFillingRules).toHaveBeenCalled();
});
});
function mockElement() {
Mock.element = {};
}
function mockWidgetScope($injector) {
Mock.scope = {
class: '',
$parent: {
widget: mockParentWidget($injector)
}
};
return Mock.scope;
}
function mockParentWidget($injector) {
mockQuestion($injector);
Mock.parentWidget = {
getItem: function() {
return Mock.question;
},
updateFillingRules: function() {}
};
return Mock.parentWidget;
}
function mockQuestion($injector) {
Mock.question = $injector.get('SurveyItemFactory').create('IntegerQuestion', 'Q1');
Mock.question.fillingRules.options.pastDate = $injector.get('RulesFactory').create('pastDate');
return Mock.question;
}
function mockAdd($injector) {
Mock.add = $injector.get('FillingRulesEditorWidgetFactory').create();
}
});<|fim▁end|> | });
it('should start the data field as date', function() { |
<|file_name|>152_add_message_id_to_event.py<|end_file_name|><|fim▁begin|>"""add message column to event
Revision ID: 211e93aff1e1<|fim▁hole|>Revises: 2493281d621
Create Date: 2015-03-20 18:50:29.961734
"""
# revision identifiers, used by Alembic.
revision = '211e93aff1e1'
down_revision = '2f3c8fa3fc3a'
from alembic import op
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("SET FOREIGN_KEY_CHECKS=0;"))
conn.execute(text("ALTER TABLE event ADD COLUMN message_id int(11) DEFAULT NULL"))
conn.execute(text("ALTER TABLE event ADD CONSTRAINT message_ifbk FOREIGN KEY "
"(`message_id`) REFERENCES `message` (`id`) ON DELETE CASCADE"))
def downgrade():
conn = op.get_bind()
conn.execute(text("SET FOREIGN_KEY_CHECKS=0;"))
conn.execute(text("ALTER TABLE event DROP FOREIGN KEY message_ifbk"))
conn.execute(text("ALTER TABLE event DROP COLUMN message_id"))<|fim▁end|> | |
<|file_name|>file_stream.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/base/file_stream.h"
#include "base/location.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/task_runner_util.h"
#include "base/threading/thread_restrictions.h"
#include "base/threading/worker_pool.h"
#include "net/base/file_stream_context.h"
#include "net/base/file_stream_net_log_parameters.h"
#include "net/base/net_errors.h"
namespace net {
FileStream::FileStream(NetLog* net_log,
const scoped_refptr<base::TaskRunner>& task_runner)
/* To allow never opened stream to be destroyed on any thread we set flags
as if stream was opened asynchronously. */
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(bound_net_log_, task_runner)) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::FileStream(NetLog* net_log)
/* To allow never opened stream to be destroyed on any thread we set flags
as if stream was opened asynchronously. */
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(bound_net_log_,
base::WorkerPool::GetTaskRunner(true /* slow */))) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::FileStream(base::PlatformFile file,
int flags,
NetLog* net_log,
const scoped_refptr<base::TaskRunner>& task_runner)
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(base::File(file), flags, bound_net_log_,
task_runner)) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::FileStream(base::PlatformFile file, int flags, NetLog* net_log)
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(base::File(file), flags, bound_net_log_,
base::WorkerPool::GetTaskRunner(true /* slow */))) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::FileStream(base::File file,
net::NetLog* net_log,
const scoped_refptr<base::TaskRunner>& task_runner)
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(file.Pass(), bound_net_log_, task_runner)) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::FileStream(base::File file, net::NetLog* net_log)
: bound_net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_FILESTREAM)),
context_(new Context(file.Pass(), bound_net_log_,
base::WorkerPool::GetTaskRunner(true /* slow */))) {
bound_net_log_.BeginEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
FileStream::~FileStream() {
if (context_->async()) {
context_.release()->Orphan();
} else {
context_->CloseSync();
context_.reset();
}
bound_net_log_.EndEvent(NetLog::TYPE_FILE_STREAM_ALIVE);
}
int FileStream::Open(const base::FilePath& path, int open_flags,
const CompletionCallback& callback) {
if (IsOpen()) {
DLOG(FATAL) << "File is already open!";
return ERR_UNEXPECTED;
}
DCHECK(open_flags & base::File::FLAG_ASYNC);
context_->OpenAsync(path, open_flags, callback);
return ERR_IO_PENDING;
}
int FileStream::OpenSync(const base::FilePath& path, int open_flags) {
base::ThreadRestrictions::AssertIOAllowed();
if (IsOpen()) {
DLOG(FATAL) << "File is already open!";
return ERR_UNEXPECTED;
}
DCHECK(!context_->async());
return context_->OpenSync(path, open_flags);
}
int FileStream::Close(const CompletionCallback& callback) {
DCHECK(context_->async());
context_->CloseAsync(callback);
return ERR_IO_PENDING;
}
int FileStream::CloseSync() {
DCHECK(!context_->async());
base::ThreadRestrictions::AssertIOAllowed();
context_->CloseSync();
return OK;
}
bool FileStream::IsOpen() const {
return context_->file().IsValid();
}
int FileStream::Seek(Whence whence,
int64 offset,
const Int64CompletionCallback& callback) {
if (!IsOpen())
return ERR_UNEXPECTED;
// Make sure we're async.
DCHECK(context_->async());
context_->SeekAsync(whence, offset, callback);
return ERR_IO_PENDING;
}
int64 FileStream::SeekSync(Whence whence, int64 offset) {
base::ThreadRestrictions::AssertIOAllowed();
if (!IsOpen())
return ERR_UNEXPECTED;
// If we're in async, make sure we don't have a request in flight.
DCHECK(!context_->async() || !context_->async_in_progress());
return context_->SeekSync(whence, offset);
}
int64 FileStream::Available() {
base::ThreadRestrictions::AssertIOAllowed();
if (!IsOpen())
return ERR_UNEXPECTED;
int64 cur_pos = SeekSync(FROM_CURRENT, 0);
if (cur_pos < 0)
return cur_pos;
int64 size = context_->GetFileSize();
if (size < 0)
return size;
DCHECK_GE(size, cur_pos);
return size - cur_pos;
}
int FileStream::Read(IOBuffer* buf,
int buf_len,
const CompletionCallback& callback) {
if (!IsOpen())
return ERR_UNEXPECTED;
// read(..., 0) will return 0, which indicates end-of-file.
DCHECK_GT(buf_len, 0);
DCHECK(context_->async());
return context_->ReadAsync(buf, buf_len, callback);
}
int FileStream::ReadSync(char* buf, int buf_len) {
base::ThreadRestrictions::AssertIOAllowed();
if (!IsOpen())
return ERR_UNEXPECTED;
DCHECK(!context_->async());
// read(..., 0) will return 0, which indicates end-of-file.
DCHECK_GT(buf_len, 0);
return context_->ReadSync(buf, buf_len);
}
int FileStream::ReadUntilComplete(char *buf, int buf_len) {
base::ThreadRestrictions::AssertIOAllowed();
int to_read = buf_len;
int bytes_total = 0;
do {
int bytes_read = ReadSync(buf, to_read);
if (bytes_read <= 0) {
if (bytes_total == 0)
return bytes_read;
return bytes_total;
}
bytes_total += bytes_read;
buf += bytes_read;
to_read -= bytes_read;
} while (bytes_total < buf_len);
return bytes_total;
}
int FileStream::Write(IOBuffer* buf,
int buf_len,
const CompletionCallback& callback) {
if (!IsOpen())
return ERR_UNEXPECTED;
DCHECK(context_->async());
// write(..., 0) will return 0, which indicates end-of-file.
DCHECK_GT(buf_len, 0);
return context_->WriteAsync(buf, buf_len, callback);
}<|fim▁hole|>
if (!IsOpen())
return ERR_UNEXPECTED;
DCHECK(!context_->async());
// write(..., 0) will return 0, which indicates end-of-file.
DCHECK_GT(buf_len, 0);
return context_->WriteSync(buf, buf_len);
}
int64 FileStream::Truncate(int64 bytes) {
base::ThreadRestrictions::AssertIOAllowed();
if (!IsOpen())
return ERR_UNEXPECTED;
// Seek to the position to truncate from.
int64 seek_position = SeekSync(FROM_BEGIN, bytes);
if (seek_position != bytes)
return ERR_UNEXPECTED;
// And truncate the file.
return context_->Truncate(bytes);
}
int FileStream::Flush(const CompletionCallback& callback) {
if (!IsOpen())
return ERR_UNEXPECTED;
// Make sure we're async.
DCHECK(context_->async());
context_->FlushAsync(callback);
return ERR_IO_PENDING;
}
int FileStream::FlushSync() {
base::ThreadRestrictions::AssertIOAllowed();
if (!IsOpen())
return ERR_UNEXPECTED;
return context_->FlushSync();
}
void FileStream::EnableErrorStatistics() {
context_->set_record_uma(true);
}
void FileStream::SetBoundNetLogSource(const BoundNetLog& owner_bound_net_log) {
if ((owner_bound_net_log.source().id == NetLog::Source::kInvalidId) &&
(bound_net_log_.source().id == NetLog::Source::kInvalidId)) {
// Both |BoundNetLog|s are invalid.
return;
}
// Should never connect to itself.
DCHECK_NE(bound_net_log_.source().id, owner_bound_net_log.source().id);
bound_net_log_.AddEvent(NetLog::TYPE_FILE_STREAM_BOUND_TO_OWNER,
owner_bound_net_log.source().ToEventParametersCallback());
owner_bound_net_log.AddEvent(NetLog::TYPE_FILE_STREAM_SOURCE,
bound_net_log_.source().ToEventParametersCallback());
}
const base::File& FileStream::GetFileForTesting() const {
return context_->file();
}
} // namespace net<|fim▁end|> |
int FileStream::WriteSync(const char* buf, int buf_len) {
base::ThreadRestrictions::AssertIOAllowed(); |
<|file_name|>ByteArrayTypeDescriptor.java<|end_file_name|><|fim▁begin|>/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.type.descriptor.java;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.sql.Blob;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.BinaryStream;
import org.hibernate.engine.jdbc.internal.BinaryStreamImpl;
import org.hibernate.type.descriptor.WrapperOptions;
/**
* Descriptor for {@code Byte[]} handling.
*
* @author Steve Ebersole
*/
public class ByteArrayTypeDescriptor extends AbstractTypeDescriptor<Byte[]> {
public static final ByteArrayTypeDescriptor INSTANCE = new ByteArrayTypeDescriptor();
@SuppressWarnings({ "unchecked" })
public ByteArrayTypeDescriptor() {
super( Byte[].class, ArrayMutabilityPlan.INSTANCE );
}
@Override
public String toString(Byte[] bytes) {
final StringBuilder buf = new StringBuilder();
for ( Byte aByte : bytes ) {
final String hexStr = Integer.toHexString( aByte - Byte.MIN_VALUE );
if ( hexStr.length() == 1 ) {
buf.append( '0' );
}
buf.append( hexStr );
}
return buf.toString();
}
@Override
public Byte[] fromString(String string) {
if ( string == null ) {
return null;
}
if ( string.length() % 2 != 0 ) {
throw new IllegalArgumentException( "The string is not a valid string representation of a binary content." );
}
Byte[] bytes = new Byte[string.length() / 2];
for ( int i = 0; i < bytes.length; i++ ) {
final String hexStr = string.substring( i * 2, (i + 1) * 2 );
bytes[i] = (byte) ( Integer.parseInt( hexStr, 16 ) + Byte.MIN_VALUE );
}
return bytes;
}
@SuppressWarnings({ "unchecked" })
@Override
public <X> X unwrap(Byte[] value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( Byte[].class.isAssignableFrom( type ) ) {
return (X) value;
}
if ( byte[].class.isAssignableFrom( type ) ) {<|fim▁hole|> return (X) new ByteArrayInputStream( unwrapBytes( value ) );
}
if ( BinaryStream.class.isAssignableFrom( type ) ) {
return (X) new BinaryStreamImpl( unwrapBytes( value ) );
}
if ( Blob.class.isAssignableFrom( type ) ) {
return (X) options.getLobCreator().createBlob( unwrapBytes( value ) );
}
throw unknownUnwrap( type );
}
@Override
public <X> Byte[] wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( Byte[].class.isInstance( value ) ) {
return (Byte[]) value;
}
if ( byte[].class.isInstance( value ) ) {
return wrapBytes( (byte[]) value );
}
if ( InputStream.class.isInstance( value ) ) {
return wrapBytes( DataHelper.extractBytes( (InputStream) value ) );
}
if ( Blob.class.isInstance( value ) || DataHelper.isNClob( value.getClass() ) ) {
try {
return wrapBytes( DataHelper.extractBytes( ( (Blob) value ).getBinaryStream() ) );
}
catch ( SQLException e ) {
throw new HibernateException( "Unable to access lob stream", e );
}
}
throw unknownWrap( value.getClass() );
}
private Byte[] wrapBytes(byte[] bytes) {
if ( bytes == null ) {
return null;
}
final Byte[] result = new Byte[bytes.length];
for ( int i = 0; i < bytes.length; i++ ) {
result[i] = bytes[i];
}
return result;
}
private byte[] unwrapBytes(Byte[] bytes) {
if ( bytes == null ) {
return null;
}
final byte[] result = new byte[bytes.length];
for ( int i = 0; i < bytes.length; i++ ) {
result[i] = bytes[i];
}
return result;
}
}<|fim▁end|> | return (X) unwrapBytes( value );
}
if ( InputStream.class.isAssignableFrom( type ) ) { |
<|file_name|>test_cube_to_pp.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2010 - 2017, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests
import os
import tempfile
import cf_units
import numpy as np
import iris.coords
import iris.coord_systems
import iris.fileformats.pp
from iris.fileformats.pp import PPField3
from iris.tests import mock
import iris.tests.pp as pp
import iris.util
import iris.tests.stock as stock
def itab_callback(cube, field, filename):
cube.add_aux_coord(iris.coords.AuxCoord([field.lbrel], long_name='MOUMHeaderReleaseNumber', units='no_unit'))
cube.add_aux_coord(iris.coords.AuxCoord([field.lbexp], long_name='ExperimentNumber(ITAB)', units='no_unit'))
@tests.skip_data
class TestPPSave(tests.IrisTest, pp.PPTest):
def test_no_forecast_time(self):
cube = stock.lat_lon_cube()
coord = iris.coords.DimCoord(np.array([24], dtype=np.int64),
standard_name='time',
units='hours since epoch')
cube.add_aux_coord(coord)
self.assertCML(cube, ['cube_to_pp', 'no_forecast_time.cml'])
reference_txt_path = tests.get_result_path(('cube_to_pp', 'no_forecast_time.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path:
iris.save(cube, temp_pp_path)
def test_no_forecast_period(self):
cube = stock.lat_lon_cube()
# Add a bounded scalar time coord and a forecast_reference_time.
time_coord = iris.coords.DimCoord(
10.958333, standard_name='time',
units='days since 2013-05-10 12:00',
bounds=[10.916667, 11.0])
cube.add_aux_coord(time_coord)
forecast_reference_time = iris.coords.DimCoord(
2.0, standard_name='forecast_reference_time',
units='weeks since 2013-05-07')
cube.add_aux_coord(forecast_reference_time)
self.assertCML(cube, ['cube_to_pp', 'no_forecast_period.cml'])
reference_txt_path = tests.get_result_path(('cube_to_pp',
'no_forecast_period.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cube) as \
temp_pp_path:
iris.save(cube, temp_pp_path)
def test_pp_save_rules(self):
# Test pp save rules without user rules.
#read
in_filename = tests.get_data_path(('PP', 'simple_pp', 'global.pp'))
cubes = iris.load(in_filename, callback=itab_callback)
reference_txt_path = tests.get_result_path(('cube_to_pp', 'simple.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path:
iris.save(cubes, temp_pp_path)
def test_pp_append_singles(self):
# Test pp append saving - single cubes.
# load 2 arrays of >2D cubes
cube = stock.simple_pp()
reference_txt_path = tests.get_result_path(('cube_to_pp', 'append_single.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=[cube, cube]) as temp_pp_path:
iris.save(cube, temp_pp_path) # Create file
iris.save(cube, temp_pp_path, append=True) # Append to file
reference_txt_path = tests.get_result_path(('cube_to_pp', 'replace_single.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path:
iris.save(cube, temp_pp_path) # Create file
iris.save(cube, temp_pp_path) # Replace file
def test_pp_append_lists(self):
# Test PP append saving - lists of cubes.
# For each of the first four time-steps in the 4D cube,
# pull out the bottom two levels.
cube_4d = stock.realistic_4d()
cubes = [cube_4d[i, :2, :, :] for i in range(4)]
reference_txt_path = tests.get_result_path(('cube_to_pp', 'append_multi.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path:
iris.save(cubes[:2], temp_pp_path)
iris.save(cubes[2:], temp_pp_path, append=True)
reference_txt_path = tests.get_result_path(('cube_to_pp', 'replace_multi.txt'))
with self.cube_save_test(reference_txt_path, reference_cubes=cubes[2:]) as temp_pp_path:
iris.save(cubes[:2], temp_pp_path)
iris.save(cubes[2:], temp_pp_path)
def add_coords_to_cube_and_test(self, coord1, coord2):
# a wrapper for creating arbitrary 2d cross-sections and run pp-saving tests
dataarray = np.arange(16, dtype='>f4').reshape(4, 4)
cm = iris.cube.Cube(data=dataarray)
cm.add_dim_coord(coord1, 0)
cm.add_dim_coord(coord2, 1)
# TODO: This is the desired line of code...
# reference_txt_path = tests.get_result_path(('cube_to_pp', '%s.%s.pp.txt' % (coord1.name(), coord2.name())))
# ...but this is required during the CF change, to maintain the original filename.
coord1_name = coord1.name().replace("air_", "")
coord2_name = coord2.name().replace("air_", "")
reference_txt_path = tests.get_result_path(('cube_to_pp', '%s.%s.pp.txt' % (coord1_name, coord2_name)))
# test with name
with self.cube_save_test(reference_txt_path, reference_cubes=cm,
field_coords=[coord1.name(), coord2.name()]) as temp_pp_path:
iris.save(cm, temp_pp_path, field_coords=[coord1.name(), coord2.name()])<|fim▁hole|> with self.cube_save_test(reference_txt_path, reference_cubes=cm,
field_coords=[coord1, coord2]) as temp_pp_path:
iris.save(cm, temp_pp_path, field_coords=[coord1, coord2])
def test_non_standard_cross_sections(self):
#ticket #1037, the five variants being dealt with are
# 'pressure.latitude',
# 'depth.latitude',
# 'eta.latitude',
# 'pressure.time',
# 'depth.time',
f = FakePPEnvironment()
self.add_coords_to_cube_and_test(
iris.coords.DimCoord(f.z, long_name='air_pressure', units='hPa', bounds=f.z_bounds),
iris.coords.DimCoord(f.y, standard_name='latitude', units='degrees', bounds=f.y_bounds, coord_system=f.geog_cs()))
self.add_coords_to_cube_and_test(
iris.coords.DimCoord(f.z, long_name='depth', units='m', bounds=f.z_bounds),
iris.coords.DimCoord(f.y, standard_name='latitude', units='degrees', bounds=f.y_bounds, coord_system=f.geog_cs()))
self.add_coords_to_cube_and_test(
iris.coords.DimCoord(f.z, long_name='eta', units='1', bounds=f.z_bounds),
iris.coords.DimCoord(f.y, standard_name='latitude', units='degrees', bounds=f.y_bounds, coord_system=f.geog_cs()))
self.add_coords_to_cube_and_test(
iris.coords.DimCoord(f.z, long_name='air_pressure', units='hPa', bounds=f.z_bounds),
iris.coords.DimCoord(f.y, standard_name='time', units=cf_units.Unit('days since 0000-01-01 00:00:00', calendar=cf_units.CALENDAR_360_DAY), bounds=f.y_bounds))
self.add_coords_to_cube_and_test(
iris.coords.DimCoord(f.z, standard_name='depth', units='m', bounds=f.z_bounds),
iris.coords.DimCoord(f.y, standard_name='time', units=cf_units.Unit('days since 0000-01-01 00:00:00', calendar=cf_units.CALENDAR_360_DAY), bounds=f.y_bounds))
def test_365_calendar_export(self):
# test for 365 day calendar export
cube = stock.simple_pp()
new_unit = cf_units.Unit('hours since 1970-01-01 00:00:00',
calendar=cf_units.CALENDAR_365_DAY)
cube.coord('time').units = new_unit
# Add an extra "fill_value" property, as used by the save rules.
cube.fill_value = None
pp_field = mock.MagicMock(spec=PPField3)
iris.fileformats.pp_save_rules.verify(cube, pp_field)
self.assertEqual(pp_field.lbtim.ic, 4)
class FakePPEnvironment(object):
''' fake a minimal PP environment for use in cross-section coords, as in PP save rules '''
y = [1, 2, 3, 4]
z = [111, 222, 333, 444]
y_bounds = [[0.9, 1.1], [1.9, 2.1], [2.9, 3.1], [3.9, 4.1]]
z_bounds = [[110.9, 111.1], [221.9, 222.1], [332.9, 333.1], [443.9, 444.1]]
def geog_cs(self):
"""Return a GeogCS for this PPField.
Returns:
A GeogCS with the appropriate earth shape, meridian and pole position.
"""
return iris.coord_systems.GeogCS(6371229.0)
class TestPPSaveRules(tests.IrisTest, pp.PPTest):
def test_default_coord_system(self):
GeogCS = iris.coord_systems.GeogCS
cube = iris.tests.stock.lat_lon_cube()
reference_txt_path = tests.get_result_path(('cube_to_pp',
'default_coord_system.txt'))
# Remove all coordinate systems.
for coord in cube.coords():
coord.coord_system = None
# Ensure no coordinate systems available.
self.assertIsNone(cube.coord_system(GeogCS))
self.assertIsNone(cube.coord_system(None))
with self.cube_save_test(reference_txt_path, reference_cubes=cube) as \
temp_pp_path:
# Save cube to PP with no coordinate system.
iris.save(cube, temp_pp_path)
pp_cube = iris.load_cube(temp_pp_path)
# Ensure saved cube has the default coordinate system.
self.assertIsInstance(pp_cube.coord_system(GeogCS),
iris.coord_systems.GeogCS)
self.assertIsNotNone(pp_cube.coord_system(None))
self.assertIsInstance(pp_cube.coord_system(None),
iris.coord_systems.GeogCS)
self.assertIsNotNone(pp_cube.coord_system())
self.assertIsInstance(pp_cube.coord_system(),
iris.coord_systems.GeogCS)
def lbproc_from_pp(self, filename):
# Gets the lbproc field from the ppfile
pp_file = iris.fileformats.pp.load(filename)
field = next(pp_file)
return field.lbproc
def test_pp_save_rules(self):
# Test single process flags
for _, process_desc in iris.fileformats.pp.LBPROC_PAIRS[1:]:
# Get basic cube and set process flag manually
ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = (process_desc,)
# Save cube to pp
temp_filename = iris.util.create_temp_filename(".pp")
iris.save(ll_cube, temp_filename)
# Check the lbproc is what we expect
self.assertEqual(self.lbproc_from_pp(temp_filename),
iris.fileformats.pp.lbproc_map[process_desc])
os.remove(temp_filename)
# Test mutiple process flags
multiple_bit_values = ((128, 64), (4096, 1024), (8192, 1024))
# Maps lbproc value to the process flags that should be created
multiple_map = {sum(bits) : [iris.fileformats.pp.lbproc_map[bit] for bit in bits] for bits in multiple_bit_values}
for lbproc, descriptions in six.iteritems(multiple_map):
ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = descriptions
# Save cube to pp
temp_filename = iris.util.create_temp_filename(".pp")
iris.save(ll_cube, temp_filename)
# Check the lbproc is what we expect
self.assertEqual(self.lbproc_from_pp(temp_filename), lbproc)
os.remove(temp_filename)
@tests.skip_data
def test_lbvc(self):
cube = stock.realistic_4d_no_derived()[0, :4, ...]
v_coord = iris.coords.DimCoord(standard_name='depth',
units='m', points=[-5, -10, -15, -20])
cube.remove_coord('level_height')
cube.remove_coord('sigma')
cube.remove_coord('surface_altitude')
cube.add_aux_coord(v_coord, 0)
expected = ([2, 1, -5.0],
[2, 2, -10.0],
[2, 3, -15.0],
[2, 4, -20.0])
for field, (lbvc, lblev, blev) in zip(fields_from_cube(cube), expected):
self.assertEqual(field.lbvc, lbvc)
self.assertEqual(field.lblev, lblev)
self.assertEqual(field.blev, blev)
def fields_from_cube(cubes):
"""
Return an iterator of PP fields generated from saving the given cube(s)
to a temporary file, and then subsequently loading them again
"""
with tempfile.NamedTemporaryFile('w+b', suffix='.pp') as tmp_file:
if six.PY2:
fh = tmp_file.file
else:
fh = tmp_file
iris.save(cubes, fh, saver='pp')
# make sure the fh is written to disk, and move it back to the
# start of the file
fh.flush()
os.fsync(fh)
fh.seek(0)
# load in the saved pp fields and check the appropriate metadata
for field in iris.fileformats.pp.load(tmp_file.name):
yield field
if __name__ == "__main__":
tests.main()<|fim▁end|> | # test with coord |
<|file_name|>spacetodepth_op_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for SpacetoDepth op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
class SpaceToDepthTest(test.TestCase):
def _testOne(self, inputs, block_size, outputs, dtype=dtypes.float32):
input_nhwc = math_ops.cast(inputs, dtype)
with self.test_session(use_gpu=False):
# test NHWC (default) on CPU
x_tf = array_ops.space_to_depth(input_nhwc, block_size)
self.assertAllEqual(x_tf.eval(), outputs)
if test.is_gpu_available():
with self.test_session(use_gpu=True):
# test NHWC (default) on GPU
x_tf = array_ops.space_to_depth(input_nhwc, block_size)
self.assertAllEqual(x_tf.eval(), outputs)
# test NCHW on GPU
input_nchw = test_util.NHWCToNCHW(input_nhwc)
output_nchw = array_ops.space_to_depth(
input_nchw, block_size, data_format="NCHW")
output_nhwc = test_util.NCHWToNHWC(output_nchw)
self.assertAllEqual(output_nhwc.eval(), outputs)
def testBasic(self):
x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 2
x_out = [[[[1, 2, 3, 4]]]]
self._testOne(x_np, block_size, x_out)<|fim▁hole|> x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 2
x_out = [[[[1, 2, 3, 4]]]]
self._testOne(x_np, block_size, x_out, dtype=dtypes.float16)
# Tests for larger input dimensions. To make sure elements are
# correctly ordered spatially.
def testLargerInput2x2(self):
x_np = [[[[1], [2], [5], [6]], [[3], [4], [7], [8]],
[[9], [10], [13], [14]], [[11], [12], [15], [16]]]]
block_size = 2
x_out = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[9, 10, 11, 12],
[13, 14, 15, 16]]]]
self._testOne(x_np, block_size, x_out)
# Tests for larger input dimensions. To make sure elements are
# correctly ordered in depth. Here, larger block size.
def testLargerInput4x4(self):
x_np = [[[[1], [2], [5], [6]], [[3], [4], [7], [8]],
[[9], [10], [13], [14]], [[11], [12], [15], [16]]]]
block_size = 4
x_out = [[[[1, 2, 5, 6, 3, 4, 7, 8, 9, 10, 13, 14, 11, 12, 15, 16]]]]
self._testOne(x_np, block_size, x_out)
# Tests for larger input depths.
# To make sure elements are properly interleaved in depth.
def testDepthInterleaved(self):
x_np = [[[[1, 10], [2, 20]], [[3, 30], [4, 40]]]]
block_size = 2
x_out = [[[[1, 10, 2, 20, 3, 30, 4, 40]]]]
self._testOne(x_np, block_size, x_out)
# Tests for larger input depths. Here an odd depth.
# To make sure elements are properly interleaved in depth.
def testDepthInterleavedDepth3(self):
x_np = [[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]
block_size = 2
x_out = [[[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]]]]
self._testOne(x_np, block_size, x_out)
# Tests for larger input dimensions AND for larger input depths.
# To make sure elements are properly interleaved in depth and ordered
# spatially.
def testDepthInterleavedLarge(self):
x_np = [[[[1, 10], [2, 20], [5, 50], [6, 60]],
[[3, 30], [4, 40], [7, 70], [8, 80]],
[[9, 90], [10, 100], [13, 130], [14, 140]],
[[11, 110], [12, 120], [15, 150], [16, 160]]]]
block_size = 2
x_out = [[[[1, 10, 2, 20, 3, 30, 4, 40], [5, 50, 6, 60, 7, 70, 8, 80]],
[[9, 90, 10, 100, 11, 110, 12, 120],
[13, 130, 14, 140, 15, 150, 16, 160]]]]
self._testOne(x_np, block_size, x_out)
def testBlockSize2Batch10(self):
block_size = 2
def batch_input_elt(i):
return [[[1 * i], [2 * i], [5 * i], [6 * i]],
[[3 * i], [4 * i], [7 * i], [8 * i]],
[[9 * i], [10 * i], [13 * i], [14 * i]],
[[11 * i], [12 * i], [15 * i], [16 * i]]]
def batch_output_elt(i):
return [[[1 * i, 2 * i, 3 * i, 4 * i], [5 * i, 6 * i, 7 * i, 8 * i]],
[[9 * i, 10 * i, 11 * i, 12 * i],
[13 * i, 14 * i, 15 * i, 16 * i]]]
batch_size = 10
x_np = [batch_input_elt(i) for i in range(batch_size)]
x_out = [batch_output_elt(i) for i in range(batch_size)]
self._testOne(x_np, block_size, x_out)
def testBatchSize0(self):
block_size = 2
batch_size = 0
input_nhwc = array_ops.ones([batch_size, 4, 6, 3])
x_out = array_ops.ones([batch_size, 2, 3, 12])
with self.test_session(use_gpu=False):
# test NHWC (default) on CPU
x_tf = array_ops.space_to_depth(input_nhwc, block_size)
self.assertAllEqual(x_tf.shape, x_out.shape)
x_tf.eval()
if test.is_gpu_available():
with self.test_session(use_gpu=True):
# test NHWC (default) on GPU
x_tf = array_ops.space_to_depth(input_nhwc, block_size)
self.assertAllEqual(x_tf.shape, x_out.shape)
x_tf.eval()
# Tests for different width and height.
def testNonSquare(self):
x_np = [[[[1, 10], [2, 20]], [[3, 30], [4, 40]], [[5, 50], [6, 60]],
[[7, 70], [8, 80]], [[9, 90], [10, 100]], [[11, 110], [12, 120]]]]
block_size = 2
x_out = [[[[1, 10, 2, 20, 3, 30, 4, 40]], [[5, 50, 6, 60, 7, 70, 8, 80]],
[[9, 90, 10, 100, 11, 110, 12, 120]]]]
self._testOne(x_np, block_size, x_out)
# Error handling:
def testInputWrongDimMissingDepth(self):
# The input is missing the last dimension ("depth")
x_np = [[[1, 2], [3, 4]]]
block_size = 2
with self.assertRaises(ValueError):
out_tf = array_ops.space_to_depth(x_np, block_size)
out_tf.eval()
def testInputWrongDimMissingBatch(self):
# The input is missing the first dimension ("batch")
x_np = [[[1], [2]], [[3], [4]]]
block_size = 2
with self.assertRaises(ValueError):
_ = array_ops.space_to_depth(x_np, block_size)
def testBlockSize0(self):
# The block size is 0.
x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 0
with self.assertRaises(ValueError):
out_tf = array_ops.space_to_depth(x_np, block_size)
out_tf.eval()
def testBlockSizeOne(self):
# The block size is 1. The block size needs to be > 1.
x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 1
with self.assertRaises(ValueError):
out_tf = array_ops.space_to_depth(x_np, block_size)
out_tf.eval()
def testBlockSizeLarger(self):
# The block size is too large for this input.
x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 10
with self.assertRaises(ValueError):
out_tf = array_ops.space_to_depth(x_np, block_size)
out_tf.eval()
def testBlockSizeNotDivisibleWidth(self):
# The block size divides width but not height.
x_np = [[[[1], [2], [3]], [[3], [4], [7]]]]
block_size = 3
with self.assertRaises(ValueError):
_ = array_ops.space_to_depth(x_np, block_size)
def testBlockSizeNotDivisibleHeight(self):
# The block size divides height but not width.
x_np = [[[[1], [2]], [[3], [4]], [[5], [6]]]]
block_size = 3
with self.assertRaises(ValueError):
_ = array_ops.space_to_depth(x_np, block_size)
def testBlockSizeNotDivisibleBoth(self):
# The block size does not divide neither width or height.
x_np = [[[[1], [2]], [[3], [4]]]]
block_size = 3
with self.assertRaises(ValueError):
_ = array_ops.space_to_depth(x_np, block_size)
def testUnknownShape(self):
t = array_ops.space_to_depth(
array_ops.placeholder(dtypes.float32), block_size=4)
self.assertEqual(4, t.get_shape().ndims)
def spaceToDepthUsingTranspose(self, tensor, block_size, data_format):
block_size_sq = block_size * block_size
if data_format == "NHWC":
b, ih, iw, ic = tensor.shape.as_list()
assert ih % block_size == 0, (ih, block_size)
assert iw % block_size == 0, (iw, block_size)
ow, oh, oc = iw // block_size, ih // block_size, ic * block_size_sq
tensor = array_ops.reshape(tensor,
[b, oh, block_size, ow, block_size, ic])
tensor = array_ops.transpose(tensor, [0, 1, 3, 2, 4, 5])
tensor = array_ops.reshape(tensor, [b, oh, ow, oc])
elif data_format == "NCHW":
b, ic, ih, iw = tensor.shape.as_list()
assert ih % block_size == 0, (ih, block_size)
assert iw % block_size == 0, (iw, block_size)
ow, oh, oc = iw // block_size, ih // block_size, ic * block_size_sq
tensor = array_ops.reshape(tensor,
[b, ic, oh, block_size, ow, block_size])
tensor = array_ops.transpose(tensor, [0, 3, 5, 1, 2, 4])
tensor = array_ops.reshape(tensor, [b, oc, oh, ow])
return tensor
def compareToTranspose(self, batch_size, out_height, out_width, in_channels,
block_size, data_format, use_gpu):
in_height = out_height * block_size
in_width = out_width * block_size
nhwc_input_shape = [batch_size, in_height, in_width, in_channels]
nchw_input_shape = [batch_size, in_channels, in_height, in_width]
total_size = np.prod(nhwc_input_shape)
if data_format == "NCHW_VECT_C":
# Initialize the input tensor with qint8 values that circle -127..127.
x = [((f + 128) % 255) - 127 for f in range(total_size)]
t = constant_op.constant(x, shape=nhwc_input_shape, dtype=dtypes.float32)
expected = self.spaceToDepthUsingTranspose(t, block_size, "NHWC")
t = test_util.NHWCToNCHW_VECT_C(t)
t, _, _ = gen_array_ops.quantize_v2(t, -128.0, 127.0, dtypes.qint8)
t = array_ops.space_to_depth(t, block_size, data_format="NCHW_VECT_C")
t = gen_array_ops.dequantize(t, -128, 127)
actual = test_util.NCHW_VECT_CToNHWC(t)
else:
# Initialize the input tensor with ascending whole numbers as floats.
x = [f * 1.0 for f in range(total_size)]
shape = nchw_input_shape if data_format == "NCHW" else nhwc_input_shape
t = constant_op.constant(x, shape=shape, dtype=dtypes.float32)
expected = self.spaceToDepthUsingTranspose(t, block_size, data_format)
actual = array_ops.space_to_depth(t, block_size, data_format=data_format)
with self.test_session(use_gpu=use_gpu) as sess:
actual_vals, expected_vals = sess.run([actual, expected])
self.assertTrue(np.array_equal(actual_vals, expected_vals))
def testAgainstTranspose(self):
self.compareToTranspose(3, 2, 3, 1, 2, "NHWC", False)
self.compareToTranspose(1, 2, 3, 2, 2, "NHWC", False)
self.compareToTranspose(1, 2, 3, 2, 3, "NHWC", False)
if not test.is_gpu_available():
tf_logging.info("skipping gpu tests since gpu not available")
return
self.compareToTranspose(3, 2, 3, 1, 2, "NHWC", True)
self.compareToTranspose(3, 2, 3, 2, 2, "NHWC", True)
self.compareToTranspose(3, 2, 3, 1, 2, "NCHW", True)
self.compareToTranspose(3, 2, 3, 2, 3, "NCHW", True)
self.compareToTranspose(5, 7, 11, 3, 2, "NCHW", True)
self.compareToTranspose(3, 2, 3, 4, 2, "NCHW_VECT_C", True)
self.compareToTranspose(3, 2, 3, 8, 3, "NCHW_VECT_C", True)
self.compareToTranspose(5, 7, 11, 12, 2, "NCHW_VECT_C", True)
class SpaceToDepthGradientTest(test.TestCase):
# Check the gradients.
def _checkGrad(self, x, block_size, data_format):
# NCHW is implemented for only GPU.
if data_format == "NCHW" and not test.is_gpu_available():
return
assert 4 == x.ndim
with self.test_session(use_gpu=True):
tf_x = ops.convert_to_tensor(x)
tf_y = array_ops.space_to_depth(tf_x, block_size, data_format=data_format)
epsilon = 1e-2
((x_jacob_t, x_jacob_n)) = gradient_checker.compute_gradient(
tf_x,
x.shape,
tf_y,
tf_y.get_shape().as_list(),
x_init_value=x,
delta=epsilon)
self.assertAllClose(x_jacob_t, x_jacob_n, rtol=1e-2, atol=epsilon)
# Tests a gradient for space_to_depth of x which is a four dimensional
# tensor of shape [b, h * block_size, w * block_size, d].
def _compare(self, b, h, w, d, block_size, data_format):
block_size_sq = block_size * block_size
data = np.random.normal(0, 1, b * h * w * d * block_size_sq).astype(
np.float32)
if data_format == "NHWC":
x = data.reshape([b, h * block_size, w * block_size, d])
else:
x = data.reshape([b, d, h * block_size, w * block_size])
self._checkGrad(x, block_size, data_format)
# Don't use very large numbers as dimensions here as the result is tensor
# with cartesian product of the dimensions.
def testSmall(self):
block_size = 2
self._compare(1, 2, 3, 5, block_size, "NHWC")
self._compare(1, 2, 3, 5, block_size, "NCHW")
def testSmall2(self):
block_size = 2
self._compare(2, 4, 3, 2, block_size, "NHWC")
self._compare(2, 4, 3, 2, block_size, "NCHW")
if __name__ == "__main__":
test.main()<|fim▁end|> |
def testBasicFloat16(self): |
<|file_name|>neg.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn neg_1() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(DL)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 218], OperandSize::Word)
}
fn neg_2() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectDisplaced(BP, 219, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 158, 219, 0], OperandSize::Word)
}
fn neg_3() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(BL)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 219], OperandSize::Dword)
}
fn neg_4() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectDisplaced(EBX, 970803269, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 155, 69, 72, 221, 57], OperandSize::Dword)
}
fn neg_5() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(CL)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 217], OperandSize::Qword)
}
fn neg_6() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledDisplaced(RDX, Eight, 1936396919, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 28, 213, 119, 18, 107, 115], OperandSize::Qword)
}
fn neg_7() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(BL)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 219], OperandSize::Qword)
}
fn neg_8() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledIndexedDisplaced(RSI, RDI, Four, 729260738, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[246, 156, 190, 194, 162, 119, 43], OperandSize::Qword)
}
fn neg_9() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(SI)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 222], OperandSize::Word)
}
fn neg_10() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectDisplaced(BP, 168, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 158, 168, 0], OperandSize::Word)
}
fn neg_11() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(BP)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 221], OperandSize::Dword)
}
fn neg_12() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledIndexed(ESI, ESI, Eight, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 28, 246], OperandSize::Dword)<|fim▁hole|>}
fn neg_14() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledIndexedDisplaced(RDI, RDI, Two, 1542514236, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 156, 127, 60, 230, 240, 91], OperandSize::Qword)
}
fn neg_15() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(EDI)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 223], OperandSize::Word)
}
fn neg_16() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledIndexed(BX, DI, One, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 25], OperandSize::Word)
}
fn neg_17() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(ECX)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 217], OperandSize::Dword)
}
fn neg_18() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledDisplaced(EDX, Four, 2087672817, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 28, 149, 241, 91, 111, 124], OperandSize::Dword)
}
fn neg_19() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(ECX)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 217], OperandSize::Qword)
}
fn neg_20() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectDisplaced(RDI, 1367383088, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[247, 159, 48, 156, 128, 81], OperandSize::Qword)
}
fn neg_21() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(RSP)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 247, 220], OperandSize::Qword)
}
fn neg_22() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(IndirectScaledDisplaced(RAX, Two, 982833472, Some(OperandSize::Qword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 247, 28, 69, 64, 217, 148, 58], OperandSize::Qword)
}<|fim▁end|> | }
fn neg_13() {
run_test(&Instruction { mnemonic: Mnemonic::NEG, operand1: Some(Direct(DI)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 247, 223], OperandSize::Qword) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from . import ir_filesystem_directory
from . import ir_filesystem_file<|fim▁end|> | # -*- coding: utf-8 -*-
# Copyright 2017 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
|
<|file_name|>test_bcrypt.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from django import test
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.management import call_command
from mock import patch
from nose.tools import eq_
class BcryptTests(test.TestCase):
def setUp(self):
super(BcryptTests, self).setUp()
User.objects.create_user('john', '[email protected]',
password='123456')
User.objects.create_user('jane', '[email protected]',
password='abc')
User.objects.create_user('jude', '[email protected]',
password=u'abcéäêëôøà')
def test_bcrypt_used(self):
"""Make sure bcrypt was used as the hash."""
eq_(User.objects.get(username='john').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jane').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jude').password[:7], 'bcrypt$')
def test_bcrypt_auth(self):
"""Try authenticating."""
assert authenticate(username='john', password='123456')
assert authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert authenticate(username='jude', password=u'abcéäêëôøà')<|fim▁hole|> """With no HMAC key, no dice."""
assert not authenticate(username='john', password='123456')
assert not authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert not authenticate(username='jude', password=u'abcéäêëôøà')
assert not authenticate(username='jude', password=u'çççbbbààà')
def test_password_from_django14(self):
"""Test that a password generated by django_sha2 with django 1.4 is
recognized and changed to a 1.3 version"""
# We can't easily call 1.4's hashers so we hardcode the passwords as
# returned with the specific salts and hmac_key in 1.4.
prefix = 'bcrypt2011_01_01$2a$12$'
suffix = '$2011-01-01'
raw_hashes = {
'john': '02CfJWdVwLK80jlRe/Xx1u8sTHAR0JUmKV9YB4BS.Os4LK6nsoLie',
'jane': '.ipDt6gRL3CPkVH7FEyR6.8YXeQFXAMyiX3mXpDh4YDBonrdofrcG',
'jude': '6Ol.vgIFxMQw0LBhCLtv7OkV.oyJjen2GVMoiNcLnbsljSfYUkQqe',
}
u = User.objects.get(username="john")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['john'],
suffix)
u.password = django14_style_password
assert u.check_password('123456')
eq_(u.password[:7], 'bcrypt$')
u = User.objects.get(username="jane")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jane'],
suffix)
u.password = django14_style_password
assert u.check_password('abc')
eq_(u.password[:7], 'bcrypt$')
u = User.objects.get(username="jude")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jude'],
suffix)
u.password = django14_style_password
assert u.check_password(u'abcéäêëôøà')
eq_(u.password[:7], 'bcrypt$')
def test_hmac_autoupdate(self):
"""Auto-update HMAC key if hash in DB is outdated."""
# Get HMAC key IDs to compare
old_key_id = max(settings.HMAC_KEYS.keys())
new_key_id = '2020-01-01'
# Add a new HMAC key
new_keys = settings.HMAC_KEYS.copy()
new_keys[new_key_id] = 'a_new_key'
with patch.object(settings._wrapped, 'HMAC_KEYS', new_keys):
# Make sure the database has the old key ID.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], old_key_id)
# Log in.
assert authenticate(username='john', password='123456')
# Make sure the DB now has a new password hash.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], new_key_id)
def test_rehash(self):
"""Auto-upgrade to stronger hash if needed."""
# Set a sha256 hash for a user. This one is "123".
john = User.objects.get(username='john')
john.password = ('sha256$7a49025f024ad3dcacad$aaff1abe5377ffeab6ccc68'
'709d94c1950edf11f02d8acb83c75d8fcac1ebeb1')
john.save()
# The hash should be sha256 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha256')
# Log in (should rehash transparently).
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')
def test_management_command(self):
"""Test password update flow via management command, from default
Django hashes, to hardened hashes, to bcrypt on log in."""
john = User.objects.get(username='john')
john.password = 'sha1$3356f$9fd40318e1de9ecd3ab3a5fe944ceaf6a2897eef'
john.save()
# The hash should be sha1 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha1')
# Simulate calling management command
call_command('strengthen_user_passwords')
# The hash should be 'hh' now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'hh')
# Logging in will convert the hardened hash to bcrypt.
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')<|fim▁end|> | assert not authenticate(username='jude', password=u'çççbbbààà')
@patch.object(settings._wrapped, 'HMAC_KEYS', dict())
def test_nokey(self): |
<|file_name|>launcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env $PYTHON$
# Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
import os
import socket
import struct
import sys
import traceback
# See com.intellij.idea.SocketLock for the server side of this interface.
RUN_PATH = u'$RUN_PATH$'
CONFIG_PATH = u'$CONFIG_PATH$'
SYSTEM_PATH = u'$SYSTEM_PATH$'
def print_usage(cmd):
print(('Usage:\n' +
' {0} -h | -? | --help\n' +
' {0} [project_dir] [-w|--wait]\n' +
' {0} [-l|--line line] [project_dir|--temp-project] [-w|--wait] file[:line]\n' +
' {0} diff <left> <right>\n' +
' {0} merge <local> <remote> [base] <merged>').format(cmd))
def write_to_sock(sock, data):
if sys.version_info[0] >= 3:
data = data.encode('utf-8')
sock.send(struct.pack('>h', len(data)) + data)
def read_from_sock(sock):
length = struct.unpack('>h', sock.recv(2))[0]
return sock.recv(length).decode('utf-8')
def read_sequence_from_sock(sock):
result = []
while True:
try:
data = read_from_sock(sock)
if data == '---':
break
result.append(data)
except (socket.error, IOError) as e:
print("I/O error({0}): {1} ({2})".format(e.errno, e.strerror, e))
traceback.print_exception(*sys.exc_info())
break
return result
def process_args(argv):
args = []
skip_next = False
for i, arg in enumerate(argv[1:]):
if arg == '-h' or arg == '-?' or arg == '--help':<|fim▁hole|> elif i == 0 and (arg == 'diff' or arg == 'merge' or arg == '--temp-project'):
args.append(arg)
elif arg == '-l' or arg == '--line':
args.append(arg)
skip_next = True
elif arg == '-w' or arg == '--wait':
args.append('--wait')
elif arg == '-p' or arg == '--project':
args.append(arg)
elif arg == '-e' or arg == '--edit':
args.append(arg)
elif skip_next:
args.append(arg)
skip_next = False
else:
path = arg
if ':' in arg:
file_path, line_number = arg.rsplit(':', 1)
if line_number.isdigit():
args.append('-l')
args.append(line_number)
path = file_path
args.append(os.path.abspath(path))
return args
def try_activate_instance(args):
port_path = os.path.join(CONFIG_PATH, 'port')
token_path = os.path.join(SYSTEM_PATH, 'token')
if not (os.path.exists(port_path) and os.path.exists(token_path)):
return False
try:
with open(port_path) as pf:
port = int(pf.read())
with open(token_path) as tf:
token = tf.read()
except ValueError:
return False
s = socket.socket()
s.settimeout(1.0)
try:
s.connect(('127.0.0.1', port))
except (socket.error, IOError):
return False
paths = read_sequence_from_sock(s)
found = CONFIG_PATH in paths or os.path.realpath(CONFIG_PATH) in paths
if found:
write_to_sock(s, 'activate ' + token + '\0' + os.getcwd() + '\0' + '\0'.join(args))
s.settimeout(None)
response = read_sequence_from_sock(s)
if len(response) < 2 or response[0] != 'ok':
print('bad response: ' + str(response))
exit(1)
if len(response) > 2:
print(response[2])
exit(int(response[1]))
return False
def start_new_instance(args):
if sys.platform == 'darwin':
if len(args) > 0:
args.insert(0, '--args')
if '--wait' in args:
args.insert(0, '-W')
os.execv('/usr/bin/open', ['open', '-na', RUN_PATH] + args)
else:
bin_file = os.path.split(RUN_PATH)[1]
os.execv(RUN_PATH, [bin_file] + args)
ide_args = process_args(sys.argv)
if not try_activate_instance(ide_args):
start_new_instance(ide_args)<|fim▁end|> | print_usage(argv[0])
exit(0) |
<|file_name|>normalizer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2011 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import decimal
import re
# lame, but it matches at least up to 6 ORDER BY columns
__EXPR = re.compile(r"ORDER BY\s(\w+\.(?P<column_1>\w+)(\s+\w+)?)"
r"(,\s+\w+\.(?P<column_2>\w+)(\s+\w+)?)?"
r"(,\s+\w+\.(?P<column_3>\w+)(\s+\w+)?)?"
r"(,\s+\w+\.(?P<column_4>\w+)(\s+\w+)?)?"
r"(,\s+\w+\.(?P<column_5>\w+)(\s+\w+)?)?"
r"(,\s+\w+\.(?P<column_6>\w+)(\s+\w+)?)?")
VOLTTYPE_NULL = 1
VOLTTYPE_TINYINT = 3 # int8
VOLTTYPE_SMALLINT = 4 # int16
VOLTTYPE_INTEGER = 5 # int32
VOLTTYPE_BIGINT = 6 # int64
VOLTTYPE_FLOAT = 8 # float64
VOLTTYPE_STRING = 9
VOLTTYPE_TIMESTAMP = 11 # 8 byte long
VOLTTYPE_MONEY = 20 # 8 byte long
VOLTTYPE_DECIMAL = 22 # 9 byte long
__NULL = {VOLTTYPE_TINYINT: -128,
VOLTTYPE_SMALLINT: -32768,
VOLTTYPE_INTEGER: -2147483648,
VOLTTYPE_BIGINT: -9223372036854775808,
VOLTTYPE_FLOAT: -1.7E+308}
def normalize_value(v, type):
global __NULL
if type in __NULL and v == __NULL[type]:
return None
elif type == VOLTTYPE_FLOAT:
return round(v, 12)
elif type == VOLTTYPE_DECIMAL:
return decimal.Decimal(v)._rescale(-12, "ROUND_HALF_EVEN")
else:
return v
def normalize_values(tuples, columns):
# 'c' here is a voltdbclient.VoltColumn and
# I assume t is a voltdbclient.VoltTable.
if hasattr(tuples, "__iter__"):
for i in xrange(len(tuples)):
if hasattr(tuples[i], "__iter__"):
normalize_values(tuples[i], columns)
else:
tuples[i] = normalize_value(tuples[i], columns[i].type)
def filter_sorted(row, sorted_cols):
"""Extract the values in the ORDER BY columns from a row.
"""
ret = []
if not sorted_cols:
return ret
for i in sorted_cols:
ret.append(row[i])
return ret
def extract_key(sorted_cols, row):
"""Extract the values in the non-ORDERBY columns from a row.
"""
k = []
for i in xrange(len(row)):
if i not in sorted_cols:
k.append(row[i])
return k
def sort(l, sorted_cols):
"""Two steps:
1. find the subset of rows which have the same values in the ORDER BY
columns.<|fim▁hole|> begin = 0
end = 0 # exclusive
prev = None
key = lambda x: extract_key(sorted_cols, x)
for i in xrange(len(l)):
if not sorted_cols:
l[:] = sorted(l, cmp=cmp, key=key)
return
tmp = filter_sorted(l[i], sorted_cols)
if prev != tmp:
if prev is not None:
end = i
l[begin:end] = sorted(l[begin:end], cmp=cmp, key=key)
prev = tmp
begin = i
l[begin:] = sorted(l[begin:], cmp=cmp, key=key)
def parse_sql(x):
"""Finds if the SQL statement contains ORDER BY command.
"""
global __EXPR
result = __EXPR.search(x)
if result:
return filter(lambda x: x, result.groupdict().values())
else:
return None
def normalize(table, sql):
"""Normalizes the result tuples of ORDER BY statements.
"""
normalize_values(table.tuples, table.columns)
sort_cols = parse_sql(sql)
indices = []
if sort_cols:
for i in xrange(len(table.columns)):
if table.columns[i].name in sort_cols:
indices.append(i)
# Make sure if there is an ORDER BY clause, the order by columns appear in
# the result table. Otherwise all the columns will be sorted by the
# normalizer.
sort(table.tuples, indices)
return table<|fim▁end|> | 2. sort them on the rest of the columns.
"""
|
<|file_name|>BullBenchThread.cpp<|end_file_name|><|fim▁begin|>#include <string.h><|fim▁hole|>#include <sys/socket.h>
#include <unistd.h>
#include <errno.h>
#include <cstdlib>
#include <queue>
#include <iostream>
#include <sstream>
#include "BullBenchThread.h"
#include "BullBench.h"
#define REQUEST_SIZE 2048
void BullBenchThread::run() {
int succ = 0;
for (;;) {
pthread_mutex_lock(&_settings.mutex);
if (succ > 0) _settings.totalSendSucc ++;
if (succ < 0) _settings.totalSendFail ++;
if (succ > 0 && _settings.totalSendSucc % 1000 == 0) {
uint64_t time = _settings.getTimeCost();
std::cout<<"send request url succ count:" << _settings.totalSendSucc
<<"\tfail count:" << _settings.totalSendFail
<<"\ttime cost: " << time / 1000000 <<" seconds, "
<< time % 1000000 << " microseconds" <<std::endl;
}
succ = 0;
if (_requestQueue.empty()) {
if (_settings.stop) {
pthread_mutex_unlock(&_settings.mutex);
exit(NULL);
}
pthread_cond_broadcast(&_settings.emptyCond);
pthread_cond_wait(&_settings.fullCond, &_settings.mutex);
pthread_mutex_unlock(&_settings.mutex);
continue;
}
std::string url = _requestQueue.front();
_requestQueue.pop();
pthread_mutex_unlock(&_settings.mutex);
char request[REQUEST_SIZE];
buildRequest(request, url);
int rlen = strlen(request);
int sock = _getSocket();
if (sock < 0) {
std::stringstream ss;
ss << "Error: sock fail:"
<< _settings.domainName << ':' << _settings.port
<< "\t" << strerror(errno) << std::endl;
std::cerr << ss.str();
succ = -1;
continue;
}
if(rlen!=write(sock,request,rlen)) {
close(sock);
std::stringstream ss;
ss << "Error: write fail :" << std::endl;
std::cerr << ss.str();
succ = -1;
continue;
}
// Read all
char response[8192];
while (read(sock,response,8192) > 0) {
// do nothing;
}
close(sock);
succ = 1;
} // end for
}
void BullBenchThread::buildRequest(char* request,std::string& uri) {
//Five_comment: same as memset bytes to sezo('\0')
bzero(request,REQUEST_SIZE);
strcpy(request,"GET");
strcat(request," ");
if (_settings.host.empty()) {
strcat(request, (char *)uri.c_str());
} else {
strcat(request, (char *)_settings.urlPrefix.c_str());
strcat(request, (char *)uri.c_str());
}
// detemine which http version
strcat(request," HTTP/1.1");
strcat(request,"\r\n");
strcat(request,"User-Agent: "PROGRAM_NAME" "PROGRAM_VERSION"\r\n");
strcat(request,"Host: ");
if (_settings.host.empty()) {
if (80 == _settings.port) {
strcat(request, _settings.domainName.c_str());
} else {
strcat(request, (_settings.domainName + ":" + _settings.portString).c_str());
}
} else {
strcat(request, _settings.host.c_str());
}
strcat(request,"\r\n");
strcat(request,"Pragma: no-cache\r\n");
strcat(request,"Connection: close\r\n");
//strcat(request,"Connection: keep-alive\r\n");
strcat(request,"Keep-Alive: timeout=20\r\n");
strcat(request,"\r\n");
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export goCommand from './goCommand'<|fim▁hole|>export goReducer from './goReducer'
export parseBestmove from './parseBestmove'
export parseId from './parseId'
export parseInfo from './parseInfo'
export parseOption from './parseOption'
export initReducer from './initReducer'<|fim▁end|> | |
<|file_name|>localEulerDdtScheme.C<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | Website: https://openfoam.org
\\ / A nd | Copyright (C) 2011-2021 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "localEulerDdtScheme.H"
#include "surfaceInterpolate.H"
#include "fvMatrices.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace fv
{
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
template<class Type>
const volScalarField& localEulerDdtScheme<Type>::localRDeltaT() const
{
return localEulerDdt::localRDeltaT(mesh());
}
template<class Type>
const surfaceScalarField& localEulerDdtScheme<Type>::localRDeltaTf() const
{
return localEulerDdt::localRDeltaTf(mesh());
}
template<class Type>
tmp<GeometricField<Type, fvPatchField, volMesh>>
localEulerDdtScheme<Type>::fvcDdt
(
const dimensioned<Type>& dt
)
{
const word ddtName("ddt(" + dt.name() + ')');
return tmp<GeometricField<Type, fvPatchField, volMesh>>
(
GeometricField<Type, fvPatchField, volMesh>::New
(
ddtName,
mesh(),
dimensioned<Type>
(
"0",
dt.dimensions()/dimTime,
Zero
),
calculatedFvPatchField<Type>::typeName
)
);
}
template<class Type>
tmp<GeometricField<Type, fvPatchField, volMesh>>
localEulerDdtScheme<Type>::fvcDdt
(
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
const volScalarField& rDeltaT = localRDeltaT();
const word ddtName("ddt(" + vf.name() + ')');
return tmp<GeometricField<Type, fvPatchField, volMesh>>
(
GeometricField<Type, fvPatchField, volMesh>::New
(
ddtName,
rDeltaT*(vf - vf.oldTime())
)
);
}
<|fim▁hole|>(
const dimensionedScalar& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
const volScalarField& rDeltaT = localRDeltaT();
const word ddtName("ddt(" + rho.name() + ',' + vf.name() + ')');
return tmp<GeometricField<Type, fvPatchField, volMesh>>
(
GeometricField<Type, fvPatchField, volMesh>::New
(
ddtName,
rDeltaT*rho*(vf - vf.oldTime())
)
);
}
template<class Type>
tmp<GeometricField<Type, fvPatchField, volMesh>>
localEulerDdtScheme<Type>::fvcDdt
(
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
const volScalarField& rDeltaT = localRDeltaT();
const word ddtName("ddt(" + rho.name() + ',' + vf.name() + ')');
return tmp<GeometricField<Type, fvPatchField, volMesh>>
(
GeometricField<Type, fvPatchField, volMesh>::New
(
ddtName,
rDeltaT*(rho*vf - rho.oldTime()*vf.oldTime())
)
);
}
template<class Type>
tmp<GeometricField<Type, fvPatchField, volMesh>>
localEulerDdtScheme<Type>::fvcDdt
(
const volScalarField& alpha,
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
const volScalarField& rDeltaT = localRDeltaT();
const word ddtName("ddt("+alpha.name()+','+rho.name()+','+vf.name()+')');
return tmp<GeometricField<Type, fvPatchField, volMesh>>
(
GeometricField<Type, fvPatchField, volMesh>::New
(
ddtName,
rDeltaT
*(
alpha*rho*vf
- alpha.oldTime()*rho.oldTime()*vf.oldTime()
)
)
);
}
template<class Type>
tmp<GeometricField<Type, fvsPatchField, surfaceMesh>>
localEulerDdtScheme<Type>::fvcDdt
(
const GeometricField<Type, fvsPatchField, surfaceMesh>& sf
)
{
const surfaceScalarField& rDeltaT = localRDeltaTf();
const word ddtName("ddt("+sf.name()+')');
return GeometricField<Type, fvsPatchField, surfaceMesh>::New
(
ddtName,
rDeltaT*(sf - sf.oldTime())
);
}
template<class Type>
tmp<fvMatrix<Type>>
localEulerDdtScheme<Type>::fvmDdt
(
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
tmp<fvMatrix<Type>> tfvm
(
new fvMatrix<Type>
(
vf,
vf.dimensions()*dimVol/dimTime
)
);
fvMatrix<Type>& fvm = tfvm.ref();
const scalarField& rDeltaT = localRDeltaT();
fvm.diag() = rDeltaT*mesh().Vsc();
fvm.source() = rDeltaT*vf.oldTime().primitiveField()*mesh().Vsc();
return tfvm;
}
template<class Type>
tmp<fvMatrix<Type>>
localEulerDdtScheme<Type>::fvmDdt
(
const dimensionedScalar& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
tmp<fvMatrix<Type>> tfvm
(
new fvMatrix<Type>
(
vf,
rho.dimensions()*vf.dimensions()*dimVol/dimTime
)
);
fvMatrix<Type>& fvm = tfvm.ref();
const scalarField& rDeltaT = localRDeltaT();
fvm.diag() = rDeltaT*rho.value()*mesh().Vsc();
fvm.source() =
rDeltaT*rho.value()*vf.oldTime().primitiveField()*mesh().Vsc();
return tfvm;
}
template<class Type>
tmp<fvMatrix<Type>>
localEulerDdtScheme<Type>::fvmDdt
(
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
tmp<fvMatrix<Type>> tfvm
(
new fvMatrix<Type>
(
vf,
rho.dimensions()*vf.dimensions()*dimVol/dimTime
)
);
fvMatrix<Type>& fvm = tfvm.ref();
const scalarField& rDeltaT = localRDeltaT();
fvm.diag() = rDeltaT*rho.primitiveField()*mesh().Vsc();
fvm.source() = rDeltaT
*rho.oldTime().primitiveField()
*vf.oldTime().primitiveField()*mesh().Vsc();
return tfvm;
}
template<class Type>
tmp<fvMatrix<Type>>
localEulerDdtScheme<Type>::fvmDdt
(
const volScalarField& alpha,
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& vf
)
{
tmp<fvMatrix<Type>> tfvm
(
new fvMatrix<Type>
(
vf,
alpha.dimensions()*rho.dimensions()*vf.dimensions()*dimVol/dimTime
)
);
fvMatrix<Type>& fvm = tfvm.ref();
const scalarField& rDeltaT = localRDeltaT();
fvm.diag() =
rDeltaT*alpha.primitiveField()*rho.primitiveField()*mesh().Vsc();
fvm.source() = rDeltaT
*alpha.oldTime().primitiveField()
*rho.oldTime().primitiveField()
*vf.oldTime().primitiveField()*mesh().Vsc();
return tfvm;
}
/*
// Courant number limited formulation
template<class Type>
tmp<surfaceScalarField> localEulerDdtScheme<Type>::fvcDdtPhiCoeff
(
const GeometricField<Type, fvPatchField, volMesh>& U,
const fluxFieldType& phi,
const fluxFieldType& phiCorr
)
{
// Courant number limited formulation
tmp<surfaceScalarField> tddtCouplingCoeff = scalar(1)
- min
(
mag(phiCorr)*mesh().deltaCoeffs()
/(fvc::interpolate(localRDeltaT())*mesh().magSf()),
scalar(1)
);
surfaceScalarField& ddtCouplingCoeff = tddtCouplingCoeff.ref();
surfaceScalarField::Boundary& ccbf = ddtCouplingCoeff.boundaryFieldRef();
forAll(U.boundaryField(), patchi)
{
if
(
U.boundaryField()[patchi].fixesValue()
|| isA<cyclicAMIFvPatch>(mesh().boundary()[patchi])
)
{
ccbf[patchi] = 0.0;
}
}
if (debug > 1)
{
InfoInFunction
<< "ddtCouplingCoeff mean max min = "
<< gAverage(ddtCouplingCoeff.primitiveField())
<< " " << gMax(ddtCouplingCoeff.primitiveField())
<< " " << gMin(ddtCouplingCoeff.primitiveField())
<< endl;
}
return tddtCouplingCoeff;
}
*/
template<class Type>
tmp<typename localEulerDdtScheme<Type>::fluxFieldType>
localEulerDdtScheme<Type>::fvcDdtUfCorr
(
const GeometricField<Type, fvPatchField, volMesh>& U,
const GeometricField<Type, fvsPatchField, surfaceMesh>& Uf
)
{
const surfaceScalarField rDeltaT(fvc::interpolate(localRDeltaT()));
fluxFieldType phiUf0(mesh().Sf() & Uf.oldTime());
fluxFieldType phiCorr
(
phiUf0 - fvc::dotInterpolate(mesh().Sf(), U.oldTime())
);
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr(" + U.name() + ',' + Uf.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff(U.oldTime(), phiUf0, phiCorr)
*rDeltaT*phiCorr
)
);
}
template<class Type>
tmp<typename localEulerDdtScheme<Type>::fluxFieldType>
localEulerDdtScheme<Type>::fvcDdtPhiCorr
(
const GeometricField<Type, fvPatchField, volMesh>& U,
const fluxFieldType& phi
)
{
const surfaceScalarField rDeltaT(fvc::interpolate(localRDeltaT()));
fluxFieldType phiCorr
(
phi.oldTime() - fvc::dotInterpolate(mesh().Sf(), U.oldTime())
);
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr(" + U.name() + ',' + phi.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff(U.oldTime(), phi.oldTime(), phiCorr)
*rDeltaT*phiCorr
)
);
}
template<class Type>
tmp<typename localEulerDdtScheme<Type>::fluxFieldType>
localEulerDdtScheme<Type>::fvcDdtUfCorr
(
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& U,
const GeometricField<Type, fvsPatchField, surfaceMesh>& Uf
)
{
const surfaceScalarField rDeltaT(fvc::interpolate(localRDeltaT()));
if
(
U.dimensions() == dimVelocity
&& Uf.dimensions() == dimDensity*dimVelocity
)
{
GeometricField<Type, fvPatchField, volMesh> rhoU0
(
rho.oldTime()*U.oldTime()
);
fluxFieldType phiUf0(mesh().Sf() & Uf.oldTime());
fluxFieldType phiCorr(phiUf0 - fvc::dotInterpolate(mesh().Sf(), rhoU0));
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr("
+ rho.name() + ',' + U.name() + ',' + Uf.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff(rhoU0, phiUf0, phiCorr, rho.oldTime())
*rDeltaT*phiCorr
)
);
}
else if
(
U.dimensions() == dimDensity*dimVelocity
&& Uf.dimensions() == dimDensity*dimVelocity
)
{
fluxFieldType phiUf0(mesh().Sf() & Uf.oldTime());
fluxFieldType phiCorr
(
phiUf0 - fvc::dotInterpolate(mesh().Sf(), U.oldTime())
);
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr("
+ rho.name() + ',' + U.name() + ',' + Uf.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff
(
U.oldTime(),
phiUf0,
phiCorr,
rho.oldTime()
)*rDeltaT*phiCorr
)
);
}
else
{
FatalErrorInFunction
<< "dimensions of Uf are not correct"
<< abort(FatalError);
return fluxFieldType::null();
}
}
template<class Type>
tmp<typename localEulerDdtScheme<Type>::fluxFieldType>
localEulerDdtScheme<Type>::fvcDdtPhiCorr
(
const volScalarField& rho,
const GeometricField<Type, fvPatchField, volMesh>& U,
const fluxFieldType& phi
)
{
const surfaceScalarField rDeltaT(fvc::interpolate(localRDeltaT()));
if
(
U.dimensions() == dimVelocity
&& phi.dimensions() == rho.dimensions()*dimFlux
)
{
GeometricField<Type, fvPatchField, volMesh> rhoU0
(
rho.oldTime()*U.oldTime()
);
fluxFieldType phiCorr
(
phi.oldTime() - fvc::dotInterpolate(mesh().Sf(), rhoU0)
);
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr("
+ rho.name() + ',' + U.name() + ',' + phi.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff
(
rhoU0,
phi.oldTime(),
phiCorr,
rho.oldTime()
)*rDeltaT*phiCorr
)
);
}
else if
(
U.dimensions() == rho.dimensions()*dimVelocity
&& phi.dimensions() == rho.dimensions()*dimFlux
)
{
fluxFieldType phiCorr
(
phi.oldTime() - fvc::dotInterpolate(mesh().Sf(), U.oldTime())
);
return tmp<fluxFieldType>
(
new fluxFieldType
(
IOobject
(
"ddtCorr("
+ rho.name() + ',' + U.name() + ',' + phi.name() + ')',
mesh().time().timeName(),
mesh()
),
this->fvcDdtPhiCoeff
(
U.oldTime(),
phi.oldTime(),
phiCorr,
rho.oldTime()
)*rDeltaT*phiCorr
)
);
}
else
{
FatalErrorInFunction
<< "dimensions of phi are not correct"
<< abort(FatalError);
return fluxFieldType::null();
}
}
template<class Type>
tmp<surfaceScalarField> localEulerDdtScheme<Type>::meshPhi
(
const GeometricField<Type, fvPatchField, volMesh>&
)
{
return surfaceScalarField::New
(
"meshPhi",
mesh(),
dimensionedScalar(dimVolume/dimTime, 0)
);
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace fv
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// ************************************************************************* //<|fim▁end|> | template<class Type>
tmp<GeometricField<Type, fvPatchField, volMesh>>
localEulerDdtScheme<Type>::fvcDdt |
<|file_name|>pull.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
This script can be used to automatically pull translations from Transifex,
commit, push, and merge them to their respective repos.
To use, export an environment variable `GITHUB_ACCESS_TOKEN`. The token requires
GitHub's "repo" scope.
Run the script from the root of this repo.
python transifex/pull.py [email protected]:edx/course-discovery.git
If you want to use a custom merge method pass the --merge-method option.
python transifex/pull.py [email protected]:edx/course-discovery.git --merge-method rebase
If you want to skip the compile messages step, pass the --skip-compilemessages option.
python transifex/pull.py [email protected]:edx/course-discovery.git --skip-compilemessages
"""
import os
import shutil
from argparse import ArgumentParser
from utils import DEFAULT_MERGE_METHOD, MERGE_METHODS, logger, repo_context
# The name of the branch to use.
BRANCH_NAME = 'transifex-bot-update-translations'
# The commit message to use.
MESSAGE = 'chore(i18n): update translations'
# Environment variable needed to run paver compilejsi18n command
os.environ['LMS_CFG']='../lms.yml'
os.environ['STUDIO_CFG'] = '../studio.yml'<|fim▁hole|># Configuration repo to fetch lms/studio settings
CONFIGURATION_REPO_URL = 'https://github.com/edx/configuration.git'
def pull(clone_url, repo_owner, merge_method=DEFAULT_MERGE_METHOD, skip_compilemessages=False,
skip_check_changes=False):
"""Pulls translations for the given repo.
If applicable, commits them, pushes them to GitHub, opens a PR, waits for
status checks to pass, then merges the PR and deletes the branch.
"""
with repo_context(CONFIGURATION_REPO_URL, repo_owner, BRANCH_NAME, MESSAGE, merge_method=merge_method) as config_repo:
logger.info('Pulling lms/studio settings from [%s].', config_repo.name)
shutil.copy('./docker/build/edxapp/lms.yml', '../')
shutil.copy('./docker/build/edxapp/studio.yml', '../')
with repo_context(clone_url, repo_owner, BRANCH_NAME, MESSAGE, merge_method=merge_method) as repo:
logger.info('Pulling translations for [%s].', repo.name)
repo.pull_translations()
if skip_compilemessages:
logger.info('Skipping compilemessages.')
else:
compilemessages_succeeded = repo.compilemessages()
repo.commit_push_and_open_pr(skip_check_changes)
if repo.pr:
if not (skip_compilemessages or compilemessages_succeeded):
# Notify the team that message compilation failed.
repo.pr.create_issue_comment(
'@{owner} failing message compilation prevents this PR from being automatically merged. '
'Refer to the build log for more details.'.format(
owner=repo.owner
)
)
# Fail job immediately, without trying to merge the PR. We don't
# want to merge PRs without compiled messages.
raise RuntimeError('Failed to compile messages.')
repo.merge_pr()
def parse_arguments():
parser = ArgumentParser()
parser.add_argument(
'clone_url',
help='URL to use to clone the repository.'
)
parser.add_argument(
'repo_owner',
help='This is the user/team that will be pinged when errors occur.'
)
parser.add_argument(
'--merge-method',
choices=MERGE_METHODS,
default=DEFAULT_MERGE_METHOD,
help='Method to use when merging the PR. See https://developer.github.com/v3/pulls/#merge-a-pull-request-merge-button for details.'
)
parser.add_argument(
'--skip-compilemessages',
action='store_true',
help='Skip the message compilation step.'
)
parser.add_argument(
'--skip-check-changes',
action='store_true',
help='Skip the check changes step.'
)
return parser.parse_args()
if __name__ == '__main__':
args = parse_arguments()
pull(
args.clone_url,
args.repo_owner,
merge_method=args.merge_method,
skip_compilemessages=args.skip_compilemessages,
skip_check_changes=args.skip_check_changes,
)<|fim▁end|> | os.environ['REVISION_CFG'] = ''
os.environ['SKIP_NPM_INSTALL'] = 'True'
os.environ['LANG'] = 'C.UTF-8'
|
<|file_name|>debug.py<|end_file_name|><|fim▁begin|>"""
External serialization for testing remote module loading.
"""
from tiddlyweb.serializations import SerializationInterface
class Serialization(SerializationInterface):
def list_recipes(self, recipes):
print recipes
def list_bags(self, bags):
print bags
def recipe_as(self, recipe):
print "r_as: %s" % recipe
def as_recipe(self, recipe, input):<|fim▁hole|>
def as_bag(self, bag, input):
print "as_b: %s" % input
def tiddler_as(self, tiddler):
print "t_as: %s" % tiddler
def as_tiddler(self, tiddler, input):
print "as_t: %s" % input<|fim▁end|> | print "as_r: %s" % input
def bag_as(self, bag):
print "b_as: %s" % bag |
<|file_name|>decider.go<|end_file_name|><|fim▁begin|>// Copyright © 2016 NAME HERE <EMAIL ADDRESS>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
<|fim▁hole|> "fmt"
"github.com/fanux/pbrain/common"
"github.com/fanux/pbrain/plugins/decider"
"github.com/spf13/cobra"
)
// deciderCmd represents the decider command
var deciderCmd = &cobra.Command{
Use: "decider",
Short: "scale apps by app metrical",
Long: `app metrical is loadbalance info, or cpu memery use info`,
Run: func(cmd *cobra.Command, args []string) {
// Work your own magic here
fmt.Println("decider called")
basePlugin := common.GetBasePlugin(ManagerHost, ManagerPort, decider.PLUGIN_NAME)
RunPlugin(&decider.Decider{basePlugin, nil})
},
}
func init() {
RootCmd.AddCommand(deciderCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// deciderCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// deciderCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}<|fim▁end|> | package cmd
import ( |
<|file_name|>sql_util.py<|end_file_name|><|fim▁begin|>def table_exists(curs, table_name=''):
'''
If table_name is a schema qualified table name and it exists it is returned,
else if it is not schema qualified and the table name exists in the search
path then that schema qualified table name is returned, else None.
'''
curs.execute('SELECT pgpartitioner.table_exists(%s)', (table_name,))
return curs.fetchone()
def get_column_type(curs, table_name, column_name):
'''
If column_name exists on table_name it's SQL type is returned. Else an
exception is raised.
'''
curs.execute('SELECT pgpartitioner.get_column_type(%s, %s);', (table_name, column_name))
return curs.fetchone()[0]
def get_constraint_defs(curs, table_name, fkeys=True):
'''
Returns a list of constraint definition fragments suitable for use
in SQL create table or alter table statements. fkeys are not included if
fkeys is false
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_constraint_defs(%s, %s);', (table_name, fkeys))
return [res[0] for res in curs.fetchall()]
def get_index_defs(curs, table_name):
'''
Returns a list of 2-tuples consisting of each index creation def statement<|fim▁hole|> '''
curs.execute('SELECT * FROM pgpartitioner.get_table_index_defs(%s);', (table_name,))
return [res[0] for res in curs.fetchall()]
def table_attributes(curs, table_name):
'''
Returns a tuple of the given table's attributes
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_attributes(%s);', (table_name,))
atts = tuple([res[0] for res in curs.fetchall()])
return atts
def normalize_date(curs, date_str, fmt, units='month', diff='0 months'):
'''
Takes a valid date string in any format and formats it according to fmt.
'''
normalize_date_sql = \
'''
SELECT to_char(date_trunc(%s, %s::timestamp + %s), %s);
'''
curs.execute(normalize_date_sql, (units, date_str, diff, fmt))
return curs.fetchone()[0]<|fim▁end|> | for any non-primary key or unique indexes on the given table and the
index name. |
<|file_name|>dispatcher_test.go<|end_file_name|><|fim▁begin|>package app
import (
"bytes"
"github.com/ahmedkamals/foo-protocol-proxy/analysis"
"github.com/ahmedkamals/foo-protocol-proxy/config"
"github.com/ahmedkamals/foo-protocol-proxy/handlers"
"github.com/ahmedkamals/foo-protocol-proxy/testingutil"
"github.com/stretchr/testify/assert"
"log"
"net/http"
"os"
"testing"
)<|fim▁hole|>func TestShouldGetDispatcherRoutesCorrectly(t *testing.T) {
t.Parallel()
analyzer := analysis.NewAnalyzer()
routes := getRoutes(analyzer)
saver := getMockedSaver()
dispatcher := NewDispatcher(config.Configuration{}, analyzer, saver)
testCases := []testingutil.TestCase{
{
ID: "Dispatcher routes",
Input: dispatcher,
Expected: routes,
},
}
for _, testCase := range testCases {
input := testCase.Input.(*Dispatcher)
expected := testCase.Expected.(map[string]http.Handler)
actual := input.getRoutes()
assert.Len(t, actual, len(expected))
for key, val := range actual {
assert.NotEmpty(t, actual[key])
assert.Implements(t, (*http.Handler)(nil), val)
}
}
}
func TestShouldBlockIndefinitely(t *testing.T) {
t.Parallel()
testCases := []testingutil.TestCase{
{
ID: "Connection forwarding",
Input: make(chan os.Signal, 1),
Expected: true,
},
}
configuration := config.Configuration{}
analyzer := analysis.NewAnalyzer()
saver := getMockedSaver()
dispatcher := NewDispatcher(configuration, analyzer, saver)
for _, testCase := range testCases {
input := testCase.Input.(chan os.Signal)
expected := testCase.Expected.(bool)
input <- os.Interrupt
actual := dispatcher.blockIndefinitely(input, true)
assert.Equal(t, expected, actual)
}
}
func getRoutes(analyzer *analysis.Analyzer) map[string]http.Handler {
var buf bytes.Buffer
eventLogger := log.New(&buf, "", log.Ldate)
return map[string]http.Handler{
"/metrics": handlers.NewMetricsHandler(analyzer, eventLogger),
"/stats": handlers.NewMetricsHandler(analyzer, eventLogger),
"/health": handlers.NewHealthHandler(eventLogger),
"/heartbeat": handlers.NewHealthHandler(eventLogger),
"/status": handlers.NewHealthHandler(eventLogger),
}
}<|fim▁end|> | |
<|file_name|>scalar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use test::{black_box, Bencher};
use tipb::ScalarFuncSig;
fn get_scalar_args_with_match(sig: ScalarFuncSig) -> (usize, usize) {
// Only select some functions to benchmark
let (min_args, max_args) = match sig {
ScalarFuncSig::LtInt => (2, 2),
ScalarFuncSig::CastIntAsInt => (1, 1),
ScalarFuncSig::IfInt => (3, 3),
ScalarFuncSig::JsonArraySig => (0, usize::MAX),
ScalarFuncSig::CoalesceDecimal => (1, usize::MAX),
ScalarFuncSig::JsonExtractSig => (2, usize::MAX),
ScalarFuncSig::JsonSetSig => (3, usize::MAX),
_ => (0, 0),
};
(min_args, max_args)
}
fn init_scalar_args_map() -> HashMap<ScalarFuncSig, (usize, usize)> {
let mut m: HashMap<ScalarFuncSig, (usize, usize)> = HashMap::default();
let tbls = vec![
(ScalarFuncSig::LtInt, (2, 2)),
(ScalarFuncSig::CastIntAsInt, (1, 1)),
(ScalarFuncSig::IfInt, (3, 3)),
(ScalarFuncSig::JsonArraySig, (0, usize::MAX)),
(ScalarFuncSig::CoalesceDecimal, (1, usize::MAX)),
(ScalarFuncSig::JsonExtractSig, (2, usize::MAX)),
(ScalarFuncSig::JsonSetSig, (3, usize::MAX)),
(ScalarFuncSig::Acos, (0, 0)),
];
for tbl in tbls {
m.insert(tbl.0, tbl.1);
}
m
}
fn get_scalar_args_with_map(
m: &HashMap<ScalarFuncSig, (usize, usize)>,
sig: ScalarFuncSig,
) -> (usize, usize) {
if let Some((min_args, max_args)) = m.get(&sig).cloned() {
return (min_args, max_args);
}
(0, 0)
}
#[bench]
fn bench_get_scalar_args_with_match(b: &mut Bencher) {
b.iter(|| {
for _ in 0..1000 {
black_box(get_scalar_args_with_match(black_box(ScalarFuncSig::AbsInt)));
}
})
}
#[bench]
fn bench_get_scalar_args_with_map(b: &mut Bencher) {
let m = init_scalar_args_map();
b.iter(|| {
for _ in 0..1000 {
black_box(get_scalar_args_with_map(
black_box(&m),
black_box(ScalarFuncSig::AbsInt),
));
}
})
}<|fim▁end|> | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use collections::HashMap;
use std::usize; |
<|file_name|>layout.js<|end_file_name|><|fim▁begin|>import React from 'react'
import DocumentTitle from 'react-document-title'
import ReactHeight from 'react-height'
import Header from './header/header'
import Content from './content/content'
import Footer from './footer/footer'
import { APP_NAME } from '../constants'
class Layout extends React.Component {
<|fim▁hole|> flex: 'none'
}
const contentStyle = {
flex: 1
}
const containerStyle = {
display: 'flex',
minHeight: window.innerHeight,
flexDirection: 'column'
}
return (
<div style={containerStyle}>
<DocumentTitle title={APP_NAME}/>
<Header style={hfStyle}/>
<Content style={contentStyle}/>
<Footer style={hfStyle}/>
</div>
)
}
}
export default Layout<|fim▁end|> | render() {
const hfStyle = { |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import pkg_resources
from datetime import datetime
import re
from inbox.api.validation import (
get_recipients, get_attachments, get_thread, get_message)
from inbox.api.err import InputError
from inbox.contacts.process_mail import update_contacts_from_message
from inbox.models import Message, Part
from inbox.models.action_log import schedule_action
from inbox.sqlalchemy_ext.util import generate_public_id
VERSION = pkg_resources.get_distribution('inbox-sync').version
class SendMailException(Exception):
"""
Raised when sending fails.
Parameters
----------
message: string
A descriptive error message.
http_code: int
An appropriate HTTP error code for the particular type of failure.
server_error: string, optional
The error returned by the mail server.
failures: dict, optional
If sending only failed for some recipients, information on the specific
failures.
"""
def __init__(self, message, http_code, server_error=None, failures=None):
self.message = message
self.http_code = http_code
self.server_error = server_error
self.failures = failures
super(SendMailException, self).__init__(
message, http_code, server_error, failures)
def get_sendmail_client(account):
from inbox.sendmail import module_registry
sendmail_mod = module_registry.get(account.provider)
sendmail_cls = getattr(sendmail_mod, sendmail_mod.SENDMAIL_CLS)
sendmail_client = sendmail_cls(account)
return sendmail_client
def create_draft_from_mime(account, raw_mime, db_session):
our_uid = generate_public_id() # base-36 encoded string
new_headers = ('X-INBOX-ID: {0}-0\r\n'
'Message-Id: <{0}[email protected]>\r\n'
'User-Agent: NylasMailer/{1}\r\n').format(our_uid, VERSION)
new_body = new_headers + raw_mime
with db_session.no_autoflush:
msg = Message.create_from_synced(account, '', '',
datetime.utcnow(), new_body)
if msg.from_addr and len(msg.from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if msg.reply_to and len(msg.reply_to) > 1:
raise InputError("reply_to field can have at most one item")
if msg.subject is not None and not \
isinstance(msg.subject, basestring):
raise InputError('"subject" should be a string')
if not isinstance(msg.body, basestring):
raise InputError('"body" should be a string')
if msg.references or msg.in_reply_to:
msg.is_reply = True
thread_cls = account.thread_cls
msg.thread = thread_cls(
subject=msg.subject,
recentdate=msg.received_date,
namespace=account.namespace,
subjectdate=msg.received_date)
msg.is_created = True
msg.is_sent = True
msg.is_draft = False
msg.is_read = True
db_session.add(msg)
db_session.flush()
return msg
def block_to_part(block, message, namespace):
inline_image_uri = r'cid:{}'.format(block.public_id)
is_inline = re.search(inline_image_uri, message.body) is not None
# Create a new Part object to associate to the message object.
# (You can't just set block.message, because if block is an
# attachment on an existing message, that would dissociate it from
# the existing message.)
part = Part(block=block)
part.content_id = block.public_id if is_inline else None
part.namespace_id = namespace.id
part.content_disposition = 'inline' if is_inline else 'attachment'
part.is_inboxapp_attachment = True
return part
def create_message_from_json(data, namespace, db_session, is_draft):
""" Construct a Message instance from `data`, a dictionary representing the
POST body of an API request. All new objects are added to the session, but
not committed."""
# Validate the input and get referenced objects (thread, attachments)
# as necessary.
to_addr = get_recipients(data.get('to'), 'to')
cc_addr = get_recipients(data.get('cc'), 'cc')
bcc_addr = get_recipients(data.get('bcc'), 'bcc')
from_addr = get_recipients(data.get('from'), 'from')
reply_to = get_recipients(data.get('reply_to'), 'reply_to')
if from_addr and len(from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if reply_to and len(reply_to) > 1:
raise InputError("reply_to field can have at most one item")
subject = data.get('subject')
if subject is not None and not isinstance(subject, basestring):
raise InputError('"subject" should be a string')
body = data.get('body', '')
if not isinstance(body, basestring):
raise InputError('"body" should be a string')
blocks = get_attachments(data.get('file_ids'), namespace.id, db_session)
reply_to_thread = get_thread(data.get('thread_id'), namespace.id,
db_session)
reply_to_message = get_message(data.get('reply_to_message_id'),
namespace.id, db_session)
if reply_to_message is not None and reply_to_thread is not None:
if reply_to_message not in reply_to_thread.messages:
raise InputError('Message {} is not in thread {}'.
format(reply_to_message.public_id,
reply_to_thread.public_id))
with db_session.no_autoflush:
account = namespace.account
dt = datetime.utcnow()
uid = generate_public_id()
to_addr = to_addr or []
cc_addr = cc_addr or []
bcc_addr = bcc_addr or []
blocks = blocks or []
if subject is None:
# If this is a reply with no explicitly specified subject, set the
# subject from the prior message/thread by default.
# TODO(emfree): Do we want to allow changing the subject on a reply
# at all?
if reply_to_message is not None:
subject = reply_to_message.subject
elif reply_to_thread is not None:
subject = reply_to_thread.subject
subject = subject or ''
message = Message()
message.namespace = namespace
message.is_created = True
message.is_draft = is_draft
message.from_addr = from_addr if from_addr else \
[(account.name, account.email_address)]
# TODO(emfree): we should maybe make received_date nullable, so its
# value doesn't change in the case of a drafted-and-later-reconciled
# message.
message.received_date = dt
message.subject = subject
message.body = body
message.to_addr = to_addr
message.cc_addr = cc_addr
message.bcc_addr = bcc_addr
message.reply_to = reply_to
# TODO(emfree): this is different from the normal 'size' value of a
# message, which is the size of the entire MIME message.
message.size = len(body)
message.is_read = True
message.is_sent = False
message.public_id = uid
message.version = 0
message.regenerate_nylas_uid()
# Set the snippet
message.snippet = message.calculate_html_snippet(body)
# Associate attachments to the draft message
for block in blocks:
message.parts.append(block_to_part(block, message, namespace))
update_contacts_from_message(db_session, message, namespace)
if reply_to_message is not None:
message.is_reply = True
_set_reply_headers(message, reply_to_message)
thread = reply_to_message.thread
message.reply_to_message = reply_to_message
elif reply_to_thread is not None:
message.is_reply = True
thread = reply_to_thread
# Construct the in-reply-to and references headers from the last
# message currently in the thread.
previous_messages = [m for m in thread.messages if not m.is_draft]
if previous_messages:
last_message = previous_messages[-1]
message.reply_to_message = last_message
_set_reply_headers(message, last_message)
else:
# If this isn't a reply to anything, create a new thread object for
# the draft. We specialize the thread class so that we can, for
# example, add the g_thrid for Gmail later if we reconcile a synced
# message with this one. This is a huge hack, but works.
message.is_reply = False
thread_cls = account.thread_cls
thread = thread_cls(
subject=message.subject,
recentdate=message.received_date,
namespace=namespace,
subjectdate=message.received_date)
message.thread = thread
db_session.add(message)
if is_draft:
schedule_action('save_draft', message, namespace.id, db_session,
version=message.version)
db_session.flush()
return message
def update_draft(db_session, account, draft, to_addr=None,
subject=None, body=None, blocks=None, cc_addr=None,
bcc_addr=None, from_addr=None, reply_to=None):
"""
Update draft with new attributes.
"""
def update(attr, value=None):
if value is not None:
setattr(draft, attr, value)
if attr == 'body':
# Update size, snippet too
draft.size = len(value)
draft.snippet = draft.calculate_html_snippet(
value)
update('to_addr', to_addr)
update('cc_addr', cc_addr)
update('bcc_addr', bcc_addr)
update('reply_to', reply_to)
update('from_addr', from_addr)
update('subject', subject if subject else None)
update('body', body if body else None)
update('received_date', datetime.utcnow())
# Remove any attachments that aren't specified
new_block_ids = [b.id for b in blocks]
for part in filter(lambda x: x.block_id not in new_block_ids,
draft.parts):
draft.parts.remove(part)
db_session.delete(part)
# Parts require special handling
for block in blocks:
# Don't re-add attachments that are already attached
if block.id in [p.block_id for p in draft.parts]:
continue
draft.parts.append(block_to_part(block, draft, account.namespace))
thread = draft.thread
if len(thread.messages) == 1:
# If there are no prior messages on the thread, update its subject and
# dates to match the draft.
thread.subject = draft.subject
thread.subjectdate = draft.received_date
thread.recentdate = draft.received_date
# Remove previous message-contact associations, and create new ones.
draft.contacts = []
update_contacts_from_message(db_session, draft, account.namespace)
# The draft we're updating may or may not be one authored through the API:
# - Ours: is_created = True, Message-Id = public_id+version
# - Not Ours: is_created = False, Message-Id = ???
# Mark that the draft is now created by us
draft.is_created = True
# Save the current Message-Id so we know which draft to delete in syncback
old_message_id_header = draft.message_id_header
# Increment version and rebuild the message ID header.
draft.version += 1
draft.regenerate_nylas_uid()
# Sync to remote
schedule_action('update_draft', draft, draft.namespace.id, db_session,
version=draft.version,
old_message_id_header=old_message_id_header)
db_session.commit()
return draft
def delete_draft(db_session, account, draft):
""" Delete the given draft. """
thread = draft.thread
assert draft.is_draft
# Delete remotely.
schedule_action('delete_draft', draft, draft.namespace.id, db_session,
nylas_uid=draft.nylas_uid,
message_id_header=draft.message_id_header)
db_session.delete(draft)
# Delete the thread if it would now be empty.
if not thread.messages:
db_session.delete(thread)
db_session.commit()
def generate_attachments(message, blocks):
attachment_dicts = []
for block in blocks:
content_disposition = 'attachment'
for part in block.parts:
if part.message_id == message.id and part.content_disposition == 'inline':
content_disposition = 'inline'
break
attachment_dicts.append({
'block_id': block.public_id,
'filename': block.filename,
'data': block.data,
'content_type': block.content_type,
'content_disposition': content_disposition,
})<|fim▁hole|>
def _set_reply_headers(new_message, previous_message):
"""When creating a draft in reply to a thread, set the In-Reply-To and
References headers appropriately, if possible."""
if previous_message.message_id_header:
new_message.in_reply_to = previous_message.message_id_header
if previous_message.references:
new_message.references = (previous_message.references +
[previous_message.message_id_header])
else:
new_message.references = [previous_message.message_id_header]<|fim▁end|> | return attachment_dicts |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function(){
jQuery('.carousel').carousel()
var FPS = 30;
var player = $('#player')
var pWidth = player.width();
$window = $(window)
var wWidth = $window.width();
setInterval(function() {
update();
}, 1000/FPS);
function update() {
if(keydown.space) {
player.shoot();
}
if(keydown.left) {
console.log('go left')
player.css('left', '-=10');
}
if(keydown.right) {
console.log('go right')
var x = player.position().left;
if(x + pWidth > wWidth)
{
player.css('left', '0')
}
else if(x < 0 )
{
var p = wWidth + x - pWidth;
var t = p + 'px'
player.css('left', t)
}
else {
player.css('left', '+=10');
}
}
}
$('')<|fim▁hole|><|fim▁end|> |
}) |
<|file_name|>linkedlist_2.py<|end_file_name|><|fim▁begin|>class Node:
def __init__(self,data):
self.data = data
self.next = None<|fim▁hole|> def __init__(self):
self.head = None
def push(self, newdata): #in front
newnode = Node(newdata)
newnode.next = self.head
self.head = newnode
def insertAfter(self, prevnode, newdata):
newnode = Node(newdata)
newnode.next = prevnode.next
prevnode.next = newnode
def append(self, newdata): #at end
newnode = Node(newdata)
#if list is empty
if self.head is None:
self.head = newnode
return
#if not
temp = self.head
while(temp.next):
temp = temp.next
temp.next = newnode
def deleteNode(self, key): #first one found
temp = self.head
while (temp is not None):
print 'Now at:',temp.data
if temp.data == key:
print 'Found 3 at',temp.data
break
print 'prev is temp now'
prev = temp
temp = temp.next
if(temp == None): return
prev.next = temp.next
temp = None
def deletepos(self, pos): #specific position
temp = self.head
for i in xrange(0,pos-1):
prev = temp
temp = temp.next
if(temp == None): return
prev.next = temp.next
temp = None
def printList(self):
temp = self.head
while(temp):
print " %d" %(temp.data)
temp = temp.next
def main():
print 'hello'
ll = LinkedList()
ll.push(1)
ll.push(2)
ll.push(3)
ll.push(4)
ll.push(5)
print "Created Linked List: "
ll.printList()
ll.deletepos(3)
print "\nLinked List after Deletion of 3:"
ll.printList()
if __name__ == '__main__':
main()<|fim▁end|> |
class LinkedList: |
<|file_name|>test_twelve_tone.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from twelve_tone.cli import main
def test_main():
runner = CliRunner()
result = runner.invoke(main, [])
assert result.exit_code == 0<|fim▁end|> | from click.testing import CliRunner
|
<|file_name|>eventSIPHeaderField.py<|end_file_name|><|fim▁begin|>try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from ...sipmessaging import SIPHeaderField
from ...sipmessaging import classproperty
class EventSIPHeaderField(SIPHeaderField):
# noinspection PyNestedDecorators
@classproperty
@classmethod
def canonical_field_name(cls):
return 'Event'
# noinspection PyNestedDecorators
@classproperty
@classmethod
def canonical_compact_field_name(cls):
return 'o'
@classmethod
def new_for_attributes(cls, field_name="Event", field_value_string=""):<|fim▁hole|>
@property
def is_event(self):
return True
# http://www.iana.org/assignments/sip-parameters/sip-parameters.xhtml#sip-parameters-2
@property
def adaptive_min_rate(self):
return self.parameter_named('adaptive-min-rate')
@adaptive_min_rate.setter
def adaptive_min_rate(self, a_string):
self.parameter_named_put('adaptive-min-rate', a_string)
@property
def body(self):
return self.parameter_named('body')
@body.setter
def body(self, a_string):
self.parameter_named_put('body', a_string)
@property
def call_id(self):
return self.parameter_named('call-id')
@call_id.setter
def call_id(self, a_string):
self.parameter_named_put('call-id', a_string)
@property
def effective_by(self):
return self.parameter_named('effective-by')
@effective_by.setter
def effective_by(self, a_string):
self.parameter_named_put('effective-by', a_string)
@property
def from_tag(self):
return self.parameter_named('from-tag')
@from_tag.setter
def from_tag(self, a_string):
self.parameter_named_put('from-tag', a_string)
@property
def id(self):
return self.parameter_named('id')
@id.setter
def id(self, a_string):
self.parameter_named_put('id', a_string)
@property
def include_session_description(self):
return self.parameter_named('include-session-description')
@include_session_description.setter
def include_session_description(self, a_string):
self.parameter_named_put('include-session-description', a_string)
@property
def max_rate(self):
return self.parameter_named('max-rate')
@max_rate.setter
def max_rate(self, a_string):
self.parameter_named_put('max-rate', a_string)
@property
def min_rate(self):
return self.parameter_named('min-rate')
@min_rate.setter
def min_rate(self, a_string):
self.parameter_named_put('min-rate', a_string)
@property
def model(self):
return self.parameter_named('model')
@model.setter
def model(self, a_string):
self.parameter_named_put('model', a_string)
@property
def profile_type(self):
return self.parameter_named('profile-type')
@profile_type.setter
def profile_type(self, a_string):
self.parameter_named_put('profile-type', a_string)
@property
def shared(self):
return self.parameter_named('shared')
@shared.setter
def shared(self, a_string):
self.parameter_named_put('shared', a_string)
@property
def to_tag(self):
return self.parameter_named('to-tag')
@to_tag.setter
def to_tag(self, a_string):
self.parameter_named_put('to-tag', a_string)
@property
def vendor(self):
return self.parameter_named('vendor')
@vendor.setter
def vendor(self, a_string):
self.parameter_named_put('vendor', a_string)
@property
def version(self):
return self.parameter_named('version')
@version.setter
def version(self, a_string):
self.parameter_named_put('version', a_string)<|fim▁end|> | return cls.new_for_field_name_and_value_string(field_name=field_name, field_value_string=field_value_string) |
<|file_name|>day_14.rs<|end_file_name|><|fim▁begin|>use tdd_kata::string_calc_kata::iter_1::day_14::evaluate;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_eval_simple_num() {
assert_eq!(evaluate("1"), Ok(1.0));
}
#[test]
fn test_eval_three_digit_num() {
assert_eq!(evaluate("256"), Ok(256.0));
}
#[test]
fn test_eval_real_num() {
assert_eq!(evaluate("125.256"), Ok(125.256));
}
#[test]
fn test_eval_add() {
assert_eq!(evaluate("1+2"), Ok(3.0));<|fim▁hole|> fn test_eval_sub() {
assert_eq!(evaluate("3-1"), Ok(2.0));
}
#[test]
fn test_eval_few_operations() {
assert_eq!(evaluate("2+3-1+4"), Ok(8.0));
}
#[test]
fn test_eval_mul() {
assert_eq!(evaluate("2×5"), Ok(10.0));
}
#[test]
fn test_eval_div() {
assert_eq!(evaluate("10÷2"), Ok(5.0));
}
#[test]
fn test_eval_operations_with_diff_priority() {
assert_eq!(evaluate("20+2×5-100÷4"), Ok(5.0));
}
#[test]
fn test_eval_operations_with_parentheses() {
assert_eq!(evaluate("2+(2-3+5×2)-8"), Ok(3.0));
}
#[test]
fn test_eval_operations_with_two_levels_of_parentheses() {
assert_eq!(evaluate("2+(2-3+5×2)-((1+1)×4)"), Ok(3.0));
}
}<|fim▁end|> | }
#[test] |
<|file_name|>intermediate_catch_event_type.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
Class used for representing tIntermediateCatchEvent of BPMN 2.0 graph
"""
import graph.classes.events.catch_event_type as catch_event
class IntermediateCatchEvent(catch_event.CatchEvent):
"""
Class used for representing tIntermediateCatchEvent of BPMN 2.0 graph
"""
def __init__(self):
"""
Default constructor, initializes object fields with new instances.<|fim▁hole|><|fim▁end|> | """
super(IntermediateCatchEvent, self).__init__() |
<|file_name|>template.go<|end_file_name|><|fim▁begin|>package main
import (
"compress/gzip"
"crypto/md5"
"encoding/hex"
"html/template"
"log"
"net/http"
"strings"
"time"
"github.com/dustin/go-humanize"
)
var tmpl = template.Must(template.New("").Funcs(template.FuncMap{
"User": UserByID,
"Comma": func(n interface{}) string {
if x, ok := n.(uint64); ok {
return humanize.Comma(int64(x))
}
return humanize.Comma(n.(int64))
},
"RelTime": humanize.Time,
"MD5": func(s string) string {
hash := md5.New()
hash.Write([]byte(s))
return hex.EncodeToString(hash.Sum(nil))
},
"ToLower": strings.ToLower,
"ToUpper": strings.ToUpper,
}).ParseGlob("tmpl/*.html"))
type TmplMeta struct {
SiteTitle string
Title string
URL string
LoggedIn *User
IsLoginPage bool
}
type TmplIndex struct {
Meta *TmplMeta
Forums []*Forum
}
type TmplLogin struct {
Meta *TmplMeta
User string
Error string
}
type TmplRegister struct {
Meta *TmplMeta
User string
Email string
Error string
}
type TmplForum struct {
Meta *TmplMeta
Forum *Forum
Topics []*Topic
}
type TmplTopic struct {
Meta *TmplMeta
Forum *Forum
Topic *Topic
Posts []*Post
}
func GetTmplMeta(r *http.Request) *TmplMeta {
m := &TmplMeta{}
if err := Bucket.Get("meta/siteTitle", &m.SiteTitle); err != nil {
m.SiteTitle = "Forum"
}
m.URL = r.URL.String()
m.LoggedIn, _ = UserByCookie(r)
return m
}
func (m *TmplMeta) SetTitle(title string) *TmplMeta {
m.Title = title
return m
}
func ShowTemplate(w http.ResponseWriter, r *http.Request, file string, data interface{}, status int) {
w, gzipClose := maybeGzip(w, r)
defer gzipClose()
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.WriteHeader(status)
err := tmpl.ExecuteTemplate(w, file, data)
if err != nil {
log.Println(r.URL, err)
}
}
func init() {
http.Handle("/favicon.ico", http.RedirectHandler("/static/favicon.ico", http.StatusMovedPermanently))
fs := http.FileServer(http.Dir("tmpl/"))
http.HandleFunc("/static/", func(w http.ResponseWriter, r *http.Request) {
// forbid directory indexes
if r.URL.Path[len(r.URL.Path)-1] == '/' {
http.Error(w, "", http.StatusForbidden)
return
}
// add expires a year in the future
w.Header().Add("Expires", time.Now().AddDate(1, 0, 0).Format(http.TimeFormat))
// gzip, perhaps?
w, gzipClose := maybeGzip(w, r)
defer gzipClose()
fs.ServeHTTP(w, r)
})
}
type gzipWriter struct {
http.ResponseWriter
w *gzip.Writer
}
func (g *gzipWriter) Write(b []byte) (int, error) {
return g.w.Write(b)
}
func maybeGzip(w http.ResponseWriter, r *http.Request) (http.ResponseWriter, func() error) {
if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") && w.Header().Get("Content-Encoding") == "" {
g, err := gzip.NewWriterLevel(w, gzip.BestSpeed)
if err != nil {
panic(err)
}
w.Header().Set("Content-Encoding", "gzip")
return &gzipWriter{w, g}, g.Close<|fim▁hole|><|fim▁end|> | }
return w, func() error { return nil }
} |
<|file_name|>get_code_stats.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# $Id: get_code_stats.py 9318 2011-06-10 02:37:10Z nathan_george $
#
# Proprietary and confidential.
# Copyright $Date:: 2011#$ Perfect Search Corporation.
# All rights reserved.
#
import sys
import os
import re
import optparse
import math
buildscriptDir = os.path.dirname(__file__)
buildscriptDir = os.path.abspath(os.path.join(buildscriptDir, os.path.pardir))
sys.path.append(buildscriptDir)
import sandbox
import codescan<|fim▁hole|>import xmail
import metadata
from ioutil import *
EXT_PAT = metadata.INTERESTING_EXT_PAT
FROM = 'Code Stat Scanner <[email protected]>'
parser = optparse.OptionParser('Usage: %prog [options] [folder]\n\nCompiles stats about a code base; optionally emails report.')
xmail.addMailOptions(parser)
def getRelevantPaths(p):
relevant = []
if not p.endswith('/'):
relevant.append(p)
while p:
i = p.rfind('/')
if i == -1:
relevant.append('')
break
else:
p = p[0:i+1]
relevant.append(p)
p = p[0:-1]
return relevant
def getValuesKeyName(key):
return '[' + key + ']'
def isValuesKeyName(key):
return key[0] == '['
class StatsHolder:
def __init__(self, rootPath):
rootPath = norm_folder(rootPath)
self.rootPath = rootPath
self.statsByPath = {}
self.statsByExtension = {}
def getSandboxName(self):
i = self.rootPath.find('/sandboxes/')
if i != -1:
x = self.rootPath[i + 11:]
i = x.find('/code')
if i > -1:
x = x[0:i]
i = x.rfind('/')
if i > -1:
x = x[0:i]
return x
else:
return self.rootPath
def getRelativePath(self, path):
endsWithSlash = path.endswith('/')
path = os.path.abspath(path).replace('\\', '/')
# abspath() removes trailing slash; undo
if endsWithSlash and path[-1] != '/':
path = path + '/'
return path[len(self.rootPath):]
def addStat(self, path, statName, number):
shouldAggregate = not path.endswith('/')
if shouldAggregate:
k = getValuesKeyName(statName)
dict = self.statsByExtension
ignored, ext = os.path.splitext(path)
#print('ext = %s' % ext)
#sys.exit(0)
if not ext in dict:
dict[ext] = {}
dict = dict[ext]
if not statName in dict:
dict[statName] = number
dict[k] = [number]
else:
dict[statName] = dict[statName] + number
dict[k].append(number)
relativePath = self.getRelativePath(path)
sbp = self.statsByPath
for p in getRelevantPaths(relativePath):
if not p in sbp:
sbp[p] = {}
dict = sbp[p]
if not statName in dict:
dict[statName] = number
if shouldAggregate:
#print('aggregating %s for %s', (k, p))
dict[k] = [number]
else:
dict[statName] = dict[statName] + number
if shouldAggregate:
dict[k].append(number)
_CPP_TESTNAME_PAT = re.compile(r'^\s*(SIMPLE_TEST\s*\(\s*(.*?)\s*\)|class\s+([a-zA-Z_0-9]+)\s*:\s*(public|protected|private)\s+[a-zA-Z_0-9]+Test)', re.MULTILINE | re.DOTALL)
_JAVA_TESTNAME_PAT = re.compile(r'^\s*public\s+void\s+([a-zA-Z_0-9]+)\s*\(', re.MULTILINE | re.DOTALL)
_PY_TESTNAME_PAT = re.compile(r'^\s*def test([a-zA-Z_0-9]+)\s*\(\s*self\s*\)\s*:', re.MULTILINE | re.DOTALL)
_CPP_CLASS_PAT = re.compile(r'^\s*(template\s*<.*?>\s*)?(class|struct|union)\s+([a-zA-Z_0-9]+)', re.MULTILINE | re.DOTALL)
_JAVA_CLASS_PAT = re.compile(r'^\s*((abstract|public|private|protected|static|final)\s+)*(class|interface)\s+([a-zA-Z_0-9]+)', re.MULTILINE | re.DOTALL)
_PY_CLASS_PAT = re.compile(r'^\s*class\s+([a-zA-Z_0-9]+).*?:', re.MULTILINE | re.DOTALL)
_TEST_FILE_PAT = re.compile(r'/test/', re.IGNORECASE)
_CLASS_PATS = [_CPP_CLASS_PAT, _JAVA_CLASS_PAT, _PY_CLASS_PAT]
_TESTNAME_PATS = [_CPP_TESTNAME_PAT, _JAVA_TESTNAME_PAT, _PY_TESTNAME_PAT]
def getFileTypeIndex(path):
path = path.lower()
if path.endswith('.cpp') or path.endswith('.h'):
return 0
elif path.endswith('.java'):
return 1
elif path.endswith('.py'):
return 2
return -1
def getClassPatForPath(path):
i = getFileTypeIndex(path)
if i != -1:
return _CLASS_PATS[i]
def getTestnamePatForPath(path):
i = getFileTypeIndex(path)
if i != -1:
return _TESTNAME_PATS[i]
def analyzeFile(fpath, stats):
fpath = os.path.abspath(fpath)
rel = stats.getRelativePath(fpath)
#print('analyzing %s' % rel)
txt = read_file(fpath)
byteCount = len(txt)
stats.addStat(fpath, 'byte count, impl + test', byteCount)
lineCount = codescan.getLineNumForOffset(txt, byteCount)
stats.addStat(fpath, 'line count, impl + test', lineCount)
isTest = bool(_TEST_FILE_PAT.search(fpath))
codeType = 'impl'
if isTest:
codeType = 'test'
stats.addStat(fpath, 'byte count, ' + codeType, byteCount)
stats.addStat(fpath, 'line count, ' + codeType, lineCount)
# See if we know how to do any further analysis on this file.
pat = getClassPatForPath(fpath)
if pat:
if isTest:
pat = getTestnamePatForPath(fpath)
if pat:
stats.addStat(fpath, 'unit test count', len(pat.findall(txt)))
else:
stats.addStat(fpath, 'class count', len(pat.findall(txt)))
def statPathIsFile(p):
i = p.rfind('.')
if i > -1:
return p[i+1:] in ['cpp','h','java','py']
return False
def statPathIsComponent(p):
return p == '' or (p.endswith('/') and p.find('/') == len(p) - 1)
_FLOAT_TYPE = type(0.1)
def getReportLine(key, number, showKB = False, formatSpecifier='%02f'):
numtxt = number
ntype = type(number)
if ntype == _FLOAT_TYPE:
numtxt = formatSpecifier % number
if numtxt.endswith('00'):
numtxt = numtxt[0:-3]
else:
numtxt = str(number)
line = '%s = %s' % (key, numtxt)
if showKB:
line += ' (%0.0f KB)' % (number / 1024.0)
return line
def getAggregateStats(dict, key):
values = dict.get(getValuesKeyName(key))
avg = mean(values)
stdev = stddev(values)
return avg, stdev
def describeTestRatio(ratio, multiplier = 1.0):
if ratio < 0.085 * multiplier:
lbl = 'POOR COVERAGE'
elif ratio < 0.20 * multiplier:
lbl = 'fair coverage'
elif ratio < 0.5 * multiplier:
lbl = 'good coverage'
else:
lbl = 'excellent coverage'
return '%0.2f (%s)' % (ratio, lbl)
def generateReport(stats):
#print(stats.statsByPath)
report = ''
components = [p for p in stats.statsByPath.keys() if statPathIsComponent(p)]
files = [p for p in stats.statsByPath.keys() if statPathIsFile(p)]
components.sort()
files.sort()
uberDict = stats.statsByPath['']
avg, stdev = getAggregateStats(uberDict, 'byte count, impl')
tooBigs = {'': max(avg + 2.5 * stdev, 20000)}
avg, stdev = getAggregateStats(uberDict, 'line count, impl')
tooLongs = {'': max(avg + 2.5 * stdev, 1000)}
for ext in stats.statsByExtension.keys():
dict = stats.statsByExtension[ext]
avg, stdev = getAggregateStats(dict, 'byte count, impl')
tooBigs[ext] = avg + 2.5 * stdev
avg, stdev = getAggregateStats(dict, 'line count, impl')
tooLongs[ext] = max(avg + 2.5 * stdev, 1000)
for path in components:
desc = path
if desc == '':
desc = 'entire folder tree'
report += '\nStats for %s' % desc
dict = stats.statsByPath[path]
keys = [k for k in dict.keys() if not isValuesKeyName(k)]
keys.sort()
for key in keys:
showKB = key.startswith('byte')
report += '\n ' + getReportLine(key, dict[key], showKB)
if showKB or key.startswith('line'):
values = dict[getValuesKeyName(key)]
avg = mean(values)
report += '; ' + getReportLine('mean', avg, showKB, formatSpecifier='%0.0f')
report += '; ' + getReportLine('std dev', stddev(values), False, formatSpecifier='%0.1f')
classCount = dict.get('class count', 0)
unitTestCount = dict.get('unit test count', 0)
if unitTestCount:
implLineCount = dict.get('line count, impl', 0)
testLineCount = dict.get('line count, test', 0)
if implLineCount:
ratio = describeTestRatio(testLineCount / float(implLineCount))
report += '\n ' + getReportLine('test lines per impl line', ratio)
implByteCount = dict.get('byte count, impl', 0)
testByteCount = dict.get('byte count, test', 0)
if implByteCount:
ratio = describeTestRatio(testByteCount / float(implByteCount))
report += '\n ' + getReportLine('test bytes per impl byte', ratio)
if classCount:
ratio = describeTestRatio(float(unitTestCount) / classCount, 2.5)
else:
ratio = '(undefined; no classes)'
else:
ratio = 'NO UNIT TESTS!'
report += '\n ' + getReportLine('tests per class', ratio)
if path:
myFiles = [f for f in files if f.startswith(path)]
#testFiles = [f for f in myFiles if _TEST_FILE_PAT.search(f)]
#implFiles = [f for f in myFiles if not _TEST_FILE_PAT.search(f)]
tooComplex = []
for implF in myFiles:
ignored, ext = os.path.splitext(implF)
size = stats.statsByPath[implF].get('byte count, impl')
length = stats.statsByPath[implF].get('line count, impl')
if size > tooBigs[''] or size > tooBigs[ext] or length > tooLongs[''] or length > tooLongs[ext]:
tooComplex.append((implF, size, length))
if tooComplex:
# Java doesn't support partial classes, so splitting classes into multiple
# files isn't always practical. In C++ and python, however, there are good
# ways to split into smaller files.
if tooComplex[0][0].endswith('.java'):
comment = 'refactor suggested'
else:
comment = 'REFACTOR NEEDED'
report += '\n unusually complex files (%s):' % comment
for tc in tooComplex:
report += '\n %s (%0.0f KB, %d lines)' % (tc[0], tc[1] / 1024.0, tc[2])
report += '\n'
return report
def sum(numbers):
n = 0
for x in numbers:
n += x
return n
def mean(numbers):
return sum(numbers) / float(len(numbers))
def variance(numbers):
avg = mean(numbers)
diffsFromMean = [n - avg for n in numbers]
squaredDfm = [n * n for n in diffsFromMean]
variance = sum(squaredDfm) / len(numbers)
return variance
def stddev(numbers):
# This is a *population* stddev, not a sample stddev.
# The difference is that we assume we have all possible
# values, not just a representative sample.
return math.sqrt(variance(numbers))
class StatsRecurser:
def __init__(self, stats):
self.stats = stats
def select(self, folder, dirs):
self.stats.addStat(folder, "scanned subdir count", len(dirs))
return dirs
class StatsVisitor:
def __init__(self, stats):
self.stats = stats
def visit(self, folder, item, relativePath):
analyzeFile(folder + item, self.stats)
self.stats.addStat(folder, "scanned file count", 1)
def analyze(path, prebuilt, options):
if not os.path.isdir(path):
sys.stderr.write('%s is not a valid folder.\n' % path)
return 1
path = norm_folder(path)
stats = StatsHolder(path)
print('\nCompiling stats for %s...' % metadata.get_friendly_name_for_path(path))
visitor = StatsVisitor(stats)
recurser = StatsRecurser(stats)
visitedFiles, visitedFolders = metadata.visit(path, visitor, recurser, excludePrograms=True)#, debug=True)
report = generateReport(stats)
print(report)
if xmail.hasDest(options):
xmail.sendmail(report, subject='code stats for %s' % metadata.get_friendly_name_for_path(path),
sender='Code Stat Scanner <[email protected]>', options=options)
if __name__ == '__main__':
options, args = parser.parse_args()
prebuilt = []
if args:
folder = args[0]
else:
folder = sandbox.current.get_code_root()
exitCode = analyze(folder, prebuilt, options)
sys.exit(exitCode)<|fim▁end|> | |
<|file_name|>typings.d.ts<|end_file_name|><|fim▁begin|>// Typings reference file, you can add your own global typings here
// https://www.typescriptlang.org/docs/handbook/writing-declaration-files.html
declare var System: any;<|fim▁hole|><|fim▁end|> | declare module "file-saver"; |
<|file_name|>retryhandler.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import random
import functools
import logging
from binascii import crc32
from botocore.vendored.requests import ConnectionError, Timeout
from botocore.vendored.requests.packages.urllib3.exceptions import ClosedPoolError
from botocore.exceptions import ChecksumError, EndpointConnectionError
logger = logging.getLogger(__name__)
# The only supported error for now is GENERAL_CONNECTION_ERROR
# which maps to requests generic ConnectionError. If we're able
# to get more specific exceptions from requests we can update
# this mapping with more specific exceptions.
EXCEPTION_MAP = {
'GENERAL_CONNECTION_ERROR': [
ConnectionError, ClosedPoolError, Timeout,
EndpointConnectionError
],
}
def delay_exponential(base, growth_factor, attempts):
"""Calculate time to sleep based on exponential function.
The format is::
base * growth_factor ^ (attempts - 1)
If ``base`` is set to 'rand' then a random number between
0 and 1 will be used as the base.
Base must be greater than 0, otherwise a ValueError will be
raised.
"""
if base == 'rand':
base = random.random()
elif base <= 0:
raise ValueError("The 'base' param must be greater than 0, "
"got: %s" % base)
time_to_sleep = base * (growth_factor ** (attempts - 1))
return time_to_sleep
def create_exponential_delay_function(base, growth_factor):
"""Create an exponential delay function based on the attempts.
This is used so that you only have to pass it the attempts
parameter to calculate the delay.
"""
return functools.partial(
delay_exponential, base=base, growth_factor=growth_factor)
def create_retry_handler(config, operation_name=None):
checker = create_checker_from_retry_config(
config, operation_name=operation_name)
action = create_retry_action_from_config(
config, operation_name=operation_name)
return RetryHandler(checker=checker, action=action)
def create_retry_action_from_config(config, operation_name=None):
# The spec has the possibility of supporting per policy
# actions, but right now, we assume this comes from the
# default section, which means that delay functions apply
# for every policy in the retry config (per service).
delay_config = config['__default__']['delay']
if delay_config['type'] == 'exponential':
return create_exponential_delay_function(
base=delay_config['base'],
growth_factor=delay_config['growth_factor'])
def create_checker_from_retry_config(config, operation_name=None):
checkers = []
max_attempts = None
retryable_exceptions = []
if '__default__' in config:
policies = config['__default__'].get('policies', [])
max_attempts = config['__default__']['max_attempts']
for key in policies:
current_config = policies[key]
checkers.append(_create_single_checker(current_config))
retry_exception = _extract_retryable_exception(current_config)
if retry_exception is not None:
retryable_exceptions.extend(retry_exception)
if operation_name is not None and config.get(operation_name) is not None:
operation_policies = config[operation_name]['policies']
for key in operation_policies:
checkers.append(_create_single_checker(operation_policies[key]))
retry_exception = _extract_retryable_exception(
operation_policies[key])
if retry_exception is not None:
retryable_exceptions.extend(retry_exception)
if len(checkers) == 1:
# Don't need to use a MultiChecker
return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts)
else:
multi_checker = MultiChecker(checkers)
return MaxAttemptsDecorator(
multi_checker, max_attempts=max_attempts,
retryable_exceptions=tuple(retryable_exceptions))
def _create_single_checker(config):
if 'response' in config['applies_when']:
return _create_single_response_checker(
config['applies_when']['response'])
elif 'socket_errors' in config['applies_when']:
return ExceptionRaiser()
def _create_single_response_checker(response):
if 'service_error_code' in response:
checker = ServiceErrorCodeChecker(
status_code=response['http_status_code'],
error_code=response['service_error_code'])
elif 'http_status_code' in response:
checker = HTTPStatusCodeChecker(
status_code=response['http_status_code'])
elif 'crc32body' in response:
checker = CRC32Checker(header=response['crc32body'])
else:
# TODO: send a signal.
raise ValueError("Unknown retry policy: %s" % config)
return checker
def _extract_retryable_exception(config):
applies_when = config['applies_when']
if 'crc32body' in applies_when.get('response', {}):
return [ChecksumError]
elif 'socket_errors' in applies_when:
exceptions = []
for name in applies_when['socket_errors']:
exceptions.extend(EXCEPTION_MAP[name])
return exceptions
class RetryHandler(object):
"""Retry handler.
The retry handler takes two params, ``checker`` object
and an ``action`` object.
The ``checker`` object must be a callable object and based on a response
and an attempt number, determines whether or not sufficient criteria for
a retry has been met. If this is the case then the ``action`` object
(which also is a callable) determines what needs to happen in the event
of a retry.
"""
def __init__(self, checker, action):
self._checker = checker
self._action = action
def __call__(self, attempts, response, caught_exception, **kwargs):
"""Handler for a retry.
Intended to be hooked up to an event handler (hence the **kwargs),
this will process retries appropriately.
"""
if self._checker(attempts, response, caught_exception):
result = self._action(attempts=attempts)
logger.debug("Retry needed, action of: %s", result)
return result
logger.debug("No retry needed.")
class BaseChecker(object):
"""Base class for retry checkers.
Each class is responsible for checking a single criteria that determines
whether or not a retry should not happen.
"""
def __call__(self, attempt_number, response, caught_exception):
"""Determine if retry criteria matches.
Note that either ``response`` is not None and ``caught_exception`` is
None or ``response`` is None and ``caught_exception`` is not None.
:type attempt_number: int
:param attempt_number: The total number of times we've attempted
to send the request.
:param response: The HTTP response (if one was received).
:type caught_exception: Exception
:param caught_exception: Any exception that was caught while trying to
send the HTTP response.
:return: True, if the retry criteria matches (and therefore a retry
should occur. False if the criteria does not match.
"""
# The default implementation allows subclasses to not have to check
# whether or not response is None or not.
if response is not None:
return self._check_response(attempt_number, response)
elif caught_exception is not None:
return self._check_caught_exception(
attempt_number, caught_exception)
else:
raise ValueError("Both response and caught_exception are None.")
def _check_response(self, attempt_number, response):
pass
def _check_caught_exception(self, attempt_number, caught_exception):
pass
class MaxAttemptsDecorator(BaseChecker):
"""Allow retries up to a maximum number of attempts.
This will pass through calls to the decorated retry checker, provided
that the number of attempts does not exceed max_attempts. It will
also catch any retryable_exceptions passed in. Once max_attempts has<|fim▁hole|> that was previously being caught will be raised.
"""
def __init__(self, checker, max_attempts, retryable_exceptions=None):
self._checker = checker
self._max_attempts = max_attempts
self._retryable_exceptions = retryable_exceptions
def __call__(self, attempt_number, response, caught_exception):
should_retry = self._should_retry(attempt_number, response,
caught_exception)
if should_retry:
if attempt_number >= self._max_attempts:
logger.debug("Reached the maximum number of retry "
"attempts: %s", attempt_number)
return False
else:
return should_retry
else:
return False
def _should_retry(self, attempt_number, response, caught_exception):
if self._retryable_exceptions and \
attempt_number < self._max_attempts:
try:
return self._checker(attempt_number, response, caught_exception)
except self._retryable_exceptions as e:
logger.debug("retry needed, retryable exception caught: %s",
e, exc_info=True)
return True
else:
# If we've exceeded the max attempts we just let the exception
# propogate if one has occurred.
return self._checker(attempt_number, response, caught_exception)
class HTTPStatusCodeChecker(BaseChecker):
def __init__(self, status_code):
self._status_code = status_code
def _check_response(self, attempt_number, response):
if response[0].status_code == self._status_code:
logger.debug(
"retry needed: retryable HTTP status code received: %s",
self._status_code)
return True
else:
return False
class ServiceErrorCodeChecker(BaseChecker):
def __init__(self, status_code, error_code):
self._status_code = status_code
self._error_code = error_code
def _check_response(self, attempt_number, response):
if response[0].status_code == self._status_code:
actual_error_code = response[1].get('Error', {}).get('Code')
if actual_error_code == self._error_code:
logger.debug(
"retry needed: matching HTTP status and error code seen: "
"%s, %s", self._status_code, self._error_code)
return True
return False
class MultiChecker(BaseChecker):
def __init__(self, checkers):
self._checkers = checkers
def __call__(self, attempt_number, response, caught_exception):
for checker in self._checkers:
checker_response = checker(attempt_number, response,
caught_exception)
if checker_response:
return checker_response
return False
class CRC32Checker(BaseChecker):
def __init__(self, header):
# The header where the expected crc32 is located.
self._header_name = header
def _check_response(self, attempt_number, response):
http_response = response[0]
expected_crc = http_response.headers.get(self._header_name)
if expected_crc is None:
logger.debug("crc32 check skipped, the %s header is not "
"in the http response.", self._header_name)
else:
actual_crc32 = crc32(response[0].content) & 0xffffffff
if not actual_crc32 == int(expected_crc):
logger.debug(
"retry needed: crc32 check failed, expected != actual: "
"%s != %s", int(expected_crc), actual_crc32)
raise ChecksumError(checksum_type='crc32',
expected_checksum=int(expected_crc),
actual_checksum=actual_crc32)
class ExceptionRaiser(BaseChecker):
"""Raise any caught exceptions.
This class will raise any non None ``caught_exception``.
"""
def _check_caught_exception(self, attempt_number, caught_exception):
# This is implementation specific, but this class is useful by
# coordinating with the MaxAttemptsDecorator.
# The MaxAttemptsDecorator has a list of exceptions it should catch
# and retry, but something needs to come along and actually raise the
# caught_exception. That's what this class is being used for. If
# the MaxAttemptsDecorator is not interested in retrying the exception
# then this exception just propogates out past the retry code.
raise caught_exception<|fim▁end|> | been exceeded, then False will be returned or the retryable_exceptions |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>import os
import os.path
import sys
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Add apps and lib directories to PYTHONPATH
sys.path = [
ROOT,
os.path.join(ROOT, 'apps'),
] + sys.path
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "koedquiz.settings")
# This application object is used by the development server
# as well as any WSGI server configured to use this file.
from django.core.wsgi import get_wsgi_application<|fim▁hole|><|fim▁end|> | application = get_wsgi_application() |
<|file_name|>CurrencyFairURI.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.nilsonmassarenti.app.currencyfair.model;
/**
* This class is to manager URL of Rest.
* @author nilsonmassarenti - [email protected]
* @version 0.1
* Last update: 03-Mar-2015 12:20 am
*/
public class CurrencyFairURI {
public static final String DUMMY_BP = "/rest/currencyfair/dummy";
public static final String GET_CURRENCY_FAIR = "/rest/currencyfair/get/{id}";
public static final String GET_ALL_CURRENCY_FAIR = "/rest/currencyfairs";
public static final String CREATE_CURRENCY_FAIR = "/rest/currencyfair/create";
public static final String DELETE_CURRENCY_FAIR = "/rest/currencyfair/delete/{id}";
}<|fim▁end|> | |
<|file_name|>test_generators.py<|end_file_name|><|fim▁begin|>tutorial_tests = """
Let's try a simple generator:
>>> def f():
... yield 1
... yield 2
>>> for i in f():
... print(i)
1
2
>>> g = f()
>>> next(g)
1
>>> next(g)
2
"Falling off the end" stops the generator:
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
StopIteration
"return" also stops the generator:
>>> def f():
... yield 1
... return
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in f
StopIteration
>>> next(g) # once stopped, can't be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
"raise StopIteration" stops the generator too:
>>> def f():
... yield 1
... raise StopIteration
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
However, they are not exactly equivalent:
>>> def g1():
... try:
... return
... except:
... yield 1
...
>>> list(g1())
[]
>>> def g2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(g2()))
[42]
This may be surprising at first:
>>> def g3():
... try:
... return
... finally:
... yield 1
...
>>> list(g3())
[1]
Let's create an alternate range() function implemented as a generator:
>>> def yrange(n):
... for i in range(n):
... yield i
...
>>> list(yrange(5))
[0, 1, 2, 3, 4]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print("creator", next(r))
... return r
...
>>> def caller():
... r = creator()
... for i in r:
... print("caller", i)
...
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
...
>>> list(zrange(5))
[0, 1, 2, 3, 4]
"""
# The examples from PEP 255.
pep_tests = """
Specification: Yield
Restriction: A generator cannot be resumed while it is actively
running:
>>> def g():
... i = next(me)
... yield i
>>> me = g()
>>> next(me)
Traceback (most recent call last):
...
File "<string>", line 2, in g
ValueError: generator already executing
Specification: Return
Note that return isn't always equivalent to raising StopIteration: the
difference lies in how enclosing try/except constructs are treated.
For example,
>>> def f1():
... try:
... return
... except:
... yield 1
>>> print(list(f1()))
[]
because, as in any function, return simply exits, but
>>> def f2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(f2()))
[42]
because StopIteration is captured by a bare "except", as is any
exception.
Specification: Generators and Exception Propagation
>>> def f():
... return 1//0
>>> def g():
... yield f() # the zero division exception propagates
... yield 42 # and we'll never get here
>>> k = g()
>>> next(k)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
File "<stdin>", line 2, in f
ZeroDivisionError: integer division or modulo by zero
>>> next(k) # and the generator cannot be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>>
Specification: Try/Except/Finally
>>> def f():
... try:
... yield 1
... try:
... yield 2
... 1//0
... yield 3 # never get here
... except ZeroDivisionError:
... yield 4
... yield 5
... raise
... except:
... yield 6
... yield 7 # the "raise" above stops this
... except:
... yield 8
... yield 9
... try:
... x = 12
... finally:
... yield 10
... yield 11
>>> print(list(f()))
[1, 2, 4, 5, 8, 9, 10, 11]
>>>
Guido's binary tree example.
>>> # A binary tree class.
>>> class Tree:
...
... def __init__(self, label, left=None, right=None):
... self.label = label
... self.left = left
... self.right = right
...
... def __repr__(self, level=0, indent=" "):
... s = level*indent + repr(self.label)
... if self.left:
... s = s + "\\n" + self.left.__repr__(level+1, indent)
... if self.right:
... s = s + "\\n" + self.right.__repr__(level+1, indent)
... return s
...
... def __iter__(self):
... return inorder(self)
>>> # Create a Tree from a list.
>>> def tree(list):
... n = len(list)
... if n == 0:
... return []
... i = n // 2
... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # A recursive generator that generates Tree labels in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # Print the nodes of the tree in in-order.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
>>> # A non-recursive generator.
>>> def inorder(node):
... stack = []
... while node:
... while node.left:
... stack.append(node)
... node = node.left
... yield node.label
... while not node.right:
... try:
... node = stack.pop()
... except IndexError:
... return
... yield node.label
... node = node.right
>>> # Exercise the non-recursive generator.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
"""
# Examples from Iterator-List and Python-Dev and c.l.py.
email_tests = """
The difference between yielding None and returning it.
>>> def g():
... for i in range(3):
... yield None
... yield None
... return
>>> list(g())
[None, None, None, None]
Ensure that explicitly raising StopIteration acts like any other exception
in try/except, not like a return.
>>> def g():
... yield 1
... try:
... raise StopIteration
... except:
... yield 2
... yield 3
>>> list(g())
[1, 2, 3]
Next one was posted to c.l.py.
>>> def gcomb(x, k):
... "Generate all combinations of k elements from list x."
...
... if k > len(x):
... return
... if k == 0:
... yield []
... else:
... first, rest = x[0], x[1:]
... # A combination does or doesn't contain first.
... # If it does, the remainder is a k-1 comb of rest.
... for c in gcomb(rest, k-1):
... c.insert(0, first)
... yield c
... # If it doesn't contain first, it's a k comb of rest.
... for c in gcomb(rest, k):
... yield c
>>> seq = list(range(1, 5))
>>> for k in range(len(seq) + 2):
... print("%d-combs of %s:" % (k, seq))
... for c in gcomb(seq, k):
... print(" ", c)
0-combs of [1, 2, 3, 4]:
[]
1-combs of [1, 2, 3, 4]:
[1]
[2]
[3]
[4]
2-combs of [1, 2, 3, 4]:
[1, 2]
[1, 3]
[1, 4]
[2, 3]
[2, 4]
[3, 4]
3-combs of [1, 2, 3, 4]:
[1, 2, 3]
[1, 2, 4]
[1, 3, 4]
[2, 3, 4]
4-combs of [1, 2, 3, 4]:
[1, 2, 3, 4]
5-combs of [1, 2, 3, 4]:
From the Iterators list, about the types of these things.
>>> def g():
... yield 1
...
>>> type(g)
<class 'function'>
>>> i = g()
>>> type(i)
<class 'generator'>
>>> [s for s in dir(i) if not s.startswith('_')]
['close', 'gi_code', 'gi_frame', 'gi_running', 'send', 'throw']
>>> from test.support import HAVE_DOCSTRINGS
>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'x.__next__() <==> next(x)')
x.__next__() <==> next(x)
>>> iter(i) is i
True
>>> import types
>>> isinstance(i, types.GeneratorType)
True
And more, added later.
>>> i.gi_running
0
>>> type(i.gi_frame)
<class 'frame'>
>>> i.gi_running = 42
Traceback (most recent call last):
...
AttributeError: readonly attribute
>>> def g():
... yield me.gi_running
>>> me = g()
>>> me.gi_running
0
>>> next(me)
1
>>> me.gi_running
0
A clever union-find implementation from c.l.py, due to David Eppstein.
Sent: Friday, June 29, 2001 12:16 PM
To: [email protected]
Subject: Re: PEP 255: Simple Generators
>>> class disjointSet:
... def __init__(self, name):
... self.name = name
... self.parent = None
... self.generator = self.generate()
...
... def generate(self):
... while not self.parent:
... yield self
... for x in self.parent.generator:
... yield x
...
... def find(self):
... return next(self.generator)
...
... def union(self, parent):
... if self.parent:
... raise ValueError("Sorry, I'm not a root!")
... self.parent = parent
...
... def __str__(self):
... return self.name
>>> names = "ABCDEFGHIJKLM"
>>> sets = [disjointSet(name) for name in names]
>>> roots = sets[:]
>>> import random
>>> gen = random.Random(42)
>>> while 1:
... for s in sets:
... print(" %s->%s" % (s, s.find()), end='')
... print()
... if len(roots) > 1:
... s1 = gen.choice(roots)
... roots.remove(s1)
... s2 = gen.choice(roots)
... s1.union(s2)
... print("merged", s1, "into", s2)
... else:
... break
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged K into B
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged A into F
A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged E into F
A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged D into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged M into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
merged J into B
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
merged B into C
A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
merged F into G
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
merged L into C
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
merged G into I
A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
merged I into H
A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
merged C into H
A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
"""
# Emacs turd '
# Fun tests (for sufficiently warped notions of "fun").
fun_tests = """
Build up to a recursive Sieve of Eratosthenes generator.
>>> def firstn(g, n):
... return [next(g) for i in range(n)]
>>> def intsfrom(i):
... while 1:
... yield i
... i += 1
>>> firstn(intsfrom(5), 7)
[5, 6, 7, 8, 9, 10, 11]
>>> def exclude_multiples(n, ints):
... for i in ints:
... if i % n:
... yield i
>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
[1, 2, 4, 5, 7, 8]
>>> def sieve(ints):
... prime = next(ints)
... yield prime
... not_divisible_by_prime = exclude_multiples(prime, ints)
... for p in sieve(not_divisible_by_prime):
... yield p
>>> primes = sieve(intsfrom(2))
>>> firstn(primes, 20)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
Another famous problem: generate all integers of the form
2**i * 3**j * 5**k
in increasing order, where i,j,k >= 0. Trickier than it may look at first!
Try writing it without generators, and correctly, and without generating
3 internal results for each result output.
>>> def times(n, g):
... for i in g:
... yield n * i
>>> firstn(times(10, intsfrom(1)), 10)
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
>>> def merge(g, h):
... ng = next(g)
... nh = next(h)
... while 1:
... if ng < nh:
... yield ng
... ng = next(g)
... elif ng > nh:
... yield nh
... nh = next(h)
... else:
... yield ng
... ng = next(g)
... nh = next(h)
The following works, but is doing a whale of a lot of redundant work --
it's not clear how to get the internal uses of m235 to share a single
generator. Note that me_times2 (etc) each need to see every element in the
result sequence. So this is an example where lazy lists are more natural
(you can look at the head of a lazy list any number of times).
>>> def m235():
... yield 1
... me_times2 = times(2, m235())
... me_times3 = times(3, m235())
... me_times5 = times(5, m235())
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Don't print "too many" of these -- the implementation above is extremely
inefficient: each call of m235() leads to 3 recursive calls, and in
turn each of those 3 more, and so on, and so on, until we've descended
enough levels to satisfy the print stmts. Very odd: when I printed 5
lines of results below, this managed to screw up Win98's malloc in "the
usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
address space, and it *looked* like a very slow leak.
>>> result = m235()
>>> for i in range(3):
... print(firstn(result, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
Heh. Here's one way to get a shared list, complete with an excruciating
namespace renaming trick. The *pretty* part is that the times() and merge()
functions can be reused as-is, because they only assume their stream
arguments are iterable -- a LazyList is the same as a generator to times().
>>> class LazyList:
... def __init__(self, g):
... self.sofar = []
... self.fetch = g.__next__
...
... def __getitem__(self, i):
... sofar, fetch = self.sofar, self.fetch
... while i >= len(sofar):
... sofar.append(fetch())
... return sofar[i]
>>> def m235():
... yield 1
... # Gack: m235 below actually refers to a LazyList.
... me_times2 = times(2, m235)
... me_times3 = times(3, m235)
... me_times5 = times(5, m235)
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Print as many of these as you like -- *this* implementation is memory-
efficient.
>>> m235 = LazyList(m235())
>>> for i in range(5):
... print([m235[j] for j in range(15*i, 15*(i+1))])
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
Ye olde Fibonacci generator, LazyList style.
>>> def fibgen(a, b):
...
... def sum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def tail(g):
... next(g) # throw first away
... for x in g:
... yield x
...
... yield a
... yield b
... for s in sum(iter(fib),
... tail(iter(fib))):
... yield s
>>> fib = LazyList(fibgen(1, 2))
>>> firstn(iter(fib), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
Running after your tail with itertools.tee (new in version 2.4)
The algorithms "m235" (Hamming) and Fibonacci presented above are both
examples of a whole family of FP (functional programming) algorithms
where a function produces and returns a list while the production algorithm
suppose the list as already produced by recursively calling itself.
For these algorithms to work, they must:
- produce at least a first element without presupposing the existence of
the rest of the list
- produce their elements in a lazy manner
To work efficiently, the beginning of the list must not be recomputed over
and over again. This is ensured in most FP languages as a built-in feature.
In python, we have to explicitly maintain a list of already computed results
and abandon genuine recursivity.
This is what had been attempted above with the LazyList class. One problem
with that class is that it keeps a list of all of the generated results and
therefore continually grows. This partially defeats the goal of the generator
concept, viz. produce the results only as needed instead of producing them
all and thereby wasting memory.
Thanks to itertools.tee, it is now clear "how to get the internal uses of
m235 to share a single generator".
>>> from itertools import tee
>>> def m235():
... def _m235():
... yield 1
... for n in merge(times(2, m2),
... merge(times(3, m3),
... times(5, m5))):
... yield n
... m1 = _m235()
... m2, m3, m5, mRes = tee(m1, 4)
... return mRes
>>> it = m235()
>>> for i in range(5):
... print(firstn(it, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
The "tee" function does just what we want. It internally keeps a generated
result for as long as it has not been "consumed" from all of the duplicated
iterators, whereupon it is deleted. You can therefore print the hamming
sequence during hours without increasing memory usage, or very little.
The beauty of it is that recursive running-after-their-tail FP algorithms
are quite straightforwardly expressed with this Python idiom.
Ye olde Fibonacci generator, tee style.
>>> def fib():
...
... def _isum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def _fib():
... yield 1
... yield 2
... next(fibTail) # throw first away
... for res in _isum(fibHead, fibTail):
... yield res
...
... realfib = _fib()
... fibHead, fibTail, fibRes = tee(realfib, 3)
... return fibRes
>>> firstn(fib(), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
"""
# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
# hackery.
syntax_tests = """
>>> def f():
... return 22
... yield 1
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
>>> def f():
... yield 1
... return 22
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
"return None" is not the same as "return" in a generator:
>>> def f():
... yield 1
... return None
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
These are fine:
>>> def f():
... yield 1
... return
>>> def f():
... try:
... yield 1
... finally:
... pass
>>> def f():
... try:
... try:
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... pass
... finally:
... pass
>>> def f():
... try:
... try:
... yield 12
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... try:
... x = 12
... finally:
... yield 12
... except:
... return
>>> list(f())
[12, 666]
>>> def f():
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield 1
>>> type(f())
<class 'generator'>
>>> def f():
... if "":
... yield None
>>> type(f())
<class 'generator'>
>>> def f():
... return
... try:
... if x==4:
... pass
... elif 0:
... try:
... 1//0
... except SyntaxError:
... pass
... else:
... if 0:
... while 12:
... x += 1
... yield 2 # don't blink
... f(a, b, c, d, e)
... else:
... pass
... except:
... x = 1
... return
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... def g():
... yield 1
...
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... class C:
... def __init__(self):
... yield 1
... def f(self):
... yield 2
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... return
... if 0:
... yield 2
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... lambda x: x # shouldn't trigger here
... return # or here
... def f(i):
... return 2*i # or here
... if 0:
... return 3 # but *this* sucks (line 8)
... if 0:
... yield 2 # because it's a generator (line 10)
Traceback (most recent call last):
SyntaxError: 'return' with argument inside generator
<|fim▁hole|>... for i in range(3):
... try:
... continue
... finally:
... yield i
...
>>> g = f()
>>> print(next(g))
0
>>> print(next(g))
1
>>> print(next(g))
2
>>> print(next(g))
Traceback (most recent call last):
StopIteration
Test the gi_code attribute
>>> def f():
... yield 5
...
>>> g = f()
>>> g.gi_code is f.__code__
True
>>> next(g)
5
>>> next(g)
Traceback (most recent call last):
StopIteration
>>> g.gi_code is f.__code__
True
Test the __name__ attribute and the repr()
>>> def f():
... yield 5
...
>>> g = f()
>>> g.__name__
'f'
>>> repr(g) # doctest: +ELLIPSIS
'<generator object f at ...>'
Lambdas shouldn't have their usual return behavior.
>>> x = lambda: (yield 1)
>>> list(x())
[1]
>>> x = lambda: ((yield 1), (yield 2))
>>> list(x())
[1, 2]
"""
# conjoin is a simple backtracking generator, named in honor of Icon's
# "conjunction" control structure. Pass a list of no-argument functions
# that return iterable objects. Easiest to explain by example: assume the
# function list [x, y, z] is passed. Then conjoin acts like:
#
# def g():
# values = [None] * 3
# for values[0] in x():
# for values[1] in y():
# for values[2] in z():
# yield values
#
# So some 3-lists of values *may* be generated, each time we successfully
# get into the innermost loop. If an iterator fails (is exhausted) before
# then, it "backtracks" to get the next value from the nearest enclosing
# iterator (the one "to the left"), and starts all over again at the next
# slot (pumps a fresh iterator). Of course this is most useful when the
# iterators have side-effects, so that which values *can* be generated at
# each slot depend on the values iterated at previous slots.
def simple_conjoin(gs):
values = [None] * len(gs)
def gen(i):
if i >= len(gs):
yield values
else:
for values[i] in gs[i]():
for x in gen(i+1):
yield x
for x in gen(0):
yield x
# That works fine, but recursing a level and checking i against len(gs) for
# each item produced is inefficient. By doing manual loop unrolling across
# generator boundaries, it's possible to eliminate most of that overhead.
# This isn't worth the bother *in general* for generators, but conjoin() is
# a core building block for some CPU-intensive generator applications.
def conjoin(gs):
n = len(gs)
values = [None] * n
# Do one loop nest at time recursively, until the # of loop nests
# remaining is divisible by 3.
def gen(i):
if i >= n:
yield values
elif (n-i) % 3:
ip1 = i+1
for values[i] in gs[i]():
for x in gen(ip1):
yield x
else:
for x in _gen3(i):
yield x
# Do three loop nests at a time, recursing only if at least three more
# remain. Don't call directly: this is an internal optimization for
# gen's use.
def _gen3(i):
assert i < n and (n-i) % 3 == 0
ip1, ip2, ip3 = i+1, i+2, i+3
g, g1, g2 = gs[i : ip3]
if ip3 >= n:
# These are the last three, so we can yield values directly.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
yield values
else:
# At least 6 loop nests remain; peel off 3 and recurse for the
# rest.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
for x in _gen3(ip3):
yield x
for x in gen(0):
yield x
# And one more approach: For backtracking apps like the Knight's Tour
# solver below, the number of backtracking levels can be enormous (one
# level per square, for the Knight's Tour, so that e.g. a 100x100 board
# needs 10,000 levels). In such cases Python is likely to run out of
# stack space due to recursion. So here's a recursion-free version of
# conjoin too.
# NOTE WELL: This allows large problems to be solved with only trivial
# demands on stack space. Without explicitly resumable generators, this is
# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
# than the fancy unrolled recursive conjoin.
def flat_conjoin(gs): # rename to conjoin to run tests with this instead
n = len(gs)
values = [None] * n
iters = [None] * n
_StopIteration = StopIteration # make local because caught a *lot*
i = 0
while 1:
# Descend.
try:
while i < n:
it = iters[i] = gs[i]().__next__
values[i] = it()
i += 1
except _StopIteration:
pass
else:
assert i == n
yield values
# Backtrack until an older iterator can be resumed.
i -= 1
while i >= 0:
try:
values[i] = iters[i]()
# Success! Start fresh at next level.
i += 1
break
except _StopIteration:
# Continue backtracking.
i -= 1
else:
assert i < 0
break
# A conjoin-based N-Queens solver.
class Queens:
def __init__(self, n):
self.n = n
rangen = range(n)
# Assign a unique int to each column and diagonal.
# columns: n of those, range(n).
# NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
# each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
# based.
# NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
# each, smallest i+j is 0, largest is 2n-2.
# For each square, compute a bit vector of the columns and
# diagonals it covers, and for each row compute a function that
# generates the possiblities for the columns in that row.
self.rowgenerators = []
for i in rangen:
rowuses = [(1 << j) | # column ordinal
(1 << (n + i-j + n-1)) | # NW-SE ordinal
(1 << (n + 2*n-1 + i+j)) # NE-SW ordinal
for j in rangen]
def rowgen(rowuses=rowuses):
for j in rangen:
uses = rowuses[j]
if uses & self.used == 0:
self.used |= uses
yield j
self.used &= ~uses
self.rowgenerators.append(rowgen)
# Generate solutions.
def solve(self):
self.used = 0
for row2col in conjoin(self.rowgenerators):
yield row2col
def printsolution(self, row2col):
n = self.n
assert n == len(row2col)
sep = "+" + "-+" * n
print(sep)
for i in range(n):
squares = [" " for j in range(n)]
squares[row2col[i]] = "Q"
print("|" + "|".join(squares) + "|")
print(sep)
# A conjoin-based Knight's Tour solver. This is pretty sophisticated
# (e.g., when used with flat_conjoin above, and passing hard=1 to the
# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
# creating 10s of thousands of generators then!), and is lengthy.
class Knights:
def __init__(self, m, n, hard=0):
self.m, self.n = m, n
# solve() will set up succs[i] to be a list of square #i's
# successors.
succs = self.succs = []
# Remove i0 from each of its successor's successor lists, i.e.
# successors can't go back to i0 again. Return 0 if we can
# detect this makes a solution impossible, else return 1.
def remove_from_successors(i0, len=len):
# If we remove all exits from a free square, we're dead:
# even if we move to it next, we can't leave it again.
# If we create a square with one exit, we must visit it next;
# else somebody else will have to visit it, and since there's
# only one adjacent, there won't be a way to leave it again.
# Finelly, if we create more than one free square with a
# single exit, we can only move to one of them next, leaving
# the other one a dead end.
ne0 = ne1 = 0
for i in succs[i0]:
s = succs[i]
s.remove(i0)
e = len(s)
if e == 0:
ne0 += 1
elif e == 1:
ne1 += 1
return ne0 == 0 and ne1 < 2
# Put i0 back in each of its successor's successor lists.
def add_to_successors(i0):
for i in succs[i0]:
succs[i].append(i0)
# Generate the first move.
def first():
if m < 1 or n < 1:
return
# Since we're looking for a cycle, it doesn't matter where we
# start. Starting in a corner makes the 2nd move easy.
corner = self.coords2index(0, 0)
remove_from_successors(corner)
self.lastij = corner
yield corner
add_to_successors(corner)
# Generate the second moves.
def second():
corner = self.coords2index(0, 0)
assert self.lastij == corner # i.e., we started in the corner
if m < 3 or n < 3:
return
assert len(succs[corner]) == 2
assert self.coords2index(1, 2) in succs[corner]
assert self.coords2index(2, 1) in succs[corner]
# Only two choices. Whichever we pick, the other must be the
# square picked on move m*n, as it's the only way to get back
# to (0, 0). Save its index in self.final so that moves before
# the last know it must be kept free.
for i, j in (1, 2), (2, 1):
this = self.coords2index(i, j)
final = self.coords2index(3-i, 3-j)
self.final = final
remove_from_successors(this)
succs[final].append(corner)
self.lastij = this
yield this
succs[final].remove(corner)
add_to_successors(this)
# Generate moves 3 thru m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, i)]
break
candidates.append((e, i))
else:
candidates.sort()
for e, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate moves 3 thru m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# matters a lot is 52x52.
def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
# Break ties via max distance from board centerpoint (favor
# corners and edges whenever possible).
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, 0, i)]
break
i1, j1 = self.index2coords(i)
d = (i1 - vmid)**2 + (j1 - hmid)**2
candidates.append((e, -d, i))
else:
candidates.sort()
for e, d, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate the last move.
def last():
assert self.final in succs[self.lastij]
yield self.final
if m*n < 4:
self.squaregenerators = [first]
else:
self.squaregenerators = [first, second] + \
[hard and advance_hard or advance] * (m*n - 3) + \
[last]
def coords2index(self, i, j):
assert 0 <= i < self.m
assert 0 <= j < self.n
return i * self.n + j
def index2coords(self, index):
assert 0 <= index < self.m * self.n
return divmod(index, self.n)
def _init_board(self):
succs = self.succs
del succs[:]
m, n = self.m, self.n
c2i = self.coords2index
offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
(-1, -2), (-2, -1), (-2, 1), (-1, 2)]
rangen = range(n)
for i in range(m):
for j in rangen:
s = [c2i(i+io, j+jo) for io, jo in offsets
if 0 <= i+io < m and
0 <= j+jo < n]
succs.append(s)
# Generate solutions.
def solve(self):
self._init_board()
for x in conjoin(self.squaregenerators):
yield x
def printsolution(self, x):
m, n = self.m, self.n
assert len(x) == m*n
w = len(str(m*n))
format = "%" + str(w) + "d"
squares = [[None] * n for i in range(m)]
k = 1
for i in x:
i1, j1 = self.index2coords(i)
squares[i1][j1] = format % k
k += 1
sep = "+" + ("-" * w + "+") * n
print(sep)
for i in range(m):
row = squares[i]
print("|" + "|".join(row) + "|")
print(sep)
conjoin_tests = """
Generate the 3-bit binary numbers in order. This illustrates dumbest-
possible use of conjoin, just to generate the full cross-product.
>>> for c in conjoin([lambda: iter((0, 1))] * 3):
... print(c)
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
[1, 1, 1]
For efficiency in typical backtracking apps, conjoin() yields the same list
object each time. So if you want to save away a full account of its
generated sequence, you need to copy its results.
>>> def gencopy(iterator):
... for x in iterator:
... yield x[:]
>>> for n in range(10):
... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
0 1 True True
1 2 True True
2 4 True True
3 8 True True
4 16 True True
5 32 True True
6 64 True True
7 128 True True
8 256 True True
9 512 True True
And run an 8-queens solver.
>>> q = Queens(8)
>>> LIMIT = 2
>>> count = 0
>>> for row2col in q.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... q.printsolution(row2col)
Solution 1
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
Solution 2
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
>>> print(count, "solutions in all.")
92 solutions in all.
And run a Knight's Tour on a 10x10 board. Note that there are about
20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
>>> k = Knights(10, 10)
>>> LIMIT = 2
>>> count = 0
>>> for x in k.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... k.printsolution(x)
... else:
... break
Solution 1
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
Solution 2
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
"""
weakref_tests = """\
Generators are weakly referencable:
>>> import weakref
>>> def gen():
... yield 'foo!'
...
>>> wr = weakref.ref(gen)
>>> wr() is gen
True
>>> p = weakref.proxy(gen)
Generator-iterators are weakly referencable as well:
>>> gi = gen()
>>> wr = weakref.ref(gi)
>>> wr() is gi
True
>>> p = weakref.proxy(gi)
>>> list(p)
['foo!']
"""
coroutine_tests = """\
Sending a value into a started generator:
>>> def f():
... print((yield 1))
... yield 2
>>> g = f()
>>> next(g)
1
>>> g.send(42)
42
2
Sending a value into a new generator produces a TypeError:
>>> f().send("foo")
Traceback (most recent call last):
...
TypeError: can't send non-None value to a just-started generator
Yield by itself yields None:
>>> def f(): yield
>>> list(f())
[None]
An obscene abuse of a yield expression within a generator expression:
>>> list((yield 21) for i in range(4))
[21, None, 21, None, 21, None, 21, None]
And a more sane, but still weird usage:
>>> def f(): list(i for i in [(yield 26)])
>>> type(f())
<class 'generator'>
A yield expression with augmented assignment.
>>> def coroutine(seq):
... count = 0
... while count < 200:
... count += yield
... seq.append(count)
>>> seq = []
>>> c = coroutine(seq)
>>> next(c)
>>> print(seq)
[]
>>> c.send(10)
>>> print(seq)
[10]
>>> c.send(10)
>>> print(seq)
[10, 20]
>>> c.send(10)
>>> print(seq)
[10, 20, 30]
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2)
Traceback (most recent call last):
...
SyntaxError: 'yield' outside function
>>> def f(): return lambda x=(yield): 1
Traceback (most recent call last):
...
SyntaxError: 'return' with argument inside generator
>>> def f(): x = yield = y
Traceback (most recent call last):
...
SyntaxError: assignment to yield expression not possible
>>> def f(): (yield bar) = y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
>>> def f(): (yield bar) += y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
Now check some throw() conditions:
>>> def f():
... while True:
... try:
... print((yield))
... except ValueError as v:
... print("caught ValueError (%s)" % (v))
>>> import sys
>>> g = f()
>>> next(g)
>>> g.throw(ValueError) # type only
caught ValueError ()
>>> g.throw(ValueError("xyz")) # value only
caught ValueError (xyz)
>>> g.throw(ValueError, ValueError(1)) # value+matching type
caught ValueError (1)
>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
caught ValueError (1)
>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
caught ValueError (1)
>>> g.throw(ValueError(1), "foo") # bad args
Traceback (most recent call last):
...
TypeError: instance exception may not have a separate value
>>> g.throw(ValueError, "foo", 23) # bad args
Traceback (most recent call last):
...
TypeError: throw() third argument must be a traceback object
>>> g.throw("abc")
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not str
>>> g.throw(0)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not int
>>> g.throw(list)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not type
>>> def throw(g,exc):
... try:
... raise exc
... except:
... g.throw(*sys.exc_info())
>>> throw(g,ValueError) # do it with traceback included
caught ValueError ()
>>> g.send(1)
1
>>> throw(g,TypeError) # terminate the generator
Traceback (most recent call last):
...
TypeError
>>> print(g.gi_frame)
None
>>> g.send(2)
Traceback (most recent call last):
...
StopIteration
>>> g.throw(ValueError,6) # throw on closed generator
Traceback (most recent call last):
...
ValueError: 6
>>> f().throw(ValueError,7) # throw on just-opened generator
Traceback (most recent call last):
...
ValueError: 7
Plain "raise" inside a generator should preserve the traceback (#13188).
The traceback should have 3 levels:
- g.throw()
- f()
- 1/0
>>> def f():
... try:
... yield
... except:
... raise
>>> g = f()
>>> try:
... 1/0
... except ZeroDivisionError as v:
... try:
... g.throw(v)
... except Exception as w:
... tb = w.__traceback__
>>> levels = 0
>>> while tb:
... levels += 1
... tb = tb.tb_next
>>> levels
3
Now let's try closing a generator:
>>> def f():
... try: yield
... except GeneratorExit:
... print("exiting")
>>> g = f()
>>> next(g)
>>> g.close()
exiting
>>> g.close() # should be no-op now
>>> f().close() # close on just-opened generator should be fine
>>> def f(): yield # an even simpler generator
>>> f().close() # close before opening
>>> g = f()
>>> next(g)
>>> g.close() # close normally
And finalization:
>>> def f():
... try: yield
... finally:
... print("exiting")
>>> g = f()
>>> next(g)
>>> del g
exiting
GeneratorExit is not caught by except Exception:
>>> def f():
... try: yield
... except Exception:
... print('except')
... finally:
... print('finally')
>>> g = f()
>>> next(g)
>>> del g
finally
Now let's try some ill-behaved generators:
>>> def f():
... try: yield
... except GeneratorExit:
... yield "foo!"
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
RuntimeError: generator ignored GeneratorExit
>>> g.close()
Our ill-behaved code should be invoked during GC:
>>> import sys, io
>>> old, sys.stderr = sys.stderr, io.StringIO()
>>> g = f()
>>> next(g)
>>> del g
>>> sys.stderr.getvalue().startswith(
... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
... )
True
>>> sys.stderr = old
And errors thrown during closing should propagate:
>>> def f():
... try: yield
... except GeneratorExit:
... raise TypeError("fie!")
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
TypeError: fie!
Ensure that various yield expression constructs make their
enclosing function a generator:
>>> def f(): x += yield
>>> type(f())
<class 'generator'>
>>> def f(): x = yield
>>> type(f())
<class 'generator'>
>>> def f(): lambda x=(yield): 1
>>> type(f())
<class 'generator'>
>>> def f(): x=(i for i in (yield) if (yield))
>>> type(f())
<class 'generator'>
>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
>>> data = [1,2]
>>> g = f(data)
>>> type(g)
<class 'generator'>
>>> g.send(None)
'a'
>>> data
[1, 2]
>>> g.send(0)
'b'
>>> data
[27, 2]
>>> try: g.send(1)
... except StopIteration: pass
>>> data
[27, 27]
"""
refleaks_tests = """
Prior to adding cycle-GC support to itertools.tee, this code would leak
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
>>> import itertools
>>> def leak():
... class gen:
... def __iter__(self):
... return self
... def __next__(self):
... return self.item
... g = gen()
... head, tail = itertools.tee(g)
... g.item = head
... return head
>>> it = leak()
Make sure to also test the involvement of the tee-internal teedataobject,
which stores returned items.
>>> item = next(it)
This test leaked at one point due to generator finalization/destruction.
It was copied from Lib/test/leakers/test_generator_cycle.py before the file
was removed.
>>> def leak():
... def gen():
... while True:
... yield g
... g = gen()
>>> leak()
This test isn't really generator related, but rather exception-in-cleanup
related. The coroutine tests (above) just happen to cause an exception in
the generator's __del__ (tp_del) method. We can also test for this
explicitly, without generators. We do have to redirect stderr to avoid
printing warnings and to doublecheck that we actually tested what we wanted
to test.
>>> import sys, io
>>> old = sys.stderr
>>> try:
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
... raise RuntimeError
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
... err.startswith(
... "Exception RuntimeError: RuntimeError() in <"
... )
... err.endswith("> ignored")
... len(err.splitlines())
... finally:
... sys.stderr = old
True
True
1
These refleak tests should perhaps be in a testfile of their own,
test_generators just happened to be the test that drew these out.
"""
__test__ = {"tut": tutorial_tests,
"pep": pep_tests,
"email": email_tests,
"fun": fun_tests,
"syntax": syntax_tests,
"conjoin": conjoin_tests,
"weakref": weakref_tests,
"coroutine": coroutine_tests,
"refleaks": refleaks_tests,
}
# Magic test name that regrtest.py invokes *after* importing this module.
# This worms around a bootstrap problem.
# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
# so this works as expected in both ways of running regrtest.
def test_main(verbose=None):
from test import support, test_generators
support.run_doctest(test_generators, verbose)
# This part isn't needed for regrtest, but for running the test directly.
if __name__ == "__main__":
test_main(1)<|fim▁end|> | This one caused a crash (see SF bug 567538):
>>> def f(): |
<|file_name|>CatAllocationTestCase.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;<|fim▁hole|>import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
/**
* A base testcase that allows to run tests based on the output of the CAT API
* The input is a line based cat/shards output like:
* kibana-int 0 p STARTED 2 24.8kb 10.202.245.2 r5-9-35
*
* the test builds up a clusterstate from the cat input and optionally runs a full balance on it.
* This can be used to debug cluster allocation decisions.
*/
public abstract class CatAllocationTestCase extends ESAllocationTestCase {
protected abstract Path getCatPath() throws IOException;
public void testRun() throws IOException {
Set<String> nodes = new HashSet<>();
Map<String, Idx> indices = new HashMap<>();
try (BufferedReader reader = Files.newBufferedReader(getCatPath(), StandardCharsets.UTF_8)) {
String line = null;
// regexp FTW
Pattern pattern = Pattern.compile("^(.+)\\s+(\\d)\\s+([rp])\\s+(STARTED|RELOCATING|INITIALIZING|UNASSIGNED)" +
"\\s+\\d+\\s+[0-9.a-z]+\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+).*$");
while((line = reader.readLine()) != null) {
final Matcher matcher;
if ((matcher = pattern.matcher(line)).matches()) {
final String index = matcher.group(1);
Idx idx = indices.get(index);
if (idx == null) {
idx = new Idx(index);
indices.put(index, idx);
}
final int shard = Integer.parseInt(matcher.group(2));
final boolean primary = matcher.group(3).equals("p");
ShardRoutingState state = ShardRoutingState.valueOf(matcher.group(4));
String ip = matcher.group(5);
nodes.add(ip);
ShardRouting routing = TestShardRouting.newShardRouting(index, shard, ip, null, primary, state);
idx.add(routing);
logger.debug("Add routing {}", routing);
} else {
fail("can't read line: " + line);
}
}
}
logger.info("Building initial routing table");
MetaData.Builder builder = MetaData.builder();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
for(Idx idx : indices.values()) {
IndexMetaData.Builder idxMetaBuilder = IndexMetaData.builder(idx.name).settings(settings(Version.CURRENT))
.numberOfShards(idx.numShards()).numberOfReplicas(idx.numReplicas());
for (ShardRouting shardRouting : idx.routing) {
if (shardRouting.active()) {
Set<String> allocationIds = idxMetaBuilder.getInSyncAllocationIds(shardRouting.id());
if (allocationIds == null) {
allocationIds = new HashSet<>();
} else {
allocationIds = new HashSet<>(allocationIds);
}
allocationIds.add(shardRouting.allocationId().getId());
idxMetaBuilder.putInSyncAllocationIds(shardRouting.id(), allocationIds);
}
}
IndexMetaData idxMeta = idxMetaBuilder.build();
builder.put(idxMeta, false);
IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idxMeta.getIndex()).initializeAsRecovery(idxMeta);
Map<Integer, IndexShardRoutingTable> shardIdToRouting = new HashMap<>();
for (ShardRouting r : idx.routing) {
IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(r.shardId()).addShard(r).build();
if (shardIdToRouting.containsKey(r.getId())) {
refData = new IndexShardRoutingTable.Builder(shardIdToRouting.get(r.getId())).addShard(r).build();
}
shardIdToRouting.put(r.getId(), refData);
}
for (IndexShardRoutingTable t: shardIdToRouting.values()) {
tableBuilder.addIndexShard(t);
}
IndexRoutingTable table = tableBuilder.build();
routingTableBuilder.add(table);
}
MetaData metaData = builder.build();
RoutingTable routingTable = routingTableBuilder.build();
DiscoveryNodes.Builder builderDiscoNodes = DiscoveryNodes.builder();
for (String node : nodes) {
builderDiscoNodes.add(newNode(node));
}
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING
.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).nodes(builderDiscoNodes.build()).build();
if (balanceFirst()) {
clusterState = rebalance(clusterState);
}
clusterState = allocateNew(clusterState);
}
protected abstract ClusterState allocateNew(ClusterState clusterState);
protected boolean balanceFirst() {
return true;
}
private ClusterState rebalance(ClusterState clusterState) {
RoutingTable routingTable;AllocationService strategy = createAllocationService(Settings.builder()
.build());
RoutingAllocation.Result routingResult = strategy.reroute(clusterState, "reroute");
clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build();
int numRelocations = 0;
while (true) {
List<ShardRouting> initializing = clusterState.routingTable().shardsWithState(INITIALIZING);
if (initializing.isEmpty()) {
break;
}
logger.debug("Initializing shards: {}", initializing);
numRelocations += initializing.size();
routingResult = strategy.applyStartedShards(clusterState, initializing);
clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build();
}
logger.debug("--> num relocations to get balance: {}", numRelocations);
return clusterState;
}
public class Idx {
final String name;
final List<ShardRouting> routing = new ArrayList<>();
public Idx(String name) {
this.name = name;
}
public void add(ShardRouting r) {
routing.add(r);
}
public int numReplicas() {
int count = 0;
for (ShardRouting msr : routing) {
if (msr.primary() == false && msr.id()==0) {
count++;
}
}
return count;
}
public int numShards() {
int max = 0;
for (ShardRouting msr : routing) {
if (msr.primary()) {
max = Math.max(msr.getId()+1, max);
}
}
return max;
}
}
}<|fim▁end|> | import java.util.Map; |
<|file_name|>features.ts<|end_file_name|><|fim▁begin|>import * as vscode from 'vscode';
import { workspace } from 'vscode';
import * as gitflowUtils from '../helpers/gitflowUtils';
import * as gitUtils from '../helpers/gitUtils';
import * as path from 'path';
import { InitConfigSettings } from '../settings/configSettings';
import { BranchSetting } from '../settings/branchSettings';
const config = workspace.getConfiguration();
const initValues = config.get('gitflow4code.init') as InitConfigSettings;
const askForDeletion = config.get('gitflow4code.askBeforeDeletion') as boolean;
const deleteByDefault = config.get('gitflow4code.deleteBranchByDefault') as boolean;
const pushAfterFinishing = config.get('gitflow4code.pushAfterFinishing') as boolean;
export function run(outChannel, action) {
if(action === 'start') {
var itemPickList = [
{
label: 'Start Feature from ' + initValues.develop,
description: ''
},
{
label: 'Start Feature from another base branch',
description: ''
}
];
vscode.window.showQuickPick(itemPickList).then(function(item) {
if(!item) return;
outChannel.clear();
if(item.label === itemPickList[0].label)
vscode.window.showInputBox({ prompt: 'Name of Feature: ', ignoreFocusOut: true }).then(val => startFeature(outChannel, val, initValues.develop));
else {
gitUtils.getBranchList(workspace.rootPath).then((features) => {
var branchPickList = [];
features.forEach(branchName => {
branchPickList.push( { label: branchName, description: 'create feature branch using ' + branchName + ' as your base'});
});
vscode.window.showQuickPick(branchPickList).then(function(item) {
if(!item) return;
outChannel.clear();
vscode.window.showInputBox({ prompt: 'Name of Feature: ', ignoreFocusOut: true }).then(val => startFeature(outChannel, val, item.label));
});
});
}
});
}
else if (action === 'finish') {
if(askForDeletion)
vscode.window.showInputBox({ prompt: 'Would you like this feature branch deleted after finishing? (y/n)', ignoreFocusOut: true }).then(function(val) {
if(val !== undefined && (val.toLowerCase() === 'y' || val.toLowerCase() === 'n')) {
var deleteBranch = val.toLowerCase() === 'y';
finishFeature(outChannel, deleteBranch);
}
});
else
finishFeature(outChannel, deleteByDefault);
}
}
function startFeature(outChannel, featureName, baseBranch) {
if(featureName !== undefined) // User chose to Cancel/Esc operation
if(featureName !== '') {
featureName = featureName.trim().replace(/ /g, '_');
gitUtils.getGitRepositoryPath(vscode.workspace.rootPath).then(function (gitRepositoryPath) {
gitflowUtils.startFeature(gitRepositoryPath, featureName, baseBranch)
.then(startFeature, genericErrorHandler)
.catch(genericErrorHandler)
}).catch(genericErrorHandler);
}
else
genericErrorHandler('Name of feature cannot be blank');
function startFeature(log) {
if(log.length === 0) {
vscode.window.showInformationMessage('Nothing to show');
return;
}
let featuresConfig = config.get('gitflow4code.features') as BranchSetting[];
featuresConfig.push(new BranchSetting(initValues.features + featureName, baseBranch, pushAfterFinishing));
config.update('gitflow4code.features', featuresConfig);
outChannel.append(log);
outChannel.show();
}
function genericErrorHandler(error) {
if(error.code && error.syscall && error.code === 'ENOENT' && error.syscall === 'spawn git')
vscode.window.showErrorMessage('Cannot find git installation');
else {
outChannel.appendLine(error);
outChannel.show();
vscode.window.showErrorMessage('There was an error, please view details in output log');
}
}
}
function finishFeature(outChannel, deleteBranch) {
gitUtils.getGitRepositoryPath(vscode.workspace.rootPath).then(function (gitRepositoryPath) {
gitUtils.getCurrentBranchName(vscode.workspace.rootPath).then((branchName) => {
if(branchName.toString().startsWith(initValues.features)) {
let featuresConfig = config.get('gitflow4code.features') as BranchSetting[];
let featureSetting = featuresConfig.find((feature) => feature.name === branchName.toString());
if(!featureSetting)
featureSetting = new BranchSetting(branchName.toString(), initValues.develop, pushAfterFinishing);
let options = {
pushToOrigin: pushAfterFinishing
};
gitflowUtils.finishFeature(gitRepositoryPath, branchName.toString(), featureSetting.base, deleteBranch, options).then(finishFeature, genericErrorHandler);
function finishFeature(log) {
if(log.length === 0) {
vscode.window.showInformationMessage('Nothing to show');
return;
}
if(deleteBranch) {
let featureIndex = featuresConfig.indexOf(featureSetting);
featuresConfig.splice(featureIndex, 1);
config.update('gitflow4code.features', featuresConfig);
}
outChannel.append(log);
outChannel.show();
}
}
else
vscode.window.showErrorMessage('Not currently on a Feature branch');
})
}).catch(genericErrorHandler);
function genericErrorHandler(error) {
if(error.code && error.syscall && error.code === 'ENOENT' && error.syscall === 'spawn git')
vscode.window.showErrorMessage('Cannot find git installation');
else {
outChannel.appendLine(error);
outChannel.show();
vscode.window.showErrorMessage('There was an error, please view details in output log');
}
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>constant.js<|end_file_name|><|fim▁begin|>/*
<|fim▁hole|> * @namespace hifive.pitalium.explorer.constant
*/
h5.u.obj.expose('hifive.pitalium.explorer.constant', {});
})();<|fim▁end|> | * Copyright (C) 2015-2017 NS Solutions Corporation, All Rights Reserved.
*/
(function() {
/**
|
<|file_name|>database_admin_client_config.py<|end_file_name|><|fim▁begin|>config = {
"interfaces": {
"google.spanner.admin.database.v1.DatabaseAdmin": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": [],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 1000,
"retry_delay_multiplier": 1.3,<|fim▁hole|> "initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
}
},
"methods": {
"ListDatabases": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"CreateDatabase": {
"timeout_millis": 3600000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetDatabase": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"UpdateDatabaseDdl": {
"timeout_millis": 3600000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DropDatabase": {
"timeout_millis": 3600000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"GetDatabaseDdl": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"SetIamPolicy": {
"timeout_millis": 30000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetIamPolicy": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"TestIamPermissions": {
"timeout_millis": 30000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
},
}
}
}<|fim▁end|> | "max_retry_delay_millis": 32000, |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.