prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Group.js<|end_file_name|><|fim▁begin|>import { setPropertiesFromJSON } from '../../json-helper';
import Entity from './Entity';
/**
* Generated class for shr.entity.Group.
* @extends Entity
*/
class Group extends Entity {
/**<|fim▁hole|> return this._entryInfo;
}
/**
* Set the entry information.
* @param {Entry} entryInfo - The shr.base.Entry
*/
set entryInfo(entryInfo) {
this._entryInfo = entryInfo;
}
/**
* Get the Type.
* @returns {Type} The shr.entity.Type
*/
get type() {
return this._type;
}
/**
* Set the Type.
* @param {Type} type - The shr.entity.Type
*/
set type(type) {
this._type = type;
}
/**
* Get the ActiveFlag.
* @returns {ActiveFlag} The shr.entity.ActiveFlag
*/
get activeFlag() {
return this._activeFlag;
}
/**
* Set the ActiveFlag.
* @param {ActiveFlag} activeFlag - The shr.entity.ActiveFlag
*/
set activeFlag(activeFlag) {
this._activeFlag = activeFlag;
}
/**
* Get the Title.
* @returns {Title} The shr.core.Title
*/
get title() {
return this._title;
}
/**
* Set the Title.
* @param {Title} title - The shr.core.Title
*/
set title(title) {
this._title = title;
}
/**
* Get the Definitional.
* @returns {Definitional} The shr.core.Definitional
*/
get definitional() {
return this._definitional;
}
/**
* Set the Definitional.
* @param {Definitional} definitional - The shr.core.Definitional
*/
set definitional(definitional) {
this._definitional = definitional;
}
/**
* Get the MembershipCriterion array.
* @returns {Array<MembershipCriterion>} The shr.entity.MembershipCriterion array
*/
get membershipCriterion() {
return this._membershipCriterion;
}
/**
* Set the MembershipCriterion array.
* @param {Array<MembershipCriterion>} membershipCriterion - The shr.entity.MembershipCriterion array
*/
set membershipCriterion(membershipCriterion) {
this._membershipCriterion = membershipCriterion;
}
/**
* Get the Member array.
* @returns {Array<Member>} The shr.entity.Member array
*/
get member() {
return this._member;
}
/**
* Set the Member array.
* @param {Array<Member>} member - The shr.entity.Member array
*/
set member(member) {
this._member = member;
}
/**
* Get the Count.
* @returns {Count} The shr.core.Count
*/
get count() {
return this._count;
}
/**
* Set the Count.
* @param {Count} count - The shr.core.Count
*/
set count(count) {
this._count = count;
}
/**
* Deserializes JSON data to an instance of the Group class.
* The JSON must be valid against the Group JSON schema, although this is not validated by the function.
* @param {object} json - the JSON data to deserialize
* @returns {Group} An instance of Group populated with the JSON data
*/
static fromJSON(json={}) {
const inst = new Group();
setPropertiesFromJSON(inst, json);
return inst;
}
}
export default Group;<|fim▁end|> | * Get the entry information.
* @returns {Entry} The shr.base.Entry
*/
get entryInfo() { |
<|file_name|>all_7.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|> ['iostructures_2eh',['IOStructures.h',['../de/d79/IOStructures_8h.html',1,'']]],
['irwlspar',['IRWLSpar',['../d4/d49/budgeted-train_8h.html#ad51d9a46645ad0b0bedb1113a3807d24',1,'budgeted-train.h']]]
];<|fim▁end|> | ['index',['index',['../d8/d33/structsvm__sample.html#a008f6b24c7c76af103e84245fb271506',1,'svm_sample']]],
['initlibirwls',['initLIBIRWLS',['../d4/d54/pythonmodule_8c.html#abe9b02ef8b3dff171684f855d6819d13',1,'initLIBIRWLS(void): pythonmodule.c'],['../d0/da7/pythonmodule_8h.html#abe9b02ef8b3dff171684f855d6819d13',1,'initLIBIRWLS(void): pythonmodule.c']]],
['initmemory',['initMemory',['../d0/d98/ParallelAlgorithms_8h.html#aa6df8c3f4f455d5692b3cb220fc205c7',1,'ParallelAlgorithms.h']]],
['iostructures_2ec',['IOStructures.c',['../dc/dfc/IOStructures_8c.html',1,'']]], |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Legato - Statsd Server in Rust
//
// Copyright 2016 TSH Labs
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Parse and validate CLI and file configuration sources
use log;
use toml;
use clap::ArgMatches;
use std::error::{self, Error};
use std::fmt;
use std::net::SocketAddr;
use ::types::MetricResult;
pub use self::validate::{validate_addr, validate_percentiles};
use self::parse::{parse_addr, parse_log_level, parse_flush_period_from_str,
parse_prefix, parse_percentiles_from_str,
parse_percentiles_from_values, parse_flush_period_from_int64,
parse_ident, parse_as_str, parse_as_integer, parse_as_bool,
parse_as_slice, parse_as_table};
mod validate;
mod parse;
const DEFAULT_ENABLE_CONSOLE: bool = true;
const DEFAULT_ENABLE_GRAPHITE: bool = false;
const DEFAULT_FLUSH_PERIOD_MILLIS: u64 = 10_000;
const DEFAULT_GRAPHITE_ADDR: &'static str = "127.0.0.1:2003";
const DEFAULT_LOCAL_ADDR: &'static str = "127.0.0.1:8125";
const DEFAULT_LOG_LEVEL: log::LogLevelFilter = log::LogLevelFilter::Warn;
const DEFAULT_PREFIX_GLOBAL: &'static str = "statsd";
const DEFAULT_PREFIX_COUNTERS: &'static str = "counters";
const DEFAULT_PREFIX_TIMERS: &'static str = "timers";
const DEFAULT_PREFIX_GAUGES: &'static str = "gauges";
const DEFAULT_TIMER_PERCENTILES: &'static [u8] = &[75, 90, 95, 98, 99];
///
pub type ConfigResult<T> = Result<T, ConfigError>;
///
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ConfigErrorKind {
InvalidAddress,
InvalidLogLevel,
InvalidPercentiles,
InvalidFlushPeriod,
InvalidPrefix,
InvalidType,<|fim▁hole|>}
///
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ConfigError {
kind: ConfigErrorKind,
field: String,
value: Option<String>,
}
impl ConfigError {
///
pub fn kind(&self) -> ConfigErrorKind {
self.kind
}
///
pub fn field(&self) -> &str {
&self.field
}
///
pub fn value(&self) -> &Option<String> {
&self.value
}
}
impl<T: Into<String>> From<(ConfigErrorKind, T)> for ConfigError {
fn from((kind, field): (ConfigErrorKind, T)) -> ConfigError {
ConfigError {
kind: kind,
field: field.into(),
value: None,
}
}
}
impl<T: Into<String>> From<(ConfigErrorKind, T, T)> for ConfigError {
fn from((kind, field, value): (ConfigErrorKind, T, T)) -> ConfigError {
ConfigError {
kind: kind,
field: field.into(),
value: Some(value.into()),
}
}
}
impl error::Error for ConfigError {
fn description(&self) -> &str {
match self.kind {
ConfigErrorKind::InvalidAddress => "invalid address",
ConfigErrorKind::InvalidLogLevel => "invalid log level",
ConfigErrorKind::InvalidPercentiles => "invalid percentiles",
ConfigErrorKind::InvalidFlushPeriod => "invalid flush period",
ConfigErrorKind::InvalidPrefix => "invalid prefix",
ConfigErrorKind::InvalidType => "invalid type",
ConfigErrorKind::InvalidSection => "missing or invalid section",
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
impl fmt::Display for ConfigError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "{} for {}", self.description(), self.field));
if let Some(ref v) = self.value {
try!(write!(f, ": '{}'", v));
}
Ok(())
}
}
///
///
///
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct ServerConfig {
pub enable_console: bool,
pub enable_graphite: bool,
pub flush_period_ms: u64,
pub graphite_addr: SocketAddr,
pub local_addr: SocketAddr,
pub log_level: log::LogLevelFilter,
pub prefix_global: String,
pub prefix_counters: String,
pub prefix_timers: String,
pub prefix_gauges: String,
pub timer_percentiles: Vec<u8>,
pub config_file: Option<String>,
}
impl Default for ServerConfig {
fn default() -> ServerConfig {
ServerConfig {
enable_console: DEFAULT_ENABLE_CONSOLE,
enable_graphite: DEFAULT_ENABLE_GRAPHITE,
flush_period_ms: DEFAULT_FLUSH_PERIOD_MILLIS,
graphite_addr: DEFAULT_GRAPHITE_ADDR.parse().unwrap(),
local_addr: DEFAULT_LOCAL_ADDR.parse().unwrap(),
log_level: DEFAULT_LOG_LEVEL,
prefix_global: DEFAULT_PREFIX_GLOBAL.to_string(),
prefix_counters: DEFAULT_PREFIX_COUNTERS.to_string(),
prefix_timers: DEFAULT_PREFIX_TIMERS.to_string(),
prefix_gauges: DEFAULT_PREFIX_GAUGES.to_string(),
timer_percentiles: Vec::from(DEFAULT_TIMER_PERCENTILES),
config_file: None,
}
}
}
///
///
///
pub trait MergeConfig {
fn merge_to(self, conf: &ServerConfig) -> ServerConfig;
}
///
///
///
#[derive(Eq, PartialEq, Debug)]
pub struct FileConfig {
pub enable_console: Option<bool>,
pub enable_graphite: Option<bool>,
pub flush_period_ms: Option<u64>,
pub graphite_addr: Option<SocketAddr>,
pub local_addr: Option<SocketAddr>,
pub log_level: Option<log::LogLevelFilter>,
pub prefix_global: Option<String>,
pub prefix_counters: Option<String>,
pub prefix_timers: Option<String>,
pub prefix_gauges: Option<String>,
pub timer_percentiles: Option<Vec<u8>>,
}
impl FileConfig {
pub fn from(table: &toml::Table) -> MetricResult<FileConfig> {
let server = try!(table.get("legato")
.ok_or(ConfigError::from((ConfigErrorKind::InvalidSection, "legato")))
.and_then(|v| parse_as_table(v, "legato", parse_ident)));
let enable_console = if let Some(v) = server.get("enable_console") {
Some(try!(parse_as_bool(v, "enabled_console", parse_ident)))
} else {
None
};
let enable_graphite = if let Some(v) = server.get("enable_graphite") {
Some(try!(parse_as_bool(v, "enable_graphite", parse_ident)))
} else {
None
};
let flush_period_ms = if let Some(v) = server.get("flush_period_ms") {
Some(try!(parse_as_integer(v, "flush_period_ms", parse_flush_period_from_int64)))
} else {
None
};
let graphite_addr = if let Some(v) = server.get("graphite_addr") {
Some(try!(parse_as_str(v, "graphite_addr", parse_addr)))
} else {
None
};
let local_addr = if let Some(v) = server.get("local_addr") {
Some(try!(parse_as_str(v, "local_addr", parse_addr)))
} else {
None
};
let log_level = if let Some(v) = server.get("log_level") {
Some(try!(parse_as_str(v, "log_level", parse_log_level)))
} else {
None
};
let prefix_global = if let Some(v) = server.get("prefix_global") {
Some(try!(parse_as_str(v, "prefix_global", parse_prefix)))
} else {
None
};
let prefix_counters = if let Some(v) = server.get("prefix_counters") {
Some(try!(parse_as_str(v, "prefix_counters", parse_prefix)))
} else {
None
};
let prefix_timers = if let Some(v) = server.get("prefix_timers") {
Some(try!(parse_as_str(v, "prefix_timers", parse_prefix)))
} else {
None
};
let prefix_gauges = if let Some(v) = server.get("prefix_gauges") {
Some(try!(parse_as_str(v, "prefix_gauges", parse_prefix)))
} else {
None
};
let timer_percentiles = if let Some(v) = server.get("timer_percentiles") {
Some(try!(parse_as_slice(v, "timer_percentiles", parse_percentiles_from_values)))
} else {
None
};
Ok(FileConfig {
enable_console: enable_console,
enable_graphite: enable_graphite,
flush_period_ms: flush_period_ms,
graphite_addr: graphite_addr,
local_addr: local_addr,
log_level: log_level,
prefix_global: prefix_global,
prefix_counters: prefix_counters,
prefix_timers: prefix_timers,
prefix_gauges: prefix_gauges,
timer_percentiles: timer_percentiles,
})
}
}
impl MergeConfig for FileConfig {
fn merge_to(self, conf: &ServerConfig) -> ServerConfig {
let mut merged = conf.clone();
if let Some(v) = self.enable_console {
merged.enable_console = v;
}
if let Some(v) = self.enable_graphite {
merged.enable_graphite = v;
}
if let Some(v) = self.flush_period_ms {
merged.flush_period_ms = v;
}
if let Some(v) = self.graphite_addr {
merged.graphite_addr = v;
}
if let Some(v) = self.local_addr {
merged.local_addr = v;
}
if let Some(v) = self.log_level {
merged.log_level = v;
}
if let Some(v) = self.prefix_global {
merged.prefix_global = v;
}
if let Some(v) = self.prefix_counters {
merged.prefix_counters = v;
}
if let Some(v) = self.prefix_timers {
merged.prefix_timers = v;
}
if let Some(v) = self.prefix_gauges {
merged.prefix_gauges = v;
}
if let Some(v) = self.timer_percentiles {
merged.timer_percentiles = v;
}
merged
}
}
///
///
///
#[derive(Eq, PartialEq, Debug)]
pub struct CliConfig {
pub enable_console: Option<bool>,
pub enable_graphite: Option<bool>,
pub flush_period_ms: Option<u64>,
pub graphite_addr: Option<SocketAddr>,
pub local_addr: Option<SocketAddr>,
pub log_level: Option<log::LogLevelFilter>,
pub prefix_global: Option<String>,
pub prefix_counters: Option<String>,
pub prefix_timers: Option<String>,
pub prefix_gauges: Option<String>,
pub timer_percentiles: Option<Vec<u8>>,
pub config_file: Option<String>,
}
impl CliConfig {
pub fn from<'a>(matches: ArgMatches<'a>) -> MetricResult<CliConfig> {
let enable_console = if matches.is_present("enable-console") {
Some(true)
} else if matches.is_present("disable-console") {
Some(false)
} else {
None
};
let enable_graphite = if matches.is_present("enable-graphite") {
Some(true)
} else if matches.is_present("disable-graphite") {
Some(false)
} else {
None
};
let flush_period_ms = if let Some(v) = matches.value_of("flush-period-ms") {
Some(try!(parse_flush_period_from_str(v, "flush-period-ms")))
} else {
None
};
let graphite_addr = if let Some(v) = matches.value_of("graphite-addr") {
Some(try!(parse_addr(v, "graphite-addr")))
} else {
None
};
let local_addr = if let Some(v) = matches.value_of("local-addr") {
Some(try!(parse_addr(v, "local-addr")))
} else {
None
};
let log_level = if matches.is_present("debug") {
Some(log::LogLevelFilter::Debug)
} else if matches.is_present("verbose") {
Some(log::LogLevelFilter::Info)
} else if matches.is_present("quiet") {
Some(log::LogLevelFilter::Error)
} else {
None
};
let prefix_global = matches.value_of("prefix-global").map(|v| v.to_string());
let prefix_counters = matches.value_of("prefix-counters").map(|v| v.to_string());
let prefix_timers = matches.value_of("prefix-timers").map(|v| v.to_string());
let prefix_gauges = matches.value_of("prefix-gauges").map(|v| v.to_string());
let timer_percentiles = if let Some(v) = matches.value_of("timer-percentiles") {
Some(try!(parse_percentiles_from_str(v, "timer-percentiles")))
} else {
None
};
let config_file = matches.value_of("config").map(|v| v.to_string());
Ok(CliConfig {
enable_console: enable_console,
enable_graphite: enable_graphite,
flush_period_ms: flush_period_ms,
graphite_addr: graphite_addr,
local_addr: local_addr,
log_level: log_level,
prefix_global: prefix_global,
prefix_counters: prefix_counters,
prefix_timers: prefix_timers,
prefix_gauges: prefix_gauges,
timer_percentiles: timer_percentiles,
config_file: config_file,
})
}
}
impl MergeConfig for CliConfig {
fn merge_to(self, conf: &ServerConfig) -> ServerConfig {
let mut merged = conf.clone();
if let Some(v) = self.enable_console {
merged.enable_console = v;
}
if let Some(v) = self.enable_graphite {
merged.enable_graphite = v;
}
if let Some(v) = self.flush_period_ms {
merged.flush_period_ms = v;
}
if let Some(v) = self.graphite_addr {
merged.graphite_addr = v;
}
if let Some(v) = self.local_addr {
merged.local_addr = v;
}
if let Some(v) = self.log_level {
merged.log_level = v;
}
if let Some(v) = self.prefix_global {
merged.prefix_global = v;
}
if let Some(v) = self.prefix_counters {
merged.prefix_counters = v;
}
if let Some(v) = self.prefix_timers {
merged.prefix_timers = v;
}
if let Some(v) = self.prefix_gauges {
merged.prefix_gauges = v;
}
if let Some(v) = self.timer_percentiles {
merged.timer_percentiles = v;
}
merged
}
}<|fim▁end|> | InvalidSection, |
<|file_name|>denominations.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::*;
#[inline]
/// 1 Ether in Wei
pub fn ether() -> U256 { U256::exp10(18) }
#[inline]<|fim▁hole|>/// 1 Finney in Wei
pub fn finney() -> U256 { U256::exp10(15) }
#[inline]
/// 1 Szabo in Wei
pub fn szabo() -> U256 { U256::exp10(12) }
#[inline]
/// 1 Shannon in Wei
pub fn shannon() -> U256 { U256::exp10(9) }
#[inline]
/// 1 Wei in Wei
pub fn wei() -> U256 { U256::exp10(0) }<|fim▁end|> | |
<|file_name|>sensor.py<|end_file_name|><|fim▁begin|>"""Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}<|fim▁hole|>
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration<|fim▁end|> |
if delay > 0:
attrs["delay"] = f"{delay} minutes" |
<|file_name|>vue-clipboard.d.ts<|end_file_name|><|fim▁begin|>declare module 'vue-clipboard' {
import Vue, { PluginFunction } from 'vue'
module "vue/types/vue" {
interface Vue {
$copy(text: string): boolean;
}
}<|fim▁hole|>
class VueClipboard {
static install: PluginFunction<never>
}
}<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
Command lmdb_copy is a clone of mdb_copy that copies an LMDB environment. A
consistent copy is made even if the source database is in use.
Command line flags mirror the flags for the original program. For information
about, run lmdb_copy with the -h flag.
lmdb_copy -h
*/
package main
import (
"flag"
"log"
"os"
"github.com/bmatsuo/lmdb-go/internal/lmdbcmd"
"github.com/bmatsuo/lmdb-go/lmdb"
)
func main() {
opt := &Options{}
flag.BoolVar(&opt.Compact, "c", false, "Compact while copying.")
flag.Parse()
lmdbcmd.PrintVersion()
if flag.NArg() > 2 {
log.Fatalf("too many arguments specified")<|fim▁hole|> if flag.NArg() == 0 {
log.Fatalf("at least one argument must be specified")
}
var srcpath, dstpath string
srcpath = flag.Arg(0)
if flag.NArg() > 1 {
dstpath = flag.Arg(1)
}
copyEnv(srcpath, dstpath, opt)
}
// Options contain the command line options for an lmdb_copy command.
type Options struct {
Compact bool
}
func copyEnv(srcpath, dstpath string, opt *Options) error {
env, err := lmdb.NewEnv()
if err != nil {
return err
}
err = env.Open(srcpath, lmdbcmd.OpenFlag(), 0644)
if err != nil {
return err
}
var flags uint
if opt != nil && opt.Compact {
flags |= lmdb.CopyCompact
}
if dstpath != "" {
return env.CopyFlag(dstpath, flags)
}
fd := os.Stdout.Fd()
return env.CopyFDFlag(fd, flags)
}<|fim▁end|> | } |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ShroomsAPI.settings")
application = get_wsgi_application()<|fim▁end|> | WSGI config for ShroomsAPI project.
It exposes the WSGI callable as a module-level variable named ``application``.
|
<|file_name|>_familysrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
<|fim▁hole|>
class FamilysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="familysrc", parent_name="barpolar.hoverlabel.font", **kwargs
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)<|fim▁end|> | |
<|file_name|>__MIDL___MIDL_itf_shobjidl_0212_0001.hpp<|end_file_name|><|fim▁begin|>// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
#pragma once
#include <common/common.h>
<|fim▁hole|>
START_ATF_NAMESPACE
typedef SLR_FLAGS __MIDL___MIDL_itf_shobjidl_0212_0001;
END_ATF_NAMESPACE<|fim▁end|> | |
<|file_name|>_implicit_psolver1.py<|end_file_name|><|fim▁begin|>from pycp2k.inputsection import InputSection
from ._dielectric_cube1 import _dielectric_cube1
from ._dirichlet_bc_cube1 import _dirichlet_bc_cube1
from ._dirichlet_cstr_charge_cube1 import _dirichlet_cstr_charge_cube1
<|fim▁hole|>class _implicit_psolver1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.DIELECTRIC_CUBE = _dielectric_cube1()
self.DIRICHLET_BC_CUBE = _dirichlet_bc_cube1()
self.DIRICHLET_CSTR_CHARGE_CUBE = _dirichlet_cstr_charge_cube1()
self._name = "IMPLICIT_PSOLVER"
self._subsections = {'DIRICHLET_BC_CUBE': 'DIRICHLET_BC_CUBE', 'DIRICHLET_CSTR_CHARGE_CUBE': 'DIRICHLET_CSTR_CHARGE_CUBE', 'DIELECTRIC_CUBE': 'DIELECTRIC_CUBE'}<|fim▁end|> | |
<|file_name|>files.rs<|end_file_name|><|fim▁begin|>use std::ffi;
use std::fs;
use std::io::Read;
use std::io::Write;
use std::path;
use crate::error::Result;
use failure::ResultExt;
use ignore::gitignore::{Gitignore, GitignoreBuilder};
use ignore::Match;
use log::debug;
use log::trace;
use normalize_line_endings::normalized;
use walkdir::{DirEntry, WalkDir};
pub struct FilesBuilder {
root_dir: path::PathBuf,
subtree: Option<path::PathBuf>,
ignore: Vec<String>,
ignore_hidden: bool,
extensions: Vec<ffi::OsString>,
}
impl FilesBuilder {
pub fn new<R: Into<path::PathBuf>>(root_dir: R) -> Result<Self> {
Self::new_from_path(root_dir.into())
}
fn new_from_path(root_dir: path::PathBuf) -> Result<Self> {
let builder = FilesBuilder {
root_dir,
subtree: Default::default(),
ignore: Default::default(),
ignore_hidden: true,
extensions: Default::default(),
};
Ok(builder)
}
pub fn add_ignore(&mut self, line: &str) -> Result<&mut Self> {
trace!("{:?}: adding '{}' ignore pattern", self.root_dir, line);
self.ignore.push(line.to_owned());
Ok(self)
}
pub fn ignore_hidden(&mut self, ignore: bool) -> Result<&mut Self> {
self.ignore_hidden = ignore;
Ok(self)
}
pub fn limit(&mut self, subtree: path::PathBuf) -> Result<&mut Self> {
self.subtree = Some(subtree);
Ok(self)
}
pub fn add_extension(&mut self, ext: &str) -> Result<&mut FilesBuilder> {
trace!("{:?}: adding '{}' extension", self.root_dir, ext);
self.extensions.push(ext.into());
Ok(self)
}
pub fn build(&self) -> Result<Files> {
let mut ignore = GitignoreBuilder::new(&self.root_dir);
if self.ignore_hidden {
ignore.add_line(None, ".*")?;
ignore.add_line(None, "_*")?;
}
for line in &self.ignore {
ignore.add_line(None, line)?;
}
let ignore = ignore.build()?;
let files = Files {
root_dir: self.root_dir.clone(),
subtree: self
.subtree
.as_ref()
.map(|subtree| self.root_dir.join(subtree)),
ignore,
extensions: self.extensions.clone(),
};
Ok(files)
}
}
pub struct FilesIterator<'a> {
inner: Box<dyn Iterator<Item = path::PathBuf> + 'a>,
}
impl<'a> FilesIterator<'a> {
fn new(files: &'a Files) -> FilesIterator<'a> {
let walker = WalkDir::new(files.root_dir.as_path())
.min_depth(1)
.follow_links(false)
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.filter_entry(move |e| files.includes_entry(e))
.filter_map(|e| e.ok())
.filter(|e| e.file_type().is_file())
.map(move |e| e.path().to_path_buf());
FilesIterator {
inner: Box::new(walker),
}
}
}
impl<'a> Iterator for FilesIterator<'a> {
type Item = path::PathBuf;
fn next(&mut self) -> Option<path::PathBuf> {
self.inner.next()
}
}
#[derive(Debug, Clone)]
pub struct Files {
root_dir: path::PathBuf,
subtree: Option<path::PathBuf>,
ignore: Gitignore,
extensions: Vec<ffi::OsString>,
}
impl Files {
pub fn root(&self) -> &path::Path {
&self.root_dir
}
pub fn subtree(&self) -> &path::Path {
self.subtree
.as_deref()
.unwrap_or_else(|| self.root_dir.as_path())<|fim▁hole|> return false;
}
let is_dir = false;
if let Some(ref subtree) = self.subtree {
if !file.starts_with(subtree) {
return false;
}
}
self.includes_path(file, is_dir)
}
#[cfg(test)]
pub fn includes_dir(&self, dir: &path::Path) -> bool {
let is_dir = true;
if let Some(ref subtree) = self.subtree {
if !dir.starts_with(subtree) {
return false;
}
}
self.includes_path(dir, is_dir)
}
pub fn files(&self) -> FilesIterator<'_> {
FilesIterator::new(self)
}
fn ext_contains(&self, file: &path::Path) -> bool {
if self.extensions.is_empty() {
return true;
}
file.extension()
.map(|ext| self.extensions.iter().any(|e| e == ext))
.unwrap_or(false)
}
fn includes_entry(&self, entry: &DirEntry) -> bool {
let file = entry.path();
let is_dir = entry.file_type().is_dir();
if !is_dir && !self.ext_contains(file) {
return false;
}
if let Some(ref subtree) = self.subtree {
if !file.starts_with(subtree) {
return false;
}
}
// Assumption: The parent paths will have been checked before we even get to this point.
self.includes_path_leaf(file, is_dir)
}
fn includes_path(&self, path: &path::Path, is_dir: bool) -> bool {
if path == self.root_dir {
return true;
}
let parent = path.parent();
if let Some(mut parent) = parent {
if parent.starts_with(&self.root_dir) {
// HACK: Gitignore seems to act differently on Windows/Linux, so putting this in to
// get them to act the same
if parent == path::Path::new(".") {
parent = path::Path::new("./");
}
if !self.includes_path(parent, parent.is_dir()) {
return false;
}
}
}
self.includes_path_leaf(path, is_dir)
}
fn includes_path_leaf(&self, path: &path::Path, is_dir: bool) -> bool {
match self.ignore.matched(path, is_dir) {
Match::None => true,
Match::Ignore(glob) => {
trace!("{:?}: ignored {:?}", path, glob.original());
false
}
Match::Whitelist(glob) => {
trace!("{:?}: allowed {:?}", path, glob.original());
true
}
}
}
}
impl<'a> IntoIterator for &'a Files {
type Item = path::PathBuf;
type IntoIter = FilesIterator<'a>;
fn into_iter(self) -> FilesIterator<'a> {
self.files()
}
}
pub fn find_project_file<P: Into<path::PathBuf>>(dir: P, name: &str) -> Option<path::PathBuf> {
find_project_file_internal(dir.into(), name)
}
fn find_project_file_internal(dir: path::PathBuf, name: &str) -> Option<path::PathBuf> {
let mut file_path = dir;
file_path.push(name);
while !file_path.exists() {
file_path.pop(); // filename
let hit_bottom = !file_path.pop();
if hit_bottom {
return None;
}
file_path.push(name);
}
Some(file_path)
}
pub fn cleanup_path(path: &str) -> String {
let stripped = path.trim_start_matches("./");
if stripped == "." {
String::new()
} else {
stripped.to_owned()
}
}
pub fn read_file<P: AsRef<path::Path>>(path: P) -> Result<String> {
let mut file = fs::File::open(path.as_ref())?;
let mut text = String::new();
file.read_to_string(&mut text)?;
let text: String = normalized(text.chars()).collect();
Ok(text)
}
pub fn copy_file(src_file: &path::Path, dest_file: &path::Path) -> Result<()> {
// create target directories if any exist
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.with_context(|_| failure::format_err!("Could not create {}", parent.display()))?;
}
debug!(
"Copying `{}` to `{}`",
src_file.display(),
dest_file.display()
);
fs::copy(src_file, dest_file).with_context(|_| {
failure::format_err!(
"Could not copy {} into {}",
src_file.display(),
dest_file.display()
)
})?;
Ok(())
}
pub fn write_document_file<S: AsRef<str>, P: AsRef<path::Path>>(
content: S,
dest_file: P,
) -> Result<()> {
write_document_file_internal(content.as_ref(), dest_file.as_ref())
}
fn write_document_file_internal(content: &str, dest_file: &path::Path) -> Result<()> {
// create target directories if any exist
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.with_context(|_| failure::format_err!("Could not create {}", parent.display()))?;
}
let mut file = fs::File::create(dest_file)
.with_context(|_| failure::format_err!("Could not create {}", dest_file.display()))?;
file.write_all(content.as_bytes())?;
trace!("Wrote {}", dest_file.display());
Ok(())
}
#[cfg(test)]
mod tests {
#![allow(clippy::bool_assert_comparison)]
use super::*;
macro_rules! assert_includes_dir {
($root:expr, $ignores:expr, $test:expr, $included:expr) => {
let mut files = FilesBuilder::new(path::Path::new($root)).unwrap();
let ignores: &[&str] = $ignores;
for ignore in ignores {
files.add_ignore(ignore).unwrap();
}
let files = files.build().unwrap();
assert_eq!(files.includes_dir(path::Path::new($test)), $included);
};
}
macro_rules! assert_includes_file {
($root:expr, $ignores:expr, $test:expr, $included:expr) => {
let mut files = FilesBuilder::new(path::Path::new($root)).unwrap();
let ignores: &[&str] = $ignores;
for ignore in ignores {
files.add_ignore(ignore).unwrap();
}
let files = files.build().unwrap();
assert_eq!(files.includes_file(path::Path::new($test)), $included);
};
}
#[test]
fn files_includes_root_dir() {
assert_includes_dir!("/usr/cobalt/site", &[], "/usr/cobalt/site", true);
assert_includes_dir!("./", &[], "./", true);
}
#[test]
fn files_includes_child_dir() {
assert_includes_dir!("/usr/cobalt/site", &[], "/usr/cobalt/site/child", true);
assert_includes_dir!("./", &[], "./child", true);
}
#[test]
fn files_excludes_hidden_dir() {
assert_includes_dir!("/usr/cobalt/site", &[], "/usr/cobalt/site/_child", false);
assert_includes_dir!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/_child",
false
);
assert_includes_dir!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/_child/child",
false
);
assert_includes_dir!("./", &[], "./_child", false);
assert_includes_dir!("./", &[], "./child/_child", false);
assert_includes_dir!("./", &[], "./_child/child", false);
}
#[test]
fn files_excludes_dot_dir() {
assert_includes_dir!("/usr/cobalt/site", &[], "/usr/cobalt/site/.child", false);
assert_includes_dir!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/.child",
false
);
assert_includes_dir!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/.child/child",
false
);
assert_includes_dir!("./", &[], "./.child", false);
assert_includes_dir!("./", &[], "./child/.child", false);
assert_includes_dir!("./", &[], "./.child/child", false);
}
#[test]
fn files_includes_file() {
assert_includes_file!("/usr/cobalt/site", &[], "/usr/cobalt/site/child.txt", true);
assert_includes_file!("./", &[], "./child.txt", true);
}
#[test]
fn files_includes_child_dir_file() {
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/child.txt",
true
);
assert_includes_file!("./", &[], "./child/child.txt", true);
}
#[test]
fn files_excludes_hidden_file() {
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/_child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/_child.txt",
false
);
assert_includes_file!("./", &[], "./_child.txt", false);
assert_includes_file!("./", &[], "./child/_child.txt", false);
}
#[test]
fn files_excludes_hidden_dir_file() {
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/_child/child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/_child/child.txt",
false
);
assert_includes_file!("./", &[], "./_child/child.txt", false);
assert_includes_file!("./", &[], "./child/_child/child.txt", false);
}
#[test]
fn files_excludes_dot_file() {
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/.child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/.child.txt",
false
);
assert_includes_file!("./", &[], "./.child.txt", false);
assert_includes_file!("./", &[], "./child/.child.txt", false);
}
#[test]
fn files_excludes_dot_dir_file() {
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/.child/child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
&[],
"/usr/cobalt/site/child/.child/child.txt",
false
);
assert_includes_file!("./", &[], "./.child/child.txt", false);
assert_includes_file!("./", &[], "./child/.child/child.txt", false);
}
#[test]
fn files_excludes_ignored_file() {
let ignores = &["README", "**/*.scss"];
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/README",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/README",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/blog.scss",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/blog.scss",
false
);
assert_includes_file!("./", ignores, "./README", false);
assert_includes_file!("./", ignores, "./child/README", false);
assert_includes_file!("./", ignores, "./blog.scss", false);
assert_includes_file!("./", ignores, "./child/blog.scss", false);
}
#[test]
fn files_includes_overridden_file() {
let ignores = &["!.htaccess"];
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/.htaccess",
true
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/.htaccess",
true
);
assert_includes_file!("./", ignores, "./.htaccess", true);
assert_includes_file!("./", ignores, "./child/.htaccess", true);
}
#[test]
fn files_includes_overridden_dir() {
let ignores = &[
"!/_posts",
"!/_posts/**",
"/_posts/**/_*",
"/_posts/**/_*/**",
];
assert_includes_dir!("/usr/cobalt/site", ignores, "/usr/cobalt/site/_posts", true);
assert_includes_dir!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child",
true
);
assert_includes_dir!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/_posts",
false
);
assert_includes_dir!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/_posts/child",
false
);
assert_includes_dir!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child/_child",
false
);
assert_includes_dir!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child/_child/child",
false
);
assert_includes_dir!("./", ignores, "./_posts", true);
assert_includes_dir!("./", ignores, "./_posts/child", true);
assert_includes_dir!("./", ignores, "./child/_posts", false);
assert_includes_dir!("./", ignores, "./child/_posts/child", false);
assert_includes_dir!("./", ignores, "./_posts/child/_child", false);
assert_includes_dir!("./", ignores, "./_posts/child/_child/child", false);
}
#[test]
fn files_includes_overridden_dir_file() {
let ignores = &[
"!/_posts",
"!/_posts/**",
"/_posts/**/_*",
"/_posts/**/_*/**",
];
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child.txt",
true
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child/child.txt",
true
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/_posts/child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/child/_posts/child/child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child/_child.txt",
false
);
assert_includes_file!(
"/usr/cobalt/site",
ignores,
"/usr/cobalt/site/_posts/child/_child/child.txt",
false
);
assert_includes_file!("./", ignores, "./_posts/child.txt", true);
assert_includes_file!("./", ignores, "./_posts/child/child.txt", true);
assert_includes_file!("./", ignores, "./child/_posts/child.txt", false);
assert_includes_file!("./", ignores, "./child/_posts/child/child.txt", false);
assert_includes_file!("./", ignores, "./_posts/child/_child.txt", false);
assert_includes_file!("./", ignores, "./_posts/child/_child/child.txt", false);
}
#[test]
fn files_includes_limit() {
let root = "/usr/cobalt/site";
let limit = "limit";
let files = FilesBuilder::new(path::Path::new(root))
.unwrap()
.limit(limit.into())
.unwrap()
.build()
.unwrap();
assert!(files.includes_file(path::Path::new("/usr/cobalt/site/limit")));
assert!(files.includes_dir(path::Path::new("/usr/cobalt/site/limit")));
assert!(files.includes_file(path::Path::new("/usr/cobalt/site/limit/child")));
assert!(files.includes_dir(path::Path::new("/usr/cobalt/site/limit/child")));
}
#[test]
fn files_includes_limit_outside() {
let root = "/usr/cobalt/site";
let limit = "limit";
let files = FilesBuilder::new(path::Path::new(root))
.unwrap()
.limit(limit.into())
.unwrap()
.build()
.unwrap();
assert!(!files.includes_dir(path::Path::new("/usr/cobalt/site/limit_foo")));
assert!(!files.includes_file(path::Path::new("/usr/cobalt/site/limit_foo")));
assert!(!files.includes_dir(path::Path::new("/usr/cobalt/site/bird")));
assert!(!files.includes_file(path::Path::new("/usr/cobalt/site/bird")));
assert!(!files.includes_dir(path::Path::new("/usr/cobalt/site/bird/limit")));
assert!(!files.includes_file(path::Path::new("/usr/cobalt/site/bird/limit")));
}
#[test]
fn files_iter_matches_include() {
let root_dir = path::Path::new("tests/fixtures/hidden_files");
let files = FilesBuilder::new(root_dir).unwrap().build().unwrap();
let mut actual: Vec<_> = files
.files()
.map(|f| f.strip_prefix(root_dir).unwrap().to_owned())
.collect();
actual.sort();
let expected = vec![
path::Path::new("child/child.txt").to_path_buf(),
path::Path::new("child.txt").to_path_buf(),
];
assert_eq!(expected, actual);
}
#[test]
fn find_project_file_same_dir() {
let actual = find_project_file("tests/fixtures/config", "_cobalt.yml").unwrap();
let expected = path::Path::new("tests/fixtures/config/_cobalt.yml");
assert_eq!(actual, expected);
}
#[test]
fn find_project_file_parent_dir() {
let actual = find_project_file("tests/fixtures/config/child", "_cobalt.yml").unwrap();
let expected = path::Path::new("tests/fixtures/config/_cobalt.yml");
assert_eq!(actual, expected);
}
#[test]
fn find_project_file_doesnt_exist() {
let expected = path::Path::new("<NOT FOUND>");
let actual =
find_project_file("tests/fixtures/", "_cobalt.yml").unwrap_or_else(|| expected.into());
assert_eq!(actual, expected);
}
#[test]
fn cleanup_path_empty() {
assert_eq!(cleanup_path(""), "");
}
#[test]
fn cleanup_path_dot() {
assert_eq!(cleanup_path("."), "");
}
#[test]
fn cleanup_path_current_dir() {
assert_eq!(cleanup_path("./"), "");
}
#[test]
fn cleanup_path_current_dir_extreme() {
assert_eq!(cleanup_path("././././."), "");
}
#[test]
fn cleanup_path_current_dir_child() {
assert_eq!(cleanup_path("./build/file.txt"), "build/file.txt");
}
}<|fim▁end|> | }
pub fn includes_file(&self, file: &path::Path) -> bool {
if !self.ext_contains(file) { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic types that share their serialization implementations
//! for both specified and computed values.
use counter_style::{Symbols, parse_counter_style_name};
use cssparser::Parser;
use parser::{Parse, ParserContext};
use std::fmt;
use style_traits::{Comma, OneOrMoreSeparated, ParseError, StyleParseError, ToCss};
use super::CustomIdent;
use values::specified::url::SpecifiedUrl;
pub mod background;
pub mod basic_shape;
pub mod border;
pub mod effects;
pub mod flex;
#[cfg(feature = "gecko")]
pub mod gecko;
pub mod grid;
pub mod image;
pub mod position;
pub mod rect;
pub mod text;
pub mod transform;
// https://drafts.csswg.org/css-counter-styles/#typedef-symbols-type
define_css_keyword_enum! { SymbolsType:
"cyclic" => Cyclic,
"numeric" => Numeric,
"alphabetic" => Alphabetic,
"symbolic" => Symbolic,
"fixed" => Fixed,
}
add_impls_for_keyword_enum!(SymbolsType);
#[cfg(feature = "gecko")]
impl SymbolsType {
/// Convert symbols type to their corresponding Gecko values.
pub fn to_gecko_keyword(self) -> u8 {
use gecko_bindings::structs;
match self {
SymbolsType::Cyclic => structs::NS_STYLE_COUNTER_SYSTEM_CYCLIC as u8,
SymbolsType::Numeric => structs::NS_STYLE_COUNTER_SYSTEM_NUMERIC as u8,
SymbolsType::Alphabetic => structs::NS_STYLE_COUNTER_SYSTEM_ALPHABETIC as u8,
SymbolsType::Symbolic => structs::NS_STYLE_COUNTER_SYSTEM_SYMBOLIC as u8,
SymbolsType::Fixed => structs::NS_STYLE_COUNTER_SYSTEM_FIXED as u8,
}
}
}
/// https://drafts.csswg.org/css-counter-styles/#typedef-counter-style
///
/// Since wherever <counter-style> is used, 'none' is a valid value as
/// well, we combine them into one type to make code simpler.
#[derive(Clone, Debug, Eq, PartialEq, ToCss)]
pub enum CounterStyleOrNone {
/// `none`
None,
/// `<counter-style-name>`
Name(CustomIdent),
/// `symbols()`
#[css(function)]
Symbols(SymbolsType, Symbols),
}
impl CounterStyleOrNone {
/// disc value
pub fn disc() -> Self {
CounterStyleOrNone::Name(CustomIdent(atom!("disc")))
}
/// decimal value
pub fn decimal() -> Self {
CounterStyleOrNone::Name(CustomIdent(atom!("decimal")))
}
}
no_viewport_percentage!(CounterStyleOrNone);
impl Parse for CounterStyleOrNone {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(name) = input.try(|i| parse_counter_style_name(i)) {
return Ok(CounterStyleOrNone::Name(name));
}
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(CounterStyleOrNone::None);
}
if input.try(|i| i.expect_function_matching("symbols")).is_ok() {
return input.parse_nested_block(|input| {
let symbols_type = input.try(|i| SymbolsType::parse(i))
.unwrap_or(SymbolsType::Symbolic);
let symbols = Symbols::parse(context, input)?;
// There must be at least two symbols for alphabetic or
// numeric system.
if (symbols_type == SymbolsType::Alphabetic ||
symbols_type == SymbolsType::Numeric) && symbols.0.len() < 2 {
return Err(StyleParseError::UnspecifiedError.into());
}
// Identifier is not allowed in symbols() function.
if symbols.0.iter().any(|sym| !sym.is_allowed_in_symbols()) {
return Err(StyleParseError::UnspecifiedError.into());
}
Ok(CounterStyleOrNone::Symbols(symbols_type, symbols))
});
}
Err(StyleParseError::UnspecifiedError.into())
}
}
/// A settings tag, defined by a four-character tag and a setting value
///
/// For font-feature-settings, this is a tag and an integer,
/// for font-variation-settings this is a tag and a float
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct FontSettingTag<T> {
/// A four-character tag, packed into a u32 (one byte per character)
pub tag: u32,
/// The value
pub value: T,
}
impl<T> OneOrMoreSeparated for FontSettingTag<T> {
type S = Comma;
}
impl<T: ToCss> ToCss for FontSettingTag<T> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use byteorder::{BigEndian, ByteOrder};
use std::str;
let mut raw = [0u8; 4];
BigEndian::write_u32(&mut raw, self.tag);
str::from_utf8(&raw).unwrap_or_default().to_css(dest)?;
self.value.to_css(dest)
}
}
impl<T: Parse> Parse for FontSettingTag<T> {
/// https://www.w3.org/TR/css-fonts-3/#propdef-font-feature-settings
/// https://drafts.csswg.org/css-fonts-4/#low-level-font-variation-
/// settings-control-the-font-variation-settings-property
/// <string> [ on | off | <integer> ]
/// <string> <number>
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
use byteorder::{ReadBytesExt, BigEndian};
use std::io::Cursor;
let u_tag;
{
let tag = input.expect_string()?;
// allowed strings of length 4 containing chars: <U+20, U+7E>
if tag.len() != 4 ||
tag.chars().any(|c| c < ' ' || c > '~')
{
return Err(StyleParseError::UnspecifiedError.into())
}
let mut raw = Cursor::new(tag.as_bytes());
u_tag = raw.read_u32::<BigEndian>().unwrap();
}
Ok(FontSettingTag { tag: u_tag, value: T::parse(context, input)? })
}
}
/// A font settings value for font-variation-settings or font-feature-settings
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Debug, Eq, PartialEq, ToCss)]
pub enum FontSettings<T> {
/// No settings (default)
Normal,
/// Set of settings
Tag(Vec<FontSettingTag<T>>)
}
impl<T: Parse> Parse for FontSettings<T> {
/// https://www.w3.org/TR/css-fonts-3/#propdef-font-feature-settings
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if input.try(|i| i.expect_ident_matching("normal")).is_ok() {
return Ok(FontSettings::Normal);
}
Vec::parse(context, input).map(FontSettings::Tag)
}
}
/// An integer that can also parse "on" and "off",<|fim▁hole|>/// because it serializes with the preceding space
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct FontSettingTagInt(pub u32);
/// A number value to be used for font-variation-settings
///
/// Do not use this type anywhere except within FontSettings
/// because it serializes with the preceding space
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct FontSettingTagFloat(pub f32);
impl ToCss for FontSettingTagInt {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match self.0 {
1 => Ok(()),
0 => dest.write_str(" off"),
x => write!(dest, " {}", x)
}
}
}
impl Parse for FontSettingTagInt {
fn parse<'i, 't>(_context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(value) = input.try(|input| input.expect_integer()) {
// handle integer, throw if it is negative
if value >= 0 {
Ok(FontSettingTagInt(value as u32))
} else {
Err(StyleParseError::UnspecifiedError.into())
}
} else if let Ok(_) = input.try(|input| input.expect_ident_matching("on")) {
// on is an alias for '1'
Ok(FontSettingTagInt(1))
} else if let Ok(_) = input.try(|input| input.expect_ident_matching("off")) {
// off is an alias for '0'
Ok(FontSettingTagInt(0))
} else {
// empty value is an alias for '1'
Ok(FontSettingTagInt(1))
}
}
}
impl Parse for FontSettingTagFloat {
fn parse<'i, 't>(_: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
input.expect_number().map(FontSettingTagFloat).map_err(|e| e.into())
}
}
impl ToCss for FontSettingTagFloat {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str(" ")?;
self.0.to_css(dest)
}
}
/// An SVG paint value
///
/// https://www.w3.org/TR/SVG2/painting.html#SpecifyingPaint
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Debug, PartialEq, ToAnimatedValue, ToComputedValue, ToCss)]
pub struct SVGPaint<ColorType> {
/// The paint source
pub kind: SVGPaintKind<ColorType>,
/// The fallback color
pub fallback: Option<ColorType>,
}
/// An SVG paint value without the fallback
///
/// Whereas the spec only allows PaintServer
/// to have a fallback, Gecko lets the context
/// properties have a fallback as well.
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Debug, PartialEq, ToAnimatedValue, ToComputedValue, ToCss)]
pub enum SVGPaintKind<ColorType> {
/// `none`
None,
/// `<color>`
Color(ColorType),
/// `url(...)`
PaintServer(SpecifiedUrl),
/// `context-fill`
ContextFill,
/// `context-stroke`
ContextStroke,
}
impl<ColorType> SVGPaintKind<ColorType> {
/// Parse a keyword value only
fn parse_ident<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
try_match_ident_ignore_ascii_case! { input.expect_ident()?,
"none" => Ok(SVGPaintKind::None),
"context-fill" => Ok(SVGPaintKind::ContextFill),
"context-stroke" => Ok(SVGPaintKind::ContextStroke),
}
}
}
/// Parse SVGPaint's fallback.
/// fallback is keyword(none) or Color.
/// https://svgwg.org/svg2-draft/painting.html#SpecifyingPaint
fn parse_fallback<'i, 't, ColorType: Parse>(context: &ParserContext,
input: &mut Parser<'i, 't>)
-> Option<ColorType> {
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
None
} else {
input.try(|i| ColorType::parse(context, i)).ok()
}
}
impl<ColorType: Parse> Parse for SVGPaint<ColorType> {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(url) = input.try(|i| SpecifiedUrl::parse(context, i)) {
Ok(SVGPaint {
kind: SVGPaintKind::PaintServer(url),
fallback: parse_fallback(context, input),
})
} else if let Ok(kind) = input.try(SVGPaintKind::parse_ident) {
if let SVGPaintKind::None = kind {
Ok(SVGPaint {
kind: kind,
fallback: None,
})
} else {
Ok(SVGPaint {
kind: kind,
fallback: parse_fallback(context, input),
})
}
} else if let Ok(color) = input.try(|i| ColorType::parse(context, i)) {
Ok(SVGPaint {
kind: SVGPaintKind::Color(color),
fallback: None,
})
} else {
Err(StyleParseError::UnspecifiedError.into())
}
}
}<|fim▁end|> | /// for font-feature-settings
///
/// Do not use this type anywhere except within FontSettings |
<|file_name|>cliente.py<|end_file_name|><|fim▁begin|>'''
Created on 20/02/2009
@author: Chuidiang
Ejemplo de cliente de socket.
Establece conexion con el servidor, envia "hola", recibe y escribe la
respuesta, espera 2 segundos, envia "adios", recibe y escribe la respuesta
y cierrra la conexion
'''
import socket
#import time
if __name__ == '__main__':
# Se establece la conexion
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("127.0.0.1", 8000))
cond = True
while cond:
#s.send(str(665))
#a=s.recv(1000)
#print a
b = raw_input(">. ")
#print type(b)
if (b != ''):
b = int(b)
if (b <= 255) & (b >= 0):
s.send(str(b))
elif (b == 666):
cond = False
s.send(str(b))<|fim▁hole|> s.close()
print "nojoda"
s.close()
elif (b == 665):
s.send(str(b))
a = s.recv(1000)
print a
else:
print "el rango debe ser entre 0 y 255, 666 es para cerrar"
# Se envia "hola"
#s.send("hola")
# Se recibe la respuesta y se escribe en pantalla
#datos = s.recv(1000)
#print datos
#
## Espera de 2 segundos
#time.sleep(2)
#
## Se envia "adios"
#s.send("adios")
#
## Se espera respuesta, se escribe en pantalla y se cierra la
## conexion
#datos = s.recv(1000)
#print datos
#s.close()<|fim▁end|> | |
<|file_name|>ConditionMonitors.cpp<|end_file_name|><|fim▁begin|>/*
Copyright_License {
Top Hat Soaring Glide Computer - http://www.tophatsoaring.org/
Copyright (C) 2000-2016 The Top Hat Soaring Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#include "ConditionMonitors.hpp"
#include "ConditionMonitorAATTime.hpp"
#include "ConditionMonitorFinalGlide.hpp"
#include "ConditionMonitorGlideTerrain.hpp"
#include "ConditionMonitorLandableReachable.hpp"
#include "ConditionMonitorSunset.hpp"
#include "ConditionMonitorWind.hpp"
static ConditionMonitorWind cm_wind;
static ConditionMonitorFinalGlide cm_finalglide;
static ConditionMonitorSunset cm_sunset;
static ConditionMonitorAATTime cm_aattime;
static ConditionMonitorGlideTerrain cm_glideterrain;
static ConditionMonitorLandableReachable cm_landablereachable;
void
ConditionMonitorsUpdate(const NMEAInfo &basic, const DerivedInfo &calculated,
const ComputerSettings &settings)
{
cm_wind.Update(basic, calculated, settings);
cm_finalglide.Update(basic, calculated, settings);
cm_sunset.Update(basic, calculated, settings);<|fim▁hole|><|fim▁end|> | cm_aattime.Update(basic, calculated, settings);
cm_glideterrain.Update(basic, calculated, settings);
cm_landablereachable.Update(basic, calculated, settings);
} |
<|file_name|>AccelerometerProxy.js<|end_file_name|><|fim▁begin|>/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
(function (win) {<|fim▁hole|> module.exports = {
start: function (successCallback, errorCallback) {
if (accelerometerCallback) {
win.removeEventListener("devicemotion", accelerometerCallback, true);
}
accelerometerCallback = function (motion) {
successCallback({
x: motion.accelerationIncludingGravity.x,
y: motion.accelerationIncludingGravity.y,
z: motion.accelerationIncludingGravity.z,
timestamp: new Date().getTime()
});
};
win.addEventListener("devicemotion", accelerometerCallback, true);
},
stop: function (successCallback, errorCallback) {
win.removeEventListener("devicemotion", accelerometerCallback, true);
accelerometerCallback = null;
}
};
require("cordova/tizen/commandProxy").add("Accelerometer", module.exports);
}(window));<|fim▁end|> | var cordova = require('cordova'),
Acceleration = require('cordova-plugin-device-motion.Acceleration'),
accelerometerCallback = null;
|
<|file_name|>rootfindpack.py<|end_file_name|><|fim▁begin|>"""
Root finding methods
====================
Routines in this module:
bisection(f, a, b, eps=1e-5)
newton1(f, df, eps=1e-5)
newtonn(f, J, x0, eps=1e-5)
secant(f, x0, x1, eps=1e-5)
inv_cuadratic_interp(f, a, b, c, eps=1e-5)
lin_fracc_interp(f, a, b, c, eps=1e-5)
broyden(f, x0, B0, eps=1e-5)
"""
import numpy as np
'''
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright (C) 4/24/17 Carlos Brito
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.*
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
'''
__all__ = ['bisection', 'newton1', 'secant', 'newtonn',
'inv_cuadratic_interp', 'lin_fracc_interp',
'brent']
def bisection(f, a, b, eps=1e-5, display=False):
"""
Find root of f.
This function computes a root of the function f using the bisection method.
Parameters
----------
f : function
Function we want to find the root of.
a : float
Lower bound.
b : float
High bound.
eps : float
Tolerance.
Returns
-------
m : float
Root of f.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
if a > b:
a, b = b, a
while((b - a) > eps):
m = a + np.float32(b - a) / 2.
if (np.sign(f(a)) == np.sign(f(m))):
a = m
else:
b = m
if display:
print 'iteration ', iterations
print 'm: ', m
iterations += 1
return m, iterations
def newton1(f, df, x0, eps=1e-5, display=False):
"""
Find root of f.
This method computes the root of f using Newton's method.
Parameters
----------
f : function
Function we want to find the root of.
df : function
Derivative of f.
x0 : float
This is the starting point for the method.
eps : float
Tolerance.
Returns
-------
root : float
Root of f.
"""
iterations = 0
x_old = np.float(x0)
x_new = x_old
while(True):
try:
x_old = x_new
x_new = x_old - f(x_old) / df(x_old)
if display:
print 'iteration ', iterations
print 'x: ', x_new
iterations += 1
if(abs(x_old - x_new) <= eps):
break
except(ZeroDivisionError):
return np.nan
root = x_new
return root, iterations
def secant(f, x0, x1, eps=1e-5, display=False):
"""
Parameters
----------
f : function
Function we want to find the root of.
x0 : float
First initial value "close" to the root of f.
x1: float
Second initial value "close" to the root of f.
eps : float
Tolerance.
Returns
-------
root : float
Root of f.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
x_old_0 = x0
x_old_1 = x1
x_new = x0 - f(x0) * (x1 - x0) / (f(x1) - f(x0))<|fim▁hole|> x_old_0 = x_old_1
x_old_1 = x_new
x_new = x_old_1 - f(x_old_1) * \
((x_old_1 - x_old_0) / (f(x_old_1) - f(x_old_0)))
if display:
print 'iteration ', iterations
print 'x: ', x_new
iterations += 1
if(abs(x_old_1 - x_new) < eps):
break
root = x_new
return root, iterations
def inv_cuadratic_interp(f, a, b, c, eps=1e-5, display=False):
"""
Find root of f.
This method finds the root of f using the inverse cuadratic
interpolation method.
Parameters
----------
f : function
Function we want to find the root of.
a : float
First initial value.
b : float
Second initial value.
c : float
Third initial value.
Returns
-------
root : float
Root of f.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
while True:
u = f(b) / f(c)
v = f(b) / f(a)
w = f(a) / f(c)
p = v * (w * (u - w) * (c - b) - (1 - u) * (b - a))
q = (w - 1) * (u - 1) * (v - 1)
x_new = b + p / q
a = b
b = c
c = x_new
if display:
print 'iteration ', iterations
print 'x: ', x_new
iterations += 1
if(abs(f(x_new)) < eps):
break
root = x_new
return root, iterations
def lin_fracc_interp(f, a, b, c, eps=1e-5, display=False):
"""
Find root of f.
This method finds the root of f using the linear fractional
interpolation method.
Parameters
----------
f : function
Function we want to find the root of.
a : float
First initial value.
b : float
Second initial value.
c : float
Third initial value.
Returns
-------
root : float
Root of f.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
while True:
numerator = (a - c) * (b - c) * (f(a) - f(b)) * f(c)
denominator = (a - c) * (f(c) - f(b)) * f(a) - \
(b - c) * (f(c) - f(a)) * f(b)
h = numerator / denominator
x_new = c + h
a = b
b = c
c = x_new
if display:
print 'iteration ', iterations
print 'x: ', x_new
iterations += 1
if(abs(f(x_new)) < eps):
break
root = x_new
return root, iterations
def broyden(f, x0, B0, eps=1e-5, display=False):
"""
Finds roots for functions of k-variables.
This function utilizes Broyden's method to find roots in a
k-dimensional function f utilizing the initial Jacobian B0
at x0.
Parameters
----------
f : function which takes an array_like matrix and
returns an array_like matrix
Function we want to find the root of.
x0 : array_like
Initial point.
B0 : array_like
Jacobian of function at x0.
eps : float
Error tolerance.
Returns
-------
root : array_like
Root of function.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
x_new = x0
B_new = B0
while True:
x_old = x_new
B_old = B_new
s = np.dot(np.linalg.inv(B_old), -f(x_old).T) # solve for s
x_new = x_old + s
y = f(x_new) - f(x_old)
B_new = B_old + (np.dot((y - np.dot(B_old, s)), s.T)
) / (np.dot(s.T, s))
if display:
print 'iteration ', iterations
print 'x:', x_new
print 'B', B_new
iterations += 1
# convergence check
if(np.all(np.abs(x_old - x_new) <= eps)):
break
root = x_new
return root, iterations
def newtonn(f, J, x0, eps=1e-5, display=False):
"""
Finds roots for functions of k-variables.
This function utilizes Newton's method for root finding
to find roots in a k-dimensional function. To do this,
it takes the Jacobian of the function and an initial
point.
Parameters
----------
f : function which takes an array_like matrix and
returns an array_like matrix
J : function returning an array_like matrix
Jacobian of function.
x0 : array_like
Initial point.
eps : float
Error tolerance.
Returns
-------
root : array_like
Root of function.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
x_new = x0
try:
while True:
x_old = x_new
x_new = x_old - np.dot(np.linalg.inv(J(x_old)), f(x_old))
if display:
print 'iteration ', iterations
print 'x: ', x_new
iterations += 1
# convergence check
if(np.all(np.abs(x_old - x_new) <= eps)):
break
except np.linalg.LinAlgError:
print 'Error during iteration. Matrix is probably singular'
return None
root = x_new
return root, iterations
def brent(f, a, b, eps=1e-5, display=False):
"""
Finds root of a one dimensional function.
This function utilizes Brent's method for root finding
to find roots in a one dimensional function. To do this,
it needs a function and an interval which contains the
root.
Parameters
----------
f : function
Function we want to find the root of.
a : float
Low bound of interval
b : float
High bound of interval
eps : float
Tolerance.
Returns
-------
root : float
Root of function.
iterations : int
Number of iterations taken to find root.
"""
iterations = 0
mflag = False
d = 0.
if f(a) * f(b) >= 0:
raise ValueError('root is not bracketed')
if(abs(f(a)) < abs(f(b))):
a, b = b, a # swap vlaues
c = a
mflag = True
while (True):
if f(a) != f(c) and f(b) != f(c):
# inverse quadratic interpolation
s = (a * f(b) * f(c)) / ((f(a) - f(b)) * (f(a) - f(c))) + \
(b * f(a) * f(c)) / ((f(b) - f(a)) * (f(b) - f(c))) + \
(c * f(a) * f(b)) / ((f(c) - f(a)) * (f(c) - f(b)))
else:
# secant method
s = b - f(b) * (b - a) / (f(b) - f(a))
tmp1 = (3. * a + b) / 4.
tmp2 = b
if tmp1 > tmp2:
tmp1, tmp2 = tmp2, tmp1
if not (tmp1 < s < tmp2) or \
mflag and (abs(s - b)) >= (abs(c - d) / 2.) or \
not mflag and (abs(s - b)) >= (abs(c - d) / 2.) or \
mflag and (abs(b - c)) < abs(eps) or \
not mflag and (abs(c - d)) < abs(eps):
# bisection method
s = (a + b) / 2.
mflag = True
else:
mflag = False
d = c
c = b
if f(a) * f(s) < 0:
b = s
else:
a = s
if abs(f(a)) < abs(f(b)):
a, b = b, a
if display:
print 'iteration: ', iterations
print 'x: ', s
iterations += 1
# convergence check
if f(b) == 0 or f(s) == 0 or (abs(b - a) < eps):
break
root = s
return root, iterations<|fim▁end|> |
while True: |
<|file_name|>test_mincost.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import networkx as nx
from nose.tools import assert_equal, assert_raises
class TestNetworkSimplex:
def test_simple_digraph(self):
G = nx.DiGraph()
G.add_node('a', demand = -5)
G.add_node('d', demand = 5)
G.add_edge('a', 'b', weight = 3, capacity = 4)
G.add_edge('a', 'c', weight = 6, capacity = 10)
G.add_edge('b', 'd', weight = 1, capacity = 9)
G.add_edge('c', 'd', weight = 2, capacity = 5)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'b': 4, 'c': 1},
'b': {'d': 4},
'c': {'d': 1},
'd': {}}
assert_equal(flowCost, 24)
assert_equal(nx.min_cost_flow_cost(G), 24)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 24)
def test_negcycle_infcap(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 5)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('c', 'a', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('d', 'c', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G)
def test_sum_demands_not_zero(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 4)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('a', 'c', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('c', 'd', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
def test_no_flow_satisfying_demands(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 5)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('a', 'c', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('c', 'd', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
def test_transshipment(self):
G = nx.DiGraph()
G.add_node('a', demand = 1)
G.add_node('b', demand = -2)
G.add_node('c', demand = -2)
G.add_node('d', demand = 3)
G.add_node('e', demand = -4)
G.add_node('f', demand = -4)
G.add_node('g', demand = 3)
G.add_node('h', demand = 2)
G.add_node('r', demand = 3)
G.add_edge('a', 'c', weight = 3)
G.add_edge('r', 'a', weight = 2)
G.add_edge('b', 'a', weight = 9)
G.add_edge('r', 'c', weight = 0)
G.add_edge('b', 'r', weight = -6)
G.add_edge('c', 'd', weight = 5)
G.add_edge('e', 'r', weight = 4)
G.add_edge('e', 'f', weight = 3)
G.add_edge('h', 'b', weight = 4)
G.add_edge('f', 'd', weight = 7)
G.add_edge('f', 'h', weight = 12)
G.add_edge('g', 'd', weight = 12)
G.add_edge('f', 'g', weight = -1)
G.add_edge('h', 'g', weight = -10)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'c': 0},
'b': {'a': 0, 'r': 2},
'c': {'d': 3},
'd': {},
'e': {'r': 3, 'f': 1},<|fim▁hole|> 'g': {'d': 0},
'h': {'b': 0, 'g': 0},
'r': {'a': 1, 'c': 1}}
assert_equal(flowCost, 41)
assert_equal(nx.min_cost_flow_cost(G), 41)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 41)
def test_max_flow_min_cost(self):
G = nx.DiGraph()
G.add_edge('s', 'a', bandwidth = 6)
G.add_edge('s', 'c', bandwidth = 10, cost = 10)
G.add_edge('a', 'b', cost = 6)
G.add_edge('b', 'd', bandwidth = 8, cost = 7)
G.add_edge('c', 'd', cost = 10)
G.add_edge('d', 't', bandwidth = 5, cost = 5)
soln = {'s': {'a': 5, 'c': 0},
'a': {'b': 5},
'b': {'d': 5},
'c': {'d': 0},
'd': {'t': 5},
't': {}}
flow = nx.max_flow_min_cost(G, 's', 't', capacity = 'bandwidth',
weight = 'cost')
assert_equal(flow, soln)
assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90)
def test_digraph1(self):
# From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied
# Mathematical Programming. Addison-Wesley, 1977.
G = nx.DiGraph()
G.add_node(1, demand = -20)
G.add_node(4, demand = 5)
G.add_node(5, demand = 15)
G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}),
(1, 3, {'capacity': 8, 'weight': 4}),
(2, 3, {'weight': 2}),
(2, 4, {'capacity': 4, 'weight': 2}),
(2, 5, {'capacity': 10, 'weight': 6}),
(3, 4, {'capacity': 15, 'weight': 1}),
(3, 5, {'capacity': 5, 'weight': 3}),
(4, 5, {'weight': 2}),
(5, 3, {'capacity': 4, 'weight': 1})])
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 12, 3: 8},
2: {3: 8, 4: 4, 5: 0},
3: {4: 11, 5: 5},
4: {5: 10},
5: {3: 0}}
assert_equal(flowCost, 150)
assert_equal(nx.min_cost_flow_cost(G), 150)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 150)
def test_digraph2(self):
# Example from ticket #430 from mfrasca. Original source:
# http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11.
G = nx.DiGraph()
G.add_edge('s', 1, capacity=12)
G.add_edge('s', 2, capacity=6)
G.add_edge('s', 3, capacity=14)
G.add_edge(1, 2, capacity=11)
G.add_edge(2, 3, capacity=9)
G.add_edge(1, 4, capacity=5)
G.add_edge(1, 5, capacity=2)
G.add_edge(2, 5, capacity=4)
G.add_edge(2, 6, capacity=2)
G.add_edge(3, 6, capacity=31)
G.add_edge(4, 5, capacity=18)
G.add_edge(5, 5, capacity=9)
G.add_edge(4, 't', capacity=3)
G.add_edge(5, 't', capacity=7)
G.add_edge(6, 't', capacity=22)
flow = nx.max_flow_min_cost(G, 's', 't')
soln = {1: {2: 5, 4: 5, 5: 2},
2: {3: 6, 5: 3, 6: 2},
3: {6: 20},
4: {5: 2, 't': 3},
5: {5: 0, 't': 7},
6: {'t': 22},
's': {1: 12, 2: 6, 3: 14},
't': {}}
assert_equal(flow, soln)
def test_digraph3(self):
"""Combinatorial Optimization: Algorithms and Complexity,
Papadimitriou Steiglitz at page 140 has an example, 7.1, but that
admits multiple solutions, so I alter it a bit. From ticket #430
by mfrasca."""
G = nx.DiGraph()
G.add_edge('s', 'a', {0: 2, 1: 4})
G.add_edge('s', 'b', {0: 2, 1: 1})
G.add_edge('a', 'b', {0: 5, 1: 2})
G.add_edge('a', 't', {0: 1, 1: 5})
G.add_edge('b', 'a', {0: 1, 1: 3})
G.add_edge('b', 't', {0: 3, 1: 2})
"PS.ex.7.1: testing main function"
sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1)
flow = sum(v for v in sol['s'].values())
assert_equal(4, flow)
assert_equal(23, nx.cost_of_flow(G, sol, weight=1))
assert_equal(sol['s'], {'a': 2, 'b': 2})
assert_equal(sol['a'], {'b': 1, 't': 1})
assert_equal(sol['b'], {'a': 0, 't': 3})
assert_equal(sol['t'], {})
def test_zero_capacity_edges(self):
"""Address issue raised in ticket #617 by arv."""
G = nx.DiGraph()
G.add_edges_from([(1, 2, {'capacity': 1, 'weight': 1}),
(1, 5, {'capacity': 1, 'weight': 1}),
(2, 3, {'capacity': 0, 'weight': 1}),
(2, 5, {'capacity': 1, 'weight': 1}),
(5, 3, {'capacity': 2, 'weight': 1}),
(5, 4, {'capacity': 0, 'weight': 1}),
(3, 4, {'capacity': 2, 'weight': 1})])
G.node[1]['demand'] = -1
G.node[2]['demand'] = -1
G.node[4]['demand'] = 2
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 0, 5: 1},
2: {3: 0, 5: 1},
3: {4: 2},
4: {},
5: {3: 2, 4: 0}}
assert_equal(flowCost, 6)
assert_equal(nx.min_cost_flow_cost(G), 6)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 6)
def test_digon(self):
"""Check if digons are handled properly. Taken from ticket
#618 by arv."""
nodes = [(1, {}),
(2, {'demand': -4}),
(3, {'demand': 4}),
]
edges = [(1, 2, {'capacity': 3, 'weight': 600000}),
(2, 1, {'capacity': 2, 'weight': 0}),
(2, 3, {'capacity': 5, 'weight': 714285}),
(3, 2, {'capacity': 2, 'weight': 0}),
]
G = nx.DiGraph(edges)
G.add_nodes_from(nodes)
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 0},
2: {1: 0, 3: 4},
3: {2: 0}}
assert_equal(flowCost, 2857140)
assert_equal(nx.min_cost_flow_cost(G), 2857140)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 2857140)
def test_multidigraph(self):
"""Raise an exception for multidigraph."""
G = nx.MultiDiGraph()
G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight='capacity')
assert_raises(nx.NetworkXError, nx.network_simplex, G)<|fim▁end|> | 'f': {'d': 0, 'g': 3, 'h': 2}, |
<|file_name|>SupervisedLearning.py<|end_file_name|><|fim▁begin|>import numpy as np
from sklearn.grid_search import GridSearchCV
import sklearn.metrics as metrics
from sklearn import preprocessing as prep
from tr_utils import merge_two_dicts, isEmpty
class SKSupervisedLearning (object):
"""
Thin wrapper around some learning methods
"""
def __init__(self, classifier, X_train, Y_train, X_test, Y_test):
"""
X_train, Y_train - training data: examples + corresponding class labels
X_test, Y_test - validation data: examples + corresponding class labels
"""
self.X_train = X_train
self.X_test = X_test
self.Y_train = Y_train
self.Y_test = Y_test
self.X_train_scaled = np.array([])
self.X_test_scaled = np.array([])
self._classifier = classifier
self._clf = None
self._proba_train = None
self._proba_test = None
self._train_params = None
self._estimation_params = None
self._scaler = None
# parameters for sklearn grid search
self._jobs = -1
self._cv = 10
self._verbose = 0
self._scoring = "log_loss"
@property
def scaler(self):
return self._scaler
@property
def clf(self):
if self._clf == None:
self._clf = self._classifier(**self.train_params) if self.train_params != None else self._classifier()
return self._clf
@property
def proba_train(self):
return self._proba_train
@property
def proba_test(self):
return self._proba_test
@property
def train_params(self):
"""
Training parameter dictionary specific to each learner
"""
return self._train_params
@train_params.setter
def train_params(self, val):
self._train_params = val
@property
def estimation_params(self):
"""
Dictionary of paramters to estimate, specific to each learner:
e.g.:
{'gamma': [0.001, 0.1, 1], 'C': [1, 10, 100]}
"""
return self._estimation_params
@estimation_params.setter
def estimation_params(self, val):
self._estimation_params = val
@property
def jobs(self):
return self._jobs
@jobs.setter
def jobs(self, val):
self._jobs = val
@property
def cv(self):
return self._cv
@cv.setter
def cv(self, val):
self._cv = val
@property
def scoring(self):
return self._scoring
<|fim▁hole|> self._scoring = val
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, val):
self._verbose = val
@property
def proba_train(self):
return self._proba_train
@property
def proba_test(self):
return self._proba_test
def _pick_examples(self):
'''
If we have scaled examples - pick them, else pick X_train, X_test
'''
return (self.X_train, self.X_test) \
if isEmpty(self.X_train_scaled) or isEmpty(self.X_test_scaled) \
else (self.X_train_scaled, self.X_test_scaled)
def remove_scaling(self):
self.X_test_scaled = None
self.X_train_scaled = None
def grid_search_classifier(self) :
"""
Grid search for the best classifier, given parameters.
Returns best score
Sets the classifier to the best classifier given training and estimation parameters
See sklearn GridSearchCV for details
"""
gs = False
if self.train_params == None and self.estimation_params == None:
raise AttributeError("Cannot have train_params and estimation_params both absent")
# first - grid-search for the best parameters
if self.estimation_params:
X_train, X_test = self._pick_examples()
Y_train = self.Y_train
clf = self._classifier(**self.train_params) if self.train_params != None else self._classifier()
gs = GridSearchCV(clf, self.estimation_params, scoring = self.scoring, cv = self.cv, n_jobs=self.jobs, verbose = self.verbose)
gs.fit(X_train, Y_train)
print gs.best_params_
print gs.best_score_
# if we have specified parameters of our own - we need to add those
if gs:
self.train_params = merge_two_dicts(gs.best_params_, self.train_params) if self.train_params != None else gs.best_params_
self._clf = self._classifier(**self.train_params)
return gs.best_score_
def _fit_scaler(self, scaler_class, X):
return scaler_class().fit(X)
# TODO: other scalers?
def fit_standard_scaler(self):
"""
Standard scaler scales samples 'vertically', (by feature), by removing the mean and reducing to unit std.
Computes a scaler and transforms both train and validation sets based upon it
"""
self._scaler = self._fit_scaler(prep.StandardScaler, self.X_train)
self.X_train_scaled = self._scaler.transform(self.X_train)
self.X_test_scaled = self._scaler.transform(self.X_test)
def fit_and_validate(self):
'''
Returns training & testing log loss
'''
X_train, X_test = self._pick_examples()
# shorthand
Y_train = self.Y_train
Y_test = self.Y_test
self.clf.fit(X_train, Y_train)
# get probabilities
self._proba_train = self.clf.predict_proba(X_train)
self._proba_test = self.clf.predict_proba(X_test)
return metrics.log_loss(Y_train, self.proba_train), np.array([]) if isEmpty(Y_test) else metrics.log_loss(Y_test, self.proba_test)
def predict_actual(self, X_actual_test):
'''
Return actual prediction on a set where we don't have labels
'''
return self.clf.predict_proba(X_actual_test)<|fim▁end|> | @scoring.setter
def scoring(self, val): |
<|file_name|>test_learner_profile.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
End-to-end tests for Student's Profile Page.
"""
from contextlib import contextmanager
from datetime import datetime
from bok_choy.web_app_test import WebAppTest
from nose.plugins.attrib import attr
from ...pages.common.logout import LogoutPage
from ...pages.lms.account_settings import AccountSettingsPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.learner_profile import LearnerProfilePage
from ...pages.lms.dashboard import DashboardPage
from ..helpers import EventsTestMixin
class LearnerProfileTestMixin(EventsTestMixin):
"""
Mixin with helper methods for testing learner profile pages.
"""
PRIVACY_PUBLIC = u'all_users'
PRIVACY_PRIVATE = u'private'
PUBLIC_PROFILE_FIELDS = ['username', 'country', 'language_proficiencies', 'bio']
PRIVATE_PROFILE_FIELDS = ['username']
PUBLIC_PROFILE_EDITABLE_FIELDS = ['country', 'language_proficiencies', 'bio']
USER_SETTINGS_CHANGED_EVENT_NAME = u"edx.user.settings.changed"
def log_in_as_unique_user(self):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def set_public_profile_fields_data(self, profile_page):
"""
Fill in the public profile fields of a user.
"""
profile_page.value_for_dropdown_field('language_proficiencies', 'English')
profile_page.value_for_dropdown_field('country', 'United Arab Emirates')
profile_page.set_value_for_textarea_field('bio', 'Nothing Special')
def visit_profile_page(self, username, privacy=None):
"""
Visit a user's profile page and if a privacy is specified and
is different from the displayed value, then set the privacy to that value.
"""
profile_page = LearnerProfilePage(self.browser, username)
# Change the privacy if requested by loading the page and
# changing the drop down
if privacy is not None:
profile_page.visit()
# Change the privacy setting if it is not the desired one already
profile_page.privacy = privacy
# Verify the current setting is as expected
if privacy == self.PRIVACY_PUBLIC:
self.assertEqual(profile_page.privacy, 'all_users')
else:
self.assertEqual(profile_page.privacy, 'private')
if privacy == self.PRIVACY_PUBLIC:
self.set_public_profile_fields_data(profile_page)
# Reset event tracking so that the tests only see events from
# loading the profile page.
self.start_time = datetime.now() # pylint: disable=attribute-defined-outside-init
# Load the page
profile_page.visit()
return profile_page
def set_birth_year(self, birth_year):
"""
Set birth year for the current user to the specified value.
"""
account_settings_page = AccountSettingsPage(self.browser)
account_settings_page.visit()
account_settings_page.wait_for_page()
self.assertEqual(
account_settings_page.value_for_dropdown_field('year_of_birth', str(birth_year)),
str(birth_year)
)
def verify_profile_page_is_public(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently public.
"""
self.assertEqual(profile_page.visible_fields, self.PUBLIC_PROFILE_FIELDS)
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.editable_fields, self.PUBLIC_PROFILE_EDITABLE_FIELDS)
else:
self.assertEqual(profile_page.editable_fields, [])
def verify_profile_page_is_private(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently private.
"""
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.visible_fields, self.PRIVATE_PROFILE_FIELDS)
def verify_profile_page_view_event(self, requesting_username, profile_user_id, visibility=None):
"""
Verifies that the correct view event was captured for the profile page.
"""
actual_events = self.wait_for_events(
start_time=self.start_time,
event_filter={'event_type': 'edx.user.settings.viewed', 'username': requesting_username},
number_of_matches=1)
self.assert_events_match(
[
{
'username': requesting_username,
'event': {
'user_id': int(profile_user_id),
'page': 'profile',
'visibility': unicode(visibility)
}
}
],
actual_events
)
@contextmanager
def verify_pref_change_event_during(self, username, user_id, setting, **kwargs):
"""Assert that a single setting changed event is emitted for the user_api_userpreference table."""
expected_event = {
'username': username,
'event': {
'setting': setting,
'user_id': int(user_id),
'table': 'user_api_userpreference',
'truncated': []
}
}
expected_event['event'].update(kwargs)
event_filter = {
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'username': username,
}
with self.assert_events_match_during(event_filter=event_filter, expected_events=[expected_event]):
yield
def initialize_different_user(self, privacy=None, birth_year=None):
"""
Initialize the profile page for a different test user
"""
username, user_id = self.log_in_as_unique_user()
# Set the privacy for the new user
if privacy is None:
privacy = self.PRIVACY_PUBLIC
self.visit_profile_page(username, privacy=privacy)
# Set the user's year of birth
if birth_year:
self.set_birth_year(birth_year)
# Log the user out
LogoutPage(self.browser).visit()
return username, user_id
@attr('shard_4')
class OwnLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify a student's own profile page.
"""
def verify_profile_forced_private_message(self, username, birth_year, message=None):
"""
Verify age limit messages for a user.
"""
if birth_year is None:
birth_year = ""
self.set_birth_year(birth_year=birth_year)
profile_page = self.visit_profile_page(username)
self.assertTrue(profile_page.privacy_field_visible)
if message:
self.assertTrue(profile_page.age_limit_message_present)
else:
self.assertFalse(profile_page.age_limit_message_present)
self.assertIn(message, profile_page.profile_forced_private_message)
def test_profile_defaults_to_public(self):
"""
Scenario: Verify that a new user's profile defaults to public.
Given that I am a new user.
When I go to my profile page.
Then I see that the profile visibility is set to public.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
self.verify_profile_page_is_public(profile_page)
def assert_default_image_has_public_access(self, profile_page):
"""
Assert that profile image has public access.
"""
self.assertTrue(profile_page.profile_has_default_image)
self.assertTrue(profile_page.profile_has_image_with_public_access())
def test_make_profile_public(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my private profile page
And I set the profile visibility to public
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as public
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=self.PRIVACY_PRIVATE, new=self.PRIVACY_PUBLIC
):
profile_page.privacy = self.PRIVACY_PUBLIC
# Reload the page and verify that the profile is now public
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_public(profile_page)
def test_make_profile_private(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my public profile page
And I set the profile visibility to private
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as private
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=None, new=self.PRIVACY_PRIVATE
):
profile_page.privacy = self.PRIVACY_PRIVATE
# Reload the page and verify that the profile is now private
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_private(profile_page)
def test_dashboard_learner_profile_link(self):
"""
Scenario: Verify that my profile link is present on dashboard page and we can navigate to correct page.
Given that I am a registered user.
When I go to Dashboard page.
And I click on username dropdown.
Then I see Profile link in the dropdown menu.
When I click on Profile link.
Then I will be navigated to Profile page.
"""
username, user_id = self.log_in_as_unique_user()
dashboard_page = DashboardPage(self.browser)
dashboard_page.visit()
dashboard_page.click_username_dropdown()
self.assertIn('Profile', dashboard_page.username_dropdown_link_text)
dashboard_page.click_my_profile_link()
my_profile_page = LearnerProfilePage(self.browser, username)
my_profile_page.wait_for_page()
def test_fields_on_my_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own private profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to private.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_page_is_private(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_fields_on_my_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own public profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see all the profile fields are shown.
And `location`, `language` and `about me` fields are editable.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.verify_profile_page_is_public(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PUBLIC)
def _test_dropdown_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a dropdown field.
"""
profile_page.value_for_dropdown_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def _test_textarea_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a textarea field.
"""
profile_page.set_value_for_textarea_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def test_country_field(self):
"""
Test behaviour of `Country` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set country value to `Pakistan`.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I reload the page.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I make `country` field editable
Then `country` field mode should be `edit`
And `country` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'country', 'Pakistan', 'Pakistan', 'display')
profile_page.make_field_editable('country')
self.assertEqual(profile_page.mode_for_field('country'), 'edit')
self.assertTrue(profile_page.field_icon_present('country'))
def test_language_field(self):
"""
Test behaviour of `Language` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set language value to `Urdu`.
Then displayed language should be `Urdu` and language field mode should be `display`
And I reload the page.
Then displayed language should be `Urdu` and language field mode should be `display`
Then I set empty value for language.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I reload the page.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I make `language` field editable
Then `language` field mode should be `edit`
And `language` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'language_proficiencies', 'Urdu', 'Urdu', 'display')
self._test_dropdown_field(profile_page, 'language_proficiencies', '', 'Add language', 'placeholder')
profile_page.make_field_editable('language_proficiencies')
self.assertTrue(profile_page.mode_for_field('language_proficiencies'), 'edit')
self.assertTrue(profile_page.field_icon_present('language_proficiencies'))
def test_about_me_field(self):
"""
Test behaviour of `About Me` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set about me value to `ThisIsIt`.
Then displayed about me should be `ThisIsIt` and about me field mode should be `display`
And I reload the page.
Then displayed about me should be `ThisIsIt` and about me field mode should be `display`
Then I set empty value for about me.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I reload the page.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I make `about me` field editable
Then `about me` field mode should be `edit`
"""
placeholder_value = (
"Tell other learners a little about yourself: where you live, what your interests are, "
"why you're taking courses, or what you hope to learn."
)
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_textarea_field(profile_page, 'bio', 'ThisIsIt', 'ThisIsIt', 'display')
self._test_textarea_field(profile_page, 'bio', '', placeholder_value, 'placeholder')
profile_page.make_field_editable('bio')
self.assertTrue(profile_page.mode_for_field('bio'), 'edit')
def test_birth_year_not_set(self):
"""
Verify message if birth year is not set.
Given that I am a registered user.
And birth year is not set for the user.
And I visit my profile page.
Then I should see a message that the profile is private until the year of birth is set.
"""
username, user_id = self.log_in_as_unique_user()
message = "You must specify your birth year before you can share your full profile."
self.verify_profile_forced_private_message(username, birth_year=None, message=message)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_is_under_age(self):
"""
Verify message if user is under age.
Given that I am a registered user.
And birth year is set so that age is less than 13.
And I visit my profile page.
Then I should see a message that the profile is private as I am under thirteen.
"""
username, user_id = self.log_in_as_unique_user()
under_age_birth_year = datetime.now().year - 10
self.verify_profile_forced_private_message(
username,
birth_year=under_age_birth_year,
message='You must be over 13 to share a full profile.'
)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_can_only_see_default_image_for_private_profile(self):
"""
Scenario: Default profile image behaves correctly for under age user.
Given that I am on my profile page with private access
And I can see default image
When I move my cursor to the image
Then i cannot see the upload/remove image text
And i cannot upload/remove the image.
"""
year_of_birth = datetime.now().year - 5
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_forced_private_message(
username,
year_of_birth,
message='You must be over 13 to share a full profile.'
)
self.assertTrue(profile_page.profile_has_default_image)
self.assertFalse(profile_page.profile_has_image_with_private_access())
def test_user_can_see_default_image_for_public_profile(self):
"""
Scenario: Default profile image behaves correctly for public profile.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
And i am able to upload new image
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
def test_user_can_upload_the_profile_image_with_success(self):
"""
Scenario: Upload profile image works correctly.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new image via file uploader
Then i can see the changed image
And i can also see the latest image after reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
profile_page.visit()
self.assertTrue(profile_page.image_upload_success)
def test_user_can_see_error_for_exceeding_max_file_size_limit(self):
"""
Scenario: Upload profile image does not work for > 1MB image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new > 1MB image via file uploader
Then i can see the error message for file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='larger_image.jpg')
self.assertEqual(profile_page.profile_image_message, "The file must be smaller than 1 MB in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_file_size_below_the_min_limit(self):
"""
Scenario: Upload profile image does not work for < 100 Bytes image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new < 100 Bytes image via file uploader
Then i can see the error message for minimum file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='list-icon-visited.png')
self.assertEqual(profile_page.profile_image_message, "The file must be at least 100 bytes in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_wrong_file_type(self):
"""
Scenario: Upload profile image does not work for wrong file types.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new csv file via file uploader
Then i can see the error message for wrong/unsupported file type
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='generic_csv.csv')
self.assertEqual(
profile_page.profile_image_message,
"The file must be one of the following types: .gif, .png, .jpeg, .jpg."
)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_remove_profile_image(self):
"""
Scenario: Remove profile image works correctly.
<|fim▁hole|> When I move my cursor to the image
Then i can see the upload/remove image text
When i click on the remove image link
Then i can see the default image
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
self.assertTrue(profile_page.remove_profile_image())
self.assertTrue(profile_page.profile_has_default_image)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
def test_user_cannot_remove_default_image(self):
"""
Scenario: Remove profile image does not works for default images.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see only the upload image text
And i cannot see the remove image text
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
self.assertFalse(profile_page.remove_link_present)
def test_eventing_after_multiple_uploads(self):
"""
Scenario: An event is fired when a user with a profile image uploads another image
Given that I am on my profile page with public access
And I upload a new image via file uploader
When I upload another image via the file uploader
Then two upload events have been emitted
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg', wait_for_upload_button=False)
@attr('shard_4')
class DifferentUserLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify viewing the profile page of a different user.
"""
def test_different_user_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's private profile.
Given that I am a registered user.
And I visit a different user's private profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
different_username, different_user_id = self.initialize_different_user(privacy=self.PRIVACY_PRIVATE)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
def test_different_user_under_age(self):
"""
Scenario: Verify that an under age user's profile is private to others.
Given that I am a registered user.
And I visit an under age user's profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see that only the private fields are shown.
"""
under_age_birth_year = datetime.now().year - 10
different_username, different_user_id = self.initialize_different_user(
privacy=self.PRIVACY_PUBLIC,
birth_year=under_age_birth_year
)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
def test_different_user_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's public profile.
Given that I am a registered user.
And I visit a different user's public profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then all the profile fields are shown.
Then I shouldn't see the profile visibility selector dropdown.
Also `location`, `language` and `about me` fields are not editable.
"""
different_username, different_user_id = self.initialize_different_user(privacy=self.PRIVACY_PUBLIC)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.wait_for_public_fields()
self.verify_profile_page_is_public(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PUBLIC)
@attr('a11y')
class LearnerProfileA11yTest(LearnerProfileTestMixin, WebAppTest):
"""
Class to test learner profile accessibility.
"""
def test_editable_learner_profile_a11y(self):
"""
Test the accessibility of the editable version of the profile page
(user viewing her own public profile).
"""
username, _ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'skip-link', # TODO: AC-179
'link-href', # TODO: AC-231
],
})
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('language_proficiencies')
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('bio')
profile_page.a11y_audit.check_for_accessibility_errors()
def test_read_only_learner_profile_a11y(self):
"""
Test the accessibility of the read-only version of a public profile page
(user viewing someone else's profile page).
"""
# initialize_different_user should cause country, language, and bio to be filled out (since
# privacy is public). It doesn't appear that this is happening, although the method
# works in regular bokchoy tests. Perhaps a problem with phantomjs? So this test is currently
# only looking at a read-only profile page with a username.
different_username, _ = self.initialize_different_user(privacy=self.PRIVACY_PUBLIC)
self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'skip-link', # TODO: AC-179
'link-href', # TODO: AC-231
],
})
profile_page.a11y_audit.check_for_accessibility_errors()<|fim▁end|> | Given that I am on my profile page with public access
And I can see default image |
<|file_name|>cxx_vector.rs<|end_file_name|><|fim▁begin|>//! Less used details of `CxxVector` are exposed in this module. `CxxVector`
//! itself is exposed at the crate root.
use crate::extern_type::ExternType;
use crate::kind::Trivial;
use crate::string::CxxString;
use core::ffi::c_void;
use core::fmt::{self, Debug};
use core::iter::FusedIterator;
use core::marker::{PhantomData, PhantomPinned};
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::pin::Pin;
use core::slice;
/// Binding to C++ `std::vector<T, std::allocator<T>>`.
///
/// # Invariants
///
/// As an invariant of this API and the static analysis of the cxx::bridge
/// macro, in Rust code we can never obtain a `CxxVector` by value. Instead in
/// Rust code we will only ever look at a vector behind a reference or smart
/// pointer, as in `&CxxVector<T>` or `UniquePtr<CxxVector<T>>`.
#[repr(C, packed)]
pub struct CxxVector<T> {
// A thing, because repr(C) structs are not allowed to consist exclusively
// of PhantomData fields.
_void: [c_void; 0],
// The conceptual vector elements to ensure that autotraits are propagated
// correctly, e.g. CxxVector is UnwindSafe iff T is.
_elements: PhantomData<[T]>,
// Prevent unpin operation from Pin<&mut CxxVector<T>> to &mut CxxVector<T>.
_pinned: PhantomData<PhantomPinned>,
}
impl<T> CxxVector<T>
where
T: VectorElement,
{
/// Returns the number of elements in the vector.
///
/// Matches the behavior of C++ [std::vector\<T\>::size][size].
///
/// [size]: https://en.cppreference.com/w/cpp/container/vector/size
pub fn len(&self) -> usize {
T::__vector_size(self)
}
/// Returns true if the vector contains no elements.
///
/// Matches the behavior of C++ [std::vector\<T\>::empty][empty].
///
/// [empty]: https://en.cppreference.com/w/cpp/container/vector/empty
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns a reference to an element at the given position, or `None` if
/// out of bounds.
pub fn get(&self, pos: usize) -> Option<&T> {
if pos < self.len() {
Some(unsafe { self.get_unchecked(pos) })
} else {
None
}
}
/// Returns a pinned mutable reference to an element at the given position,
/// or `None` if out of bounds.
pub fn index_mut(self: Pin<&mut Self>, pos: usize) -> Option<Pin<&mut T>> {
if pos < self.len() {
Some(unsafe { self.index_unchecked_mut(pos) })
} else {
None
}
}
/// Returns a reference to an element without doing bounds checking.
///
/// This is generally not recommended, use with caution! Calling this method
/// with an out-of-bounds index is undefined behavior even if the resulting
/// reference is not used.
///
/// Matches the behavior of C++
/// [std::vector\<T\>::operator\[\] const][operator_at].
///
/// [operator_at]: https://en.cppreference.com/w/cpp/container/vector/operator_at
pub unsafe fn get_unchecked(&self, pos: usize) -> &T {
let this = self as *const CxxVector<T> as *mut CxxVector<T>;
unsafe {
let ptr = T::__get_unchecked(this, pos) as *const T;
&*ptr
}
}
/// Returns a pinned mutable reference to an element without doing bounds
/// checking.
///
/// This is generally not recommended, use with caution! Calling this method
/// with an out-of-bounds index is undefined behavior even if the resulting
/// reference is not used.
///
/// Matches the behavior of C++
/// [std::vector\<T\>::operator\[\]][operator_at].
///
/// [operator_at]: https://en.cppreference.com/w/cpp/container/vector/operator_at
pub unsafe fn index_unchecked_mut(self: Pin<&mut Self>, pos: usize) -> Pin<&mut T> {
unsafe {
let ptr = T::__get_unchecked(self.get_unchecked_mut(), pos);
Pin::new_unchecked(&mut *ptr)
}
}
/// Returns a slice to the underlying contiguous array of elements.
pub fn as_slice(&self) -> &[T]
where
T: ExternType<Kind = Trivial>,
{
let len = self.len();
if len == 0 {
// The slice::from_raw_parts in the other branch requires a nonnull
// and properly aligned data ptr. C++ standard does not guarantee
// that data() on a vector with size 0 would return a nonnull
// pointer or sufficiently aligned pointer, so using it would be
// undefined behavior. Create our own empty slice in Rust instead
// which upholds the invariants.
&[]
} else {
let this = self as *const CxxVector<T> as *mut CxxVector<T>;
let ptr = unsafe { T::__get_unchecked(this, 0) };
unsafe { slice::from_raw_parts(ptr, len) }
}
}
/// Returns a slice to the underlying contiguous array of elements by
/// mutable reference.
pub fn as_mut_slice(self: Pin<&mut Self>) -> &mut [T]
where
T: ExternType<Kind = Trivial>,
{
let len = self.len();
if len == 0 {
&mut []
} else {
let ptr = unsafe { T::__get_unchecked(self.get_unchecked_mut(), 0) };
unsafe { slice::from_raw_parts_mut(ptr, len) }
}
}
/// Returns an iterator over elements of type `&T`.
pub fn iter(&self) -> Iter<T> {
Iter { v: self, index: 0 }
}
/// Returns an iterator over elements of type `Pin<&mut T>`.
pub fn iter_mut(self: Pin<&mut Self>) -> IterMut<T> {
IterMut { v: self, index: 0 }
}
/// Appends an element to the back of the vector.
///
/// Matches the behavior of C++ [std::vector\<T\>::push_back][push_back].
///
/// [push_back]: https://en.cppreference.com/w/cpp/container/vector/push_back
pub fn push(self: Pin<&mut Self>, value: T)
where
T: ExternType<Kind = Trivial>,
{
let mut value = ManuallyDrop::new(value);
unsafe {
// C++ calls move constructor followed by destructor on `value`.
T::__push_back(self, &mut value);
}
}
/// Removes the last element from a vector and returns it, or `None` if the
/// vector is empty.
pub fn pop(self: Pin<&mut Self>) -> Option<T>
where
T: ExternType<Kind = Trivial>,
{
if self.is_empty() {
None
} else {
let mut value = MaybeUninit::uninit();
Some(unsafe {
T::__pop_back(self, &mut value);
value.assume_init()
})
}
}
}
/// Iterator over elements of a `CxxVector` by shared reference.
///
/// The iterator element type is `&'a T`.
pub struct Iter<'a, T> {
v: &'a CxxVector<T>,
index: usize,
}
impl<'a, T> IntoIterator for &'a CxxVector<T>
where
T: VectorElement,
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> Iterator for Iter<'a, T>
where
T: VectorElement,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
let next = self.v.get(self.index)?;
self.index += 1;
Some(next)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
}
impl<'a, T> ExactSizeIterator for Iter<'a, T>
where
T: VectorElement,
{
fn len(&self) -> usize {
self.v.len() - self.index
}
}
impl<'a, T> FusedIterator for Iter<'a, T> where T: VectorElement {}
/// Iterator over elements of a `CxxVector` by pinned mutable reference.
///
/// The iterator element type is `Pin<&'a mut T>`.
pub struct IterMut<'a, T> {<|fim▁hole|>impl<'a, T> IntoIterator for Pin<&'a mut CxxVector<T>>
where
T: VectorElement,
{
type Item = Pin<&'a mut T>;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<'a, T> Iterator for IterMut<'a, T>
where
T: VectorElement,
{
type Item = Pin<&'a mut T>;
fn next(&mut self) -> Option<Self::Item> {
let next = self.v.as_mut().index_mut(self.index)?;
self.index += 1;
// Extend lifetime to allow simultaneous holding of nonoverlapping
// elements, analogous to slice::split_first_mut.
unsafe {
let ptr = Pin::into_inner_unchecked(next) as *mut T;
Some(Pin::new_unchecked(&mut *ptr))
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
}
impl<'a, T> ExactSizeIterator for IterMut<'a, T>
where
T: VectorElement,
{
fn len(&self) -> usize {
self.v.len() - self.index
}
}
impl<'a, T> FusedIterator for IterMut<'a, T> where T: VectorElement {}
impl<T> Debug for CxxVector<T>
where
T: VectorElement + Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_list().entries(self).finish()
}
}
/// Trait bound for types which may be used as the `T` inside of a
/// `CxxVector<T>` in generic code.
///
/// This trait has no publicly callable or implementable methods. Implementing
/// it outside of the CXX codebase is not supported.
///
/// # Example
///
/// A bound `T: VectorElement` may be necessary when manipulating [`CxxVector`]
/// in generic code.
///
/// ```
/// use cxx::vector::{CxxVector, VectorElement};
/// use std::fmt::Display;
///
/// pub fn take_generic_vector<T>(vector: &CxxVector<T>)
/// where
/// T: VectorElement + Display,
/// {
/// println!("the vector elements are:");
/// for element in vector {
/// println!(" • {}", element);
/// }
/// }
/// ```
///
/// Writing the same generic function without a `VectorElement` trait bound
/// would not compile.
pub unsafe trait VectorElement: Sized {
#[doc(hidden)]
fn __typename(f: &mut fmt::Formatter) -> fmt::Result;
#[doc(hidden)]
fn __vector_size(v: &CxxVector<Self>) -> usize;
#[doc(hidden)]
unsafe fn __get_unchecked(v: *mut CxxVector<Self>, pos: usize) -> *mut Self;
#[doc(hidden)]
unsafe fn __push_back(v: Pin<&mut CxxVector<Self>>, value: &mut ManuallyDrop<Self>) {
// Opaque C type vector elements do not get this method because they can
// never exist by value on the Rust side of the bridge.
let _ = v;
let _ = value;
unreachable!()
}
#[doc(hidden)]
unsafe fn __pop_back(v: Pin<&mut CxxVector<Self>>, out: &mut MaybeUninit<Self>) {
// Opaque C type vector elements do not get this method because they can
// never exist by value on the Rust side of the bridge.
let _ = v;
let _ = out;
unreachable!()
}
#[doc(hidden)]
fn __unique_ptr_null() -> MaybeUninit<*mut c_void>;
#[doc(hidden)]
unsafe fn __unique_ptr_raw(raw: *mut CxxVector<Self>) -> MaybeUninit<*mut c_void>;
#[doc(hidden)]
unsafe fn __unique_ptr_get(repr: MaybeUninit<*mut c_void>) -> *const CxxVector<Self>;
#[doc(hidden)]
unsafe fn __unique_ptr_release(repr: MaybeUninit<*mut c_void>) -> *mut CxxVector<Self>;
#[doc(hidden)]
unsafe fn __unique_ptr_drop(repr: MaybeUninit<*mut c_void>);
}
macro_rules! vector_element_by_value_methods {
(opaque, $segment:expr, $ty:ty) => {};
(trivial, $segment:expr, $ty:ty) => {
#[doc(hidden)]
unsafe fn __push_back(v: Pin<&mut CxxVector<$ty>>, value: &mut ManuallyDrop<$ty>) {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$std$vector$", $segment, "$push_back")]
fn __push_back(_: Pin<&mut CxxVector<$ty>>, _: &mut ManuallyDrop<$ty>);
}
}
unsafe { __push_back(v, value) }
}
#[doc(hidden)]
unsafe fn __pop_back(v: Pin<&mut CxxVector<$ty>>, out: &mut MaybeUninit<$ty>) {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$std$vector$", $segment, "$pop_back")]
fn __pop_back(_: Pin<&mut CxxVector<$ty>>, _: &mut MaybeUninit<$ty>);
}
}
unsafe { __pop_back(v, out) }
}
};
}
macro_rules! impl_vector_element {
($kind:ident, $segment:expr, $name:expr, $ty:ty) => {
const_assert_eq!(0, mem::size_of::<CxxVector<$ty>>());
const_assert_eq!(1, mem::align_of::<CxxVector<$ty>>());
unsafe impl VectorElement for $ty {
#[doc(hidden)]
fn __typename(f: &mut fmt::Formatter) -> fmt::Result {
f.write_str($name)
}
#[doc(hidden)]
fn __vector_size(v: &CxxVector<$ty>) -> usize {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$std$vector$", $segment, "$size")]
fn __vector_size(_: &CxxVector<$ty>) -> usize;
}
}
unsafe { __vector_size(v) }
}
#[doc(hidden)]
unsafe fn __get_unchecked(v: *mut CxxVector<$ty>, pos: usize) -> *mut $ty {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$std$vector$", $segment, "$get_unchecked")]
fn __get_unchecked(_: *mut CxxVector<$ty>, _: usize) -> *mut $ty;
}
}
unsafe { __get_unchecked(v, pos) }
}
vector_element_by_value_methods!($kind, $segment, $ty);
#[doc(hidden)]
fn __unique_ptr_null() -> MaybeUninit<*mut c_void> {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$unique_ptr$std$vector$", $segment, "$null")]
fn __unique_ptr_null(this: *mut MaybeUninit<*mut c_void>);
}
}
let mut repr = MaybeUninit::uninit();
unsafe { __unique_ptr_null(&mut repr) }
repr
}
#[doc(hidden)]
unsafe fn __unique_ptr_raw(raw: *mut CxxVector<Self>) -> MaybeUninit<*mut c_void> {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$unique_ptr$std$vector$", $segment, "$raw")]
fn __unique_ptr_raw(this: *mut MaybeUninit<*mut c_void>, raw: *mut CxxVector<$ty>);
}
}
let mut repr = MaybeUninit::uninit();
unsafe { __unique_ptr_raw(&mut repr, raw) }
repr
}
#[doc(hidden)]
unsafe fn __unique_ptr_get(repr: MaybeUninit<*mut c_void>) -> *const CxxVector<Self> {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$unique_ptr$std$vector$", $segment, "$get")]
fn __unique_ptr_get(this: *const MaybeUninit<*mut c_void>) -> *const CxxVector<$ty>;
}
}
unsafe { __unique_ptr_get(&repr) }
}
#[doc(hidden)]
unsafe fn __unique_ptr_release(mut repr: MaybeUninit<*mut c_void>) -> *mut CxxVector<Self> {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$unique_ptr$std$vector$", $segment, "$release")]
fn __unique_ptr_release(this: *mut MaybeUninit<*mut c_void>) -> *mut CxxVector<$ty>;
}
}
unsafe { __unique_ptr_release(&mut repr) }
}
#[doc(hidden)]
unsafe fn __unique_ptr_drop(mut repr: MaybeUninit<*mut c_void>) {
extern "C" {
attr! {
#[link_name = concat!("cxxbridge1$unique_ptr$std$vector$", $segment, "$drop")]
fn __unique_ptr_drop(this: *mut MaybeUninit<*mut c_void>);
}
}
unsafe { __unique_ptr_drop(&mut repr) }
}
}
};
}
macro_rules! impl_vector_element_for_primitive {
($ty:ident) => {
impl_vector_element!(trivial, stringify!($ty), stringify!($ty), $ty);
};
}
impl_vector_element_for_primitive!(u8);
impl_vector_element_for_primitive!(u16);
impl_vector_element_for_primitive!(u32);
impl_vector_element_for_primitive!(u64);
impl_vector_element_for_primitive!(usize);
impl_vector_element_for_primitive!(i8);
impl_vector_element_for_primitive!(i16);
impl_vector_element_for_primitive!(i32);
impl_vector_element_for_primitive!(i64);
impl_vector_element_for_primitive!(isize);
impl_vector_element_for_primitive!(f32);
impl_vector_element_for_primitive!(f64);
impl_vector_element!(opaque, "string", "CxxString", CxxString);<|fim▁end|> | v: Pin<&'a mut CxxVector<T>>,
index: usize,
}
|
<|file_name|>scenarios.js<|end_file_name|><|fim▁begin|>'use strict';
/* https://github.com/angular/protractor/blob/master/docs/toc.md */
describe('my app', function() {
browser.get('index.html');
it('should automatically redirect to /home when location hash/fragment is empty', function() {
expect(browser.getLocationAbsUrl()).toMatch("/home");
});
describe('view1', function() {
beforeEach(function() {
browser.get('index.html#/home');
});
it('should render home when user navigates to /home', function() {
expect(element.all(by.css('[ng-view] p')).first().getText()).
toMatch(/partial for view 1/);
});<|fim▁hole|>
describe('view2', function() {
beforeEach(function() {
browser.get('index.html#/view2');
});
it('should render view2 when user navigates to /view2', function() {
expect(element.all(by.css('[ng-view] p')).first().getText()).
toMatch(/partial for view 2/);
});
});
});<|fim▁end|> |
});
|
<|file_name|>LockingGridView.js<|end_file_name|><|fim▁begin|>/*!
* Ext JS Library 3.3.1
* Copyright(c) 2006-2010 Sencha Inc.
* [email protected]
* http://www.sencha.com/license
*/
/*!
* Ext JS Library 3.3.0
* Copyright(c) 2006-2010 Ext JS, Inc.
* [email protected]
* http://www.extjs.com/license
*/
Ext.ns('Ext.ux.grid');
Ext.ux.grid.LockingGridView = Ext.extend(Ext.grid.GridView, {
lockText : 'Lock',
unlockText : 'Unlock',
rowBorderWidth : 1,
lockedBorderWidth : 1,
/*
* This option ensures that height between the rows is synchronized
* between the locked and unlocked sides. This option only needs to be used
* when the row heights aren't predictable.
*/
syncHeights: false,
initTemplates : function(){
var ts = this.templates || {};
if (!ts.masterTpl) {
ts.masterTpl = new Ext.Template(
'<div class="x-grid3" hidefocus="true">',
'<div class="x-grid3-locked">',
'<div class="x-grid3-header"><div class="x-grid3-header-inner"><div class="x-grid3-header-offset" style="{lstyle}">{lockedHeader}</div></div><div class="x-clear"></div></div>',
'<div class="x-grid3-scroller"><div class="x-grid3-body" style="{lstyle}">{lockedBody}</div><div class="x-grid3-scroll-spacer"></div></div>',
'</div>',
'<div class="x-grid3-viewport x-grid3-unlocked">',
'<div class="x-grid3-header"><div class="x-grid3-header-inner"><div class="x-grid3-header-offset" style="{ostyle}">{header}</div></div><div class="x-clear"></div></div>',
'<div class="x-grid3-scroller"><div class="x-grid3-body" style="{bstyle}">{body}</div><a href="#" class="x-grid3-focus" tabIndex="-1"></a></div>',
'</div>',
'<div class="x-grid3-resize-marker"> </div>',
'<div class="x-grid3-resize-proxy"> </div>',
'</div>'
);
}
this.templates = ts;
Ext.ux.grid.LockingGridView.superclass.initTemplates.call(this);
},
getEditorParent : function(ed){
return this.el.dom;
},
initElements : function(){
var el = Ext.get(this.grid.getGridEl().dom.firstChild),
lockedWrap = el.child('div.x-grid3-locked'),
lockedHd = lockedWrap.child('div.x-grid3-header'),
lockedScroller = lockedWrap.child('div.x-grid3-scroller'),
mainWrap = el.child('div.x-grid3-viewport'),
mainHd = mainWrap.child('div.x-grid3-header'),
scroller = mainWrap.child('div.x-grid3-scroller');
if (this.grid.hideHeaders) {
lockedHd.setDisplayed(false);
mainHd.setDisplayed(false);
}
if(this.forceFit){
scroller.setStyle('overflow-x', 'hidden');
}
Ext.apply(this, {
el : el,
mainWrap: mainWrap,
mainHd : mainHd,
innerHd : mainHd.dom.firstChild,
scroller: scroller,
mainBody: scroller.child('div.x-grid3-body'),
focusEl : scroller.child('a'),
resizeMarker: el.child('div.x-grid3-resize-marker'),
resizeProxy : el.child('div.x-grid3-resize-proxy'),
lockedWrap: lockedWrap,
lockedHd: lockedHd,
lockedScroller: lockedScroller,
lockedBody: lockedScroller.child('div.x-grid3-body'),
lockedInnerHd: lockedHd.child('div.x-grid3-header-inner', true)
});
this.focusEl.swallowEvent('click', true);
},
getLockedRows : function(){
return this.hasRows() ? this.lockedBody.dom.childNodes : [];
},
getLockedRow : function(row){
return this.getLockedRows()[row];
},
getCell : function(row, col){
var lockedLen = this.cm.getLockedCount();
if(col < lockedLen){
return this.getLockedRow(row).getElementsByTagName('td')[col];
}
return Ext.ux.grid.LockingGridView.superclass.getCell.call(this, row, col - lockedLen);
},
getHeaderCell : function(index){
var lockedLen = this.cm.getLockedCount();
if(index < lockedLen){
return this.lockedHd.dom.getElementsByTagName('td')[index];
}
return Ext.ux.grid.LockingGridView.superclass.getHeaderCell.call(this, index - lockedLen);
},
addRowClass : function(row, cls){
var lockedRow = this.getLockedRow(row);
if(lockedRow){
this.fly(lockedRow).addClass(cls);
}
Ext.ux.grid.LockingGridView.superclass.addRowClass.call(this, row, cls);
},
removeRowClass : function(row, cls){
var lockedRow = this.getLockedRow(row);
if(lockedRow){
this.fly(lockedRow).removeClass(cls);
}
Ext.ux.grid.LockingGridView.superclass.removeRowClass.call(this, row, cls);
},
removeRow : function(row) {
Ext.removeNode(this.getLockedRow(row));
Ext.ux.grid.LockingGridView.superclass.removeRow.call(this, row);
},
removeRows : function(firstRow, lastRow){
var lockedBody = this.lockedBody.dom,
rowIndex = firstRow;
for(; rowIndex <= lastRow; rowIndex++){
Ext.removeNode(lockedBody.childNodes[firstRow]);
}
Ext.ux.grid.LockingGridView.superclass.removeRows.call(this, firstRow, lastRow);
},
syncScroll : function(e){
this.lockedScroller.dom.scrollTop = this.scroller.dom.scrollTop;
Ext.ux.grid.LockingGridView.superclass.syncScroll.call(this, e);
},
updateSortIcon : function(col, dir){
var sortClasses = this.sortClasses,
lockedHeaders = this.lockedHd.select('td').removeClass(sortClasses),
headers = this.mainHd.select('td').removeClass(sortClasses),
lockedLen = this.cm.getLockedCount(),
cls = sortClasses[dir == 'DESC' ? 1 : 0];
if(col < lockedLen){
lockedHeaders.item(col).addClass(cls);
}else{
headers.item(col - lockedLen).addClass(cls);
}
},
updateAllColumnWidths : function(){
var tw = this.getTotalWidth(),
clen = this.cm.getColumnCount(),
lw = this.getLockedWidth(),
llen = this.cm.getLockedCount(),
ws = [], len, i;
this.updateLockedWidth();
for(i = 0; i < clen; i++){
ws[i] = this.getColumnWidth(i);
var hd = this.getHeaderCell(i);
hd.style.width = ws[i];
}
var lns = this.getLockedRows(), ns = this.getRows(), row, trow, j;
for(i = 0, len = ns.length; i < len; i++){
row = lns[i];
row.style.width = lw;
if(row.firstChild){
row.firstChild.style.width = lw;
trow = row.firstChild.rows[0];
for (j = 0; j < llen; j++) {
trow.childNodes[j].style.width = ws[j];
}
}
row = ns[i];
row.style.width = tw;
if(row.firstChild){
row.firstChild.style.width = tw;
trow = row.firstChild.rows[0];
for (j = llen; j < clen; j++) {
trow.childNodes[j - llen].style.width = ws[j];
}
}
}
this.onAllColumnWidthsUpdated(ws, tw);
this.syncHeaderHeight();
},
updateColumnWidth : function(col, width){
var w = this.getColumnWidth(col),
llen = this.cm.getLockedCount(),
ns, rw, c, row;
this.updateLockedWidth();
if(col < llen){
ns = this.getLockedRows();
rw = this.getLockedWidth();
c = col;
}else{
ns = this.getRows();
rw = this.getTotalWidth();
c = col - llen;
}
var hd = this.getHeaderCell(col);
hd.style.width = w;
for(var i = 0, len = ns.length; i < len; i++){
row = ns[i];
row.style.width = rw;
if(row.firstChild){
row.firstChild.style.width = rw;
row.firstChild.rows[0].childNodes[c].style.width = w;
}
}
this.onColumnWidthUpdated(col, w, this.getTotalWidth());
this.syncHeaderHeight();
},
updateColumnHidden : function(col, hidden){
var llen = this.cm.getLockedCount(),
ns, rw, c, row,
display = hidden ? 'none' : '';
this.updateLockedWidth();
if(col < llen){
ns = this.getLockedRows();
rw = this.getLockedWidth();
c = col;
}else{
ns = this.getRows();
rw = this.getTotalWidth();
c = col - llen;
}
var hd = this.getHeaderCell(col);
hd.style.display = display;
for(var i = 0, len = ns.length; i < len; i++){
row = ns[i];
row.style.width = rw;
if(row.firstChild){
row.firstChild.style.width = rw;
row.firstChild.rows[0].childNodes[c].style.display = display;
}
}
this.onColumnHiddenUpdated(col, hidden, this.getTotalWidth());
delete this.lastViewWidth;
this.layout();
},
doRender : function(cs, rs, ds, startRow, colCount, stripe){
var ts = this.templates, ct = ts.cell, rt = ts.row, last = colCount-1,
tstyle = 'width:'+this.getTotalWidth()+';',
lstyle = 'width:'+this.getLockedWidth()+';',
buf = [], lbuf = [], cb, lcb, c, p = {}, rp = {}, r;
for(var j = 0, len = rs.length; j < len; j++){
r = rs[j]; cb = []; lcb = [];
var rowIndex = (j+startRow);
for(var i = 0; i < colCount; i++){
c = cs[i];
p.id = c.id;
p.css = (i === 0 ? 'x-grid3-cell-first ' : (i == last ? 'x-grid3-cell-last ' : '')) +
(this.cm.config[i].cellCls ? ' ' + this.cm.config[i].cellCls : '');
p.attr = p.cellAttr = '';
p.value = c.renderer(r.data[c.name], p, r, rowIndex, i, ds);
p.style = c.style;
if(Ext.isEmpty(p.value)){
p.value = ' ';
}
if(this.markDirty && r.dirty && Ext.isDefined(r.modified[c.name])){
p.css += ' x-grid3-dirty-cell';
}
if(c.locked){
lcb[lcb.length] = ct.apply(p);
}else{
cb[cb.length] = ct.apply(p);
}
}
var alt = [];
if(stripe && ((rowIndex+1) % 2 === 0)){
alt[0] = 'x-grid3-row-alt';
}
if(r.dirty){
alt[1] = ' x-grid3-dirty-row';
}
rp.cols = colCount;
if(this.getRowClass){
alt[2] = this.getRowClass(r, rowIndex, rp, ds);
}
rp.alt = alt.join(' ');
rp.cells = cb.join('');
rp.tstyle = tstyle;
buf[buf.length] = rt.apply(rp);
rp.cells = lcb.join('');
rp.tstyle = lstyle;
lbuf[lbuf.length] = rt.apply(rp);
}
return [buf.join(''), lbuf.join('')];
},
processRows : function(startRow, skipStripe){
if(!this.ds || this.ds.getCount() < 1){
return;
}
var rows = this.getRows(),
lrows = this.getLockedRows(),
row, lrow;
skipStripe = skipStripe || !this.grid.stripeRows;
startRow = startRow || 0;
for(var i = 0, len = rows.length; i < len; ++i){
row = rows[i];
lrow = lrows[i];
row.rowIndex = i;
lrow.rowIndex = i;
if(!skipStripe){
row.className = row.className.replace(this.rowClsRe, ' ');
lrow.className = lrow.className.replace(this.rowClsRe, ' ');
if ((i + 1) % 2 === 0){
row.className += ' x-grid3-row-alt';
lrow.className += ' x-grid3-row-alt';
}
}
this.syncRowHeights(row, lrow);
}
if(startRow === 0){
Ext.fly(rows[0]).addClass(this.firstRowCls);
Ext.fly(lrows[0]).addClass(this.firstRowCls);
}
Ext.fly(rows[rows.length - 1]).addClass(this.lastRowCls);
Ext.fly(lrows[lrows.length - 1]).addClass(this.lastRowCls);
},
syncRowHeights: function(row1, row2){
if(this.syncHeights){
var el1 = Ext.get(row1),
el2 = Ext.get(row2),
h1 = el1.getHeight(),
h2 = el2.getHeight();
if(h1 > h2){
el2.setHeight(h1);
}else if(h2 > h1){
el1.setHeight(h2);
}
}
},
afterRender : function(){
if(!this.ds || !this.cm){
return;
}
var bd = this.renderRows() || [' ', ' '];
this.mainBody.dom.innerHTML = bd[0];
this.lockedBody.dom.innerHTML = bd[1];
this.processRows(0, true);
if(this.deferEmptyText !== true){
this.applyEmptyText();
}
this.grid.fireEvent('viewready', this.grid);
},
renderUI : function(){
var templates = this.templates,
header = this.renderHeaders(),
body = templates.body.apply({rows:' '});
return templates.masterTpl.apply({
body : body,
header: header[0],
ostyle: 'width:' + this.getOffsetWidth() + ';',
bstyle: 'width:' + this.getTotalWidth() + ';',
lockedBody: body,
lockedHeader: header[1],
lstyle: 'width:'+this.getLockedWidth()+';'
});
},
afterRenderUI: function(){
var g = this.grid;
this.initElements();
Ext.fly(this.innerHd).on('click', this.handleHdDown, this);
Ext.fly(this.lockedInnerHd).on('click', this.handleHdDown, this);
this.mainHd.on({
scope: this,
mouseover: this.handleHdOver,
mouseout: this.handleHdOut,
mousemove: this.handleHdMove
});
this.lockedHd.on({
scope: this,
mouseover: this.handleHdOver,
mouseout: this.handleHdOut,
mousemove: this.handleHdMove
});
this.scroller.on('scroll', this.syncScroll, this);
if(g.enableColumnResize !== false){
this.splitZone = new Ext.grid.GridView.SplitDragZone(g, this.mainHd.dom);
this.splitZone.setOuterHandleElId(Ext.id(this.lockedHd.dom));
this.splitZone.setOuterHandleElId(Ext.id(this.mainHd.dom));
}
if(g.enableColumnMove){
this.columnDrag = new Ext.grid.GridView.ColumnDragZone(g, this.innerHd);
this.columnDrag.setOuterHandleElId(Ext.id(this.lockedInnerHd));
this.columnDrag.setOuterHandleElId(Ext.id(this.innerHd));
this.columnDrop = new Ext.grid.HeaderDropZone(g, this.mainHd.dom);
}<|fim▁hole|> if(g.enableHdMenu !== false){
this.hmenu = new Ext.menu.Menu({id: g.id + '-hctx'});
this.hmenu.add(
{itemId: 'asc', text: this.sortAscText, cls: 'xg-hmenu-sort-asc'},
{itemId: 'desc', text: this.sortDescText, cls: 'xg-hmenu-sort-desc'}
);
if(this.grid.enableColLock !== false){
this.hmenu.add('-',
{itemId: 'lock', text: this.lockText, cls: 'xg-hmenu-lock'},
{itemId: 'unlock', text: this.unlockText, cls: 'xg-hmenu-unlock'}
);
}
if(g.enableColumnHide !== false){
this.colMenu = new Ext.menu.Menu({id:g.id + '-hcols-menu'});
this.colMenu.on({
scope: this,
beforeshow: this.beforeColMenuShow,
itemclick: this.handleHdMenuClick
});
this.hmenu.add('-', {
itemId:'columns',
hideOnClick: false,
text: this.columnsText,
menu: this.colMenu,
iconCls: 'x-cols-icon'
});
}
this.hmenu.on('itemclick', this.handleHdMenuClick, this);
}
if(g.trackMouseOver){
this.mainBody.on({
scope: this,
mouseover: this.onRowOver,
mouseout: this.onRowOut
});
this.lockedBody.on({
scope: this,
mouseover: this.onRowOver,
mouseout: this.onRowOut
});
}
if(g.enableDragDrop || g.enableDrag){
this.dragZone = new Ext.grid.GridDragZone(g, {
ddGroup : g.ddGroup || 'GridDD'
});
}
this.updateHeaderSortState();
},
layout : function(){
if(!this.mainBody){
return;
}
var g = this.grid;
var c = g.getGridEl();
var csize = c.getSize(true);
var vw = csize.width;
if(!g.hideHeaders && (vw < 20 || csize.height < 20)){
return;
}
this.syncHeaderHeight();
if(g.autoHeight){
this.scroller.dom.style.overflow = 'visible';
this.lockedScroller.dom.style.overflow = 'visible';
if(Ext.isWebKit){
this.scroller.dom.style.position = 'static';
this.lockedScroller.dom.style.position = 'static';
}
}else{
this.el.setSize(csize.width, csize.height);
var hdHeight = this.mainHd.getHeight();
var vh = csize.height - (hdHeight);
}
this.updateLockedWidth();
if(this.forceFit){
if(this.lastViewWidth != vw){
this.fitColumns(false, false);
this.lastViewWidth = vw;
}
}else {
this.autoExpand();
this.syncHeaderScroll();
}
this.onLayout(vw, vh);
},
getOffsetWidth : function() {
return (this.cm.getTotalWidth() - this.cm.getTotalLockedWidth() + this.getScrollOffset()) + 'px';
},
renderHeaders : function(){
var cm = this.cm,
ts = this.templates,
ct = ts.hcell,
cb = [], lcb = [],
p = {},
len = cm.getColumnCount(),
last = len - 1;
for(var i = 0; i < len; i++){
p.id = cm.getColumnId(i);
p.value = cm.getColumnHeader(i) || '';
p.style = this.getColumnStyle(i, true);
p.tooltip = this.getColumnTooltip(i);
p.css = (i === 0 ? 'x-grid3-cell-first ' : (i == last ? 'x-grid3-cell-last ' : '')) +
(cm.config[i].headerCls ? ' ' + cm.config[i].headerCls : '');
if(cm.config[i].align == 'right'){
p.istyle = 'padding-right:16px';
} else {
delete p.istyle;
}
if(cm.isLocked(i)){
lcb[lcb.length] = ct.apply(p);
}else{
cb[cb.length] = ct.apply(p);
}
}
return [ts.header.apply({cells: cb.join(''), tstyle:'width:'+this.getTotalWidth()+';'}),
ts.header.apply({cells: lcb.join(''), tstyle:'width:'+this.getLockedWidth()+';'})];
},
updateHeaders : function(){
var hd = this.renderHeaders();
this.innerHd.firstChild.innerHTML = hd[0];
this.innerHd.firstChild.style.width = this.getOffsetWidth();
this.innerHd.firstChild.firstChild.style.width = this.getTotalWidth();
this.lockedInnerHd.firstChild.innerHTML = hd[1];
var lw = this.getLockedWidth();
this.lockedInnerHd.firstChild.style.width = lw;
this.lockedInnerHd.firstChild.firstChild.style.width = lw;
},
getResolvedXY : function(resolved){
if(!resolved){
return null;
}
var c = resolved.cell, r = resolved.row;
return c ? Ext.fly(c).getXY() : [this.scroller.getX(), Ext.fly(r).getY()];
},
syncFocusEl : function(row, col, hscroll){
Ext.ux.grid.LockingGridView.superclass.syncFocusEl.call(this, row, col, col < this.cm.getLockedCount() ? false : hscroll);
},
ensureVisible : function(row, col, hscroll){
return Ext.ux.grid.LockingGridView.superclass.ensureVisible.call(this, row, col, col < this.cm.getLockedCount() ? false : hscroll);
},
insertRows : function(dm, firstRow, lastRow, isUpdate){
var last = dm.getCount() - 1;
if(!isUpdate && firstRow === 0 && lastRow >= last){
this.refresh();
}else{
if(!isUpdate){
this.fireEvent('beforerowsinserted', this, firstRow, lastRow);
}
var html = this.renderRows(firstRow, lastRow),
before = this.getRow(firstRow);
if(before){
if(firstRow === 0){
this.removeRowClass(0, this.firstRowCls);
}
Ext.DomHelper.insertHtml('beforeBegin', before, html[0]);
before = this.getLockedRow(firstRow);
Ext.DomHelper.insertHtml('beforeBegin', before, html[1]);
}else{
this.removeRowClass(last - 1, this.lastRowCls);
Ext.DomHelper.insertHtml('beforeEnd', this.mainBody.dom, html[0]);
Ext.DomHelper.insertHtml('beforeEnd', this.lockedBody.dom, html[1]);
}
if(!isUpdate){
this.fireEvent('rowsinserted', this, firstRow, lastRow);
this.processRows(firstRow);
}else if(firstRow === 0 || firstRow >= last){
this.addRowClass(firstRow, firstRow === 0 ? this.firstRowCls : this.lastRowCls);
}
}
this.syncFocusEl(firstRow);
},
getColumnStyle : function(col, isHeader){
var style = !isHeader ? this.cm.config[col].cellStyle || this.cm.config[col].css || '' : this.cm.config[col].headerStyle || '';
style += 'width:'+this.getColumnWidth(col)+';';
if(this.cm.isHidden(col)){
style += 'display:none;';
}
var align = this.cm.config[col].align;
if(align){
style += 'text-align:'+align+';';
}
return style;
},
getLockedWidth : function() {
return this.cm.getTotalLockedWidth() + 'px';
},
getTotalWidth : function() {
return (this.cm.getTotalWidth() - this.cm.getTotalLockedWidth()) + 'px';
},
getColumnData : function(){
var cs = [], cm = this.cm, colCount = cm.getColumnCount();
for(var i = 0; i < colCount; i++){
var name = cm.getDataIndex(i);
cs[i] = {
name : (!Ext.isDefined(name) ? this.ds.fields.get(i).name : name),
renderer : cm.getRenderer(i),
id : cm.getColumnId(i),
style : this.getColumnStyle(i),
locked : cm.isLocked(i)
};
}
return cs;
},
renderBody : function(){
var markup = this.renderRows() || [' ', ' '];
return [this.templates.body.apply({rows: markup[0]}), this.templates.body.apply({rows: markup[1]})];
},
refreshRow: function(record){
var store = this.ds,
colCount = this.cm.getColumnCount(),
columns = this.getColumnData(),
last = colCount - 1,
cls = ['x-grid3-row'],
rowParams = {
tstyle: String.format("width: {0};", this.getTotalWidth())
},
lockedRowParams = {
tstyle: String.format("width: {0};", this.getLockedWidth())
},
colBuffer = [],
lockedColBuffer = [],
cellTpl = this.templates.cell,
rowIndex,
row,
lockedRow,
column,
meta,
css,
i;
if (Ext.isNumber(record)) {
rowIndex = record;
record = store.getAt(rowIndex);
} else {
rowIndex = store.indexOf(record);
}
if (!record || rowIndex < 0) {
return;
}
for (i = 0; i < colCount; i++) {
column = columns[i];
if (i == 0) {
css = 'x-grid3-cell-first';
} else {
css = (i == last) ? 'x-grid3-cell-last ' : '';
}
meta = {
id: column.id,
style: column.style,
css: css,
attr: "",
cellAttr: ""
};
meta.value = column.renderer.call(column.scope, record.data[column.name], meta, record, rowIndex, i, store);
if (Ext.isEmpty(meta.value)) {
meta.value = ' ';
}
if (this.markDirty && record.dirty && typeof record.modified[column.name] != 'undefined') {
meta.css += ' x-grid3-dirty-cell';
}
if (column.locked) {
lockedColBuffer[i] = cellTpl.apply(meta);
} else {
colBuffer[i] = cellTpl.apply(meta);
}
}
row = this.getRow(rowIndex);
row.className = '';
lockedRow = this.getLockedRow(rowIndex);
lockedRow.className = '';
if (this.grid.stripeRows && ((rowIndex + 1) % 2 === 0)) {
cls.push('x-grid3-row-alt');
}
if (this.getRowClass) {
rowParams.cols = colCount;
cls.push(this.getRowClass(record, rowIndex, rowParams, store));
}
// Unlocked rows
this.fly(row).addClass(cls).setStyle(rowParams.tstyle);
rowParams.cells = colBuffer.join("");
row.innerHTML = this.templates.rowInner.apply(rowParams);
// Locked rows
this.fly(lockedRow).addClass(cls).setStyle(lockedRowParams.tstyle);
lockedRowParams.cells = lockedColBuffer.join("");
lockedRow.innerHTML = this.templates.rowInner.apply(lockedRowParams);
lockedRow.rowIndex = rowIndex;
this.syncRowHeights(row, lockedRow);
this.fireEvent('rowupdated', this, rowIndex, record);
},
refresh : function(headersToo){
this.fireEvent('beforerefresh', this);
this.grid.stopEditing(true);
var result = this.renderBody();
this.mainBody.update(result[0]).setWidth(this.getTotalWidth());
this.lockedBody.update(result[1]).setWidth(this.getLockedWidth());
if(headersToo === true){
this.updateHeaders();
this.updateHeaderSortState();
}
this.processRows(0, true);
this.layout();
this.applyEmptyText();
this.fireEvent('refresh', this);
},
onDenyColumnLock : function(){
},
initData : function(ds, cm){
if(this.cm){
this.cm.un('columnlockchange', this.onColumnLock, this);
}
Ext.ux.grid.LockingGridView.superclass.initData.call(this, ds, cm);
if(this.cm){
this.cm.on('columnlockchange', this.onColumnLock, this);
}
},
onColumnLock : function(){
this.refresh(true);
},
handleHdMenuClick : function(item){
var index = this.hdCtxIndex,
cm = this.cm,
id = item.getItemId(),
llen = cm.getLockedCount();
switch(id){
case 'lock':
if(cm.getColumnCount(true) <= llen + 1){
this.onDenyColumnLock();
return undefined;
}
cm.setLocked(index, true);
if(llen != index){
cm.moveColumn(index, llen);
this.grid.fireEvent('columnmove', index, llen);
}
break;
case 'unlock':
if(llen - 1 != index){
cm.setLocked(index, false, true);
cm.moveColumn(index, llen - 1);
this.grid.fireEvent('columnmove', index, llen - 1);
}else{
cm.setLocked(index, false);
}
break;
default:
return Ext.ux.grid.LockingGridView.superclass.handleHdMenuClick.call(this, item);
}
return true;
},
handleHdDown : function(e, t){
Ext.ux.grid.LockingGridView.superclass.handleHdDown.call(this, e, t);
if(this.grid.enableColLock !== false){
if(Ext.fly(t).hasClass('x-grid3-hd-btn')){
var hd = this.findHeaderCell(t),
index = this.getCellIndex(hd),
ms = this.hmenu.items, cm = this.cm;
ms.get('lock').setDisabled(cm.isLocked(index));
ms.get('unlock').setDisabled(!cm.isLocked(index));
}
}
},
syncHeaderHeight: function(){
var hrow = Ext.fly(this.innerHd).child('tr', true),
lhrow = Ext.fly(this.lockedInnerHd).child('tr', true);
hrow.style.height = 'auto';
lhrow.style.height = 'auto';
var hd = hrow.offsetHeight,
lhd = lhrow.offsetHeight,
height = Math.max(lhd, hd) + 'px';
hrow.style.height = height;
lhrow.style.height = height;
},
updateLockedWidth: function(){
var lw = this.cm.getTotalLockedWidth(),
tw = this.cm.getTotalWidth() - lw,
csize = this.grid.getGridEl().getSize(true),
lp = Ext.isBorderBox ? 0 : this.lockedBorderWidth,
rp = Ext.isBorderBox ? 0 : this.rowBorderWidth,
vw = (csize.width - lw - lp - rp) + 'px',
so = this.getScrollOffset();
if(!this.grid.autoHeight){
var vh = (csize.height - this.mainHd.getHeight()) + 'px';
this.lockedScroller.dom.style.height = vh;
this.scroller.dom.style.height = vh;
}
this.lockedWrap.dom.style.width = (lw + rp) + 'px';
this.scroller.dom.style.width = vw;
this.mainWrap.dom.style.left = (lw + lp + rp) + 'px';
if(this.innerHd){
this.lockedInnerHd.firstChild.style.width = lw + 'px';
this.lockedInnerHd.firstChild.firstChild.style.width = lw + 'px';
this.innerHd.style.width = vw;
this.innerHd.firstChild.style.width = (tw + rp + so) + 'px';
this.innerHd.firstChild.firstChild.style.width = tw + 'px';
}
if(this.mainBody){
this.lockedBody.dom.style.width = (lw + rp) + 'px';
this.mainBody.dom.style.width = (tw + rp) + 'px';
}
}
});
Ext.ux.grid.LockingColumnModel = Ext.extend(Ext.grid.ColumnModel, {
/**
* Returns true if the given column index is currently locked
* @param {Number} colIndex The column index
* @return {Boolean} True if the column is locked
*/
isLocked : function(colIndex){
return this.config[colIndex].locked === true;
},
/**
* Locks or unlocks a given column
* @param {Number} colIndex The column index
* @param {Boolean} value True to lock, false to unlock
* @param {Boolean} suppressEvent Pass false to cause the columnlockchange event not to fire
*/
setLocked : function(colIndex, value, suppressEvent){
if (this.isLocked(colIndex) == value) {
return;
}
this.config[colIndex].locked = value;
if (!suppressEvent) {
this.fireEvent('columnlockchange', this, colIndex, value);
}
},
/**
* Returns the total width of all locked columns
* @return {Number} The width of all locked columns
*/
getTotalLockedWidth : function(){
var totalWidth = 0;
for (var i = 0, len = this.config.length; i < len; i++) {
if (this.isLocked(i) && !this.isHidden(i)) {
totalWidth += this.getColumnWidth(i);
}
}
return totalWidth;
},
/**
* Returns the total number of locked columns
* @return {Number} The number of locked columns
*/
getLockedCount : function() {
var len = this.config.length;
for (var i = 0; i < len; i++) {
if (!this.isLocked(i)) {
return i;
}
}
//if we get to this point all of the columns are locked so we return the total
return len;
},
/**
* Moves a column from one position to another
* @param {Number} oldIndex The current column index
* @param {Number} newIndex The destination column index
*/
moveColumn : function(oldIndex, newIndex){
var oldLocked = this.isLocked(oldIndex),
newLocked = this.isLocked(newIndex);
if (oldIndex < newIndex && oldLocked && !newLocked) {
this.setLocked(oldIndex, false, true);
} else if (oldIndex > newIndex && !oldLocked && newLocked) {
this.setLocked(oldIndex, true, true);
}
Ext.ux.grid.LockingColumnModel.superclass.moveColumn.apply(this, arguments);
}
});<|fim▁end|> | |
<|file_name|>grpc_error_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package tabletconn
import (
"testing"
vtrpcpb "github.com/youtube/vitess/go/vt/proto/vtrpc"
"github.com/youtube/vitess/go/vt/vterrors"
)
func TestTabletErrorFromRPCError(t *testing.T) {
testcases := []struct {
in *vtrpcpb.RPCError
want vtrpcpb.Code
}{{
in: &vtrpcpb.RPCError{
LegacyCode: vtrpcpb.LegacyErrorCode_BAD_INPUT_LEGACY,
Message: "bad input",
},
want: vtrpcpb.Code_INVALID_ARGUMENT,
}, {
in: &vtrpcpb.RPCError{
LegacyCode: vtrpcpb.LegacyErrorCode_BAD_INPUT_LEGACY,
Message: "bad input",
Code: vtrpcpb.Code_INVALID_ARGUMENT,<|fim▁hole|> want: vtrpcpb.Code_INVALID_ARGUMENT,
}, {
in: &vtrpcpb.RPCError{
Message: "bad input",
Code: vtrpcpb.Code_INVALID_ARGUMENT,
},
want: vtrpcpb.Code_INVALID_ARGUMENT,
}}
for _, tcase := range testcases {
got := vterrors.Code(ErrorFromVTRPC(tcase.in))
if got != tcase.want {
t.Errorf("FromVtRPCError(%v):\n%v, want\n%v", tcase.in, got, tcase.want)
}
}
}<|fim▁end|> | }, |
<|file_name|>is-finite-number.js<|end_file_name|><|fim▁begin|>/**
* The MIT License (MIT)
*
* Copyright (c) 2014-2022 Mickael Jeanroy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell<|fim▁hole|> * copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import {isNumber} from './is-number.js';
/**
* Check that a given value is a falsy value.
*
* @param {*} a Value to check.
* @return {boolean} `true` if parameter is a falsy value.
*/
export function isFiniteNumber(a) {
return isNumber(a) && isFinite(a);
}<|fim▁end|> | |
<|file_name|>bootstrap.js<|end_file_name|><|fim▁begin|>/* Bootstrap v3.3.1 (http://getbootstrap.com)
* Copyright 2011-2014 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* Modifications from Andoni M. Garcia.
*/
if (typeof jQuery === 'undefined') {
throw new Error('Bootstrap\'s JavaScript requires jQuery')
}
+function ($) {
var version = $.fn.jquery.split(' ')[0].split('.')
if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1)) {
throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher')
}
}(jQuery);
/* ========================================================================
* Bootstrap: transition.js v3.3.1
* http://getbootstrap.com/javascript/#transitions
* ========================================================================
* Copyright 2011-2014 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
// ============================================================
function transitionEnd() {
var el = document.createElement('bootstrap')
var transEndEventNames = {
WebkitTransition : 'webkitTransitionEnd',
MozTransition : 'transitionend',
OTransition : 'oTransitionEnd otransitionend',
transition : 'transitionend'
}
for (var name in transEndEventNames) {
if (el.style[name] !== undefined) {
return { end: transEndEventNames[name] }
}
}
return false // explicit for ie8 ( ._.)
}
// http://blog.alexmaccaw.com/css-transitions
$.fn.emulateTransitionEnd = function (duration) {
var called = false
var $el = this
$(this).one('bsTransitionEnd', function () { called = true })
var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
setTimeout(callback, duration)
return this
}
$(function () {
$.support.transition = transitionEnd()
if (!$.support.transition) return
$.event.special.bsTransitionEnd = {
bindType: $.support.transition.end,
delegateType: $.support.transition.end,
handle: function (e) {
if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
}
}
})
}(jQuery);
/* ========================================================================
* Bootstrap: collapse.js v3.3.1
* http://getbootstrap.com/javascript/#collapse
* ========================================================================
* Copyright 2011-2014 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// COLLAPSE PUBLIC CLASS DEFINITION
// ================================
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Collapse.DEFAULTS, options)
this.$trigger = $(this.options.trigger).filter('[href="#' + element.id + '"], [data-target="#' + element.id + '"]')
this.transitioning = null
if (this.options.parent) {
this.$parent = this.getParent()
} else {
this.addAriaAndCollapsedClass(this.$element, this.$trigger)
}
if (this.options.toggle) this.toggle()
}
Collapse.VERSION = '3.3.1'
Collapse.TRANSITION_DURATION = 350
Collapse.DEFAULTS = {
toggle: true,
trigger: '[data-toggle="collapse"]'
}
Collapse.prototype.dimension = function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
Collapse.prototype.show = function () {
if (this.transitioning || this.$element.hasClass('in')) return
var activesData
var actives = this.$parent && this.$parent.find('> .panel').children('.in, .collapsing')
if (actives && actives.length) {
activesData = actives.data('bs.collapse')
if (activesData && activesData.transitioning) return
}
var startEvent = $.Event('show.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
if (actives && actives.length) {
Plugin.call(actives, 'hide')
activesData || actives.data('bs.collapse', null)
}
var dimension = this.dimension()
this.$element
.removeClass('collapse')
.addClass('collapsing')[dimension](0)
.attr('aria-expanded', true)
this.$trigger
.removeClass('collapsed')
.attr('aria-expanded', true)
this.transitioning = 1
var complete = function () {
this.$element
.removeClass('collapsing')
.addClass('collapse in')[dimension]('')
this.transitioning = 0
this.$element
.trigger('shown.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
var scrollSize = $.camelCase(['scroll', dimension].join('-'))
this.$element
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
}
Collapse.prototype.hide = function () {
if (this.transitioning || !this.$element.hasClass('in')) return
var startEvent = $.Event('hide.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
var dimension = this.dimension()
this.$element[dimension](this.$element[dimension]())[0].offsetHeight
this.$element
.addClass('collapsing')
.removeClass('collapse in')
.attr('aria-expanded', false)
this.$trigger
.addClass('collapsed')
.attr('aria-expanded', false)
this.transitioning = 1
var complete = function () {
this.transitioning = 0
this.$element
.removeClass('collapsing')
.addClass('collapse')
.trigger('hidden.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
this.$element
[dimension](0)
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)
}
Collapse.prototype.toggle = function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
Collapse.prototype.getParent = function () {
return $(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
}, this))
.end()
}
Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
var isOpen = $element.hasClass('in')
$element.attr('aria-expanded', isOpen)
$trigger
.toggleClass('collapsed', !isOpen)
.attr('aria-expanded', isOpen)
}
function getTargetFromTrigger($trigger) {
var href
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
return $(target)
}
// COLLAPSE PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.collapse')
var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data && options.toggle && option == 'show') options.toggle = false
if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.collapse
$.fn.collapse = Plugin
$.fn.collapse.Constructor = Collapse
// COLLAPSE NO CONFLICT
// ====================
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
// COLLAPSE DATA-API
// =================
$(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
var $this = $(this)
if (!$this.attr('data-target')) e.preventDefault()
var $target = getTargetFromTrigger($this)
var data = $target.data('bs.collapse')
var option = data ? 'toggle' : $.extend({}, $this.data(), { trigger: this })
Plugin.call($target, option)
})
}(jQuery);
/* ========================================================================
* Bootstrap: tab.js v3.3.1
* http://getbootstrap.com/javascript/#tabs
* ========================================================================
* Copyright 2011-2014 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TAB CLASS DEFINITION
// ====================
var Tab = function (element) {
this.element = $(element)
}
Tab.VERSION = '3.3.1'
Tab.TRANSITION_DURATION = 150
Tab.prototype.show = function () {
var $this = this.element
var $ul = $this.closest('ul:not(.dropdown-menu)')
var selector = $this.data('target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
if ($this.parent('li').hasClass('active')) return
var $previous = $ul.find('.active:last a')
var hideEvent = $.Event('hide.bs.tab', {
relatedTarget: $this[0]
})
var showEvent = $.Event('show.bs.tab', {<|fim▁hole|> })
$previous.trigger(hideEvent)
$this.trigger(showEvent)
if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
var $target = $(selector)
this.activate($this.closest('li'), $ul)
this.activate($target, $target.parent(), function () {
$previous.trigger({
type: 'hidden.bs.tab',
relatedTarget: $this[0]
})
$this.trigger({
type: 'shown.bs.tab',
relatedTarget: $previous[0]
})
})
}
Tab.prototype.activate = function (element, container, callback) {
var $active = container.find('> .active')
var transition = callback
&& $.support.transition
&& (($active.length && $active.hasClass('fade')) || !!container.find('> .fade').length)
function next() {
$active
.removeClass('active')
.find('> .dropdown-menu > .active')
.removeClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', false)
element
.addClass('active')
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
if (transition) {
element[0].offsetWidth // reflow for transition
element.addClass('in')
} else {
element.removeClass('fade')
}
if (element.parent('.dropdown-menu')) {
element
.closest('li.dropdown')
.addClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
}
callback && callback()
}
$active.length && transition ?
$active
.one('bsTransitionEnd', next)
.emulateTransitionEnd(Tab.TRANSITION_DURATION) :
next()
$active.removeClass('in')
}
// TAB PLUGIN DEFINITION
// =====================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tab')
if (!data) $this.data('bs.tab', (data = new Tab(this)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tab
$.fn.tab = Plugin
$.fn.tab.Constructor = Tab
// TAB NO CONFLICT
// ===============
$.fn.tab.noConflict = function () {
$.fn.tab = old
return this
}
// TAB DATA-API
// ============
var clickHandler = function (e) {
e.preventDefault()
Plugin.call($(this), 'show')
}
$(document)
.on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler)
.on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler)
}(jQuery);<|fim▁end|> | relatedTarget: $previous[0] |
<|file_name|>adaptor.go<|end_file_name|><|fim▁begin|>// Package fasthttpadaptor provides helper functions for converting net/http
// request handlers to fasthttp request handlers.
package fasthttpadaptor
import (
"io"
"net/http"
"net/url"
"github.com/valyala/fasthttp"
)
// NewFastHTTPHandlerFunc wraps net/http handler func to fasthttp
// request handler, so it can be passed to fasthttp server.
//
// While this function may be used for easy switching from net/http to fasthttp,<|fim▁hole|>// from net/http handler.
// * net/http -> fasthttp handler conversion has some overhead,
// so the returned handler will be always slower than manually written
// fasthttp handler.
//
// So it is advisable using this function only for quick net/http -> fasthttp
// switching. Then manually convert net/http handlers to fasthttp handlers
// according to https://github.com/valyala/fasthttp#switching-from-nethttp-to-fasthttp .
func NewFastHTTPHandlerFunc(h http.HandlerFunc) fasthttp.RequestHandler {
return NewFastHTTPHandler(h)
}
// NewFastHTTPHandler wraps net/http handler to fasthttp request handler,
// so it can be passed to fasthttp server.
//
// While this function may be used for easy switching from net/http to fasthttp,
// it has the following drawbacks comparing to using manually written fasthttp
// request handler:
//
// * A lot of useful functionality provided by fasthttp is missing
// from net/http handler.
// * net/http -> fasthttp handler conversion has some overhead,
// so the returned handler will be always slower than manually written
// fasthttp handler.
//
// So it is advisable using this function only for quick net/http -> fasthttp
// switching. Then manually convert net/http handlers to fasthttp handlers
// according to https://github.com/valyala/fasthttp#switching-from-nethttp-to-fasthttp .
func NewFastHTTPHandler(h http.Handler) fasthttp.RequestHandler {
return func(ctx *fasthttp.RequestCtx) {
var r http.Request
body := ctx.PostBody()
r.Method = string(ctx.Method())
r.Proto = "HTTP/1.1"
r.ProtoMajor = 1
r.ProtoMinor = 1
r.RequestURI = string(ctx.RequestURI())
r.ContentLength = int64(len(body))
r.Host = string(ctx.Host())
r.RemoteAddr = ctx.RemoteAddr().String()
hdr := make(http.Header)
ctx.Request.Header.VisitAll(func(k, v []byte) {
sk := string(k)
sv := string(v)
switch sk {
case "Transfer-Encoding":
r.TransferEncoding = append(r.TransferEncoding, sv)
default:
hdr.Set(sk, sv)
}
})
r.Header = hdr
r.Body = &netHTTPBody{body}
rURL, err := url.ParseRequestURI(r.RequestURI)
if err != nil {
ctx.Logger().Printf("cannot parse requestURI %q: %s", r.RequestURI, err)
ctx.Error("Internal Server Error", fasthttp.StatusInternalServerError)
return
}
r.URL = rURL
var w netHTTPResponseWriter
h.ServeHTTP(&w, &r)
ctx.SetStatusCode(w.StatusCode())
for k, vv := range w.Header() {
for _, v := range vv {
ctx.Response.Header.Set(k, v)
}
}
ctx.Write(w.body)
}
}
type netHTTPBody struct {
b []byte
}
func (r *netHTTPBody) Read(p []byte) (int, error) {
if len(r.b) == 0 {
return 0, io.EOF
}
n := copy(p, r.b)
r.b = r.b[n:]
return n, nil
}
func (r *netHTTPBody) Close() error {
r.b = r.b[:0]
return nil
}
type netHTTPResponseWriter struct {
statusCode int
h http.Header
body []byte
}
func (w *netHTTPResponseWriter) StatusCode() int {
if w.statusCode == 0 {
return http.StatusOK
}
return w.statusCode
}
func (w *netHTTPResponseWriter) Header() http.Header {
if w.h == nil {
w.h = make(http.Header)
}
return w.h
}
func (w *netHTTPResponseWriter) WriteHeader(statusCode int) {
w.statusCode = statusCode
}
func (w *netHTTPResponseWriter) Write(p []byte) (int, error) {
w.body = append(w.body, p...)
return len(p), nil
}<|fim▁end|> | // it has the following drawbacks comparing to using manually written fasthttp
// request handler:
//
// * A lot of useful functionality provided by fasthttp is missing |
<|file_name|>ROGUE.js<|end_file_name|><|fim▁begin|>import ITEM_QUALITIES from '../ITEM_QUALITIES';
export default {
// Shared legendaries
SOUL_OF_THE_SHADOWBLADE: {
id: 150936,
name: 'Soul of the Shadowblade',
icon: 'inv_jewelry_ring_56',
quality: ITEM_QUALITIES.LEGENDARY,
},
MANTLE_OF_THE_MASTER_ASSASSIN: {
id: 144236,
name: 'Mantle of the Master Assassin',
icon: 'inv_shoulder_leather_raidrogue_k_01',
quality: ITEM_QUALITIES.LEGENDARY,
},
INSIGNIA_OF_RAVENHOLDT: {
id: 137049,
name: 'Insignia of Ravenholdt',
icon: 'inv_misc_epicring_a2',
quality: ITEM_QUALITIES.LEGENDARY,
},
WILL_OF_VALEERA: {
id: 137069,
name: 'Will of Valeera',
icon: 'inv_pants_cloth_02',
quality: ITEM_QUALITIES.LEGENDARY,
},
THE_DREADLORDS_DECEIT: {
id: 137021,
name: 'The Dreadlord\'s Deceit',
icon: 'inv_cape_pandaria_d_03',
quality: ITEM_QUALITIES.LEGENDARY,
},
// Assassination legendaries
DUSKWALKERS_FOOTPADS: {
id: 137030,
name: 'Duskwalker\'s Footpads',
icon: 'inv_boots_leather_8',
quality: ITEM_QUALITIES.LEGENDARY,
},
ZOLDYCK_FAMILY_TRAINING_SHACKLES: {<|fim▁hole|> name: 'Zoldyck Family Training Shackles',
icon: 'inv_bracer_leather_raiddruid_i_01',
quality: ITEM_QUALITIES.LEGENDARY,
},
THE_EMPTY_CROWN: {
id: 151815,
name: 'The Empty Crown',
icon: 'inv_crown_02',
quality: ITEM_QUALITIES.LEGENDARY,
},
// Outlaw legendaries
THRAXIS_TRICKSY_TREADS: {
id: 137031,
name: 'Thraxi\'s Tricksy Treads',
icon: 'inv_boots_leather_03a',
quality: ITEM_QUALITIES.LEGENDARY,
},
GREENSKINS_WATERLOGGED_WRISTCUFFS: {
id: 137099,
name: 'Greenskin\'s Waterlogged Wristcuffs',
icon: 'inv_bracer_leather_raidrogue_k_01',
quality: ITEM_QUALITIES.LEGENDARY,
},
SHIVARRAN_SYMMETRY: {
id: 141321,
name: 'Shivarran Symmetry',
icon: 'inv_gauntlets_83',
quality: ITEM_QUALITIES.LEGENDARY,
},
THE_CURSE_OF_RESTLESSNESS: {
id: 151817,
name: 'The Curse of Restlessness',
icon: 'inv_qiraj_draperegal',
quality: ITEM_QUALITIES.LEGENDARY,
},
// Subtlety legendaries
SHADOW_SATYRS_WALK: {
id: 137032,
name: 'Shadow Satyr\'s Walk',
icon: 'inv_boots_mail_dungeonmail_c_04',
quality: ITEM_QUALITIES.LEGENDARY,
},
DENIAL_OF_THE_HALF_GIANTS: {
id: 137100,
name: 'Denial of the Half-Giants',
icon: 'inv_bracer_leather_panda_b_02_crimson',
quality: ITEM_QUALITIES.LEGENDARY,
},
THE_FIRST_OF_THE_DEAD: {
id: 151818,
name: 'The First of the Dead',
icon: 'inv_glove_cloth_raidwarlockmythic_q_01',
quality: ITEM_QUALITIES.LEGENDARY,
},
};<|fim▁end|> | id: 137098, |
<|file_name|>atom.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
This source file is part of the MolCore project.
Copyright 2011 Kitware, Inc.
This source code is released under the New BSD License, (the "License").
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
<|fim▁hole|>******************************************************************************/
#include "atom.h"
#include "molecule.h"
namespace MolCore {
// === Atom ================================================================ //
/// \class Atom
/// \brief The Atom class represents an atom in a molecule.
/// Creates a new, invalid atom object.
Atom::Atom()
: m_molecule(0),
m_index(-1)
{
}
/// Creates a new atom object representing the atom at index \p i in
/// molecule \p m.
Atom::Atom(Molecule *m, size_t i)
: m_molecule(m),
m_index(i)
{
}
// --- Properties ---------------------------------------------------------- //
/// Returns \c true if the atom is valid.
bool Atom::isValid() const
{
return m_molecule != 0;
}
/// Returns the molecule the atom is a part of.
Molecule* Atom::molecule() const
{
return m_molecule;
}
/// Returns the atom's index in the molecule.
size_t Atom::index() const
{
return m_index;
}
/// Sets the atom's atomic number to \p number.
void Atom::setAtomicNumber(unsigned char number)
{
m_molecule->atomicNumbers()[m_index] = number;
}
/// Returns the atom's atomic number.
unsigned char Atom::atomicNumber() const
{
return m_molecule->atomicNumbers()[m_index];
}
} // end MolCore namespace<|fim▁end|> | |
<|file_name|>waveform_with_overlay.rs<|end_file_name|><|fim▁begin|>use gtk::{cairo, pango, prelude::*};
use log::debug;
use std::{
cell::RefCell,
collections::Bound::Included,
rc::Rc,
sync::{Arc, Mutex},
};
use metadata::Duration;
use renderers::{ImagePositions, SampleIndexRange, Timestamp, WaveformRenderer, BACKGROUND_COLOR};
use crate::info::{self, ChaptersBoundaries};
// Use this text to compute the largest text box for the waveform limits
// This is required to position the labels in such a way they don't
// move constantly depending on the digits width
const LIMIT_TEXT_MN: &str = "00:00.000";
const LIMIT_TEXT_H: &str = "00:00:00.000";
const CURSOR_TEXT_MN: &str = "00:00.000.000";
const CURSOR_TEXT_H: &str = "00:00:00.000.000";
// Other UI components refresh period
const OTHER_UI_REFRESH_PERIOD: Duration = Duration::from_millis(50);
const ONE_HOUR: Duration = Duration::from_secs(60 * 60);
#[derive(Default)]<|fim▁hole|>struct TextMetrics {
font_family: Option<String>,
font_size: f64,
twice_font_size: f64,
half_font_size: f64,
limit_mn_width: f64,
limit_h_width: f64,
limit_y: f64,
cursor_mn_width: f64,
cursor_h_width: f64,
cursor_y: f64,
ref_lbl: Option<gtk::Label>,
}
impl TextMetrics {
fn new(ref_lbl: gtk::Label) -> Self {
TextMetrics {
ref_lbl: Some(ref_lbl),
..Default::default()
}
}
fn set_text_metrics(&mut self, cr: &cairo::Context) {
// FIXME use Once for this
match self.font_family {
Some(ref family) => {
cr.select_font_face(family, cairo::FontSlant::Normal, cairo::FontWeight::Normal);
cr.set_font_size(self.font_size);
}
None => {
// Get font specs from the reference label
let ref_layout = self.ref_lbl.as_ref().unwrap().layout().unwrap();
let ref_ctx = ref_layout.context().unwrap();
let font_desc = ref_ctx.font_description().unwrap();
let family = font_desc.family().unwrap();
cr.select_font_face(&family, cairo::FontSlant::Normal, cairo::FontWeight::Normal);
let font_size = f64::from(ref_layout.baseline() / pango::SCALE);
cr.set_font_size(font_size);
self.font_family = Some(family.to_string());
self.font_size = font_size;
self.twice_font_size = 2f64 * font_size;
self.half_font_size = 0.5f64 * font_size;
self.limit_mn_width = cr.text_extents(LIMIT_TEXT_MN).unwrap().width;
self.limit_h_width = cr.text_extents(LIMIT_TEXT_H).unwrap().width;
self.limit_y = 2f64 * font_size;
self.cursor_mn_width = cr.text_extents(CURSOR_TEXT_MN).unwrap().width;
self.cursor_h_width = cr.text_extents(CURSOR_TEXT_H).unwrap().width;
self.cursor_y = font_size;
}
}
}
}
pub struct WaveformWithOverlay {
waveform_renderer_mtx: Arc<Mutex<Box<WaveformRenderer>>>,
text_metrics: TextMetrics,
boundaries: Rc<RefCell<ChaptersBoundaries>>,
positions: Rc<RefCell<ImagePositions>>,
last_other_ui_refresh: Timestamp,
}
impl WaveformWithOverlay {
pub fn new(
waveform_renderer_mtx: &Arc<Mutex<Box<WaveformRenderer>>>,
positions: &Rc<RefCell<ImagePositions>>,
boundaries: &Rc<RefCell<ChaptersBoundaries>>,
ref_lbl: >k::Label,
) -> Self {
WaveformWithOverlay {
waveform_renderer_mtx: Arc::clone(waveform_renderer_mtx),
text_metrics: TextMetrics::new(ref_lbl.clone()),
boundaries: Rc::clone(boundaries),
positions: Rc::clone(positions),
last_other_ui_refresh: Timestamp::default(),
}
}
pub fn draw(&mut self, da: >k::DrawingArea, cr: &cairo::Context) {
cr.set_source_rgb(BACKGROUND_COLOR.0, BACKGROUND_COLOR.1, BACKGROUND_COLOR.2);
cr.paint().unwrap();
let (positions, state) = {
let waveform_renderer = &mut *self.waveform_renderer_mtx.lock().unwrap();
// FIXME send an event?
//self.playback_needs_refresh = waveform_renderer.playback_needs_refresh();
if let Err(err) = waveform_renderer.refresh() {
if err.is_not_ready() {
return;
} else {
panic!("{}", err);
}
}
let (image, positions, state) = match waveform_renderer.image() {
Some(image_and_positions) => image_and_positions,
None => {
debug!("draw got no image");
return;
}
};
image.with_surface_external_context(cr, |cr, surface| {
cr.set_source_surface(surface, -positions.offset.x, 0f64)
.unwrap();
cr.paint().unwrap();
});
(positions, state)
};
cr.scale(1f64, 1f64);
cr.set_source_rgb(1f64, 1f64, 0f64);
self.text_metrics.set_text_metrics(cr);
// first position
let first_text = positions.offset.ts.for_humans().to_string();
let first_text_end = if positions.offset.ts < ONE_HOUR {
2f64 + self.text_metrics.limit_mn_width
} else {
2f64 + self.text_metrics.limit_h_width
};
cr.move_to(2f64, self.text_metrics.twice_font_size);
cr.show_text(&first_text).unwrap();
// last position
let last_text = positions.last.ts.for_humans().to_string();
let last_text_start = if positions.last.ts < ONE_HOUR {
2f64 + self.text_metrics.limit_mn_width
} else {
2f64 + self.text_metrics.limit_h_width
};
if positions.last.x - last_text_start > first_text_end + 5f64 {
// last text won't overlap with first text
cr.move_to(
positions.last.x - last_text_start,
self.text_metrics.twice_font_size,
);
cr.show_text(&last_text).unwrap();
}
// Draw in-range chapters boundaries
let boundaries = self.boundaries.borrow();
let chapter_range =
boundaries.range((Included(&positions.offset.ts), Included(&positions.last.ts)));
let allocation = da.allocation();
let (area_width, area_height) = (allocation.width() as f64, allocation.width() as f64);
cr.set_source_rgb(0.5f64, 0.6f64, 1f64);
cr.set_line_width(1f64);
let boundary_y0 = self.text_metrics.twice_font_size + 5f64;
let text_base = allocation.height() as f64 - self.text_metrics.half_font_size;
for (boundary, chapters) in chapter_range {
if *boundary >= positions.offset.ts {
let x = SampleIndexRange::from_duration(
*boundary - positions.offset.ts,
positions.sample_duration,
)
.as_f64()
/ positions.sample_step;
cr.move_to(x, boundary_y0);
cr.line_to(x, area_height);
cr.stroke().unwrap();
if let Some(ref prev_chapter) = chapters.prev {
cr.move_to(
x - 5f64 - cr.text_extents(&prev_chapter.title).unwrap().width,
text_base,
);
cr.show_text(&prev_chapter.title).unwrap();
}
if let Some(ref next_chapter) = chapters.next {
cr.move_to(x + 5f64, text_base);
cr.show_text(&next_chapter.title).unwrap();
}
}
}
if let Some(cursor) = &positions.cursor {
// draw current pos
cr.set_source_rgb(1f64, 1f64, 0f64);
let cursor_text = cursor.ts.for_humans().with_micro().to_string();
let cursor_text_end = if cursor.ts < ONE_HOUR {
5f64 + self.text_metrics.cursor_mn_width
} else {
5f64 + self.text_metrics.cursor_h_width
};
let cursor_text_x = if cursor.x + cursor_text_end < area_width {
cursor.x + 5f64
} else {
cursor.x - cursor_text_end
};
cr.move_to(cursor_text_x, self.text_metrics.font_size);
cr.show_text(&cursor_text).unwrap();
cr.set_line_width(1f64);
cr.move_to(cursor.x, 0f64);
cr.line_to(cursor.x, area_height - self.text_metrics.twice_font_size);
cr.stroke().unwrap();
let cursor_ts = cursor.ts;
// update other UI position
// Note: we go through the audio controller here in order
// to reduce position queries on the ref gst element
if !state.is_playing()
|| cursor.ts < self.last_other_ui_refresh
|| cursor.ts > self.last_other_ui_refresh + OTHER_UI_REFRESH_PERIOD
{
info::refresh(cursor_ts);
self.last_other_ui_refresh = cursor_ts;
}
}
*self.positions.borrow_mut() = positions;
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# coding=utf-8
# region License
# Findeco is dually licensed under GPLv3 or later and MPLv2.
#
################################################################################
# Copyright (c) 2012 Klaus Greff <[email protected]>
# This file is part of Findeco.
#
# Findeco is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# Findeco is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Findeco. If not, see <http://www.gnu.org/licenses/>.
################################################################################<|fim▁hole|>################################################################################
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#endregion #####################################################################
from __future__ import division, print_function, unicode_literals<|fim▁end|> | # |
<|file_name|>first.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
current = u'b'<|fim▁hole|>print ord('a')
print ord('1')
print ord('\n')
ip 1 2 3
./first.py<|fim▁end|> | print ord(current) |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from cms.admin.pageadmin import PageAdmin
from cms.models import Page<|fim▁hole|>
admin.site.register(page)
admin.site.register(Sub_Pages)<|fim▁end|> |
from .models import page, Sub_Pages |
<|file_name|>jquery.sentinel.js<|end_file_name|><|fim▁begin|>/*
* sentinel
* https://github.com/jaredhoyt/jquery-sentinel
*
* Copyright (c) 2015 Jared Hoyt
* Licensed under the MIT license.
*/
(function($) {
// Collection method.
$.fn.sentinel = function() {
return this.each(function(i) {
// Do something awesome to each selected element.
$(this).html('awesome' + i);
});
};
// Static method.
$.sentinel = function(options) {<|fim▁hole|> options = $.extend({}, $.sentinel.options, options);
// Return something awesome.
return 'awesome' + options.punctuation;
};
// Static method default options.
$.sentinel.options = {
punctuation: '.'
};
// Custom selector.
$.expr[':'].sentinel = function(elem) {
// Is this element awesome?
return $(elem).text().indexOf('awesome') !== -1;
};
}(jQuery));<|fim▁end|> | // Override default options with passed-in options. |
<|file_name|>e09asynctwostage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
./e09asynctwostage.py http://camlistore.org 1 6
Found 10 urls
http://camlistore.org/ frequencies: [('camlistore', 13), ...]
...
First integer arg is depth, second is minimum word count.
"""
import re
from sys import argv
import asyncio
from e01extract import canonicalize
from e04twostage import print_popular_words
from e06asyncextract import extract_async
@asyncio.coroutine
def wordcount_async(data, word_length):
counts = {}
for match in re.finditer('\w{%d,100}' % word_length, data):
word = match.group(0).lower()
counts[word] = counts.get(word, 0) + 1
return counts
@asyncio.coroutine
def extract_count_async(url, word_length):
_, data, found_urls = yield from extract_async(url)<|fim▁hole|>
@asyncio.coroutine
def twostage_async(to_fetch, seen_urls, word_length):
futures, results = [], []
for url in to_fetch:
if url in seen_urls: continue
seen_urls.add(url)
futures.append(extract_count_async(url, word_length))
for future in asyncio.as_completed(futures):
try:
results.append((yield from future))
except Exception:
continue
return results
@asyncio.coroutine
def crawl_async(start_url, max_depth, word_length):
seen_urls = set()
to_fetch = [canonicalize(start_url)]
results = []
for depth in range(max_depth + 1):
batch = yield from twostage_async(to_fetch, seen_urls, word_length)
to_fetch = []
for url, data, found_urls in batch:
results.append((url, data))
to_fetch.extend(found_urls)
return results
def main():
# Bridge the gap between sync and async
future = asyncio.Task(crawl_async(argv[1], int(argv[2]), int(argv[3])))
loop = asyncio.get_event_loop()
loop.run_until_complete(future)
loop.close()
result = future.result()
print_popular_words(result)
if __name__ == '__main__':
main()<|fim▁end|> | top_word = yield from wordcount_async(data, word_length)
return url, top_word, found_urls
|
<|file_name|>pipelines.py<|end_file_name|><|fim▁begin|># Define your item pipelines here
#<|fim▁hole|> def process_item(self, item, spider):
return item<|fim▁end|> | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class MyuniversityscraperPipeline(object): |
<|file_name|>guessnameclass.py<|end_file_name|><|fim▁begin|># A program that has a list of six colors and chooses one by random. The user can then has three chances to quess the right color. After the third attepmt the program outputs "Nope. The color I was thinking of was..."
import random
# this is the function that will execute the program
def program():
# These are the constants declaring what the colors are.
RED = 'red'
BLUE = 'blue'
GREEN = 'green'
ORANGE = 'orange'
PURPLE = 'purple'
PINK = 'pink'
class Color:
pass
c1 = Color()
c2 = Color()
c3 = Color()
guesses_made = 0
# This input causes the program to refer to you as your name.
c1.name = input('Hello! What is your name?\n')
c2.color = [BLUE, GREEN, RED, ORANGE, PURPLE, PINK]
# This randomizes what color is chosen
c2.color = random.choice(c2.color)
print ('Well, {0}, I am thinking of a color between blue, green, red, orange, purple and pink.'.format(c1.name))
while guesses_made < 3:
c3.guess = input('Take a guess: ')
guesses_made += 1
if c3.guess != c2.color:
print ('Your guess is wrong.')
if c3.guess == c2.color:
break
if c3.guess == c2.color:
print ('Good job, {0}! You guessed my color in {1} guesses!'.format(c1.name, guesses_made))<|fim▁hole|>
if __name__ == "__main__":
program()<|fim▁end|> | else:
print ('Nope. The color I was thinking of was {0}'.format(c2.color))
|
<|file_name|>ProductServiceImpl.java<|end_file_name|><|fim▁begin|>/**
* This file is part of mycollab-services.
*
* mycollab-services is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-services is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-services. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.crm.service.ibatis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.esofthead.mycollab.common.interceptor.aspect.Auditable;
import com.esofthead.mycollab.common.interceptor.aspect.Traceable;
import com.esofthead.mycollab.core.persistence.ICrudGenericDAO;
import com.esofthead.mycollab.core.persistence.ISearchableDAO;
import com.esofthead.mycollab.core.persistence.service.DefaultService;
import com.esofthead.mycollab.module.crm.dao.ProductMapper;
import com.esofthead.mycollab.module.crm.dao.ProductMapperExt;
import com.esofthead.mycollab.module.crm.domain.Product;
import com.esofthead.mycollab.module.crm.domain.criteria.ProductSearchCriteria;
import com.esofthead.mycollab.module.crm.service.ProductService;
@Service
@Transactional
public class ProductServiceImpl extends DefaultService<Integer, Product, ProductSearchCriteria>
implements ProductService {
@Autowired
private ProductMapper productMapper;
@Autowired
private ProductMapperExt productMapperExt;
@Override<|fim▁hole|>
@Override
public ISearchableDAO<ProductSearchCriteria> getSearchMapper() {
return productMapperExt;
}
}<|fim▁end|> | public ICrudGenericDAO<Integer, Product> getCrudMapper() {
return productMapper;
} |
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>import { Command, App } from './';
export interface NorthbrookConfig extends Object<any> {
plugins?: Array<string | App | Command>;
packages?: Array<string>;<|fim▁hole|>}
export interface Object<T> {
[key: string]: T;
}
export interface STDIO {
stdout?: NodeJS.WritableStream;
stderr?: NodeJS.WritableStream;
stdin?: NodeJS.ReadableStream;
}
export interface Stdio {
stdout: NodeJS.WritableStream;
stderr: NodeJS.WritableStream;
stdin: NodeJS.ReadableStream;
}<|fim▁end|> | }
export interface Plugin {
plugin: App | Command; |
<|file_name|>_GroupLock.qunit.js<|end_file_name|><|fim▁begin|>/*!
* ${copyright}
*/
sap.ui.define([
"sap/base/Log",
"sap/ui/model/odata/v4/lib/_GroupLock"
], function (Log, _GroupLock) {
"use strict";
//*********************************************************************************************
QUnit.module("sap.ui.model.odata.v4.lib._GroupLock", {
beforeEach : function () {
this.oLogMock = this.mock(Log);
this.oLogMock.expects("warning").never();
this.oLogMock.expects("error").never();
}
});
//*********************************************************************************************
QUnit.test("unlocked, initialized", function (assert) {
var oOwner = {/*owner*/},
oGroupLock = new _GroupLock("foo", oOwner);
assert.strictEqual(oGroupLock.isCanceled(), false);
assert.strictEqual(oGroupLock.getGroupId(), "foo");
assert.strictEqual(oGroupLock.oOwner, oOwner);
assert.strictEqual(oGroupLock.isLocked(), false);
assert.strictEqual(oGroupLock.waitFor("foo"), undefined);
assert.strictEqual(oGroupLock.waitFor("bar"), undefined);
});
//*********************************************************************************************
QUnit.test("owner is mandatory", function (assert) {
assert.throws(function () {
return new _GroupLock("group");
}, new Error("Missing owner"));
});
//*********************************************************************************************
QUnit.test("locked", function (assert) {
var oGroupLock,
oOwner = {/*owner*/},
oPromise1,
oPromise2;
// code under test
oGroupLock = new _GroupLock("foo", oOwner, true);
assert.strictEqual(oGroupLock.getGroupId(), "foo");
assert.strictEqual(oGroupLock.oOwner, oOwner);
assert.strictEqual(oGroupLock.isLocked(), true);
<|fim▁hole|>
assert.ok(oPromise1.isPending());
assert.ok(oPromise2.isPending());
// code under test
assert.strictEqual(oGroupLock.waitFor("bar"), undefined);
// code under test
oGroupLock.unlock();
assert.ok(oPromise1.isFulfilled());
assert.ok(oPromise2.isFulfilled());
assert.notOk(oGroupLock.isLocked());
});
//*********************************************************************************************
QUnit.test("multiple unlocks", function (assert) {
var oGroupLock = new _GroupLock("group", {/*owner*/});
oGroupLock.unlock();
assert.throws(function () {
oGroupLock.unlock();
}, new Error("GroupLock unlocked twice"));
oGroupLock.unlock(true); // no error!
});
//*********************************************************************************************
QUnit.test("getUnlockedCopy", function (assert) {
var oGroupLock1 = new _GroupLock("group", {/*owner*/}, true, true, 42),
oGroupLock2;
// code under test
oGroupLock2 = oGroupLock1.getUnlockedCopy();
assert.strictEqual(oGroupLock2.getGroupId(), oGroupLock1.getGroupId());
assert.strictEqual(oGroupLock2.oOwner, oGroupLock1.oOwner);
assert.strictEqual(oGroupLock2.isLocked(), false);
assert.strictEqual(oGroupLock2.isModifying(), false);
assert.strictEqual(oGroupLock2.getSerialNumber(), oGroupLock1.getSerialNumber());
});
//*********************************************************************************************
QUnit.test("owner & toString", function (assert) {
var oGroupLock,
oOwner = {
toString : function () {
return "owner";
}
};
oGroupLock = new _GroupLock("group", oOwner, true);
assert.strictEqual(oGroupLock.toString(),
"sap.ui.model.odata.v4.lib._GroupLock(group=group, owner=owner, locked)");
oGroupLock = new _GroupLock("group", oOwner, true, true);
assert.strictEqual(oGroupLock.toString(),
"sap.ui.model.odata.v4.lib._GroupLock(group=group, owner=owner, locked, modifying)");
oGroupLock = new _GroupLock("group", oOwner, false);
assert.strictEqual(oGroupLock.oOwner, oOwner);
assert.strictEqual(oGroupLock.toString(),
"sap.ui.model.odata.v4.lib._GroupLock(group=group, owner=owner)");
oGroupLock = new _GroupLock("group", oOwner, false, undefined, 0);
assert.strictEqual(oGroupLock.toString(),
"sap.ui.model.odata.v4.lib._GroupLock(group=group, owner=owner, serialNumber=0)");
oGroupLock = new _GroupLock("group", oOwner, true, true, 0);
assert.strictEqual(oGroupLock.toString(),
"sap.ui.model.odata.v4.lib._GroupLock(group=group, owner=owner, locked, modifying,"
+ " serialNumber=0)");
});
//*********************************************************************************************
QUnit.test("constants", function (assert) {
assert.strictEqual(_GroupLock.$cached.getGroupId(), "$cached");
assert.strictEqual(_GroupLock.$cached.isLocked(), false);
assert.strictEqual(_GroupLock.$cached.isModifying(), false);
assert.strictEqual(_GroupLock.$cached.oOwner, "sap.ui.model.odata.v4.lib._GroupLock");
// ensure that $cached can be unlocked several times
_GroupLock.$cached.unlock();
_GroupLock.$cached.unlock();
});
//*********************************************************************************************
QUnit.test("serial number", function (assert) {
var oOwner = {/*owner*/};
assert.strictEqual(new _GroupLock("group", oOwner, true, true, 42).getSerialNumber(), 42);
assert.strictEqual(new _GroupLock("group", oOwner, true).getSerialNumber(), Infinity);
assert.strictEqual(new _GroupLock("group", oOwner, true, false, 0).getSerialNumber(), 0);
});
//*********************************************************************************************
[undefined, false, true].forEach(function (bModifying, i) {
QUnit.test("modifying: " + bModifying, function (assert) {
assert.strictEqual(new _GroupLock("group", {/*owner*/}, true, bModifying, 42).isModifying(),
i === 2);
});
});
//*********************************************************************************************
QUnit.test("modifying: throws if not locked", function (assert) {
assert.throws(function () {
return new _GroupLock("group", {/*owner*/}, false, true, 42);
}, new Error("A modifying group lock has to be locked"));
});
//*********************************************************************************************
QUnit.test("cancel w/o function", function (assert) {
var oGroupLock = new _GroupLock("group", {/*owner*/}, true);
this.mock(oGroupLock).expects("unlock").withExactArgs(true);
// code under test
oGroupLock.cancel();
assert.ok(oGroupLock.isCanceled());
});
//*********************************************************************************************
QUnit.test("cancel w/ function", function (assert) {
var fnCancel = sinon.spy(),
oGroupLock = new _GroupLock("group", {/*owner*/}, true, false, undefined, fnCancel);
assert.strictEqual(oGroupLock.fnCancel, fnCancel);
sinon.assert.notCalled(fnCancel);
this.mock(oGroupLock).expects("unlock").withExactArgs(true);
// code under test
oGroupLock.cancel();
assert.ok(oGroupLock.isCanceled());
sinon.assert.calledOnce(fnCancel);
sinon.assert.calledWithExactly(fnCancel);
oGroupLock.cancel();
sinon.assert.calledOnce(fnCancel); // cancel function must not be called again
});
});<|fim▁end|> | // code under test
oPromise1 = oGroupLock.waitFor("foo");
oPromise2 = oGroupLock.waitFor("foo"); |
<|file_name|>watchpid_test.go<|end_file_name|><|fim▁begin|>package immortal
import (
"os/exec"
"syscall"
"testing"
"time"
)
func TestWatchPidGetpid(t *testing.T) {
ch := make(chan error, 1)
d := &Daemon{}
cmd := exec.Command("go", "version")
cmd.Start()
pid := cmd.Process.Pid
go func() {
d.WatchPid(pid, ch)
ch <- cmd.Wait()
}()
select {
case <-time.After(time.Millisecond):
syscall.Kill(pid, syscall.SIGTERM)
case err := <-ch:
if err != nil {
if err.Error() != "EXIT" {
t.Error(err)
}
}
}
}
func TestWatchPidGetpidKill(t *testing.T) {
d := &Daemon{}
ch := make(chan error, 1)
cmd := exec.Command("sleep", "100")
cmd.Start()<|fim▁hole|> ch <- cmd.Wait()
}()
select {
case err := <-ch:
if err != nil {
if err.Error() != "EXIT" {
t.Error(err)
}
}
case <-time.After(1 * time.Millisecond):
if err := cmd.Process.Kill(); err != nil {
t.Errorf("failed to kill: %s", err)
}
}
}<|fim▁end|> | pid := cmd.Process.Pid
go func() {
d.WatchPid(pid, ch) |
<|file_name|>messages.py<|end_file_name|><|fim▁begin|>NOT_GIT_REPO_MSG = "#{red}Not a git repository (or any of the parent directories)"
HOOK_ALREADY_INSTALLED_MSG = "The pre-commit hook has already been installed."
EXISTING_HOOK_MSG = (
"#{yellow}There is an existing pre-commit hook.\n"
"#{reset_all}Therapist can preserve this legacy hook and run it before the Therapist "
"pre-commit hook."
)
CONFIRM_PRESERVE_LEGACY_HOOK_MSG = "Would you like to preserve this legacy hook?"
COPYING_HOOK_MSG = "Copying `pre-commit` to `pre-commit.legacy`...\t"
DONE_COPYING_HOOK_MSG = "#{green}#{bright}DONE"
CONFIRM_REPLACE_HOOK_MSG = "Do you want to replace this hook?"
INSTALL_ABORTED_MSG = "Installation aborted."
INSTALLING_HOOK_MSG = "Installing pre-commit hook...\t"
DONE_INSTALLING_HOOK_MSG = "#{green}#{bright}DONE"
NO_HOOK_INSTALLED_MSG = "There is no pre-commit hook currently installed."
UNINSTALL_ABORTED_MSG = "Uninstallation aborted."
CONFIRM_UNINSTALL_HOOK_MSG = "Are you sure you want to uninstall the current pre-commit hook?"
CURRENT_HOOK_NOT_THERAPIST_MSG = (
"#{yellow}The current pre-commit hook is not the Therapist pre-commit hook.\n"
"#{reset_all}Uninstallation aborted."
)
LEGACY_HOOK_EXISTS_MSG = "#{yellow}There is a legacy pre-commit hook present."<|fim▁hole|>CONFIRM_RESTORE_LEGACY_HOOK_MSG = "Would you like to restore the legacy hook?"
COPYING_LEGACY_HOOK_MSG = "Copying `pre-commit.legacy` to `pre-commit`...\t"
DONE_COPYING_LEGACY_HOOK_MSG = "#{green}#{bright}DONE"
REMOVING_LEGACY_HOOK_MSG = "Removing `pre-commit.legacy`...\t"
DONE_REMOVING_LEGACY_HOOK_MSG = "#{green}#{bright}DONE"
UNINSTALLING_HOOK_MSG = "Uninstalling pre-commit hook...\t"
DONE_UNINSTALLING_HOOK_MSG = "#{green}#{bright}DONE"
MISCONFIGURED_MSG = "#{{red}}Misconfigured: {}"
UNSTAGED_CHANGES_MSG = "#{yellow}You have unstaged changes."
NO_THERAPIST_CONFIG_FILE_MSG = "#{red}No Therapist configuration file was found."
UPGRADE_HOOK_MSG = (
"#{red}The installed pre-commit hook is incompatible with the current version of Therapist.\n"
"#{reset_all}Install the latest pre-commit hook by running `therapist install`."
)<|fim▁end|> | |
<|file_name|>properties.py<|end_file_name|><|fim▁begin|>from bokeh.util.deprecate import deprecated_module
deprecated_module('bokeh.properties', '0.11', 'use bokeh.core.properties instead')<|fim▁hole|>
from .core.properties import * # NOQA<|fim▁end|> | del deprecated_module |
<|file_name|>transform.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic types for CSS values that are related to transformations.
use app_units::Au;
use euclid::{self, Rect, Transform3D};
use num_traits::Zero;
use values::{computed, CSSFloat};
use values::computed::length::Length as ComputedLength;
use values::computed::length::LengthOrPercentage as ComputedLengthOrPercentage;
use values::specified::length::Length as SpecifiedLength;
use values::specified::length::LengthOrPercentage as SpecifiedLengthOrPercentage;
/// A generic 2D transformation matrix.
#[allow(missing_docs)]
#[derive(
Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
#[css(comma, function)]
pub struct Matrix<T> {
pub a: T,
pub b: T,
pub c: T,
pub d: T,
pub e: T,
pub f: T,
}
#[allow(missing_docs)]
#[cfg_attr(rustfmt, rustfmt_skip)]
#[css(comma, function = "matrix3d")]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo,
ToComputedValue, ToCss)]
pub struct Matrix3D<T> {
pub m11: T, pub m12: T, pub m13: T, pub m14: T,
pub m21: T, pub m22: T, pub m23: T, pub m24: T,
pub m31: T, pub m32: T, pub m33: T, pub m34: T,
pub m41: T, pub m42: T, pub m43: T, pub m44: T,
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<T: Into<f64>> From<Matrix<T>> for Transform3D<f64> {
#[inline]
fn from(m: Matrix<T>) -> Self {
Transform3D::row_major(
m.a.into(), m.b.into(), 0.0, 0.0,
m.c.into(), m.d.into(), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
m.e.into(), m.f.into(), 0.0, 1.0,
)
}
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<T: Into<f64>> From<Matrix3D<T>> for Transform3D<f64> {
#[inline]
fn from(m: Matrix3D<T>) -> Self {
Transform3D::row_major(
m.m11.into(), m.m12.into(), m.m13.into(), m.m14.into(),
m.m21.into(), m.m22.into(), m.m23.into(), m.m24.into(),
m.m31.into(), m.m32.into(), m.m33.into(), m.m34.into(),
m.m41.into(), m.m42.into(), m.m43.into(), m.m44.into(),
)
}
}
/// A generic transform origin.
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
pub struct TransformOrigin<H, V, Depth> {
/// The horizontal origin.
pub horizontal: H,
/// The vertical origin.
pub vertical: V,
/// The depth.
pub depth: Depth,
}
/// A generic timing function.
///
/// <https://drafts.csswg.org/css-timing-1/#single-timing-function-production>
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss)]
#[value_info(ty = "TIMING_FUNCTION")]
pub enum TimingFunction<Integer, Number> {
/// `linear | ease | ease-in | ease-out | ease-in-out`
Keyword(TimingKeyword),
/// `cubic-bezier(<number>, <number>, <number>, <number>)`
#[allow(missing_docs)]
#[css(comma, function)]
CubicBezier {
x1: Number,
y1: Number,
x2: Number,
y2: Number,
},
/// `step-start | step-end | steps(<integer>, [ start | end ]?)`
#[css(comma, function)]
#[value_info(other_values = "step-start,step-end")]
Steps(Integer, #[css(skip_if = "is_end")] StepPosition),
/// `frames(<integer>)`
#[css(comma, function)]
Frames(Integer),
}
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum TimingKeyword {
Linear,
Ease,
EaseIn,
EaseOut,
EaseInOut,
}
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, Parse, PartialEq, ToComputedValue, ToCss)]
pub enum StepPosition {
Start,
End,
}
#[inline]
fn is_end(position: &StepPosition) -> bool {
*position == StepPosition::End
}
impl<H, V, D> TransformOrigin<H, V, D> {
/// Returns a new transform origin.
pub fn new(horizontal: H, vertical: V, depth: D) -> Self {
Self {
horizontal: horizontal,
vertical: vertical,
depth: depth,
}
}
}
impl<Integer, Number> TimingFunction<Integer, Number> {
/// `ease`
#[inline]
pub fn ease() -> Self {
TimingFunction::Keyword(TimingKeyword::Ease)
}
}
impl TimingKeyword {
/// Returns the keyword as a quadruplet of Bezier point coordinates
/// `(x1, y1, x2, y2)`.
#[inline]
pub fn to_bezier(self) -> (CSSFloat, CSSFloat, CSSFloat, CSSFloat) {
match self {
TimingKeyword::Linear => (0., 0., 1., 1.),
TimingKeyword::Ease => (0.25, 0.1, 0.25, 1.),
TimingKeyword::EaseIn => (0.42, 0., 1., 1.),
TimingKeyword::EaseOut => (0., 0., 0.58, 1.),
TimingKeyword::EaseInOut => (0.42, 0., 0.58, 1.),
}
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
/// A single operation in the list of a `transform` value
pub enum TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage> {
/// Represents a 2D 2x3 matrix.
Matrix(Matrix<Number>),
/// Represents a 3D 4x4 matrix.
Matrix3D(Matrix3D<Number>),
/// A 2D skew.
///
/// If the second angle is not provided it is assumed zero.
///
/// Syntax can be skew(angle) or skew(angle, angle)
#[css(comma, function)]
Skew(Angle, Option<Angle>),
/// skewX(angle)
#[css(function = "skewX")]
SkewX(Angle),
/// skewY(angle)
#[css(function = "skewY")]
SkewY(Angle),
/// translate(x, y) or translate(x)
#[css(comma, function)]
Translate(LengthOrPercentage, Option<LengthOrPercentage>),
/// translateX(x)
#[css(function = "translateX")]
TranslateX(LengthOrPercentage),
/// translateY(y)
#[css(function = "translateY")]
TranslateY(LengthOrPercentage),
/// translateZ(z)
#[css(function = "translateZ")]
TranslateZ(Length),
/// translate3d(x, y, z)
#[css(comma, function = "translate3d")]
Translate3D(LengthOrPercentage, LengthOrPercentage, Length),
/// A 2D scaling factor.
///
/// `scale(2)` is parsed as `Scale(Number::new(2.0), None)` and is equivalent to
/// writing `scale(2, 2)` (`Scale(Number::new(2.0), Some(Number::new(2.0)))`).
///
/// Negative values are allowed and flip the element.
///
/// Syntax can be scale(factor) or scale(factor, factor)
#[css(comma, function)]
Scale(Number, Option<Number>),
/// scaleX(factor)
#[css(function = "scaleX")]
ScaleX(Number),
/// scaleY(factor)
#[css(function = "scaleY")]
ScaleY(Number),
/// scaleZ(factor)
#[css(function = "scaleZ")]
ScaleZ(Number),
/// scale3D(factorX, factorY, factorZ)
#[css(comma, function = "scale3d")]
Scale3D(Number, Number, Number),
/// Describes a 2D Rotation.
///
/// In a 3D scene `rotate(angle)` is equivalent to `rotateZ(angle)`.
#[css(function)]
Rotate(Angle),
/// Rotation in 3D space around the x-axis.
#[css(function = "rotateX")]
RotateX(Angle),
/// Rotation in 3D space around the y-axis.
#[css(function = "rotateY")]
RotateY(Angle),
/// Rotation in 3D space around the z-axis.
#[css(function = "rotateZ")]
RotateZ(Angle),
/// Rotation in 3D space.
///
/// Generalization of rotateX, rotateY and rotateZ.
#[css(comma, function = "rotate3d")]
Rotate3D(Number, Number, Number, Angle),
/// Specifies a perspective projection matrix.
///
/// Part of CSS Transform Module Level 2 and defined at
/// [§ 13.1. 3D Transform Function](https://drafts.csswg.org/css-transforms-2/#funcdef-perspective).
///
/// The value must be greater than or equal to zero.
#[css(function)]
Perspective(Length),
/// A intermediate type for interpolation of mismatched transform lists.
#[allow(missing_docs)]
#[css(comma, function = "interpolatematrix")]
InterpolateMatrix {
from_list:
Transform<TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage>>,
to_list: Transform<TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage>>,
progress: computed::Percentage,
},
/// A intermediate type for accumulation of mismatched transform lists.
#[allow(missing_docs)]
#[css(comma, function = "accumulatematrix")]
AccumulateMatrix {
from_list:
Transform<TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage>>,
to_list: Transform<TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage>>,
count: Integer,
},
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
/// A value of the `transform` property
pub struct Transform<T>(#[css(if_empty = "none", iterable)] pub Vec<T>);
impl<Angle, Number, Length, Integer, LengthOrPercentage>
TransformOperation<Angle, Number, Length, Integer, LengthOrPercentage>
{
/// Check if it is any rotate function.
pub fn is_rotate(&self) -> bool {
use self::TransformOperation::*;
matches!(
*self,
Rotate(..) | Rotate3D(..) | RotateX(..) | RotateY(..) | RotateZ(..)
)
}
/// Check if it is any translate function
pub fn is_translate(&self) -> bool {
use self::TransformOperation::*;
match *self {
Translate(..) | Translate3D(..) | TranslateX(..) | TranslateY(..) | TranslateZ(..) => {
true
},
_ => false,
}
}
/// Check if it is any scale function
pub fn is_scale(&self) -> bool {
use self::TransformOperation::*;
match *self {
Scale(..) | Scale3D(..) | ScaleX(..) | ScaleY(..) | ScaleZ(..) => true,
_ => false,
}
}
}
/// Convert a length type into the absolute lengths.
pub trait ToAbsoluteLength {
/// Returns the absolute length as pixel value.
fn to_pixel_length(&self, containing_len: Option<Au>) -> Result<CSSFloat, ()>;
}
impl ToAbsoluteLength for SpecifiedLength {
// This returns Err(()) if there is any relative length or percentage. We use this when
// parsing a transform list of DOMMatrix because we want to return a DOM Exception
// if there is relative length.
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
match *self {
SpecifiedLength::NoCalc(len) => len.to_computed_pixel_length_without_context(),
SpecifiedLength::Calc(ref calc) => calc.to_computed_pixel_length_without_context(),
}
}
}
impl ToAbsoluteLength for SpecifiedLengthOrPercentage {
// This returns Err(()) if there is any relative length or percentage. We use this when
// parsing a transform list of DOMMatrix because we want to return a DOM Exception
// if there is relative length.
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
use self::SpecifiedLengthOrPercentage::*;
match *self {
Length(len) => len.to_computed_pixel_length_without_context(),
Calc(ref calc) => calc.to_computed_pixel_length_without_context(),
_ => Err(()),
}
}
}
impl ToAbsoluteLength for ComputedLength {
#[inline]
fn to_pixel_length(&self, _containing_len: Option<Au>) -> Result<CSSFloat, ()> {
Ok(self.px())
}
}
impl ToAbsoluteLength for ComputedLengthOrPercentage {
#[inline]
fn to_pixel_length(&self, containing_len: Option<Au>) -> Result<CSSFloat, ()> {
let extract_pixel_length = |lop: &ComputedLengthOrPercentage| match *lop {
ComputedLengthOrPercentage::Length(px) => px.px(),
ComputedLengthOrPercentage::Percentage(_) => 0.,
ComputedLengthOrPercentage::Calc(calc) => calc.length().px(),
};
match containing_len {
Some(relative_len) => Ok(self.to_pixel_length(relative_len).px()),
// If we don't have reference box, we cannot resolve the used value,
// so only retrieve the length part. This will be used for computing
// distance without any layout info.
None => Ok(extract_pixel_length(self)),
}
}
}
/// Support the conversion to a 3d matrix.
pub trait ToMatrix {
/// Check if it is a 3d transform function.
fn is_3d(&self) -> bool;
/// Return the equivalent 3d matrix.
fn to_3d_matrix(&self, reference_box: Option<&Rect<Au>>) -> Result<Transform3D<f64>, ()>;
}
impl<Angle, Number, Length, Integer, LoP> ToMatrix
for TransformOperation<Angle, Number, Length, Integer, LoP>
where
Angle: Copy + AsRef<computed::angle::Angle>,
Number: Copy + Into<f32> + Into<f64>,
Length: ToAbsoluteLength,
LoP: ToAbsoluteLength,
{
#[inline]
fn is_3d(&self) -> bool {
use self::TransformOperation::*;
match *self {
Translate3D(..) | TranslateZ(..) | Rotate3D(..) | RotateX(..) | RotateY(..) |
RotateZ(..) | Scale3D(..) | ScaleZ(..) | Perspective(..) | Matrix3D(..) => true,
_ => false,
}
}
/// If |reference_box| is None, we will drop the percent part from translate because
/// we cannot resolve it without the layout info, for computed TransformOperation.
/// However, for specified TransformOperation, we will return Err(()) if there is any relative
/// lengths because the only caller, DOMMatrix, doesn't accept relative lengths.
#[inline]
fn to_3d_matrix(&self, reference_box: Option<&Rect<Au>>) -> Result<Transform3D<f64>, ()> {
use self::TransformOperation::*;
use std::f64;
const TWO_PI: f64 = 2.0f64 * f64::consts::PI;
let reference_width = reference_box.map(|v| v.size.width);
let reference_height = reference_box.map(|v| v.size.height);
let matrix = match *self {
Rotate3D(ax, ay, az, theta) => {
let theta = TWO_PI - theta.as_ref().radians64();
let (ax, ay, az, theta) =
get_normalized_vector_and_angle(ax.into(), ay.into(), az.into(), theta);
Transform3D::create_rotation(
ax as f64,
ay as f64,
az as f64,
euclid::Angle::radians(theta),
)
},
RotateX(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.as_ref().radians64());
Transform3D::create_rotation(1., 0., 0., theta)
},
RotateY(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.as_ref().radians64());
Transform3D::create_rotation(0., 1., 0., theta)
},
RotateZ(theta) | Rotate(theta) => {
let theta = euclid::Angle::radians(TWO_PI - theta.as_ref().radians64());
Transform3D::create_rotation(0., 0., 1., theta)
},
Perspective(ref d) => {
let m = create_perspective_matrix(d.to_pixel_length(None)?);
m.cast()
},
Scale3D(sx, sy, sz) => Transform3D::create_scale(sx.into(), sy.into(), sz.into()),
Scale(sx, sy) => Transform3D::create_scale(sx.into(), sy.unwrap_or(sx).into(), 1.),
ScaleX(s) => Transform3D::create_scale(s.into(), 1., 1.),
ScaleY(s) => Transform3D::create_scale(1., s.into(), 1.),
ScaleZ(s) => Transform3D::create_scale(1., 1., s.into()),
Translate3D(ref tx, ref ty, ref tz) => {
let tx = tx.to_pixel_length(reference_width)? as f64;
let ty = ty.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(tx, ty, tz.to_pixel_length(None)? as f64)
},
Translate(ref tx, Some(ref ty)) => {
let tx = tx.to_pixel_length(reference_width)? as f64;
let ty = ty.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(tx, ty, 0.)
},
TranslateX(ref t) | Translate(ref t, None) => {
let t = t.to_pixel_length(reference_width)? as f64;
Transform3D::create_translation(t, 0., 0.)
},
TranslateY(ref t) => {
let t = t.to_pixel_length(reference_height)? as f64;
Transform3D::create_translation(0., t, 0.)
},
TranslateZ(ref z) => {
Transform3D::create_translation(0., 0., z.to_pixel_length(None)? as f64)
},
Skew(theta_x, theta_y) => Transform3D::create_skew(
euclid::Angle::radians(theta_x.as_ref().radians64()),
euclid::Angle::radians(theta_y.map_or(0., |a| a.as_ref().radians64())),
),
SkewX(theta) => Transform3D::create_skew(
euclid::Angle::radians(theta.as_ref().radians64()),
euclid::Angle::radians(0.),
),
SkewY(theta) => Transform3D::create_skew(
euclid::Angle::radians(0.),
euclid::Angle::radians(theta.as_ref().radians64()),
),
Matrix3D(m) => m.into(),
Matrix(m) => m.into(),
InterpolateMatrix { .. } | AccumulateMatrix { .. } => {
// TODO: Convert InterpolateMatrix/AccumulateMatrix into a valid Transform3D by
// the reference box and do interpolation on these two Transform3D matrices.
// Both Gecko and Servo don't support this for computing distance, and Servo
// doesn't support animations on InterpolateMatrix/AccumulateMatrix, so
// return an identity matrix.
// Note: DOMMatrix doesn't go into this arm.
Transform3D::identity()
},
};
Ok(matrix)
}
}
impl<T> Transform<T> {
/// `none`
pub fn none() -> Self {
Transform(vec![])
}
}
impl<T: ToMatrix> Transform<T> {
/// Return the equivalent 3d matrix of this transform list.
/// We return a pair: the first one is the transform matrix, and the second one
/// indicates if there is any 3d transform function in this transform list.
#[cfg_attr(rustfmt, rustfmt_skip)]
pub fn to_transform_3d_matrix(
&self,
reference_box: Option<&Rect<Au>>
) -> Result<(Transform3D<CSSFloat>, bool), ()> {
let cast_3d_transform = |m: Transform3D<f64>| -> Transform3D<CSSFloat> {
use std::{f32, f64};
let cast = |v: f64| { v.min(f32::MAX as f64).max(f32::MIN as f64) as f32 };
Transform3D::row_major(
cast(m.m11), cast(m.m12), cast(m.m13), cast(m.m14),
cast(m.m21), cast(m.m22), cast(m.m23), cast(m.m24),
cast(m.m31), cast(m.m32), cast(m.m33), cast(m.m34),
cast(m.m41), cast(m.m42), cast(m.m43), cast(m.m44),
)
};
// We intentionally use Transform3D<f64> during computation to avoid error propagation
// because using f32 to compute triangle functions (e.g. in create_rotation()) is not
// accurate enough. In Gecko, we also use "double" to compute the triangle functions.
// Therefore, let's use Transform3D<f64> during matrix computation and cast it into f32
// in the end.
let mut transform = Transform3D::<f64>::identity();
let mut contain_3d = false;
for operation in &self.0 {
let matrix = operation.to_3d_matrix(reference_box)?;
contain_3d |= operation.is_3d();
transform = transform.pre_mul(&matrix);
}
Ok((cast_3d_transform(transform), contain_3d))
}
}
/// Return the transform matrix from a perspective length.
#[inline]
pub fn create_perspective_matrix(d: CSSFloat) -> Transform3D<CSSFloat> {
// TODO(gw): The transforms spec says that perspective length must
// be positive. However, there is some confusion between the spec
// and browser implementations as to handling the case of 0 for the
// perspective value. Until the spec bug is resolved, at least ensure
// that a provided perspective value of <= 0.0 doesn't cause panics
// and behaves as it does in other browsers.
// See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details.
if d <= 0.0 {
Transform3D::identity()
} else {
Transform3D::create_perspective(d)
}
}
/// Return the normalized direction vector and its angle for Rotate3D.
pub fn get_normalized_vector_and_angle<T: Zero>(
x: CSSFloat,
y: CSSFloat,
z: CSSFloat,
angle: T,
) -> (CSSFloat, CSSFloat, CSSFloat, T) {
use euclid::approxeq::ApproxEq;
use values::computed::transform::DirectionVector;
let vector = DirectionVector::new(x, y, z);
if vector.square_length().approx_eq(&f32::zero()) {
// https://www.w3.org/TR/css-transforms-1/#funcdef-rotate3d
// A direction vector that cannot be normalized, such as [0, 0, 0], will cause the
// rotation to not be applied, so we use identity matrix (i.e. rotate3d(0, 0, 1, 0)).
(0., 0., 1., T::zero())
} else {
let vector = vector.robust_normalize();
(vector.x, vector.y, vector.z, angle)
}
}
#[derive(
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
/// A value of the `Rotate` property
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum Rotate<Number, Angle> {
/// 'none'
None,
/// '<angle>'
Rotate(Angle),
/// '<number>{3} <angle>'
Rotate3D(Number, Number, Number, Angle),
}
#[derive(
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
/// A value of the `Scale` property
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum Scale<Number> {
/// 'none'
None,
/// '<number>'
ScaleX(Number),<|fim▁hole|> /// '<number>{2}'
Scale(Number, Number),
/// '<number>{3}'
Scale3D(Number, Number, Number),
}
#[derive(
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
/// A value of the `Translate` property
///
/// <https://drafts.csswg.org/css-transforms-2/#individual-transforms>
pub enum Translate<LengthOrPercentage, Length> {
/// 'none'
None,
/// '<length-percentage>'
TranslateX(LengthOrPercentage),
/// '<length-percentage> <length-percentage>'
Translate(LengthOrPercentage, LengthOrPercentage),
/// '<length-percentage> <length-percentage> <length>'
Translate3D(LengthOrPercentage, LengthOrPercentage, Length),
}
#[allow(missing_docs)]
#[derive(
Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum TransformStyle {
#[cfg(feature = "servo")]
Auto,
Flat,
#[css(keyword = "preserve-3d")]
Preserve3d,
}<|fim▁end|> | |
<|file_name|>run_bot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from mattermost_bot.bot import Bot, PluginsManager
from mattermost_bot.mattermost import MattermostClient
from mattermost_bot.dispatcher import MessageDispatcher
import bot_settings
class LocalBot(Bot):
def __init__(self):
self._client = MattermostClient(
bot_settings.BOT_URL, bot_settings.BOT_TEAM,
bot_settings.BOT_LOGIN, bot_settings.BOT_PASSWORD,
bot_settings.SSL_VERIFY
)
self._plugins = PluginsManager()
self._plugins.init_plugins()
self._dispatcher = MessageDispatcher(self._client, self._plugins)
def main():
bot = LocalBot()
bot.run()<|fim▁hole|> main()<|fim▁end|> |
if __name__ == '__main__': |
<|file_name|>xiaoi.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
# created by: Han Feng (https://github.com/hanx11)
import collections
import hashlib
import logging
import requests
from wxpy.api.messages import Message
from wxpy.ext.talk_bot_utils import get_context_user_id, next_topic
from wxpy.utils.misc import get_text_without_at_bot
from wxpy.utils import enhance_connection
logger = logging.getLogger(__name__)
from wxpy.compatible import *
class XiaoI(object):
"""
与 wxpy 深度整合的小 i 机器人
"""
# noinspection SpellCheckingInspection
def __init__(self, key, secret):
"""
| 需要通过注册获得 key 和 secret
| 免费申请: http://cloud.xiaoi.com/
:param key: 你申请的 key
:param secret: 你申请的 secret
"""
self.key = key
self.secret = secret
self.realm = "xiaoi.com"
self.http_method = "POST"
self.uri = "/ask.do"
self.url = "http://nlp.xiaoi.com/ask.do?platform=custom"
xauth = self._make_http_header_xauth()
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain",
}
headers.update(xauth)
self.session = requests.Session()
self.session.headers.update(headers)
enhance_connection(self.session)
def _make_signature(self):
"""
生成请求签名
"""
# 40位随机字符
# nonce = "".join([str(randint(0, 9)) for _ in range(40)])
nonce = "4103657107305326101203516108016101205331"
sha1 = "{0}:{1}:{2}".format(self.key, self.realm, self.secret).encode("utf-8")
sha1 = hashlib.sha1(sha1).hexdigest()
sha2 = "{0}:{1}".format(self.http_method, self.uri).encode("utf-8")
sha2 = hashlib.sha1(sha2).hexdigest()
signature = "{0}:{1}:{2}".format(sha1, nonce, sha2).encode("utf-8")
signature = hashlib.sha1(signature).hexdigest()
ret = collections.namedtuple("signature_return", "signature nonce")
ret.signature = signature
ret.nonce = nonce
return ret
def _make_http_header_xauth(self):
"""
生成请求认证
"""
sign = self._make_signature()
ret = {<|fim▁hole|> }
return ret
def do_reply(self, msg):
"""
回复消息,并返回答复文本
:param msg: Message 对象
:return: 答复文本
"""
ret = self.reply_text(msg)
msg.reply(ret)
return ret
def reply_text(self, msg):
"""
仅返回答复文本
:param msg: Message 对象,或消息文本
:return: 答复文本
"""
error_response = (
"主人还没给我设置这类话题的回复",
)
if isinstance(msg, Message):
user_id = get_context_user_id(msg)
question = get_text_without_at_bot(msg)
else:
user_id = "abc"
question = msg or ""
params = {
"question": question,
"format": "json",
"platform": "custom",
"userId": user_id,
}
resp = self.session.post(self.url, data=params)
text = resp.text
for err in error_response:
if err in text:
return next_topic()
return text<|fim▁end|> | "X-Auth": "app_key=\"{0}\",nonce=\"{1}\",signature=\"{2}\"".format(
self.key, sign.nonce, sign.signature) |
<|file_name|>test_kinesis.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import uuid
from airflow.providers.amazon.aws.hooks.kinesis import AwsFirehoseHook
try:
from moto import mock_kinesis
except ImportError:
mock_kinesis = None
class TestAwsFirehoseHook(unittest.TestCase):
@unittest.skipIf(mock_kinesis is None, 'mock_kinesis package not present')
@mock_kinesis
def test_get_conn_returns_a_boto3_connection(self):
hook = AwsFirehoseHook(
aws_conn_id='aws_default', delivery_stream="test_airflow", region_name="us-east-1"
)
self.assertIsNotNone(hook.get_conn())
<|fim▁hole|> @unittest.skipIf(mock_kinesis is None, 'mock_kinesis package not present')
@mock_kinesis
def test_insert_batch_records_kinesis_firehose(self):
hook = AwsFirehoseHook(
aws_conn_id='aws_default', delivery_stream="test_airflow", region_name="us-east-1"
)
response = hook.get_conn().create_delivery_stream(
DeliveryStreamName="test_airflow",
S3DestinationConfiguration={
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
'BucketARN': 'arn:aws:s3:::kinesis-test',
'Prefix': 'airflow/',
'BufferingHints': {'SizeInMBs': 123, 'IntervalInSeconds': 124},
'CompressionFormat': 'UNCOMPRESSED',
},
)
stream_arn = response['DeliveryStreamARN']
self.assertEqual(stream_arn, "arn:aws:firehose:us-east-1:123456789012:deliverystream/test_airflow")
records = [{"Data": str(uuid.uuid4())} for _ in range(100)]
response = hook.put_records(records)
self.assertEqual(response['FailedPutCount'], 0)
self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200)<|fim▁end|> | |
<|file_name|>gandi_live.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Gandi Live driver base classes
"""
import json
from libcloud.common.base import ConnectionKey, JsonResponse
from libcloud.common.types import ProviderError
from libcloud.utils.py3 import httplib
__all__ = [
"API_HOST",
"GandiLiveBaseError",
"JsonParseError",
"ResourceNotFoundError",
"InvalidRequestError",
"ResourceConflictError",
"GandiLiveResponse",
"GandiLiveConnection",
"BaseGandiLiveDriver",
]
API_HOST = "dns.api.gandi.net"
class GandiLiveBaseError(ProviderError):
"""
Exception class for Gandi Live driver
"""
pass
class JsonParseError(GandiLiveBaseError):
pass
# Example:
# {
# "code": 404,
# "message": "Unknown zone",
# "object": "LocalizedHTTPNotFound",
# "cause": "Not Found"
# }
class ResourceNotFoundError(GandiLiveBaseError):
pass
# Example:<|fim▁hole|># "message": "zone or zone_uuid must be set",
# "object": "HTTPBadRequest",
# "cause": "No zone set.",
# "errors": [
# {
# "location": "body",
# "name": "zone_uuid",
# "description": "\"FAKEUUID\" is not a UUID"
# }
# ]
# }
class InvalidRequestError(GandiLiveBaseError):
pass
# Examples:
# {
# "code": 409,
# "message": "Zone Testing already exists",
# "object": "HTTPConflict",
# "cause": "Duplicate Entry"
# }
# {
# "code": 409,
# "message": "The domain example.org already exists",
# "object": "HTTPConflict",
# "cause": "Duplicate Entry"
# }
# {
# "code": 409,
# "message": "This zone is still used by 1 domains",
# "object": "HTTPConflict",
# "cause": "In use"
# }
class ResourceConflictError(GandiLiveBaseError):
pass
class GandiLiveResponse(JsonResponse):
"""
A Base Gandi Live Response class to derive from.
"""
def success(self):
"""
Determine if our request was successful.
For the Gandi Live response class, tag all responses as successful and
raise appropriate Exceptions from parse_body.
:return: C{True}
"""
return True
def parse_body(self):
"""
Parse the JSON response body, or raise exceptions as appropriate.
:return: JSON dictionary
:rtype: ``dict``
"""
json_error = False
try:
body = json.loads(self.body)
except Exception:
# If there is both a JSON parsing error and an unsuccessful http
# response (like a 404), we want to raise the http error and not
# the JSON one, so don't raise JsonParseError here.
body = self.body
json_error = True
# Service does not appear to return HTTP 202 Accepted for anything.
valid_http_codes = [
httplib.OK,
httplib.CREATED,
]
if self.status in valid_http_codes:
if json_error:
raise JsonParseError(body, self.status)
else:
return body
elif self.status == httplib.NO_CONTENT:
# Parse error for empty body is acceptable, but a non-empty body
# is not.
if len(body) > 0:
msg = '"No Content" response contained content'
raise GandiLiveBaseError(msg, self.status)
else:
return {}
elif self.status == httplib.NOT_FOUND:
message = self._get_error(body, json_error)
raise ResourceNotFoundError(message, self.status)
elif self.status == httplib.BAD_REQUEST:
message = self._get_error(body, json_error)
raise InvalidRequestError(message, self.status)
elif self.status == httplib.CONFLICT:
message = self._get_error(body, json_error)
raise ResourceConflictError(message, self.status)
else:
message = self._get_error(body, json_error)
raise GandiLiveBaseError(message, self.status)
# Errors are not described at all in Gandi's official documentation.
# It appears when an error arises, a JSON object is returned along with
# an HTTP 4xx class code. The object is structured as:
# {
# code: <code>,
# object: <object>,
# message: <message>,
# cause: <cause>,
# errors: [
# {
# location: <error-location>,
# name: <error-name>,
# description: <error-description>
# }
# ]
# }
# where
# <code> is a number equal to the HTTP response status code
# <object> is a string with some internal name for the status code
# <message> is a string detailing what the problem is
# <cause> is a string that comes from a set of succinct problem summaries
# errors is optional; if present:
# <error-location> is a string for which part of the request to look in
# <error-name> is a string naming the parameter
# <error-description> is a string detailing what the problem is
# Here we ignore object and combine message and cause along with an error
# if one or more exists.
def _get_error(self, body, json_error):
"""
Get the error code and message from a JSON response.
Incorporate the first error if there are multiple errors.
:param body: The body of the JSON response dictionary
:type body: ``dict``
:return: String containing error message
:rtype: ``str``
"""
if not json_error and "cause" in body:
message = "%s: %s" % (body["cause"], body["message"])
if "errors" in body:
err = body["errors"][0]
message = "%s (%s in %s: %s)" % (
message,
err.get("location"),
err.get("name"),
err.get("description"),
)
else:
message = body
return message
class GandiLiveConnection(ConnectionKey):
"""
Connection class for the Gandi Live driver
"""
responseCls = GandiLiveResponse
host = API_HOST
def add_default_headers(self, headers):
"""
Returns default headers as a dictionary.
"""
headers["Content-Type"] = "application/json"
headers["X-Api-Key"] = self.key
return headers
def encode_data(self, data):
"""Encode data to JSON"""
return json.dumps(data)
class BaseGandiLiveDriver(object):
"""
Gandi Live base driver
"""
connectionCls = GandiLiveConnection
name = "GandiLive"<|fim▁end|> | # {
# "code": 400, |
<|file_name|>keyboard.rs<|end_file_name|><|fim▁begin|>use crate::platform::*;
use crate::types::InputList;
use registry::{Data, Hive, RegKey, Security};
use std::convert::{TryFrom, TryInto};
use std::fmt;
use std::io;
use std::path::Path;
#[cfg(feature = "legacy")]
pub use crate::keyboard_legacy::*;
#[cfg(not(feature = "legacy"))]
pub use crate::keyboard_win8::*;
pub struct KeyboardRegKey {
id: String,
regkey: RegKey,
}
#[derive(Debug)]
pub enum Error {
AlreadyExists,
NotFound,
IoError(io::Error),
RegErr(registry::key::Error),
}
pub fn install(
tag: &str,
layout_name: &str,
product_code: &str,
layout_file: &str,
display_name: Option<&str>,
) -> Result<(), Error> {
log::info!("Checking if already installed");
if let Some(_) = KeyboardRegKey::find_by_product_code(product_code) {
return Err(Error::AlreadyExists);
}
log::info!("Checking language name is valid");
let lang_name = match display_name {
Some(v) => v.to_owned(),
#[cfg(not(feature = "legacy"))]
None => winlangdb::get_language_names(tag).unwrap().name,
#[cfg(feature = "legacy")]
None => layout_name.to_owned(),
};
log::info!("Creating registry key");
KeyboardRegKey::create(tag, &lang_name, product_code, layout_file, layout_name);
Ok(())
}
#[cfg(feature = "legacy")]
fn enabled_input_methods() -> InputList {
InputList::try_from("".to_owned()).unwrap()
}
fn delete_keyboard_regkey(record: KeyboardRegKey) -> Result<(), Error> {
let klrk = keyboard_layouts_regkey_write();
match klrk.delete(record.regkey_id(), true) {
Ok(_) => Ok(()),
Err(e) => Err(Error::RegErr(e)),
}
}
pub fn uninstall(product_code: &str) -> Result<(), Error> {
if let Some(record) = KeyboardRegKey::find_by_product_code(product_code) {
delete_keyboard_regkey(record)?;
crate::clean().unwrap();
return Ok(());
}
Err(Error::NotFound)
}
pub fn installed() -> Vec<KeyboardRegKey> {
KeyboardRegKey::installed()
}
fn keyboard_layouts_regkey_readonly() -> RegKey {
Hive::LocalMachine
.open(
r"SYSTEM\CurrentControlSet\Control\Keyboard Layouts",
Security::Read,
)
.unwrap()
}
fn keyboard_layouts_regkey_write() -> RegKey {
Hive::LocalMachine
.open(
r"SYSTEM\CurrentControlSet\Control\Keyboard Layouts",
Security::Write | Security::Read,
)
.unwrap()
}
pub fn remove_invalid() {
remove_duplicate_guids();
remove_invalid_dlls();
#[cfg(not(feature = "legacy"))]
remove_invalid_kbids();
}
fn remove_duplicate_guids() {
// Find duplicate GUIDs, clear all but first
let mut guids = vec![];
let keys = KeyboardRegKey::installed();
for key in keys {
let guid = match key.product_code() {
Some(v) => v,
None => continue,
};
if guids.contains(&guid) {
delete_keyboard_regkey(key).unwrap();
} else {
guids.push(guid);
}
}
}
fn remove_invalid_dlls() {
let keys = KeyboardRegKey::installed();
for key in keys {
let layout_file = match key.layout_file() {
Some(v) => v,
None => continue,
};
if !Path::new(r"C:\Windows\System32").join(layout_file).exists() {
delete_keyboard_regkey(key).unwrap();
}
}
}
fn first_available_keyboard_regkey_id(lcid: &str) -> String {
let regkey = keyboard_layouts_regkey_readonly();
let mut kbd_keys: Vec<u16> = regkey
.keys()
.map(|x| x.unwrap().to_string())
.filter(|x| x.starts_with(&"a") && x.ends_with(&lcid))
.map(|x| {
let n = u32::from_str_radix(&x, 16).unwrap_or(0u32);
(n >> 16) as u16
})
.collect();
kbd_keys.sort();
if let Some(last) = kbd_keys.last() {
format!("{:04x}{}", last + 1, lcid)
} else {
format!("a000{}", lcid)
}
}
fn first_available_layout_id() -> String {
let regkey = keyboard_layouts_regkey_readonly();
let kbd_keys: Vec<String> = regkey.keys().map(|x| x.unwrap().to_string()).collect();
let mut layout_ids: Vec<u32> = kbd_keys
.into_iter()
.map(|key| {
let kbdkey = ®key.open(key, Security::Read | Security::Write).unwrap();
let layout_idstr: String = match kbdkey.value("Layout Id") {
Ok(Data::String(v)) => v.to_string_lossy(),
_ => "0".to_string(),
};
u32::from_str_radix(&layout_idstr, 16).unwrap_or(0u32)
})
.collect();
layout_ids.sort();
format!("{:04x}", layout_ids.last().unwrap() + 1)
}
impl KeyboardRegKey {
pub fn find_by_product_code(product_code: &str) -> Option<KeyboardRegKey> {
let regkey = keyboard_layouts_regkey_readonly();
let keys: Vec<String> = regkey.keys().map(|x| x.unwrap().to_string()).collect();
for key in keys.into_iter() {
let kl_key = regkey.open(&key, Security::Read).unwrap();
let ret: Result<Data, registry::value::Error> = kl_key.value("Layout Product Code");
match ret {
Ok(Data::String(s)) if s.to_string_lossy() == product_code => {
return Some(KeyboardRegKey {
id: key.clone(),
regkey: kl_key,
})
}
_ => continue,
}
}
None
}
pub fn installed() -> Vec<KeyboardRegKey> {
let regkey = keyboard_layouts_regkey_readonly();
regkey
.keys()
.map(|x| x.unwrap().to_string())
.filter(|x| x.starts_with("a"))
.map(|x| {
let k = regkey.open(&x, Security::Read | Security::Write).unwrap();
KeyboardRegKey {
id: x.to_owned(),
regkey: k,
}
})
.collect()
}
pub fn regkey_id(&self) -> &str {
&self.id
}
pub fn id(&self) -> Option<String> {
match self.regkey.value("Layout Id") {
Ok(Data::String(v)) => Some(v.to_string_lossy()),
_ => None,
}
}
pub fn product_code(&self) -> Option<String> {
match self.regkey.value("Layout Product Code") {
Ok(Data::String(v)) => Some(v.to_string_lossy()),
_ => None,
}
}
pub fn language_name(&self) -> Option<String> {
match self.regkey.value("Custom Language Name") {
Ok(Data::String(v)) => Some(v.to_string_lossy()),
_ => None,
}
}
pub fn layout_file(&self) -> Option<String> {
match self.regkey.value("Layout File") {
Ok(Data::String(v)) => Some(v.to_string_lossy()),
_ => None,
}
}
pub fn layout_name(&self) -> Option<String> {
match self.regkey.value("Layout Text") {
Ok(Data::String(v)) => Some(v.to_string_lossy()),
_ => None,
}
}
pub fn create(
tag: &str,
display_name: &str,
product_code: &str,
layout_file: &str,
layout_name: &str,
) -> KeyboardRegKey {
info!("Locale name to lcid");
let lcid = format!("{:04x}", crate::lcid(&tag) as u16);
info!("Using lcid '{}'", lcid);
info!("D: Get first available reg ids");
let key_name = first_available_keyboard_regkey_id(&lcid);
let layout_id = first_available_layout_id();
info!("D: open regkey");
let regkey = keyboard_layouts_regkey_write()
.create(&key_name, Security::Read | Security::Write)
.unwrap();
info!("D: set regkey vals");
regkey
.set_value(
"Custom Language Display Name",
&Data::String(
format!("@%SystemRoot%\\system32\\{},-1100", &layout_file)
.try_into()
.unwrap(),
),
)
.unwrap();
regkey
.set_value(
"Custom Language Name",
&Data::String(display_name.try_into().unwrap()),
)
.unwrap();
regkey
.set_value(
"Layout Display Name",
&Data::String(
format!("@%SystemRoot%\\system32\\{},-1000", &layout_file)
.try_into()
.unwrap(),
),
)
.unwrap();
regkey
.set_value(
"Layout File",
&Data::String(layout_file.try_into().unwrap()),
)
.unwrap();
regkey
.set_value("Layout Id", &Data::String(layout_id.try_into().unwrap()))
.unwrap();
regkey
.set_value("Layout Locale Name", &Data::String(tag.try_into().unwrap()))
.unwrap();
regkey
.set_value(
"Layout Product Code",
&Data::String(product_code.try_into().unwrap()),
)
.unwrap();
regkey
.set_value(
"Layout Text",
&Data::String(layout_name.try_into().unwrap()),
)
.unwrap();
KeyboardRegKey {
id: key_name.clone(),
regkey,
}
}
}
impl fmt::Display for KeyboardRegKey {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Registry Key: {}", self.regkey_id())?;
writeln!(
f,
"Layout Name: {}",<|fim▁hole|> "Language Name: {}",
self.language_name().unwrap_or("".to_string())
)?;
writeln!(
f,
"Layout File: {}",
self.layout_file().unwrap_or("".to_string())
)?;
writeln!(f, "Layout Id: {}", self.id().unwrap_or("".to_string()))?;
writeln!(
f,
"Product Code: {}",
self.product_code().unwrap_or("".to_string())
)?;
Ok(())
}
}<|fim▁end|> | self.layout_name().unwrap_or("".to_string())
)?;
writeln!(
f, |
<|file_name|>backends.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from moto.acm import acm_backends
from moto.apigateway import apigateway_backends
from moto.autoscaling import autoscaling_backends
from moto.awslambda import lambda_backends
from moto.cloudformation import cloudformation_backends
from moto.cloudwatch import cloudwatch_backends
from moto.cognitoidentity import cognitoidentity_backends
from moto.cognitoidp import cognitoidp_backends
from moto.core import moto_api_backends
from moto.datapipeline import datapipeline_backends
from moto.dynamodb import dynamodb_backends
from moto.dynamodb2 import dynamodb_backends2
from moto.dynamodbstreams import dynamodbstreams_backends
from moto.ec2 import ec2_backends
from moto.ecr import ecr_backends
from moto.ecs import ecs_backends
from moto.elb import elb_backends
from moto.elbv2 import elbv2_backends
from moto.emr import emr_backends
from moto.events import events_backends
from moto.glacier import glacier_backends
from moto.glue import glue_backends
from moto.iam import iam_backends
from moto.instance_metadata import instance_metadata_backends
from moto.kinesis import kinesis_backends
from moto.kms import kms_backends
from moto.logs import logs_backends
from moto.opsworks import opsworks_backends
from moto.organizations import organizations_backends
from moto.polly import polly_backends
from moto.rds2 import rds2_backends
from moto.redshift import redshift_backends
from moto.resourcegroups import resourcegroups_backends
from moto.route53 import route53_backends
from moto.s3 import s3_backends
from moto.ses import ses_backends
from moto.secretsmanager import secretsmanager_backends
from moto.sns import sns_backends
from moto.sqs import sqs_backends
from moto.ssm import ssm_backends
from moto.sts import sts_backends
from moto.swf import swf_backends
from moto.xray import xray_backends
from moto.iot import iot_backends
from moto.iotdata import iotdata_backends
from moto.batch import batch_backends
from moto.resourcegroupstaggingapi import resourcegroupstaggingapi_backends
from moto.config import config_backends
BACKENDS = {
'acm': acm_backends,
'apigateway': apigateway_backends,
'autoscaling': autoscaling_backends,
'batch': batch_backends,
'cloudformation': cloudformation_backends,
'cloudwatch': cloudwatch_backends,
'cognito-identity': cognitoidentity_backends,
'cognito-idp': cognitoidp_backends,
'config': config_backends,
'datapipeline': datapipeline_backends,
'dynamodb': dynamodb_backends,
'dynamodb2': dynamodb_backends2,
'dynamodbstreams': dynamodbstreams_backends,
'ec2': ec2_backends,
'ecr': ecr_backends,
'ecs': ecs_backends,
'elb': elb_backends,
'elbv2': elbv2_backends,
'events': events_backends,
'emr': emr_backends,
'glacier': glacier_backends,
'glue': glue_backends,
'iam': iam_backends,
'moto_api': moto_api_backends,
'instance_metadata': instance_metadata_backends,
'logs': logs_backends,
'kinesis': kinesis_backends,
'kms': kms_backends,
'opsworks': opsworks_backends,
'organizations': organizations_backends,
'polly': polly_backends,
'redshift': redshift_backends,
'resource-groups': resourcegroups_backends,
'rds': rds2_backends,
's3': s3_backends,
's3bucket_path': s3_backends,
'ses': ses_backends,
'secretsmanager': secretsmanager_backends,
'sns': sns_backends,
'sqs': sqs_backends,
'ssm': ssm_backends,
'sts': sts_backends,
'swf': swf_backends,
'route53': route53_backends,
'lambda': lambda_backends,
'xray': xray_backends,
'resourcegroupstaggingapi': resourcegroupstaggingapi_backends,
'iot': iot_backends,
'iot-data': iotdata_backends,
}
def get_model(name, region_name):
for backends in BACKENDS.values():
for region, backend in backends.items():
if region == region_name:
models = getattr(backend.__class__, '__models__', {})<|fim▁hole|><|fim▁end|> | if name in models:
return list(getattr(backend, models[name])()) |
<|file_name|>htmlformelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::AttrValue;
use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLButtonElementBinding::HTMLButtonElementMethods;
use dom::bindings::codegen::Bindings::HTMLFormElementBinding;
use dom::bindings::codegen::Bindings::HTMLFormElementBinding::HTMLFormElementMethods;
use dom::bindings::codegen::Bindings::HTMLInputElementBinding::HTMLInputElementMethods;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{JS, MutNullableHeap, Root};
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::Reflectable;
use dom::blob::Blob;
use dom::document::Document;
use dom::element::Element;
use dom::event::{EventBubbles, EventCancelable};
use dom::eventtarget::EventTarget;
use dom::file::File;
use dom::htmlbuttonelement::HTMLButtonElement;
use dom::htmlcollection::CollectionFilter;
use dom::htmldatalistelement::HTMLDataListElement;
use dom::htmlelement::HTMLElement;
use dom::htmlfieldsetelement::HTMLFieldSetElement;
use dom::htmlformcontrolscollection::HTMLFormControlsCollection;
use dom::htmlinputelement::HTMLInputElement;
use dom::htmlobjectelement::HTMLObjectElement;
use dom::htmloutputelement::HTMLOutputElement;
use dom::htmlselectelement::HTMLSelectElement;
use dom::htmltextareaelement::HTMLTextAreaElement;
use dom::node::{Node, document_from_node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::Window;
use encoding::EncodingRef;
use encoding::all::UTF_8;
use encoding::label::encoding_from_whatwg_label;
use hyper::header::{Charset, ContentDisposition, ContentType, DispositionParam, DispositionType};
use hyper::method::Method;
use msg::constellation_msg::{LoadData, PipelineId};
use rand::random;
use script_runtime::ScriptChan;
use script_thread::{MainThreadScriptMsg, Runnable};
use std::borrow::ToOwned;
use std::cell::Cell;
use std::str::from_utf8;
use std::sync::mpsc::Sender;
use string_cache::Atom;
use task_source::dom_manipulation::DOMManipulationTask;
use url::form_urlencoded;
use util::str::{DOMString, split_html_space_chars};
#[derive(JSTraceable, PartialEq, Clone, Copy, HeapSizeOf)]
pub struct GenerationId(u32);
#[dom_struct]
pub struct HTMLFormElement {
htmlelement: HTMLElement,
marked_for_reset: Cell<bool>,
elements: MutNullableHeap<JS<HTMLFormControlsCollection>>,
generation_id: Cell<GenerationId>
}
impl HTMLFormElement {
fn new_inherited(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLFormElement {
HTMLFormElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document),
marked_for_reset: Cell::new(false),
elements: Default::default(),
generation_id: Cell::new(GenerationId(0))
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLFormElement> {
let element = HTMLFormElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLFormElementBinding::Wrap)
}
pub fn generation_id(&self) -> GenerationId {
self.generation_id.get()
}
}
impl HTMLFormElementMethods for HTMLFormElement {
// https://html.spec.whatwg.org/multipage/#dom-form-acceptcharset
make_getter!(AcceptCharset, "accept-charset");
// https://html.spec.whatwg.org/multipage/#dom-form-acceptcharset
make_setter!(SetAcceptCharset, "accept-charset");
// https://html.spec.whatwg.org/multipage/#dom-fs-action
make_url_or_base_getter!(Action, "action");
// https://html.spec.whatwg.org/multipage/#dom-fs-action
make_setter!(SetAction, "action");
// https://html.spec.whatwg.org/multipage/#dom-form-autocomplete
make_enumerated_getter!(Autocomplete, "autocomplete", "on", ("off"));
// https://html.spec.whatwg.org/multipage/#dom-form-autocomplete
make_setter!(SetAutocomplete, "autocomplete");
// https://html.spec.whatwg.org/multipage/#dom-fs-enctype
make_enumerated_getter!(Enctype,
"enctype",
"application/x-www-form-urlencoded",
("text/plain") | ("multipart/form-data"));
// https://html.spec.whatwg.org/multipage/#dom-fs-enctype
make_setter!(SetEnctype, "enctype");
// https://html.spec.whatwg.org/multipage/#dom-fs-encoding
fn Encoding(&self) -> DOMString {
self.Enctype()
}
// https://html.spec.whatwg.org/multipage/#dom-fs-encoding
fn SetEncoding(&self, value: DOMString) {
self.SetEnctype(value)
}
// https://html.spec.whatwg.org/multipage/#dom-fs-method
make_enumerated_getter!(Method, "method", "get", ("post") | ("dialog"));
// https://html.spec.whatwg.org/multipage/#dom-fs-method
make_setter!(SetMethod, "method");
// https://html.spec.whatwg.org/multipage/#dom-form-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#dom-form-name
make_atomic_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-fs-novalidate
make_bool_getter!(NoValidate, "novalidate");
// https://html.spec.whatwg.org/multipage/#dom-fs-novalidate
make_bool_setter!(SetNoValidate, "novalidate");
// https://html.spec.whatwg.org/multipage/#dom-fs-target
make_getter!(Target, "target");
// https://html.spec.whatwg.org/multipage/#dom-fs-target
make_setter!(SetTarget, "target");
// https://html.spec.whatwg.org/multipage/#the-form-element:concept-form-submit
fn Submit(&self) {
self.submit(SubmittedFrom::FromFormSubmitMethod, FormSubmitter::FormElement(self));
}
// https://html.spec.whatwg.org/multipage/#dom-form-reset
fn Reset(&self) {
self.reset(ResetFrom::FromFormResetMethod);
}
// https://html.spec.whatwg.org/multipage/#dom-form-elements
fn Elements(&self) -> Root<HTMLFormControlsCollection> {
if let Some(elements) = self.elements.get() {
return elements;
}
#[derive(JSTraceable, HeapSizeOf)]
struct ElementsFilter {
form: Root<HTMLFormElement>
}
impl CollectionFilter for ElementsFilter {
fn filter<'a>(&self, elem: &'a Element, _root: &'a Node) -> bool {
let form_owner = match elem.upcast::<Node>().type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(t)) => {
match t {
HTMLElementTypeId::HTMLButtonElement => {
elem.downcast::<HTMLButtonElement>().unwrap().form_owner()
}
HTMLElementTypeId::HTMLFieldSetElement => {
elem.downcast::<HTMLFieldSetElement>().unwrap().form_owner()
}
HTMLElementTypeId::HTMLInputElement => {
let input_elem = elem.downcast::<HTMLInputElement>().unwrap();
if input_elem.type_() == atom!("image") {
return false;
}
input_elem.form_owner()
}
HTMLElementTypeId::HTMLObjectElement => {
elem.downcast::<HTMLObjectElement>().unwrap().form_owner()
}
HTMLElementTypeId::HTMLOutputElement => {
elem.downcast::<HTMLOutputElement>().unwrap().form_owner()
}
HTMLElementTypeId::HTMLSelectElement => {
elem.downcast::<HTMLSelectElement>().unwrap().form_owner()
}
HTMLElementTypeId::HTMLTextAreaElement => {
elem.downcast::<HTMLTextAreaElement>().unwrap().form_owner()
}
_ => {
debug_assert!(!elem.downcast::<HTMLElement>().unwrap().is_listed_element());
return false;
}
}
}
_ => return false,
};
match form_owner {
Some(form_owner) => form_owner == self.form,
None => false,<|fim▁hole|> let window = window_from_node(self);
let elements = HTMLFormControlsCollection::new(window.r(), self.upcast(), filter);
self.elements.set(Some(&elements));
elements
}
// https://html.spec.whatwg.org/multipage/#dom-form-length
fn Length(&self) -> u32 {
self.Elements().Length() as u32
}
}
#[derive(Copy, Clone, HeapSizeOf, PartialEq)]
pub enum SubmittedFrom {
FromFormSubmitMethod,
NotFromFormSubmitMethod
}
#[derive(Copy, Clone, HeapSizeOf)]
pub enum ResetFrom {
FromFormResetMethod,
NotFromFormResetMethod
}
impl HTMLFormElement {
fn generate_boundary(&self) -> String {
let i1 = random::<u32>();
let i2 = random::<u32>();
format!("---------------------------{0}{1}", i1, i2)
}
// https://html.spec.whatwg.org/multipage/#picking-an-encoding-for-the-form
fn pick_encoding(&self) -> EncodingRef {
// Step 2
if self.upcast::<Element>().has_attribute(&atom!("accept-charset")) {
// Substep 1
let input = self.upcast::<Element>().get_string_attribute(&atom!("accept-charset"));
// Substep 2, 3, 4
let mut candidate_encodings = split_html_space_chars(&*input).filter_map(encoding_from_whatwg_label);
// Substep 5, 6
return candidate_encodings.next().unwrap_or(UTF_8);
}
// Step 1, 3
document_from_node(self).encoding()
}
// https://html.spec.whatwg.org/multipage/#multipart/form-data-encoding-algorithm
fn encode_form_data(&self, form_data: &mut Vec<FormDatum>,
encoding: Option<EncodingRef>,
boundary: String) -> String {
// Step 1
let mut result = "".to_owned();
// Step 2
// (maybe take encoding as input)
let encoding = encoding.unwrap_or(self.pick_encoding());
// Step 3
let charset = &*encoding.whatwg_name().unwrap();
// Step 4
for entry in form_data.iter_mut() {
// Substep 1
if entry.name == "_charset_" && entry.ty == "hidden" {
entry.value = FormDatumValue::String(DOMString::from(charset.clone()));
}
// TODO: Substep 2
// Step 5
// https://tools.ietf.org/html/rfc7578#section-4
result.push_str(&*format!("\r\n--{}\r\n", boundary));
let mut content_disposition = ContentDisposition {
disposition: DispositionType::Ext("form-data".to_owned()),
parameters: vec![DispositionParam::Ext("name".to_owned(), String::from(entry.name.clone()))]
};
match entry.value {
FormDatumValue::String(ref s) =>
result.push_str(&*format!("Content-Disposition: {}\r\n\r\n{}",
content_disposition,
s)),
FormDatumValue::File(ref f) => {
content_disposition.parameters.push(
DispositionParam::Filename(Charset::Ext(String::from(charset.clone())),
None,
f.name().clone().into()));
let content_type = ContentType(f.upcast::<Blob>().Type().parse().unwrap());
result.push_str(&*format!("Content-Disposition: {}\r\n{}\r\n\r\n",
content_disposition,
content_type));
result.push_str(from_utf8(&f.upcast::<Blob>().get_data().get_bytes()).unwrap());
}
}
}
result.push_str(&*format!("\r\n--{}--", boundary));
return result;
}
/// [Form submission](https://html.spec.whatwg.org/multipage/#concept-form-submit)
pub fn submit(&self, submit_method_flag: SubmittedFrom, submitter: FormSubmitter) {
// Step 1
let doc = document_from_node(self);
let base = doc.url();
// TODO: Handle browsing contexts
// Step 4
if submit_method_flag == SubmittedFrom::NotFromFormSubmitMethod &&
!submitter.no_validate(self)
{
if self.interactive_validation().is_err() {
// TODO: Implement event handlers on all form control elements
self.upcast::<EventTarget>().fire_simple_event("invalid");
return;
}
}
// Step 5
if submit_method_flag == SubmittedFrom::NotFromFormSubmitMethod {
let event = self.upcast::<EventTarget>()
.fire_event("submit",
EventBubbles::Bubbles,
EventCancelable::Cancelable);
if event.DefaultPrevented() {
return;
}
}
// Step 6
let mut form_data = self.get_form_dataset(Some(submitter));
// Step 7
let mut action = submitter.action();
// Step 8
if action.is_empty() {
action = DOMString::from(base.as_str());
}
// Step 9-11
let action_components = match base.join(&action) {
Ok(url) => url,
Err(_) => return
};
// Step 12-15
let scheme = action_components.scheme().to_owned();
let enctype = submitter.enctype();
let method = submitter.method();
let _target = submitter.target();
// TODO: Handle browsing contexts, partially loaded documents (step 16-17)
let mut load_data = LoadData::new(action_components, doc.get_referrer_policy(), Some(doc.url().clone()));
let parsed_data = match enctype {
FormEncType::UrlEncoded => {
load_data.headers.set(ContentType::form_url_encoded());
form_urlencoded::Serializer::new(String::new())
.extend_pairs(form_data.into_iter().map(|field| (field.name.clone(), field.value_str())))
.finish()
}
FormEncType::FormDataEncoded => {
let boundary = self.generate_boundary();
let mime = mime!(Multipart / FormData; Boundary =(&boundary));
load_data.headers.set(ContentType(mime));
self.encode_form_data(&mut form_data, None, boundary)
}
// TODO: Support plain text encoding
FormEncType::TextPlainEncoded => "".to_owned()
};
// Step 18
let win = window_from_node(self);
match (&*scheme, method) {
// https://html.spec.whatwg.org/multipage/#submit-dialog
(_, FormMethod::FormDialog) => return, // Unimplemented
// https://html.spec.whatwg.org/multipage/#submit-mutate-action
("http", FormMethod::FormGet) | ("https", FormMethod::FormGet) => {
// FIXME(SimonSapin): use url.query_pairs_mut() here.
load_data.url.set_query(Some(&*parsed_data));
self.plan_to_navigate(load_data, &win);
}
// https://html.spec.whatwg.org/multipage/#submit-body
("http", FormMethod::FormPost) | ("https", FormMethod::FormPost) => {
load_data.method = Method::Post;
load_data.data = Some(parsed_data.into_bytes());
self.plan_to_navigate(load_data, &win);
}
// https://html.spec.whatwg.org/multipage/#submit-get-action
("file", _) | ("about", _) | ("data", FormMethod::FormGet) |
("ftp", _) | ("javascript", _) => {
self.plan_to_navigate(load_data, &win);
}
_ => return // Unimplemented (data and mailto)
}
}
/// [Planned navigation](https://html.spec.whatwg.org/multipage/#planned-navigation)
fn plan_to_navigate(&self, load_data: LoadData, window: &Window) {
// Step 1
// Each planned navigation runnable is tagged with a generation ID, and
// before the runnable is handled, it first checks whether the HTMLFormElement's
// generation ID is the same as its own generation ID.
let GenerationId(prev_id) = self.generation_id.get();
self.generation_id.set(GenerationId(prev_id + 1));
// Step 2
let nav = box PlannedNavigation {
load_data: load_data,
pipeline_id: window.pipeline(),
script_chan: window.main_thread_script_chan().clone(),
generation_id: self.generation_id.get(),
form: Trusted::new(self)
};
// Step 3
window.dom_manipulation_task_source().queue(
DOMManipulationTask::PlannedNavigation(nav)).unwrap();
}
/// Interactively validate the constraints of form elements
/// https://html.spec.whatwg.org/multipage/#interactively-validate-the-constraints
fn interactive_validation(&self) -> Result<(), ()> {
// Step 1-3
let _unhandled_invalid_controls = match self.static_validation() {
Ok(()) => return Ok(()),
Err(err) => err
};
// TODO: Report the problems with the constraints of at least one of
// the elements given in unhandled invalid controls to the user
// Step 4
Err(())
}
/// Statitically validate the constraints of form elements
/// https://html.spec.whatwg.org/multipage/#statically-validate-the-constraints
fn static_validation(&self) -> Result<(), Vec<FormSubmittableElement>> {
let node = self.upcast::<Node>();
// FIXME(#3553): This is an incorrect way of getting controls owned by the
// form, refactor this when html5ever's form owner PR lands
// Step 1-3
let invalid_controls = node.traverse_preorder().filter_map(|field| {
if let Some(_el) = field.downcast::<Element>() {
None // Remove this line if you decide to refactor
// XXXKiChjang: Form control elements should each have a candidate_for_validation
// and satisfies_constraints methods
} else {
None
}
}).collect::<Vec<FormSubmittableElement>>();
// Step 4
if invalid_controls.is_empty() { return Ok(()); }
// Step 5-6
let unhandled_invalid_controls = invalid_controls.into_iter().filter_map(|field| {
let event = field.as_event_target()
.fire_event("invalid",
EventBubbles::DoesNotBubble,
EventCancelable::Cancelable);
if !event.DefaultPrevented() { return Some(field); }
None
}).collect::<Vec<FormSubmittableElement>>();
// Step 7
Err(unhandled_invalid_controls)
}
/// https://html.spec.whatwg.org/multipage/#constructing-the-form-data-set
/// Steps range from 1 to 3
fn get_unclean_dataset(&self, submitter: Option<FormSubmitter>) -> Vec<FormDatum> {
let node = self.upcast::<Node>();
// FIXME(#3553): This is an incorrect way of getting controls owned
// by the form, but good enough until html5ever lands
let mut data_set = Vec::new();
for child in node.traverse_preorder() {
// Step 3.1: The field element is disabled.
match child.downcast::<Element>() {
Some(el) if !el.disabled_state() => (),
_ => continue,
}
// Step 3.1: The field element has a datalist element ancestor.
if child.ancestors()
.any(|a| Root::downcast::<HTMLDataListElement>(a).is_some()) {
continue;
}
if let NodeTypeId::Element(ElementTypeId::HTMLElement(element)) = child.type_id() {
match element {
HTMLElementTypeId::HTMLInputElement => {
let input = child.downcast::<HTMLInputElement>().unwrap();
// Step 3.2-3.7
if let Some(datum) = input.form_datum(submitter) {
data_set.push(datum);
}
}
HTMLElementTypeId::HTMLButtonElement |
HTMLElementTypeId::HTMLObjectElement => {
// Unimplemented
()
}
HTMLElementTypeId::HTMLSelectElement => {
let select = child.downcast::<HTMLSelectElement>().unwrap();
select.push_form_data(&mut data_set);
}
HTMLElementTypeId::HTMLTextAreaElement => {
let textarea = child.downcast::<HTMLTextAreaElement>().unwrap();
let name = textarea.Name();
if !name.is_empty() {
data_set.push(FormDatum {
ty: textarea.Type(),
name: name,
value: FormDatumValue::String(textarea.Value())
});
}
}
_ => ()
}
}
}
data_set
// TODO: Handle `dirnames` (needs directionality support)
// https://html.spec.whatwg.org/multipage/#the-directionality
}
/// https://html.spec.whatwg.org/multipage/#constructing-the-form-data-set
pub fn get_form_dataset(&self, submitter: Option<FormSubmitter>) -> Vec<FormDatum> {
fn clean_crlf(s: &str) -> DOMString {
// Step 4
let mut buf = "".to_owned();
let mut prev = ' ';
for ch in s.chars() {
match ch {
'\n' if prev != '\r' => {
buf.push('\r');
buf.push('\n');
},
'\n' => {
buf.push('\n');
},
// This character isn't LF but is
// preceded by CR
_ if prev == '\r' => {
buf.push('\r');
buf.push('\n');
buf.push(ch);
},
_ => buf.push(ch)
};
prev = ch;
}
// In case the last character was CR
if prev == '\r' {
buf.push('\n');
}
DOMString::from(buf)
}
// Step 1-3
let mut ret = self.get_unclean_dataset(submitter);
// Step 4
for datum in &mut ret {
match &*datum.ty {
"file" | "textarea" => (),
_ => {
datum.name = clean_crlf(&datum.name);
datum.value = FormDatumValue::String(clean_crlf( match datum.value {
FormDatumValue::String(ref s) => s,
FormDatumValue::File(_) => unreachable!()
}));
}
}
};
// Step 5
ret
}
pub fn reset(&self, _reset_method_flag: ResetFrom) {
// https://html.spec.whatwg.org/multipage/#locked-for-reset
if self.marked_for_reset.get() {
return;
} else {
self.marked_for_reset.set(true);
}
let event = self.upcast::<EventTarget>()
.fire_event("reset",
EventBubbles::Bubbles,
EventCancelable::Cancelable);
if event.DefaultPrevented() {
return;
}
// TODO: This is an incorrect way of getting controls owned
// by the form, but good enough until html5ever lands
for child in self.upcast::<Node>().traverse_preorder() {
match child.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) => {
child.downcast::<HTMLInputElement>().unwrap().reset();
}
// TODO HTMLKeygenElement unimplemented
//NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLKeygenElement)) => {
// // Unimplemented
// {}
//}
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) => {
// Unimplemented
{}
}
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) => {
child.downcast::<HTMLTextAreaElement>().unwrap().reset();
}
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLOutputElement)) => {
// Unimplemented
{}
}
_ => {}
}
};
self.marked_for_reset.set(false);
}
}
pub enum FormDatumValue {
File(Root<File>),
String(DOMString)
}
// #[derive(HeapSizeOf)]
pub struct FormDatum {
pub ty: DOMString,
pub name: DOMString,
pub value: FormDatumValue
}
impl FormDatum {
pub fn value_str(&self) -> String {
match self.value {
FormDatumValue::String(ref s) => String::from(s.clone()),
FormDatumValue::File(ref f) => String::from(f.name().clone())
}
}
}
#[derive(Copy, Clone, HeapSizeOf)]
pub enum FormEncType {
TextPlainEncoded,
UrlEncoded,
FormDataEncoded
}
#[derive(Copy, Clone, HeapSizeOf)]
pub enum FormMethod {
FormGet,
FormPost,
FormDialog
}
#[derive(HeapSizeOf)]
pub enum FormSubmittableElement {
ButtonElement(Root<HTMLButtonElement>),
InputElement(Root<HTMLInputElement>),
// TODO: HTMLKeygenElement unimplemented
// KeygenElement(&'a HTMLKeygenElement),
ObjectElement(Root<HTMLObjectElement>),
SelectElement(Root<HTMLSelectElement>),
TextAreaElement(Root<HTMLTextAreaElement>)
}
impl FormSubmittableElement {
fn as_event_target(&self) -> &EventTarget {
match *self {
FormSubmittableElement::ButtonElement(ref button) => button.r().upcast(),
FormSubmittableElement::InputElement(ref input) => input.r().upcast(),
FormSubmittableElement::ObjectElement(ref object) => object.r().upcast(),
FormSubmittableElement::SelectElement(ref select) => select.r().upcast(),
FormSubmittableElement::TextAreaElement(ref textarea) => textarea.r().upcast()
}
}
}
#[derive(Copy, Clone, HeapSizeOf)]
pub enum FormSubmitter<'a> {
FormElement(&'a HTMLFormElement),
InputElement(&'a HTMLInputElement),
ButtonElement(&'a HTMLButtonElement)
// TODO: image submit, etc etc
}
impl<'a> FormSubmitter<'a> {
fn action(&self) -> DOMString {
match *self {
FormSubmitter::FormElement(form) => form.Action(),
FormSubmitter::InputElement(input_element) => {
input_element.get_form_attribute(&atom!("formaction"),
|i| i.FormAction(),
|f| f.Action())
},
FormSubmitter::ButtonElement(button_element) => {
button_element.get_form_attribute(&atom!("formaction"),
|i| i.FormAction(),
|f| f.Action())
}
}
}
fn enctype(&self) -> FormEncType {
let attr = match *self {
FormSubmitter::FormElement(form) => form.Enctype(),
FormSubmitter::InputElement(input_element) => {
input_element.get_form_attribute(&atom!("formenctype"),
|i| i.FormEnctype(),
|f| f.Enctype())
},
FormSubmitter::ButtonElement(button_element) => {
button_element.get_form_attribute(&atom!("formenctype"),
|i| i.FormAction(),
|f| f.Action())
}
};
match &*attr {
"multipart/form-data" => FormEncType::FormDataEncoded,
"text/plain" => FormEncType::TextPlainEncoded,
// https://html.spec.whatwg.org/multipage/#attr-fs-enctype
// urlencoded is the default
_ => FormEncType::UrlEncoded
}
}
fn method(&self) -> FormMethod {
let attr = match *self {
FormSubmitter::FormElement(form) => form.Method(),
FormSubmitter::InputElement(input_element) => {
input_element.get_form_attribute(&atom!("formmethod"),
|i| i.FormMethod(),
|f| f.Method())
},
FormSubmitter::ButtonElement(button_element) => {
button_element.get_form_attribute(&atom!("formmethod"),
|i| i.FormAction(),
|f| f.Action())
}
};
match &*attr {
"dialog" => FormMethod::FormDialog,
"post" => FormMethod::FormPost,
_ => FormMethod::FormGet
}
}
fn target(&self) -> DOMString {
match *self {
FormSubmitter::FormElement(form) => form.Target(),
FormSubmitter::InputElement(input_element) => {
input_element.get_form_attribute(&atom!("formtarget"),
|i| i.FormTarget(),
|f| f.Target())
},
FormSubmitter::ButtonElement(button_element) => {
button_element.get_form_attribute(&atom!("formtarget"),
|i| i.FormAction(),
|f| f.Action())
}
}
}
fn no_validate(&self, _form_owner: &HTMLFormElement) -> bool {
match *self {
FormSubmitter::FormElement(form) => form.NoValidate(),
FormSubmitter::InputElement(input_element) => {
input_element.get_form_boolean_attribute(&atom!("formnovalidate"),
|i| i.FormNoValidate(),
|f| f.NoValidate())
}
FormSubmitter::ButtonElement(button_element) => {
button_element.get_form_boolean_attribute(&atom!("formnovalidate"),
|i| i.FormNoValidate(),
|f| f.NoValidate())
}
}
}
}
pub trait FormControl: DerivedFrom<Element> + Reflectable {
// FIXME: This is wrong (https://github.com/servo/servo/issues/3553)
// but we need html5ever to do it correctly
fn form_owner(&self) -> Option<Root<HTMLFormElement>> {
// https://html.spec.whatwg.org/multipage/#reset-the-form-owner
let elem = self.to_element();
let owner = elem.get_string_attribute(&atom!("form"));
if !owner.is_empty() {
let doc = document_from_node(elem);
let owner = doc.GetElementById(owner);
if let Some(ref o) = owner {
let maybe_form = o.downcast::<HTMLFormElement>();
if maybe_form.is_some() {
return maybe_form.map(Root::from_ref);
}
}
}
elem.upcast::<Node>().ancestors().filter_map(Root::downcast).next()
}
fn get_form_attribute<InputFn, OwnerFn>(&self,
attr: &Atom,
input: InputFn,
owner: OwnerFn)
-> DOMString
where InputFn: Fn(&Self) -> DOMString,
OwnerFn: Fn(&HTMLFormElement) -> DOMString
{
if self.to_element().has_attribute(attr) {
input(self)
} else {
self.form_owner().map_or(DOMString::new(), |t| owner(t.r()))
}
}
fn get_form_boolean_attribute<InputFn, OwnerFn>(&self,
attr: &Atom,
input: InputFn,
owner: OwnerFn)
-> bool
where InputFn: Fn(&Self) -> bool,
OwnerFn: Fn(&HTMLFormElement) -> bool
{
if self.to_element().has_attribute(attr) {
input(self)
} else {
self.form_owner().map_or(false, |t| owner(t.r()))
}
}
fn to_element(&self) -> &Element {
self.upcast()
}
// XXXKiChjang: Implement these on inheritors
// fn candidate_for_validation(&self) -> bool;
// fn satisfies_constraints(&self) -> bool;
}
impl VirtualMethods for HTMLFormElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("name") => AttrValue::from_atomic(value),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
struct PlannedNavigation {
load_data: LoadData,
pipeline_id: PipelineId,
script_chan: Sender<MainThreadScriptMsg>,
generation_id: GenerationId,
form: Trusted<HTMLFormElement>
}
impl Runnable for PlannedNavigation {
fn handler(self: Box<PlannedNavigation>) {
if self.generation_id == self.form.root().generation_id.get() {
let script_chan = self.script_chan.clone();
script_chan.send(MainThreadScriptMsg::Navigate(self.pipeline_id, self.load_data)).unwrap();
}
}
}<|fim▁end|> | }
}
}
let filter = box ElementsFilter { form: Root::from_ref(self) }; |
<|file_name|>Type.java<|end_file_name|><|fim▁begin|>package com.fordprog.matrix.interpreter.type;
public enum Type {
RATIONAL,
MATRIX,
FUNCTION,<|fim▁hole|><|fim▁end|> |
VOID
} |
<|file_name|>parameter_tests_edges.py<|end_file_name|><|fim▁begin|># coding: utf-8
# In[1]:
import os
from shutil import copyfile
import subprocess
from save_embedded_graph27 import main_binary as embed_main
from spearmint_ghsom import main as ghsom_main
import numpy as np
import pickle
from time import time
def save_obj(obj, name):
with open(name + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name):
with open(name + '.pkl', 'rb') as f:
return pickle.load(f)
#root dir
os.chdir("C:\Miniconda3\Jupyter\GHSOM_simplex_dsd")
#save directory
dir = os.path.abspath("parameter_tests_edges")
#number of times to repeat
num_repeats = 30
#number of nodes in the graph
N = 64
#make save directory
if not os.path.isdir(dir):
os.mkdir(dir)
#change to dir
os.chdir(dir)
#network file names -- output of network generator
network = "network.dat"
first_level = "community.dat"
#community labels
labels = 'firstlevelcommunity'
#mixing factors
mu = 0.1
num_edges_ls = [256, 512, 1024]
parameter_settings = [0.5, 0.6, 0.7, 0.8, 0.9, 1][::-1]
overall_nmi_scores = np.zeros((len(num_edges_ls), len(parameter_settings)))
for i in range(len(num_edges_ls)):
#number of edges
num_edges = num_edges_ls[i]
#create directory
dir_string = os.path.join(dir, str(num_edges))
if not os.path.isdir(dir_string):
os.mkdir(dir_string)
#change working directory
os.chdir(dir_string)
for j in range(len(parameter_settings)):
<|fim▁hole|>
#ghsom parameters
params = {'w': 0.0001,
'eta': 0.0001,
'sigma': 1,
'e_sg': p,
'e_en': 0.8}
#create directory
dir_string_p = os.path.join(dir_string, str(p))
if not os.path.isdir(dir_string_p):
os.mkdir(dir_string_p)
#change working directory
os.chdir(dir_string_p)
if os.path.isfile('nmi_scores.csv'):
print 'already completed {}/{}, loading scores and continuing'.format(k1, p)
nmi_scores = np.genfromtxt('nmi_scores.csv', delimiter=',')
overall_nmi_scores[i,j] = np.mean(nmi_scores, axis=0)
continue
#copy executable
ex = "benchmark.exe"
if not os.path.isfile(ex):
source = "C:\\Users\\davem\\Documents\\PhD\\Benchmark Graph Generators\\binary_networks\\benchmark.exe"
copyfile(source, ex)
#record NMI scores
if not os.path.isfile('nmi_scores.pkl'):
print 'creating new nmi scores array'
nmi_scores = np.zeros(num_repeats)
else:
print 'loading nmi score progress'
nmi_scores = load_obj('nmi_scores')
#record running times
if not os.path.isfile('running_times.pkl'):
print 'creating new running time array'
running_times = np.zeros(num_repeats)
else:
print 'loading running time progress'
running_times = load_obj('running_times')
print
#generate networks
for r in range(1, num_repeats+1):
#number of communities
num_communities = np.random.randint(1,5)
#number of nodes in micro community
minc = np.floor(float(N) / num_communities)
maxc = np.ceil(float(N) / num_communities)
#average number of edges
k = float(num_edges) / N
#max number of edges
maxk = 2 * k
#make benchmark parameter file
filename = "benchmark_flags_{}_{}_{}.dat".format(num_edges,p,r)
if not os.path.isfile(filename):
print 'number of edges: {}'.format(num_edges)
print 'number of communities: {}'.format(num_communities)
print '-N {} -k {} -maxk {} -minc {} -maxc {} -mu {}'.format(N, k, maxk, minc, maxc, mu)
with open(filename,"w") as f:
f.write("-N {} -k {} -maxk {} -minc {} -maxc {} -mu {}".format(N, k, maxk, minc, maxc, mu))
print 'written flag file: {}'.format(filename)
#cmd strings
change_dir_cmd = "cd {}".format(dir_string_p)
generate_network_cmd = "benchmark -f {}".format(filename)
#output of cmd
output_file = open("cmd_output.out", 'w')
network_rename = "{}_{}".format(r,network)
first_level_rename = "{}_{}".format(r,first_level)
gml_filename = 'embedded_network_{}.gml'.format(r)
if not os.path.isfile(network_rename):
process = subprocess.Popen(change_dir_cmd + " && " + generate_network_cmd,
stdout=output_file,
stderr=output_file,
shell=True)
process.wait()
print 'generated graph {}'.format(r)
os.rename(network, network_rename)
os.rename(first_level, first_level_rename)
print 'renamed graph {}'.format(r)
if not os.path.isfile(gml_filename):
##embed graph
embed_main(network_rename, first_level_rename)
print 'embedded graph {} as {} in {}'.format(r, gml_filename, os.getcwd())
##score for this network
if not np.all(nmi_scores[r-1]):
start_time = time()
print 'starting ghsom for: {}/{}/{}'.format(num_edges, p, gml_filename)
nmi_score, communities_detected = ghsom_main(params, gml_filename, labels)
nmi_scores[r-1] = nmi_score
running_time = time() - start_time
print 'running time of algorithm: {}'.format(running_time)
running_times[r-1] = running_time
#save
save_obj(nmi_scores, 'nmi_scores')
save_obj(running_times, 'running_times')
print 'saved nmi score for network {}: {}'.format(gml_filename, nmi_score)
print
##output nmi scores to csv file
print 'writing nmi scores and running times to file'
np.savetxt('nmi_scores.csv',nmi_scores,delimiter=',')
np.savetxt('running_times.csv',running_times,delimiter=',')
print
#odd to overall list
overall_nmi_scores[i,j] = np.mean(nmi_scores, axis=0)
print 'DONE'
print 'OVERALL NMI SCORES'
print overall_nmi_scores
# In[3]:
for scores in overall_nmi_scores:
print scores
idx = np.argsort(scores)[::-1]
print parameter_settings[idx[0]]<|fim▁end|> |
#setting fo e_sg
p = parameter_settings[j]
|
<|file_name|>aria_combobox_dojo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""Test of Dojo combo box presentation."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(PauseAction(5000))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Tab to the first combo box",
["BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"BRAILLE LINE: 'Focus mode'",
" VISIBLE: 'Focus mode', cursor=0",
"BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"SPEECH OUTPUT: 'collapsed'",
"SPEECH OUTPUT: 'US State test 1 (200% Courier font): entry California selected'",
"SPEECH OUTPUT: 'Focus mode' voice=system"]))
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction("C"))
sequence.append(utils.AssertPresentationAction(
"2. Replace existing text with a 'C'",
["KNOWN ISSUE: The braille line is not quite right",
"BRAILLE LINE: 'US State test 1 (200% Courier font): C $l'",
" VISIBLE: '(200% Courier font): C $l', cursor=23",
"BRAILLE LINE: 'US State test 1 (200% Courier font): US State test 1 (200% Courier font): combo box'",
" VISIBLE: 'te test 1 (200% Courier font): U', cursor=32",
"SPEECH OUTPUT: 'expanded'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Down Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'California menu'",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
<|fim▁hole|>sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"4. Down Arrow",
["BRAILLE LINE: 'C olorado (CO)'",
" VISIBLE: 'C olorado (CO)', cursor=1",
"SPEECH OUTPUT: 'C olorado (CO).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"5. Down Arrow",
["BRAILLE LINE: 'C onnecticut (CT)'",
" VISIBLE: 'C onnecticut (CT)', cursor=1",
"SPEECH OUTPUT: 'C onnecticut (CT).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"6. Down Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"7. Up Arrow",
["BRAILLE LINE: 'C onnecticut (CT)'",
" VISIBLE: 'C onnecticut (CT)', cursor=1",
"SPEECH OUTPUT: 'C onnecticut (CT).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"8. Up Arrow",
["BRAILLE LINE: 'C olorado (CO)'",
" VISIBLE: 'C olorado (CO)', cursor=1",
"SPEECH OUTPUT: 'C olorado (CO).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"9. Up Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"10. Basic Where Am I - Combo box expanded",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'California menu'",
"SPEECH OUTPUT: 'C alifornia (CA).'",
"SPEECH OUTPUT: '1 of 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Escape"))
sequence.append(utils.AssertPresentationAction(
"11. Escape",
["BRAILLE LINE: 'US State test 1 (200% Courier font): US State test 1 (200% Courier font): combo box'",
" VISIBLE: 'te test 1 (200% Courier font): U', cursor=32",
"BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"SPEECH OUTPUT: 'collapsed'",
"SPEECH OUTPUT: 'US State test 1 (200% Courier font): entry California selected'"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()<|fim▁end|> | |
<|file_name|>auto-complete-list.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
export default Ember.Component.extend({
tagName: 'ul',
classNames: 'ember-autocomplete-list',<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Nikita Pekin and the smexybot contributors
// See the README.md file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hyper;
use serde_json;
use std::error::Error as StdError;
use std::fmt;
use std::io;
use std::result::Result as StdResult;
use url;
/// A convenient alias type for results for `smexybot`.
pub type Result<T> = StdResult<T, Error>;
/// Represents errors which occur while using Smexybot.
#[derive(Debug)]
pub enum Error {
/// A `hyper` crate error.
Hyper(hyper::Error),
/// An IO error was encountered.
Io(io::Error),
/// A `serde` crate error.
Serde(serde_json::Error),
/// Error while parsing a URL.
UrlParse(url::ParseError),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Error::*;
match *self {
Hyper(ref e) => e.fmt(f),<|fim▁hole|> Io(ref e) => e.fmt(f),
Serde(ref e) => e.fmt(f),
UrlParse(ref e) => e.fmt(f),
}
}
}
impl StdError for Error {
fn description(&self) -> &str {
use self::Error::*;
match *self {
Hyper(ref e) => e.description(),
Io(ref e) => e.description(),
Serde(ref e) => e.description(),
UrlParse(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&StdError> {
use self::Error::*;
match *self {
Hyper(ref e) => e.cause(),
Io(ref e) => e.cause(),
Serde(ref e) => e.cause(),
UrlParse(ref e) => e.cause(),
}
}
}
impl From<hyper::Error> for Error {
fn from(error: hyper::Error) -> Error {
Error::Hyper(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Error {
Error::Io(error)
}
}
impl From<serde_json::Error> for Error {
fn from(error: serde_json::Error) -> Error {
Error::Serde(error)
}
}
impl From<url::ParseError> for Error {
fn from(error: url::ParseError) -> Error {
Error::UrlParse(error)
}
}<|fim▁end|> | |
<|file_name|>Test_MonitoringSystem.py<|end_file_name|><|fim▁begin|>"""
It is used to test client->db-> service.
It requires the Monitoring service to be running and installed (so discoverable in the .cfg),
and this monitoring service should be connecting to an ElasticSeach instance
"""
# pylint: disable=invalid-name,wrong-import-position
import unittest
import tempfile
import time
from datetime import datetime
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from DIRAC import gLogger
from DIRAC.MonitoringSystem.Client.MonitoringClient import MonitoringClient
from DIRAC.Core.DISET.TransferClient import TransferClient
class MonitoringTestCase(unittest.TestCase):
def setUp(self):
gLogger.setLevel('DEBUG')
self.client = MonitoringClient()
self.data = [{u'Status': u'Waiting', 'Jobs': 2, u'timestamp': 1458130176, u'JobSplitType': u'MCStripping',
u'MinorStatus': u'unset', u'Site': u'LCG.GRIDKA.de', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049848', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458130176, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'LCG.PIC.es', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'olupton', u'JobGroup': u'lhcb', u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458130176, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'olupton', u'JobGroup': u'lhcb', u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458130176, u'JobSplitType': u'MCStripping',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049845', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 34, u'timestamp': 1458141578, u'JobSplitType': u'DataStripping',
u'MinorStatus': u'unset', u'Site': u'Group.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050299', u'UserGroup': u'lhcb_data', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 120, u'timestamp': 1458141578, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'LCG.CERN.ch', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'mvesteri', u'JobGroup': u'lhcb', u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458141578, u'JobSplitType': u'MCStripping',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049845', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 2, u'timestamp': 1458141578, u'JobSplitType': u'MCStripping',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049848', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458141578, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050286', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 95, u'timestamp': 1458199202, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'Multiple', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'mamartin', u'JobGroup': u'lhcb', u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 3, u'timestamp': 1458199202, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'Multiple', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'olupton', u'JobGroup': u'lhcb', u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 129, u'timestamp': 1458199202, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'Multiple', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049844', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 5, u'timestamp': 1458217812, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.IHEP.su', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050232', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 7, u'timestamp': 1458217812, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.IHEP.su', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050234', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458217812, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.IHEP.su', u'Reschedules': 1, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050236', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 3, u'timestamp': 1458217812, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.IHEP.su', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050238', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 2, u'timestamp': 1458217812, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.IHEP.su', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050248', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 12, u'timestamp': 1458218413, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050248', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 5, u'timestamp': 1458218413, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050250', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 4, u'timestamp': 1458218413, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050251', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458218413, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.CNAF.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',<|fim▁hole|> u'MinorStatus': u'unset', u'Site': u'LCG.NIKHEF.nl', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050248', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 3, u'timestamp': 1458219012, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.NIKHEF.nl', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050251', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458222013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.Bologna.it', u'Reschedules': 0,
u'ApplicationStatus': u'unset', u'User': u'phicharp', u'JobGroup': u'00050303',
u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 7, u'timestamp': 1458222013, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'LCG.Bristol.uk', u'Reschedules': 0,
u'ApplicationStatus': u'unset', u'User': u'clangenb', u'JobGroup': u'lhcb',
u'UserGroup': u'lhcb_user', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 2, u'timestamp': 1458222013, u'JobSplitType': u'User',
u'MinorStatus': u'unset', u'Site': u'LCG.Bristol.uk', u'Reschedules': 0,
u'ApplicationStatus': u'unset', u'User': u'mrwillia', u'JobGroup': u'lhcb',
u'UserGroup': u'lhcb_user',
u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458222013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.Bari.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050244', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 11, u'timestamp': 1458222013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.Bari.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050246', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 22, u'timestamp': 1458222013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.Bari.it', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050248', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 23, u'timestamp': 1458225013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.DESYZN.de', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049844', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 18, u'timestamp': 1458225013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.DESYZN.de', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00049847', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458225013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.DESYZN.de', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050238', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 1, u'timestamp': 1458225013, u'JobSplitType': u'MCSimulation',
u'MinorStatus': u'unset', u'Site': u'LCG.DESYZN.de', u'Reschedules': 0,
u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050246', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RRCKI.ru', u'Reschedules': 0,
u'ApplicationStatus': u'unset', u'User': u'phicharp', u'JobGroup': u'00050243',
u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RRCKI.ru', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050251', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCStripping',
u'MinorStatus': u'unset', u'Site': u'LCG.RRCKI.ru', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050256', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050229', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050241', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 1, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050243', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Waiting', 'Jobs': 2, u'timestamp': 1458226213, u'JobSplitType': u'MCReconstruction',
u'MinorStatus': u'unset', u'Site': u'LCG.RAL.uk', u'Reschedules': 0, u'ApplicationStatus': u'unset',
u'User': u'phicharp', u'JobGroup': u'00050247', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'}]
def tearDown(self):
pass
class MonitoringInsertData(MonitoringTestCase):
def test_addMonitoringRecords(self):
result = self.client.addMonitoringRecords('moni', 'WMSHistory', self.data)
self.assertTrue(result['Message'])
def test_bulkinsert(self):
result = self.client.addRecords("wmshistory_index", "WMSHistory", self.data)
self.assertTrue(result['OK'])
self.assertEqual(result['Value'], len(self.data))
time.sleep(10)
class MonitoringTestChain(MonitoringTestCase):
def test_listReports(self):
result = self.client.listReports('WMSHistory')
self.assertTrue(result['OK'])
self.assertEqual(result['Value'], ['AverageNumberOfJobs', 'NumberOfJobs', 'NumberOfReschedules'])
def test_listUniqueKeyValues(self):
result = self.client.listUniqueKeyValues('WMSHistory')
self.assertTrue(result['OK'])
self.assertTrue('Status' in result['Value'])
self.assertTrue('JobSplitType' in result['Value'])
self.assertTrue('MinorStatus' in result['Value'])
self.assertTrue('Site' in result['Value'])
self.assertTrue('ApplicationStatus' in result['Value'])
self.assertTrue('User' in result['Value'])
self.assertTrue('JobGroup' in result['Value'])
self.assertTrue('UserGroup' in result['Value'])
self.assertDictEqual(result['Value'], {u'Status': [],
u'JobSplitType': [],
u'MinorStatus': [],
u'Site': [],
u'ApplicationStatus': [],
u'User': [],
u'JobGroup': [],
u'UserGroup': []})
def test_generatePlot(self):
params = (
'WMSHistory', 'NumberOfJobs', datetime(
2016, 3, 16, 12, 30, 0, 0), datetime(
2016, 3, 17, 19, 29, 0, 0), {
'grouping': ['Site']}, 'Site', {})
result = self.client.generateDelayedPlot(*params)
self.assertTrue(result['OK'])
# self.assertEqual(
# result['Value'],
# {
# plot = 'Z:eNpljcEKwjAQRH8piWLbvQkeRLAeKnhOm7Us2CTsbsH69UYUFIQZZvawb4LUMKQYdjRoKH3kNGeK403W0JEiolSAMZ\
# xpwodXcsZukFZItipukFyxeSmiNIB3Zb_lUQL-wD4ssQYYc2Jt_VQuB-089cin6yH1Ur5FPev_\
# UgnrSjXfpRp0yfjGGLgcuz2JJl7wCYg6Slo='
# 'plot': plot,
# 'thumbnail': False})
def test_getPlot(self):
tempFile = tempfile.TemporaryFile()
transferClient = TransferClient('Monitoring/Monitoring')
params = (
'WMSHistory', 'NumberOfJobs', datetime(
2016, 3, 16, 12, 30, 0, 0), datetime(
2016, 3, 17, 19, 29, 0, 0), {
'grouping': ['Site']}, 'Site', {})
result = self.client.generateDelayedPlot(*params)
self.assertTrue(result['OK'])
result = transferClient.receiveFile(tempFile, result['Value']['plot'])
self.assertTrue(result['OK'])
def test_getReport(self):
params = (
'WMSHistory', 'NumberOfJobs', datetime(
2016, 3, 16, 12, 30, 0, 0), datetime(
2016, 3, 17, 19, 29, 0, 0), {
'grouping': ['Site']}, 'Site', {})
result = self.client.getReport(*params)
self.assertTrue(result['OK'])
self.assertDictEqual(result['Value'],
{'data': {u'Multiple': {1458198000: 227.0},
u'LCG.RRCKI.ru': {1458225000: 3.0},
u'LCG.IHEP.su': {1458217800: 18.0},
u'LCG.CNAF.it': {1458144000: None,
1458172800: None,
1458194400: None,
1458145800: None,
1458189000: None,
1458147600: None,
1458178200: None,
1458183600: None,
1458212400: None,
1458149400: None,
1458207000: None,
1458151200: None,
1458169200: None,
1458201600: None,
1458153000: None,
1458196200: None,
1458154800: None,
1458174600: None,
1458190800: None,
1458156600: None,
1458185400: None,
1458214200: None,
1458158400: None,
1458180000: None,
1458216000: None,
1458208800: None,
1458160200: None,
1458203400: None,
1458162000: None,
1458142200: None,
1458198000: None,
1458163800: None,
1458192600: None,
1458165600: None,
1458176400: None,
1458187200: None,
1458167400: None,
1458210600: None,
1458140400: 4.0,
1458181800: None,
1458205200: None,
1458171000: None,
1458217800: 22.0,
1458199800: None},
u'LCG.NIKHEF.nl': {1458217800: 27.0},
u'LCG.Bari.it': {1458221400: 34.0},
u'Group.RAL.uk': {1458140400: 34.0},
u'LCG.DESYZN.de': {1458225000: 43.0},
u'LCG.RAL.uk': {1458144000: None,
1458158400: None,
1458194400: None,
1458145800: None,
1458223200: None,
1458189000: None,
1458221400: None,
1458225000: 5.0,
1458147600: None,
1458135000: None,
1458183600: None,
1458212400: None,
1458149400: None,
1458178200: None,
1458207000: None,
1458151200: None,
1458169200: None,
1458172800: None,
1458219600: None,
1458201600: None,
1458153000: None,
1458196200: None,
1458154800: None,
1458160200: None,
1458190800: None,
1458156600: None,
1458185400: None,
1458214200: None,
1458129600: 2.0,
1458165600: None,
1458180000: None,
1458216000: None,
1458208800: None,
1458131400: None,
1458174600: None,
1458203400: None,
1458162000: None,
1458171000: None,
1458198000: None,
1458163800: None,
1458192600: None,
1458136800: None,
1458133200: None,
1458187200: None,
1458167400: None,
1458181800: None,
1458210600: None,
1458140400: None,
1458138600: None,
1458176400: None,
1458205200: None,
1458142200: None,
1458217800: None,
1458199800: None},
u'LCG.PIC.es': {1458129600: 1.0},
u'LCG.GRIDKA.de': {1458129600: 2.0},
u'LCG.Bristol.uk': {1458221400: 9.0},
u'LCG.CERN.ch': {1458140400: 120.0},
u'LCG.Bologna.it': {1458221400: 1.0}},
'granularity': 1800})
def test_getLastDayData(self):
params = {'Status': 'Running', 'Site': 'LCG.NIKHEF.nl'}
result = self.client.getLastDayData('WMSHistory', params)
self.assertTrue(result['OK'])
self.assertEqual(len(result['Value']), 2)
self.assertEqual(sorted(result['Value'][0].keys()), sorted([u'Status',
u'Jobs',
u'JobSplitType',
u'timestamp',
u'MinorStatus',
u'Site',
u'Reschedules',
u'ApplicationStatus',
u'User',
u'JobGroup',
u'UserGroup']))
class MonitoringDeleteChain(MonitoringTestCase):
def test_deleteNonExistingIndex(self):
res = self.client.deleteIndex("alllaaaaa")
self.assertTrue(res['Message'])
def test_deleteIndex(self):
today = datetime.today().strftime("%Y-%m-%d")
result = "%s-%s" % ('wmshistory_index', today)
res = self.client.deleteIndex(result)
self.assertTrue(res['OK'])
self.assertTrue('_index-%s' % today in res['Value'])
if __name__ == '__main__':
testSuite = unittest.defaultTestLoader.loadTestsFromTestCase(MonitoringTestCase)
testSuite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(MonitoringInsertData))
testSuite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(MonitoringTestChain))
testSuite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(MonitoringDeleteChain))
unittest.TextTestRunner(verbosity=2).run(testSuite)<|fim▁end|> | u'User': u'phicharp', u'JobGroup': u'00050280', u'UserGroup': u'lhcb_mc', u'metric': u'WMSHistory'},
{u'Status': u'Running', 'Jobs': 24, u'timestamp': 1458219012, u'JobSplitType': u'MCSimulation', |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for myproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
DJ_PROJECT_DIR = os.path.dirname(__file__)
BASE_DIR = os.path.dirname(DJ_PROJECT_DIR)<|fim▁hole|>
import sys
sys.path.append(os.path.join(REPO_DIR, 'libs'))
import secrets
SECRETS = secrets.getter(os.path.join(DATA_DIR, 'secrets.json'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = SECRETS['secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG') == 'True'
from socket import gethostname
ALLOWED_HOSTS = [
gethostname(), # For internal OpenShift load balancer security purposes.
os.environ.get('OPENSHIFT_APP_DNS'), # Dynamically map to the OpenShift gear name.
#'example.com', # First DNS alias (set up in the app)
#'www.example.com', # Second DNS alias (set up in the app)
]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'corsheaders',
'devices',
'devices.bpm'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# GETTING-STARTED: change 'myproject' to your project name:
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'api', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': os.environ.get('OPENSHIFT_POSTGRESQL_DB_USERNAME'),
'PASSWORD': os.environ.get('OPENSHIFT_POSTGRESQL_DB_PASSWORD'),
'HOST': os.environ.get('OPENSHIFT_POSTGRESQL_DB_HOST'), # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': os.environ.get('OPENSHIFT_POSTGRESQL_DB_PORT'), # Set to empty string for default.
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(WSGI_DIR, 'static')
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
CORS_ORIGIN_ALLOW_ALL = True
FIREBASE_URL = 'https://ubervest.firebaseio.com'<|fim▁end|> | WSGI_DIR = os.path.dirname(BASE_DIR)
REPO_DIR = os.path.dirname(WSGI_DIR)
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR', BASE_DIR) |
<|file_name|>filter-projects.pipe.ts<|end_file_name|><|fim▁begin|>import { Pipe, PipeTransform } from '@angular/core';
import { Project } from '../models/Project';
/**
* Filter an array
*/
@Pipe({
name: 'filterProjects',<|fim▁hole|> pure: true
})
export class FilterProjectsPipe implements PipeTransform {
transform(items: Project[], filter: string): any {
return items.filter(project => project.name.toLowerCase().indexOf(filter.toLocaleLowerCase()) !== -1);
}
}<|fim▁end|> | |
<|file_name|>DateUtils.ts<|end_file_name|><|fim▁begin|>/*
* @license
* Copyright Hôpitaux Universitaires de Genève. All Rights Reserved.
*
* Use of this source code is governed by an Apache-2.0 license that can be
* found in the LICENSE file at https://github.com/DSI-HUG/dejajs-components/blob/master/LICENSE
*/
import { DatePipe } from '@angular/common';
<|fim▁hole|> */
export class DateUtils {
public static SYSTEM_DATE_FORMAT = 'yyyy-MM-dd HH:mm:ss';
private static datePipe = new DatePipe('fr-CH');
/**
* Fromat a javascript date object to a 'yyyy-MM-dd HH:mm:ss' String Format
* @param date
* @returns {string}
*/
public static formatSystem(date: Date): string {
return DateUtils.datePipe.transform(date, DateUtils.SYSTEM_DATE_FORMAT);
}
}<|fim▁end|> | /**
* Date conversion for DPI standards |
<|file_name|>myconfig.py<|end_file_name|><|fim▁begin|>#Es necesario cambiar estos datos por los parametros de nuestro servidor, usuarios, password
userDb = "userDb"
passDb = "passDb"
mail = "*********@gmail.com"
passMail = "passMail"
nameDb = "domotics_db"
urlDb = "urlDb"
serverPort = 8080
#Security Code Device
updateCode = "UPDATE device SET code = '%s' WHERE id = '%s' AND (code = '%s' OR connectionStatus = 0)"
updateCodeRemote = "UPDATE device SET code = '%s' WHERE idDevice = '%s'"
#manage Port
selectGetPort = "SELECT port FROM device WHERE id = '%s' AND code ='%s'"
#Remotes
selectGetDevicesRemote = "SELECT deviceRemote.id AS id, deviceRemote.pipeSend AS pipeSend, deviceRemote.pipeRecv AS pipeRecv, deviceRemote.type AS type FROM device deviceRemote, device deviceCentral WHERE deviceRemote.idDevice = deviceCentral.id AND deviceCentral.id = '%s' AND deviceCentral.code = '%s'"
#Type device
selectGetTypeDevice = "SELECT type FROM device WHERE id = '%s' AND code ='%s'"
#Get User id
selectUserId = "SELECT id FROM user WHERE login = '%s' AND password = '%s'"
#Check users and mails
selectUserExists = "SELECT login FROM user WHERE login = '%s'"
selectMailExists = "SELECT login FROM user WHERE mail = '%s'"
selectUserExistsCheck = "SELECT login FROM user WHERE login = '%s' AND active = '1'"
selectMailExistsWithoutCheck = "SELECT login FROM user WHERE mail = '%s' AND active != '1'"
#SignIn user
insertSignIn = "INSERT INTO user (login, name, mail, password, active) VALUES ('%s', '%s', '%s', '%s', '%d')"
updateSignIn = "UPDATE user SET login = '%s', name = '%s', password = '%s', active = '%d' WHERE mail = '%s'"
#Check SignIn
updateCheckSignIn = "UPDATE user SET active = 1 WHERE login = '%s' AND password = '%s' AND active = '%s'"
#LogIn
selectLogIn = "SELECT id, name, active FROM user WHERE login = '%s' AND password = '%s' AND active = '1'"
#List locations of user
selectLocationsUser = "SELECT location.id AS id, location.name AS name, location.security AS security FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s'"
#Check Device User
checkDeviceUser = "SELECT device.id AS idDevice FROM user, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s' AND device.id = '%s'"
#Check Location User
checkLocationUser = "SELECT userLocation.idLocation AS idLocation FROM userLocation WHERE userLocation.idUser = '%s' AND userLocation.idLocation = '%s'"
#list devices of locations and user
selectDevicesLocation = "SELECT device.id AS id, device.name AS name, device.publicIp AS publicIp, device.privateIp AS privateIp, device.port AS port, DATE_FORMAT(device.timeStamp,'%%d/%%m/%%Y %%H:%%i:%%s') AS timeStamp, device.connectionStatus AS connectionStatus, device.RIPMotion AS RIPMotion, device.alarm AS alarm, device.type AS type, device.idDevice AS idDevice, device.pipeSend AS pipeSend, device.pipeRecv AS pipeRecv, device.code AS code, device.connectionMode AS connectionMode, device.version AS version FROM user, location, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND location.id = '%s' AND location.id = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s'"
#create new location
selectCheckLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s'"
insertLocation = "INSERT INTO location (name, security) VALUES ('%s','1')"
insertLocationUser = "INSERT INTO userLocation (idUser, idLocation) VALUES ('%s','%s')"
#edit location
selectCheckUpdateLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s' AND location.id != '%s'"
updateLocation = "UPDATE location SET name = '%s' WHERE id = '%s'"
updateLocationSecurity = "UPDATE location SET security = '%s' WHERE id = '%s'"
#delete location
deleteUserLocation = "DELETE FROM userLocation WHERE idLocation = '%s'"
deleteLocation = "DELETE FROM location WHERE id = '%s'"
#insert device
insertDeviceServer = "INSERT INTO device (name, port, timeStamp, type, idDevice) VALUES ('%s', '%s', NOW(), '%s', '%s')"
insertLocationDevice = "INSERT INTO locationDevice (idLocation, idDevice) VALUES ('%s', '%s')"
<|fim▁hole|>#Update Devices
updateDevice = "UPDATE device SET name = '%s', port = '%s', connectionMode = '%s', RIPMotion = '%s' WHERE id = '%s'"
updateDevicePipes = "UPDATE device SET pipeSend = '%s', pipeRecv = '%s' WHERE id = '%s'"
updateIpDevice = "UPDATE device SET publicIp = '%s', privateIp = '%s' WHERE id = '%s' AND code = '%s'"
updateNotOnline = "UPDATE device SET connectionStatus = '0' WHERE connectionStatus != '0' AND TIMEDIFF(NOW(), device.timeStamp) > TIME('00:01:00')"
updateOnline = "UPDATE device SET connectionStatus = '%s', device.timeStamp = NOW() WHERE id = '%s' AND code = '%s'"
#Check Device Remote for Delete
checkDeviceRemote = "SELECT id FROM device WHERE idDevice = '%s'"
#Delete devices
deleteTimerDevice = "DELETE FROM timer WHERE idDevice = '%s'"
deleteAlertDevice = "DELETE FROM alert WHERE idDevice = '%s'"
deleteSensorsData = "DELETE FROM sensors WHERE idDevice = '%s'"
deleteLocationDevice = "DELETE FROM locationDevice WHERE idDevice = '%s'"
deleteDevice = "DELETE FROM device WHERE id = '%s'"
#Security
selectLocationSecurity = "SELECT user.mail AS email, user.name AS nameUser, location.id AS idLocation, location.security AS security, location.name AS nameLocation, device.name AS nameDevice, device.RIPMotion AS RIPMotion, device.alarm AS alarm FROM location, device, locationDevice, userLocation, user WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND device.id ='%s' AND device.code = '%s' AND userLocation.idLocation = location.id AND userLocation.idUser = user.id"
updateAlarm = "UPDATE device SET alarm = '%s' WHERE id = '%s'"
selectDevicesLocationOpenPort = "SELECT device.id AS id, device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.RIPMotion = '1'"
selectDevicesLocationUserOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice, userLocation WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND userLocation.idLocation = locationDevice.idLocation AND userLocation.idUser = '%s'"
selectDevicesOtherLocationOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation <> '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1'"
selectDevicesLocationOpenPortCameras = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.type = '2'"
checkDeviceAlarmStatus = "SELECT alarm FROM device WHERE id = '%s' AND code ='%s'"
#Alert
insertAlert = "INSERT INTO alert (date, time, type, idDevice) VALUES (CURRENT_DATE(), CURRENT_TIME(), '%s', '%s')"
checkInsertAlert = "SELECT id FROM alert WHERE alert.type = '%s' AND alert.idDevice = '%s' AND alert.date = CURRENT_DATE() AND CURRENT_TIME()-alert.time < TIME('00:02:00')"
selectAlert = "SELECT DATE_FORMAT(alert.date,'%%d/%%m/%%Y') AS date, DATE_FORMAT(alert.time,'%%H:%%i') AS time, alert.type AS type FROM device, alert, locationDevice, userLocation WHERE device.id = alert.idDevice AND device.id = '%s' AND alert.date = STR_TO_DATE('%s','%%d/%%m/%%Y') AND locationDevice.idDevice = device.id AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = '%s' ORDER BY alert.id DESC"
#Sensors
insertSensors = "INSERT INTO sensors (temperature, humidity, pressure, brightness, date, time, idDevice) VALUES ('%s', '%s', '%s', '%s', CURRENT_DATE(), CURRENT_TIME(), '%s')"
selectSensors = "SELECT temperature, humidity, pressure, brightness, DATE_FORMAT(sensors.time,'%%H:%%i') AS time FROM device, sensors, locationDevice, userLocation WHERE device.id = sensors.idDevice AND device.id = '%s' AND sensors.date = STR_TO_DATE('%s','%%d/%%m/%%Y') AND locationDevice.idDevice = device.id AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = '%s' ORDER BY sensors.id DESC"
#Timer
selectTimer = "SELECT id, name, active, DATE_FORMAT(time,'%%H:%%i') AS time, action FROM timer WHERE idDevice = '%s' ORDER BY time"
insertTimer = "INSERT INTO timer (name, active, time, action, idDevice) VALUES ('%s', '1', '%s', '%s', '%s')"
updateTimer = "UPDATE timer SET name = '%s', active = '%s', time = '%s', action = '%s' WHERE id = '%s' and idDevice = '%s'"
deleteTimer = "DELETE FROM timer WHERE id = '%s' and idDevice = '%s'"
selectTimerAutomation = "SELECT timer.action AS action, CURRENT_TIME()-timer.time AS diff FROM timer, device WHERE timer.idDevice = '%s' AND timer.idDevice = device.id AND device.code = '%s' AND timer.active = '1' AND CURRENT_TIME()-timer.time < TIME('00:01:00') AND CURRENT_TIME > timer.time ORDER BY 1"
#SoftwareUpdate
selectDeviceVersion = "SELECT version FROM device WHERE id = '%s' AND code ='%s'"
updateVersionDevice = "UPDATE device SET version = '%s' WHERE id = '%s' AND code = '%s'"<|fim▁end|> | |
<|file_name|>pgbouncer.py<|end_file_name|><|fim▁begin|>"""Pgbouncer check
Collects metrics from the pgbouncer database.
"""
# 3p
import psycopg2 as pg
# project
from checks import AgentCheck, CheckException
class ShouldRestartException(Exception):
pass
class PgBouncer(AgentCheck):
"""Collects metrics from pgbouncer
"""
RATE = AgentCheck.rate
GAUGE = AgentCheck.gauge
DB_NAME = 'pgbouncer'
SERVICE_CHECK_NAME = 'pgbouncer.can_connect'
STATS_METRICS = {
'descriptors': [
('database', 'db'),
],
'metrics': [
('total_requests', ('pgbouncer.stats.requests_per_second', RATE)),
('total_received', ('pgbouncer.stats.bytes_received_per_second', RATE)),
('total_sent', ('pgbouncer.stats.bytes_sent_per_second', RATE)),
('total_query_time', ('pgbouncer.stats.total_query_time', GAUGE)),
('avg_req', ('pgbouncer.stats.avg_req', GAUGE)),
('avg_recv', ('pgbouncer.stats.avg_recv', GAUGE)),
('avg_sent', ('pgbouncer.stats.avg_sent', GAUGE)),
('avg_query', ('pgbouncer.stats.avg_query', GAUGE)),
],
'query': """SHOW STATS""",
}
POOLS_METRICS = {
'descriptors': [
('database', 'db'),
('user', 'user'),
],
'metrics': [
('cl_active', ('pgbouncer.pools.cl_active', GAUGE)),
('cl_waiting', ('pgbouncer.pools.cl_waiting', GAUGE)),
('sv_active', ('pgbouncer.pools.sv_active', GAUGE)),
('sv_idle', ('pgbouncer.pools.sv_idle', GAUGE)),
('sv_used', ('pgbouncer.pools.sv_used', GAUGE)),
('sv_tested', ('pgbouncer.pools.sv_tested', GAUGE)),
('sv_login', ('pgbouncer.pools.sv_login', GAUGE)),
('maxwait', ('pgbouncer.pools.maxwait', GAUGE)),
],
'query': """SHOW POOLS""",
}
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
self.dbs = {}
def _get_service_checks_tags(self, host, port):
service_checks_tags = [
"host:%s" % host,
"port:%s" % port,
"db:%s" % self.DB_NAME
]
return service_checks_tags
def _collect_stats(self, db, instance_tags):
"""Query pgbouncer for various metrics
"""
metric_scope = [self.STATS_METRICS, self.POOLS_METRICS]
try:
cursor = db.cursor()
results = None
for scope in metric_scope:
metrics = scope['metrics']
cols = [m[0] for m in metrics]
try:
query = scope['query']
self.log.debug("Running query: %s" % query)
cursor.execute(query)
results = cursor.fetchall()
except pg.Error, e:
self.log.warning("Not all metrics may be available: %s" % str(e))
continue
<|fim▁hole|> if row[0] == self.DB_NAME:
continue
desc = scope['descriptors']
if len(row) == len(cols) + len(desc) + 1:
# Some versions of pgbouncer have an extra field at the end of show pools
row = row[:-1]
assert len(row) == len(cols) + len(desc)
tags = list(instance_tags)
tags += ["%s:%s" % (d[0][1], d[1]) for d in zip(desc, row[:len(desc)])]
for i, (key_name, (mname, mtype)) in enumerate(metrics):
value = row[i + len(desc)]
mtype(self, mname, value, tags)
if not results:
self.warning('No results were found for query: "%s"' % query)
cursor.close()
except pg.Error, e:
self.log.error("Connection error: %s" % str(e))
raise ShouldRestartException
def _get_connection(self, key, host, port, user, password, use_cached=True):
"Get and memoize connections to instances"
if key in self.dbs and use_cached:
return self.dbs[key]
elif host != "" and user != "":
try:
if host == 'localhost' and password == '':
# Use ident method
connection = pg.connect("user=%s dbname=%s" % (user, self.DB_NAME))
elif port != '':
connection = pg.connect(host=host, port=port, user=user,
password=password, database=self.DB_NAME)
else:
connection = pg.connect(host=host, user=user, password=password,
database=self.DB_NAME)
connection.set_isolation_level(pg.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self.log.debug('pgbouncer status: %s' % AgentCheck.OK)
except Exception:
message = u'Cannot establish connection to pgbouncer://%s:%s/%s' % (host, port, self.DB_NAME)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=self._get_service_checks_tags(host, port),
message=message)
self.log.debug('pgbouncer status: %s' % AgentCheck.CRITICAL)
raise
else:
if not host:
raise CheckException("Please specify a PgBouncer host to connect to.")
elif not user:
raise CheckException("Please specify a user to connect to PgBouncer as.")
self.dbs[key] = connection
return connection
def check(self, instance):
host = instance.get('host', '')
port = instance.get('port', '')
user = instance.get('username', '')
password = instance.get('password', '')
tags = instance.get('tags', [])
key = '%s:%s' % (host, port)
if tags is None:
tags = []
else:
tags = list(set(tags))
try:
db = self._get_connection(key, host, port, user, password)
self._collect_stats(db, tags)
except ShouldRestartException:
self.log.info("Resetting the connection")
db = self._get_connection(key, host, port, user, password, use_cached=False)
self._collect_stats(db, tags)
message = u'Established connection to pgbouncer://%s:%s/%s' % (host, port, self.DB_NAME)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags=self._get_service_checks_tags(host, port),
message=message)
self.log.debug('pgbouncer status: %s' % AgentCheck.OK)<|fim▁end|> | for row in results: |
<|file_name|>resnet_3d.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Network definition of 3D ResNet for Action Recognition (CVPR 2018)
Reference : https://github.com/kenshohara/3D-ResNets-PyTorch
"""
# pylint: disable=unused-argument
from tvm import relay
from .init import create_workload
from . import layers
def residual_unit(
data,
num_filter,
stride,
dim_match,
name,
bottle_neck=True,
data_layout="NCDHW",
kernel_layout="OIDHW",
):
"""Return ResNet Unit symbol for building ResNet
Parameters
----------
data : str
Input data
num_filter : int
Number of output channels
bnf : int
Bottle neck channels factor with regard to num_filter
stride : tuple
Stride used in convolution
dim_match : bool
True means channel number between input and output is the same,
otherwise means differ
name : str
Base name of the operators
"""
if bottle_neck:
bn1 = layers.batch_norm_infer(data=data, epsilon=2e-5, name=name + "_bn1")
act1 = relay.nn.relu(data=bn1)
conv1 = layers.conv3d(
data=act1,
channels=int(num_filter * 0.25),
kernel_size=(1, 1, 1),
strides=stride,
padding=(0, 0, 0),
name=name + "_conv1",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn2 = layers.batch_norm_infer(data=conv1, epsilon=2e-5, name=name + "_bn2")
act2 = relay.nn.relu(data=bn2)
conv2 = layers.conv3d(
data=act2,
channels=int(num_filter * 0.25),
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name=name + "_conv2",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn3 = layers.batch_norm_infer(data=conv2, epsilon=2e-5, name=name + "_bn3")
act3 = relay.nn.relu(data=bn3)
conv3 = layers.conv3d(
data=act3,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=(1, 1, 1),
padding=(0, 0, 0),
name=name + "_conv3",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
if dim_match:
shortcut = data
else:
shortcut = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=stride,
name=name + "_sc",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
return relay.add(conv3, shortcut)
bn1 = layers.batch_norm_infer(data=data, epsilon=2e-5, name=name + "_bn1")
act1 = relay.nn.relu(data=bn1)
conv1 = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(3, 3, 3),
strides=stride,
padding=(1, 1, 1),
name=name + "_conv1",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn2 = layers.batch_norm_infer(data=conv1, epsilon=2e-5, name=name + "_bn2")
act2 = relay.nn.relu(data=bn2)
conv2 = layers.conv3d(
data=act2,
channels=num_filter,
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name=name + "_conv2",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
if dim_match:
shortcut = data
else:
shortcut = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=stride,
name=name + "_sc",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
return relay.add(conv2, shortcut)
def resnet(
units,
num_stages,
filter_list,
num_classes,
data_shape,
bottle_neck=True,
layout="NCDHW",
dtype="float32",
):
"""Return ResNet Program.
Parameters
----------
units : list
Number of units in each stage
num_stages : int
Number of stages
filter_list : list
Channel size of each stage
num_classes : int
Ouput size of symbol
data_shape : tuple of int.
The shape of input data.
bottle_neck : bool
Whether apply bottleneck transformation.
layout: str
The data layout for conv3d
dtype : str
The global data type.
"""
data_layout = layout
kernel_layout = "OIDHW" if layout == "NCDHW" else "DHWIO"
num_unit = len(units)
assert num_unit == num_stages
data = relay.var("data", shape=data_shape, dtype=dtype)
data = layers.batch_norm_infer(data=data, epsilon=2e-5, scale=False, name="bn_data")
if layout == "NCDHW":
(_, _, _, height, _) = data_shape
else:
(_, _, height, _, _) = data_shape
if height <= 32: # such as cifar10
body = layers.conv3d(
data=data,
channels=filter_list[0],
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name="conv0",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
else: # often expected to be 224 such as imagenet
body = layers.conv3d(
data=data,
channels=filter_list[0],
kernel_size=(3, 7, 7),
strides=(1, 2, 2),
padding=(1, 3, 3),
name="conv0",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
body = layers.batch_norm_infer(data=body, epsilon=2e-5, name="bn0")
body = relay.nn.relu(data=body)
# body = relay.nn.max_pool3d(data=body, pool_size=(3, 3), strides=(2, 2), padding=(1, 1),
# layout=data_layout)
for i in range(num_stages):
body = residual_unit(
body,
filter_list[i + 1],
(1 if i == 0 else 2, 1 if i == 0 else 2, 1 if i == 0 else 2),
False,
name="stage%d_unit%d" % (i + 1, 1),
bottle_neck=bottle_neck,
data_layout=data_layout,
kernel_layout=kernel_layout,
)
for j in range(units[i] - 1):
body = residual_unit(
body,
filter_list[i + 1],
(1, 1, 1),<|fim▁hole|> True,
name="stage%d_unit%d" % (i + 1, j + 2),
bottle_neck=bottle_neck,
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn1 = layers.batch_norm_infer(data=body, epsilon=2e-5, name="bn1")
relu1 = relay.nn.relu(data=bn1)
# Although kernel is not used here when global_pool=True, we should put one
pool1 = relay.nn.global_avg_pool3d(data=relu1, layout=data_layout)
flat = relay.nn.batch_flatten(data=pool1)
fc1 = layers.dense_add_bias(data=flat, units=num_classes, name="fc1")
net = relay.nn.softmax(data=fc1)
return relay.Function(relay.analysis.free_vars(net), net)
def get_net(
batch_size,
num_classes,
num_layers=50,
image_shape=(3, 16, 112, 112),
layout="NCDHW",
dtype="float32",
**kwargs,
):
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/train_resnet.py
Original author Wei Wu
"""
if layout == "NCDHW":
(_, _, height, _) = image_shape
else:
(_, height, _, _) = image_shape
data_shape = (batch_size,) + image_shape
if height <= 28:
num_stages = 3
if (num_layers - 2) % 9 == 0 and num_layers >= 164:
per_unit = [(num_layers - 2) // 9]
filter_list = [16, 64, 128, 256]
bottle_neck = True
elif (num_layers - 2) % 6 == 0 and num_layers < 164:
per_unit = [(num_layers - 2) // 6]
filter_list = [16, 16, 32, 64]
bottle_neck = False
else:
raise ValueError("no experiments done on num_layers {}".format(num_layers))
units = per_unit * num_stages
else:
if num_layers >= 50:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
filter_list = [64, 64, 128, 256, 512]
bottle_neck = False
num_stages = 4
if num_layers == 18:
units = [2, 2, 2, 2]
elif num_layers == 34:
units = [3, 4, 6, 3]
elif num_layers == 50:
units = [3, 4, 6, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
units = [3, 8, 36, 3]
elif num_layers == 200:
units = [3, 24, 36, 3]
elif num_layers == 269:
units = [3, 30, 48, 8]
else:
raise ValueError("no experiments done on num_layers {}".format(num_layers))
return resnet(
units=units,
num_stages=num_stages,
filter_list=filter_list,
num_classes=num_classes,
data_shape=data_shape,
bottle_neck=bottle_neck,
layout=layout,
dtype=dtype,
)
def get_workload(
batch_size=1,
num_classes=1000,
num_layers=18,
image_shape=(3, 16, 112, 112),
layout="NCDHW",
dtype="float32",
**kwargs,
):
"""Get benchmark workload for resnet
Parameters
----------
batch_size : int
The batch size used in the model
num_classes : int, optional
Number of classes
num_layers : int, optional
Number of layers
image_shape : tuple, optional
The input image shape
layout: str
The data layout for conv3d
dtype : str, optional
The data type
kwargs : dict
Extra arguments
Returns
-------
mod : tvm.IRModule
The relay module that contains a ResNet network.
params : dict of str to NDArray
The parameters.
"""
net = get_net(
batch_size=batch_size,
num_classes=num_classes,
num_layers=num_layers,
image_shape=image_shape,
dtype=dtype,
layout=layout,
**kwargs,
)
return create_workload(net)<|fim▁end|> | |
<|file_name|>sciblox.py<|end_file_name|><|fim▁begin|>#------------- Daniel Han-Chen 2017
#------------- https://github.com/danielhanchen/sciblox
#------------- SciBlox v0.02
#-------------
maxcats = 15
import warnings
warnings.filterwarnings("ignore")
true = True; TRUE = True
false = False; FALSE = False
import pip
def install(package): pip.main(['install', package])
#-----------------------------
try:
import pandas as pd, numpy as np, scipy, sklearn as sk, seaborn as sb
from copy import copy
from jupyterthemes import jtplot
import matplotlib.pyplot as plt
jtplot.style()
except:
try:
print("Installing packages... Please wait...")
if __name__ == '__main__':
install('pandas'); install('numpy'); install('scipy'); install('scikit-learn');
install('matplotlib'); install('seaborn'); install('lightgbm');
try: install('jupyterthemes');
except: pass;
try: install('sympy');
except: pass;
try:
install('libpython'); install('theano'); install('fancyimpute');
except: pass;
except: pass;
import pandas as pd, numpy as np, scipy, sklearn as sk, seaborn as sb
from copy import copy
import matplotlib.pyplot as plt
try:
from jupyterthemes import jtplot;
jtplot.style()
except: pass;
#-----------------------------
np.set_printoptions(suppress = True)
pd.set_option('display.max_rows', 10)
pd_colour = '#302f2f'
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DATAFRAME METHODS ------------------------------------#
#-------------------- Display options and pandas methods --------------------#
def maxrows(x = 10): pd.set_option('display.max_rows', x)
def maxcat(x = 15): maxcats = x
def tabcolour(x = '#302f2f'): pd_colour = x
#-----------------------------
def percent(x):
if x <= 1: return x
else: return x/100
#-----------------------------
def table(x):
try: return pd.DataFrame(x)
except: return pd.DataFrame(list(x.items()))
def series(x):
try: return pd.Series(x)
except:
first = pd.Series(x[0])
if len(first)!=len(x): return pd.Series(T(x)[0])
else: return first
#-----------------------------
def istable(x): return (type(x) in [pd.DataFrame,pd.Series])*1
def isarray(x): return (type(x) in [np.array,np.ndarray,np.matrix])*1
#-----------------------------
def shape(x):
try: return x.shape
except: return len(x)
#-----------------------------
def head(x, n = 5):
if istable(x)==1: return x.head(n)
else:
if len(x) > n: return x[:n]
else: return x
def tail(x, n = 5):
if istable(x)==1: return x.tail(n)
else:
if len(x) > n: return x[-n:]
else: return x
#-----------------------------
def sample(x, n = 5, ordered = False):
if n > len(x): g = len(x)
else: g = n
if istable(x)==1:
if ordered == False: return x.sample(g)
else: return x.iloc[[int(y*(len(x)/g)) for y in range(g)]]
else:
if ordered == False: return np.random.choice(x, g)
else: return np.array(x)[[int(y*(len(x)/g)) for y in range(g)]]
#-----------------------------
def columns(x):
try: return x.columns.tolist()
except: pass;
def index(x):
try: return x.index.tolist()
except: pass;
#-----------------------------
def reset(x, index = True, column = False, string = False, drop = False):
if index == True and column == False:
if drop == False: return x.reset_index()
else: return x.reset_index()[columns(x)]
else:
y = copy(x)
if type(x)==pd.Series: ss = 0
else: ss = shape(x)[1]
if string == True: y.columns = ["col"+str(y) for y in range(ss)]
else: y.columns = [y for y in range(ss)]
return y
#-----------------------------
def hcat(*args):
a = args[0]
if type(a)==pd.Series: a = table(a)
for b in args[1:]:
if type(a)==list:
if type(b)!=list: b = list(b)
a = a + b
elif isarray(a)==1:
if isarray(b)==0: b = array(b)
a = np.hstack((a,b))
else:
if type(b)!=pd.DataFrame: b = table(b)
a = pd.concat([a,b],1)
del b
return a
def vcat(*args):
a = args[0]
if type(a)==pd.Series: a = table(a)
elif type(a)==list: a = array(a)
for b in args[1:]:
if isarray(a)==1:
if isarray(b)==0: b = array(b)
a = np.vstack((a,b))
else:
if type(b)!=pd.DataFrame: b = table(b)
a = pd.concat([a,b],0)
del b
return a
#-----------------------------
def dtypes(x):
if type(x)==pd.Series:
types = x.dtype
if types==('O' or "string" or "unicode"): return 'obj'
elif types==("int64" or "uint8" or "uint16" or "uint32" or "uint64" or "int8" or "int32" or "int16"): return 'int'
elif types==('float64' or 'float16' or 'float32' or 'float128'): return 'float'
elif types=='bool': return 'bool'
else: return 'date'
else:
dfs = x.dtypes
for f in (dfs.index.tolist()):
dfs[f] = str(dfs[f])
if "int" in dfs[f]: dfs[f] = 'int'
elif "float" in dfs[f]: dfs[f] = "float"
elif "bool" in dfs[f]: dfs[f] = "bool"
elif "O" in dfs[f] or "obj" in dfs[f]: dfs[f] = "obj"
elif "date" in dfs[f]: dfs[f] = "date"
else: dfs[f] = "obj"
return dfs
def dtype(x): return dtypes(x)
def contcol(x):
try: return ((dtypes(x)=="int")|(dtypes(x)=="float")).index[(dtypes(x)=="int")|(dtypes(x)=="float")].tolist()
except: return np.nan
def conts(x):
if type(x) == pd.Series:
if dtype(x) in ["int","float"]: return x
else: return np.nan
else: return x[contcol(x)]
def objcol(x):
try: return (dtypes(x)=="obj").index[dtypes(x)=="obj"].tolist()
except: return np.nan
def objects(x):
if type(x) == pd.Series:
if dtype(x) == "obj": return x
else: return np.nan
else: return x[objcol(x)]
def objs(x): return objects(x)
def notobj(x): return exc(x, objcol(x))
def catcol(x):
if type(x) == pd.Series:
if iscat(x) == True: return x
else: return np.nan
else: return (iscat(x).index[iscat(x)]).tolist()
def classcol(x): return cats(x)
def cats(x): return x[catcol(x)]
def classes(x): return x[catcol(x)]
def iscat(x, cat = maxcats):
return ((dtypes(x)!='float')|(dtypes(x)!='int'))&(nunique(x)<=cat)
#-----------------------------
def nullcol(x): return (count(x)!=len(x)).index[count(x)!=len(x)].tolist()
def nacol(x): return nullcol(x)
def missingcol(x): return nullcol(x)
def notnull(x, row = 1, keep = None, col = 0):
if row!=1: axis = 1
elif col!=0: axis = 0
else: axis = 0
if keep is None:
try: return x.dropna(axis = axis)
except: return x.dropna()
else:
if keep < 1:
if axis==1: keep = len(x)*keep
else: keep = shape(x)[1]*keep
return x.dropna(axis = axis, thresh = keep)
def isnull(x, row = 1, keep = None, col = 0):
if row!=1 or col!=0: axis = 0
else: axis = 1
if keep is None: miss = missing(x, row = axis)!=0
else:
if axis == 1:
if keep < 1: miss = missing(x, row = axis)<=shape(x)[1]*keep
else: miss = missing(x, row = axis)<=keep
else:
if keep < 1: miss = missing(x, row = axis)<=len(x)*keep
else: miss = missing(x, row = axis)<=keep
try: return x.iloc[miss.index[miss]]
except: return x[pd.isnull(x)==True]
def dropna(x, col = None):
if col is None: return x.dropna()
else:
if type(col)!=list: col = list(col)
return x.dropna(subset = col)
#-----------------------------
def diff(want, rem):
w = copy(want)
for j in w:
if j in rem: w.remove(j)
for j in rem:
if j in w: w.remove(j)
return w
def exc(x, l):
if type(l) == str: l = [l]
return x[diff(columns(x),l)]
def drop(x, l): return exc(x, l), x[l]
def pop(x, l): return exc(x, l), x[l]
def append(l, r):
g = copy(l);
if type(g)!= list: g = [g]
if type(r) == list:
for a in r: g.append(a)
else: g.append(r)
return g
#-------------
#-------------
#-------------
#-------------
#------------------------------------ OTHER ANALYTICAL METHODS ------------------------------------#
#-------------------- Uniques and counting and sorting --------------------#
def count(x):
try: return x.count()
except: return len(x)
def missing(x, row = 0, col = 1):
if row!=0 or col!=1: x = x.T
try: return (pd.isnull(x)).sum()
except: return (np.isnan(x)).sum()
#-----------------------------
def unique(x, dropna = False):
if dropna == True: x = notnull(x)
if type(x) == pd.Series: return list(x.unique())
elif type(x) == pd.DataFrame: return {col:list(x[col].unique()) for col in columns(x)}
else:
u = []
for a in x:
if dropna == True:
if a not in u and a!=np.nan: u.append(a)
else:
if a not in u: u.append(a)
del a
return u
def nunique(x, dropna = False):
if istable(x)==True: return x.nunique()
else:
u = []; n = 0
for a in x:
if dropna == True:
if a not in u and a!=np.nan: u.append(a); n += 1
else:
if a not in u: u.append(a); n += 1
del u,a
return n
def cunique(x, dropna = False):
if type(x) == pd.Series: return x.value_counts(dropna = dropna)
elif type(x) == pd.DataFrame: return {col:x[col].value_counts() for col in columns(x)}
else:
u = {}
for a in x:
if dropna == True:
if a not in u and a!=np.nan: u[a]=1
else: u[a]+=1
else:
if a not in u: u[a]=1
else: u[a]+=1
del a
return u
def punique(x, dropna = False):
return round(nunique(x, dropna = dropna)/(count(x)+missing(x)*(dropna==False)*1)*100,4)
#-----------------------------
def reverse(x):
if type(x) == pd.Series and dtype(x) == 'bool': return x == False
elif istable(x)==1: return x.iloc[::-1]
elif type(x) == list: return x[::-1]
elif type(x) == dict: return {i[1]:i[0] for i in x.items()}
#-----------------------------
def sort(x, by = None, asc = True, ascending = True, des = False, descending = False):
if type(x) == list:
if asc == ascending == True and des == descending == False: return sorted(x)
else: return reverse(sorted(x))
else:
if type(x) == pd.Series:
if asc == ascending == True and des == descending == False: return x.sort_values(ascending = True)
else: return x.sort_values(ascending = False)
else:
if by is None: col = columns(x)
else: col = by
if asc == ascending == True and des == descending == False: return x.sort_values(ascending = True, by = col)
else: return x.sort_values(ascending = False, by = col)
def fsort(x, by = None, keep = False, asc = True, ascending = True, des = False, descending = False):
if type(x)==pd.Series: x = table(x); x = reset(x, column = True, string = True); by = columns(x)[0];
if type(by)==list: by = by[0]
if type(x) == list:
from collections import Counter
c = copy(x)
if asc == ascending == True and des == descending == False: c.sort(key=Counter(sort(c, asc = True)).get, reverse = True); return c
else: c.sort(key=Counter(sort(c, asc = False)).get, reverse = False); return c
elif by is None: print("Please specify column to sort by: fsort(x, by = 'Name')")
else:
f = by; fg = reset(table(x[f].value_counts()))
ff = f+"_Freq"; fg.columns = [f,ff]
del ff
try: fg[f+"_Length"] = fg[f].str.len()
except: fg[f+"_Length"] = fg[f]
df = x.merge(fg, how = "outer")
if asc == ascending == True and des == descending == False: df = sort(df, [f+"_Freq",f+"_Length"], asc = True)
else: df = sort(df, [f+"_Freq",f+"_Length"], asc = False)
if keep == True: return df
else: l = columns(df); l.remove(f+"_Freq"); l.remove(f+"_Length")
return df[l]
#-------------
#-------------
#-------------
#-------------
#------------------------------------ BASIC ANALYSIS METHODS ------------------------------------#
#-------------------- Ratios and detections --------------------#
def freqratio(x):
counted = cunique(x)
if type(x) == pd.Series:
try: return counted[0]/counted[1]
except: return 1
else:
empty = []
for col in columns(x):
try: empty.append(counted[col].iloc[0]/counted[col].iloc[1])
except: empty.append(1)
tab = table(empty); tab.index = columns(x); return tab[0]
def isid(x):
for col in columns(x):
if (nunique(x[col]) == len(x)) or "id" in col.lower() or "index" in col.lower(): return col
else: return ''
def pzero(x): return sum(x==0, axis = 0)/count(x)*100
#-------------
#-------------
#-------------
#-------------
#------------------------------------ MATHEMATICAL METHODS ------------------------------------#
#-------------------- Statistical methods --------------------#
def var(x, axis = 0, dof = 1):
try: return x.var(axis = axis, ddof = dof)
except: return np.nanvar(x, axis = axis, ddof = dof)
def std(x, axis = 0, dof = 1):
try: return x.std(axis = axis, ddof = dof)
except: return np.nanstd(x, axis = axis, ddof = dof)
#-------------
def mean(x, axis = 0):
try: return x.mean(axis = axis)
except: return np.nanmean(x, axis = axis)
def median(x, axis = 0):
try: return x.median(axis = axis)
except: return np.nanmedian(x, axis = axis)
def mode(x, axis = 0):
try: return series(x).mode()[0]
except: return x.mode(axis = axis).iloc[0]
def rng(x, axis = 0):
try: return conts(x).max(axis = axis) - conts(x).min(axis = axis)
except:
try: return max(x)-min(x)
except: return np.nan
#-------------
def percentile(x, p, axis = 0):
if p > 1: p = p/100
try: return x.quantile(p, axis = axis)
except: return np.nanpercentile(x, p, axis = axis)
def iqr(x, axis = 0):
return percentile(x, 0.75, axis = axis) - percentile(x, 0.25, axis = axis)
#-------------
def skewness(x, axis = 0):
try: return x.skew(axis = axis)
except: return scipy.stats.skew(x, axis = axis, nan_policy='omit')
def skew(x, axis = 0): return skewness(x, axis)
def kurtosis(x, axis = 0):
try: return scipy.stats.kurtosis(x, axis = axis, nan_policy='omit')
except: return x.kurt(axis = axis)
def kurt(x, axis = 0): return kurtosis(x, axis)
#-------------
def pnorm(p, mean = 0, var = 1):
if p > 1: p = p/100
return scipy.stats.norm.cdf(p, loc=mean, scale=var)
def qnorm(q, mean = 0, var = 1):
if q > 1: q = q/100
return scipy.stats.norm.ppf(q, loc=mean, scale=var)
def CI(q, data, method = "mean",U = True, L = True):
if q > 1: q = q/100
norms = qnorm(q+(1-q)/2)*(std(data) / sqrt(len(data)) )
if method == "mean": u = mean(data) + norms; l = mean(data) - norms
if U == L == True: return (l,u)
elif U == True: return u
else: return l
#-------------
#-------------
#-------------
#-------------
#------------------------------------ TYPES METHODS ------------------------------------#
#-------------------- Changing case --------------------#
def lower(x):
j = copy(x)
if type(x) == list:
for k in range(len(j)):
try: j[k] = j[k].lower()
except: pass;
return j
def upper(x):
j = copy(x)
if type(x) == list:
for k in range(len(j)):
try: j[k] = j[k].upper()
except: pass;
return j
#-------------------- Other types and conversions --------------------#
def int(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = x[y].astype("int64")
except:
try: x[y] = np.floor(x[y])
except: pass
return x
else:
try: return np.int64(df)
except:
try: return np.floor(df)
except: return df
def float(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = x[y].astype("float64")
except: pass
return x
else:
try: return np.float64(df)
except: return df
#-------------
def max(x, axis = 0):
if istable(x)==1: return conts(x).max()
else:
if shape(matrix(x))[0] == 1: return np.amax(x,axis=axis)
else: return np.amax(x)
def min(x, axis = 0):
if istable(x)==1: return conts(x).min()
else:
if shape(matrix(x))[0] == 1: return np.amin(x)
else: return np.amin(x,axis=axis)
#-------------
def argcheck(df, args):
if len(args)==0: col = columns(df)
elif type(args[0])!=list: col = list(args)
else: col = args[0]
return copy(df), col
#-------------
def abs(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.abs(x[y])
except: pass
return x
else:
try: return np.abs(df)
except: return df
#-------------
def log(df, *args, shift = 0):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.log(x[y]+shift)
except: pass;
return x
else:
try: return np.log(df+shift)
except: return df
#-------------
def exp(df, *args, shift = 0):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.exp(x[y])+shift
except: pass;
return x
else:
try: return np.exp(df)+shift
except: return df
#-------------
def sin(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.sin(x[y])
except: pass;
return x
else:
try: return np.sin(df)
except: return df
#-------------
def cos(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.cos(x[y])
except: pass;
return x
else:
try: return np.cos(df)
except: return df
#-------------
def cos(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.cos(x[y])
except: pass;
return x
else:
try: return np.cos(df)
except: return df
#-------------
def sqrt(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.sqrt(x[y])
except: pass;
return x
else:
try: return np.sqrt(df)
except: return df
#-------------
def floor(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.floor(x[y])
except: pass;
return x
else:
try: return np.floor(df)
except: return df
#-------------
def ceiling(df, *args):
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
try: x[y] = np.ceil(x[y])
except: pass;
return x
else:
try: return np.ceil(df)
except: return df
def ceil(df, *args): return ceiling(df, args)
#-------------
def sum(x, axis = 1):
try: return x.sum(axis = axis)
except: return np.nansum(x, axis = 0)
#-------------
#-------------
#-------------
#-------------
#------------------------------------ MATHEMATICAL METHODS ------------------------------------#
#-------------------- Linear Algebra --------------------#
from numpy import dot, multiply, multiply as mult
def array(*args):
if len(args)==1:
arrs = np.array(args[0])
try:
if shape(arrs)[1]==1: arrs = arrs.T[0]
except: pass;
return arrs
else:
try: return np.array(args)
except: return np.array([args])
def matrix(*args): return np.matrix(array(args))
def T(x):
if type(x)==np.array: return matrix(x).T
else:
try: return x.T
except: return array(x).T
def inv(x):
try: return np.linalg.inv(x)
except: print("Either det(x)=0 or not square matrix")
def det(x):
try: return np.linalg.det(x)
except: print("Not square matrix")
#-------------
def eye(x): return np.eye(x)
def I(x): return np.eye(x)
#-------------
def ones(x, s = 1):
if s == 1: return np.ones((x,x))
else: return np.ones(x)
def J(x, s = 1): return ones(x, s)
#-------------
def zeros(x, s = 1):
if s == 1: return np.zeros((x,x))
else: return np.zeros(x)
def zeroes(x, s = 1): return zeros(x, s)
def Z(x, s = 1): return zeros(x, s)
#-------------
def triu(matrix): return np.triu(matrix)
def tril(matrix): return np.tril(matrix)
#-------------
def trace(A): return np.trace(A)
def tr(A): return trace(A)
def diag(A): return np.diagonal(A)
#-------------
def repmat(A, *args):
if len(args) == 2: return np.tile(A, (args[0],args[1]))
elif len(args) == 1: return np.tile(A, args[0])
else: print("Error")
def tile(A, *args): return repmat(A, args)
#-------------
#-------------
#-------------
#-------------
#------------------------------------ TABLE METHODS ------------------------------------#
#-------------------- Opening and editing --------------------#
def read(x):
if type(x) == list:
for y in x:
if "csv" in y: return clean(pd.read_csv(y))
else:
if "csv" in x: return clean(pd.read_csv(x))
#-------------
def string(dfx, *args):
x = copy(dfx); df = copy(dfx)
if type(df) == pd.DataFrame:
x, col = argcheck(df, args)
for y in col:
x[y] = x[y].astype("str")+"*"
return x
elif type(df) == pd.Series:
df = df.astype("str")+"*"
return df
else: return str(df)
#-------------
def clean(x, *args):
def cleancol(x):
if dtypes(x) == 'obj':
c = x.str.replace(",","").str.replace(" ","").str.replace("-","").str.replace("%","").str.replace("#","")
else: c = x
try:
if ((sum(int(c)) - sum(float(c)) == 0) or sum(int(c)-float(c))==0) and count(c) == len(c): return int(c)
else: return float(c)
except:
return x
x = x.replace(np.inf, np.nan).replace(-np.inf, np.nan).replace("NaN",np.nan)
df = copy(x)
if type(x) == pd.Series: return cleancol(x)
else:
if len(args)==0: col = columns(df)
elif type(args[0]) != list: col = list(args)
else: col = args[0]
for y in col: df[y] = cleancol(df[y])
return df
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DATA ANALYTICS ------------------------------------#
#-------------------- Analyse --------------------#
def analyse(c, y = None, extra = ["skew"], dec = 2, colour = True, limit = True, graph = True):
x = copy(c)
if y is not None:
if type(y) == str: x, y = drop(x, y)
first = describe(x, extra = extra, clean = False); cols = columns(first)
df = hcat(guess_importance(x,y), first)
df.columns = append("Importance", cols)
df = round(sort(df, by = ["Importance","FreqRatio","%Unique"], des = True),dec)
if limit == True: df = df[df["Importance"]>0]
if graph == True: plot(x = index(df)[0], y = index(df)[1], z = index(df)[2], hue = y, data = c)
if colour == True: df = df.style.bar(align='mid', color=pd_colour, width = 80).set_properties(**{'max-width': '90px'})
return df
def describe(x, extra = ["skew"], clean = True):
normal = hcat(mean(x), median(x), rng(x), freqratio(x), mode(x), punique(x))
normal.columns = ["Mean","Median","Range", "FreqRatio", "Mode","%Unique"]
if type(extra)!=list: extra = [extra];extra = lower(extra);
for j in extra:
before = columns(normal)
if "skew" in j: normal = hcat(normal, skew(x)); normal.columns = append(before, "Skewness")
elif "cat" in j: normal = hcat(normal, iscat(x)); normal.columns = append(before, "IsCategorical")
elif "iqr" in j: normal = hcat(normal, iqr(x)); normal.columns = append(before, "InterQuartileRng")
elif "var" in j: normal = hcat(normal, var(x)); normal.columns = append(before, "Variance")
elif "std" in j or "sd" in j: normal = hcat(normal, std(x)); normal.columns = append(before, "SD")
elif "min" in j: normal = hcat(normal, np.min(x)); normal.columns = append(before, "Min")
elif "kurt" in j: normal = hcat(normal, kurtosis(x)); normal.columns = append(before, "Kurt")
elif "max" in j: normal = hcat(normal, np.max(x)); normal.columns = append(before, "Max")
elif "punq" in j: normal = hcat(normal, punique(x)); normal.columns = append(before, "%Unique")
elif "nunq" in j: normal = hcat(normal, nunique(x)); normal.columns = append(before, "No.Unique")
df = sort(normal, by = "FreqRatio")
if clean == True: return df.replace(np.nan,"")
else: return df
#-------------------- Var-Check and FreqRatio Check --------------------#
def varcheck(x, freq = "mean", unq = 0.1, colour = True, limit = True, output = False):
freqs = freqratio(x); unqs = punique(x)
if freq == "mean": fd = (freqs>=CI(q=0.99,data =freqs,L=False))*1
else: fd = (freqs>freq)*1
df = hcat(freqs,fd,unqs,(unqs<=unq)*1,var(x))
df.columns = ["FreqRatio","BadFreq?","%Unique","BadUnq?","Var"]
df["BadVar?"] = (df["Var"].fillna(1000)<=0.1)*1
df["BAD?"] = (df["BadFreq?"]+df["BadUnq?"]+df["BadVar?"])>0
df = round(sort(df, by =["BAD?","BadVar?","BadFreq?","BadUnq?","FreqRatio","%Unique","Var"], des = True),2)
if limit == True: df = T(T(df)[((df["BAD?"]==True).index[df["BAD?"]==True]).tolist()])
if colour == True:
df = df.style.bar(align='zero', color=pd_colour, width = 80, subset=["FreqRatio","%Unique","Var"])
df = df.apply(highlight_one, subset = ["BadFreq?","BadUnq?","BadVar?"]).apply(highlight_true, subset=["BAD?"])
df = df.set_properties(**{'max-width': '90px'})
if output == True: return exc(x, index(df))
else: return df
#-------------------- Correlations --------------------#
def corr(x, table = False, limit = 20):
if table == False:
corrs = round(x.corr()*100)
sortby = sort(sum(abs(corrs)-100),des=False)
corrs = corrs[index(sortby)]
corrs = T(T(corrs)[index(sortby)])
if shape(corrs)[0]>limit: corrs = T(T(corrs.iloc[0:limit]).iloc[0:limit])
corrs = T(reverse(T(reverse(corrs))))
cmap = sb.light_palette("black", as_cmap=True)
show = abs(corrs).style.background_gradient(cmap).set_properties(**{'max-width': '50px', 'font-size': '8pt'
,'color':'black'})
return show
else:
try: return conts(x).corr()
except: print("Error. No continuous data")
def correlation(x, table = False): return corr(x, table)
def correlation_matrix(x, table = False): return corr(x, table)
def cor(x, table = False): return corr(x, table)
#-------------------- Feature Importance --------------------#
def guess_importance(df, y):
x = copy(df)
if type(y) == str:
try: y = x[y]
except:
print("No column for y")
x = dummies(x)
x_train, x_test, y_train, y_test = holdout(x, y, info = False);
def lightmodel(x_train, x_test, y_train, y_test, reg, seed = 1234):
try: import lightgbm as lgb
except: print("Cannot install"); raise
x_train = array(x_train); y_train = array(y_train); x_test = array(x_test); y_test = array(y_test)
if reg == True:
model = lgb.LGBMRegressor(objective='regression', num_leaves = 5, learning_rate = 0.1, n_estimators = 100, seed = seed)
model.fit(x_train, y_train, early_stopping_rounds = 10, eval_metric='l2', eval_set=[(x_test, y_test)],verbose=False)
return model
imps = lightmodel(x_train, x_test, y_train, y_test, reg = True).feature_importances_
tab = table(imps); tab.index = columns(x)
imps = dict(tab)[0]*100; cols = columns(df)
imp = {k:0 for k in cols}
for j in imps.keys():
for c in cols:
if c in j: imp[c] += imps[j]
return series(imp)
def guess_imp(df, y): return guess_importance(df, y)
#-------------------- Data Reduction --------------------#
## https://stackoverflow.com/questions/29294983/how-to-calculate-correlation-between-all-columns-and-remove-highly-correlated-on
def remcor(x, limit = 0.9):
dataset = copy(x)
col_corr = set(); corr_matrix = dataset.corr()
for i in range(len(corr_matrix.columns)):
for j in range(i):
if corr_matrix.iloc[i, j] >= limit:
colname = corr_matrix.columns[i]
col_corr.add(colname)
if colname in dataset.columns: del dataset[colname]
return dataset
def remcorr(x,limit=0.9): return remcor(x,limit)
#-------------
#https://stackoverflow.com/questions/28816627/how-to-find-linearly-independent-rows-from-a-matrix
def independent(A):
try: import sympy
except:
print("Cannot install"); raise
_, inds = sympy.Matrix(A).T.rref()
print("Lin Indp rows are: "+str(inds))
return A[list(inds)]
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DUMMIFICATION ------------------------------------#
#-------------------- Dummies method --------------------#
def dummies(x, dummies = True, codes = False, freq = True, na = "inplace", nanew = True, col = None, ascending = True, cat = True, drop = True,
ids = False):
try:
if dtypes(x)[0]==('int' or 'float') and type(x)==pd.Series: return x
except:
if dtypes(x)==('int' or 'float') and type(x)==pd.Series: return x
if type(x)!=pd.DataFrame: x = table(x)
df = copy(x)
if ids == False: df = exc(df, isid(df))
if col is None:
if cat == True: col = catcol(df)
else: col = objcol(df)
elif type(col)!=list: col = [col]
if dummies == True:
if "in" in na:
for j in col:
dummified = pd.get_dummies(x[j], dummy_na = nanew)
dummified.columns = [str(j)+"_"+str(c) for c in columns(dummified)]
if j in nacol(x): dummified.iloc[isnull(x[j]).index]=np.nan
df = hcat(df, dummified)
else: df = pd.get_dummies(x, dummy_na = nanew, columns = col)
if drop == True: return notobj(zerodrop(df))
else: return zerodrop(df)
else:
if freq == True:
code = {}
for j in col:
part = {};
try: i = min(df[j]);
except: i = 0;
if dtype(df[j])!=('int'or'float'): d = fsort(df, by = j)[j]
else: d = sort(df, by = j)[j]
for k in d:
if pd.isnull(k)==False:
try: part[k]
except: part[k] = i; i+=1
code[j] = part
df[j]=df[j].replace(part)
del part,i,d,k
else:
code = {}
for j in col:
code[j] = reverse(dict(enumerate(df[j].astype("category").cat.categories)))
df[j]=df[j].replace(code[j])
if drop == True: df = notobj(df)
if shape(df)[1]==1: df = df[columns(df)[0]]
if codes == True: return df,code
else: return df
#-------------------- Quantile conversion --------------------#
def discretise(x, n = 4, smooth = True, codes = False):
if codes == False: codes = None
else: codes = False
if smooth == True:
try: return pd.qcut(x, q = n, duplicates = 'drop', labels = codes)
except: return pd.cut(x, q = n, labels = codes)
else:
return pd.cut(x, bins = n, labels = codes)
def qcut(x, n = 4, smooth = True, codes = False): return discretise(x, n, smooth, codes)
#-------------
#-------------
#-------------
#-------------
#------------------------------------ ADVANCED DATA ANALYTICS ------------------------------------#
#-------------------- Skewness Analysis --------------------#
def topositive(y, info = False):
x = copy(y); d = conts(x)
notgood = ((np.min(d)<=0).index[np.min(d)<=0]).tolist()
add = np.abs(np.min(d[notgood]))+1
d[notgood] = d[notgood]+add
x[columns(d)] = d
if info == False: return x
else: return x,add
#-------------
def boxcox(x):
if type(x) == pd.Series:
k = (conts(x)+abs(min(conts(x)))+1)
lm = scipy.stats.boxcox(k)[1]
if lm == 0: return log(x), lm
else: return ((k**lm)-1)/lm, lm
else:
df = []; lms = []
for col in contcol(x):
k = (x[col]+abs(min(x[col]))+1)
lm = scipy.stats.boxcox(k)[1]
if lm == 0: df.append(log(x[col])); lms.append(lm)
else: df.append(((k**lm)-1)/lm); lms.append(lm)
return T(table(df)), array(lms)
#-------------
def unskew(x, info = False):
def show(q, df):
if q == 0: return (df, "normal")
elif q == 1: return (sqrt(df), "sqrt")
else: return (boxcox(df)[0], "boxcox")
original = copy(x)
df = topositive(conts(x))
skews = np.abs(skew(df))
sqrted = sqrt(df)
boxcoxed = boxcox(df)[0]
comp = hcat(skew(df),skew(sqrted),skew(boxcoxed)); comp.columns = ["norm","sqrt","box"]
res = np.abs(comp.T)
r = []; out = []
for col in res:
p = 0
for i in res[col]:
if i == np.min(res[col]):
f = show(p, df[col]); r.append(f[1]); out.append(f[0]); break
else: p += 1
first = out[0]
for c in out[1:]: first = hcat(first, c)
del c, out, res, comp, sqrted, skews, boxcoxed, show
original[columns(first)] = first
res = table(r); res.index = columns(first)
if info == True: return original, res[0]
else: return original
#-------------
def outlier(df, method = "forest", poutlier = 0.025, sd = 3.5, iqr = 1.5, indicate = True, n_estimators = 100):
x = copy(df)
if "for" in method or "tree" in method:
from sklearn.ensemble import IsolationForest
df = dummies(x, na = "clear"); df = df.fillna(df[nullcol].median())
model = IsolationForest(n_estimators = n_estimators, n_jobs=-1, bootstrap = True, contamination = poutlier)
model.fit(df); preds = model.predict(df)
res = x.iloc[np.where(preds==-1)[0]]
else:
f = dummies(x, na = "clear"); df = topositive(f.fillna(f.median()))
if "std" in method or "sd" in method:
#https://stackoverflow.com/questions/22354094/pythonic-way-of-detecting-outliers-in-one-dimensional-observation-data
if len(shape(df)) == 1: df = df[:,None]
df = unskew(df)
meds = median(df, axis=0)
diff = sum((df - meds)**2, axis=1)
diff = sqrt(diff); mad = median(diff)
z = 0.6745 * diff / mad
out = (z>sd)==True
where = out.index[out].tolist()
res = x.iloc[where]
elif "iqr" in method:
first = percentile(df, p = 0.25)
last = percentile(df, p = 0.75)
iqrred = first-last
where = sum((df>(last+iqr*last))|(df<(first-iqr*first)))!=0
res = x.iloc[where.index[where].tolist()]
print("No. outliers = "+str(len(res)))
if indicate == True:
x["IsOutlier"] = 0
try: x["IsOutlier"].iloc[[res.index.tolist()]] = 1
except: pass;
return x
else: return res
def isoutlier(df, method = "forest", poutlier = 0.025, sd = 3.5, iqr = 1.5, indicate = False, n_estimators = 100):
d = outlier(df, method = method, poutlier = poutlier, sd = sd, iqr = iqr, indicate = True, n_estimators = n_estimators)
if indicate == False: return exc(d.iloc[(d["IsOutlier"]==1).index[d["IsOutlier"]==1]], "IsOutlier")
else: return d.iloc[(d["IsOutlier"]==1).index[d["IsOutlier"]==1]]
def notoutlier(df, method = "forest", poutlier = 0.025, sd = 3.5, iqr = 1.5, indicate = False, n_estimators = 100):
d = outlier(df, method = method, poutlier = poutlier, sd = sd, iqr = iqr, indicate = True, n_estimators = n_estimators)
if indicate == False: return exc(d.iloc[(d["IsOutlier"]==0).index[d["IsOutlier"]==0]], "IsOutlier")
else: return d.iloc[(d["IsOutlier"]==0).index[d["IsOutlier"]==0]]
#-------------
def zerodrop(x): return exc(x, (pzero(x)==100).index[pzero(x)==100].tolist())
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DATA CLEANING AND CONVERSION ------------------------------------#
#-------------------- Normal statistic filling --------------------#
def fillobj(x, method):
data = copy(clean(x))
missed = nacol(data[objcol(data)]); missdf = data[missed]
if method in ["mode","freq","frequency"]: data[missed] = data[missed].fillna(mode(missdf))
elif method in ["zero","missing","none"]: data[missed] = data[missed].fillna("Missing_Data")
elif method in ["mix","half","halved"]:
ins = (count(x)<0.75*len(x)).index[count(x)<0.75*len(x)]
data[ins] = data[ins].fillna("Missing_Data")
other = diff(columns(x), ins)
data[other] = data[other].fillna(mode(x[other]))
return data
#-------------
def fillcont(x, method):
data = copy(clean(x))
missed = nacol(conts(data)); missdf = data[missed]
if method in ["mean","avg","average"]: data[missed] = data[missed].fillna(mean(missdf))
elif method in ["median"]: data[missed] = data[missed].fillna(median(missdf))
elif method in ["mode","freq","frequency"]: data[missed] = data[missed].fillna(mode(missdf))
return data
#-------------------- Full methods --------------------#
def complete(df, method = None, objects = None, continuous = None, knn = 5, max_unique = 20, epoch = 100, mice = "forest", ids = False):
x = copy(df); imputation = ["bpca","pca","knn","mice","svd"]; imped = 0
if ids == False: x = exc(x, isid(x))
if method is not None: meth = method.lower()
else: meth = "a"
if method is None and objects is None and continuous is None: meth = 'knn'
if meth in imputation or objects in imputation or continuous in imputation:
imped = 1
try: import fancyimpute
except:
print("Cannot import"); raise
def matching(method, objects, continuous, thingo):
if method is not None:
if thingo in method: return 1
else: return 0
else:
if thingo in objects or thingo in continuous: return 1
else: return 0
res,codes = dummies(x, codes = True, dummies = False)
intcols = (dtypes(res)=='int').index[dtypes(res)=='int'].tolist()
if matching(meth, objects, continuous, "knn") == 1: dfilled = fancyimpute.KNN(k=knn, verbose = 0).complete(res)
elif matching(meth, objects, continuous, "svd") == 1: dfilled = fancyimpute.SoftImpute(verbose = 0).complete(res)
elif matching(meth, objects, continuous, "mice") == 1:
print("Please wait...")
dfilled = mice_complete(res, epochs = int(epoch/10), impute_method = mice, strings = objcol(x))
print("Done")
else:
print("Please wait...")
dfilled = bpca_complete(res, epochs = epoch)
print("Done")
dfilled = table(dfilled); dfilled.columns = columns(res)
for col in codes: x[col] = squeeze(series(int(round(dfilled[col],0))), upper = len(codes[col])-1, lower = 0).replace(reverse(codes[col]))
for col in contcol(x): x[col] = dfilled[col]
for col in contcol(x): x[col] = squeeze(x[col], lower = np.min(df[col]), upper = np.max(df[col]))
if (missingcol(x) != [] and objects in imputation) or meth in imputation: x = fillobj(x, "mix")
elif objects is not None: x[objcol(x)] = fillobj(df[objcol(df)], objects)
if continuous not in imputation and continuous is not None: x[contcol(x)] = fillcont(df[contcol(df)], continuous)
x = round(x, 4)
x[intcols] = int(round(x[intcols]))
return x
#-------------------- BPCA --------------------#
#http://ishiilab.jp/member/oba/tools/BPCAFill.html
def bpca_complete(x, epochs = 100):
decimals = 4
y = copy(x); cols = y.columns.tolist()
maximum = np.int(np.max(y.max())*999)
means = round(y.mean(),decimals); sd = round(y.std(),decimals); y = round((y-means)/sd,decimals)
y[missingcol(y)] = y[missingcol(y)].fillna(maximum)
mat = float(np.matrix(y))
N,d = mat.shape; q = d-1
yest = np.copy(mat); yest[yest==maximum]=0
missidx = {}; bad = np.where(mat==maximum)
for a in bad[0]: missidx[a] = []
for a in range(len(bad[0])): missidx[bad[0][a]].append(bad[1][a])
nomissidx = {}; good = np.where(mat!=maximum)
for a in good[0]: nomissidx[a] = []
for a in range(len(good[0])): nomissidx[good[0][a]].append(good[1][a])
gmiss = list(set(bad[0]))
gnomiss = list(set(good[0]))
covy = np.cov(yest.T)
U, S, V = np.linalg.svd(np.matrix(covy))
U = (U.T[0:q]).T; S = S[0:q]*np.eye(q); V = (V.T[0:q]).T
mu = np.copy(mat); mu[mu==maximum]=np.nan; mu = np.nanmean(mu, 0)
W = U*np.sqrt(S); tau = 1/ (np.trace(covy)-np.trace(S)); taumax = 1e20; taumin = 1e-20; tau = np.amax([np.amin([tau,taumax]),taumin])
galpha0 = 1e-10; balpha0 = 1; alpha = (2*galpha0 + d)/(tau*np.diag(W.T*W)+2*galpha0/balpha0)
gmu0 = 0.001; btau0 = 1; gtau0 = 1e-10; SigW = eye(q)
tauold = 1000
for epoch in range(epochs):
Rx = np.eye(q)+tau*W.T*W+SigW; Rxinv = np.linalg.inv(Rx)
idx = gnomiss; n = len(idx)
dy = mat[idx,:] - np.tile(mu,(n,1)); x = tau * Rxinv * W.T * dy.T
Td = dy.T*x.T; trS = np.sum(np.multiply(dy,dy))
for n in range(len(gmiss)):
i = gmiss[n]
dyo = np.copy(mat)[i,nomissidx[i]] - mu[nomissidx[i]]
Wm = W[missidx[i],:]; Wo = W[nomissidx[i],:]
Rxinv = np.linalg.inv( Rx - tau*Wm.T*Wm ); ex = tau * Wo.T * np.matrix(dyo).T; x = Rxinv * ex
dym = Wm * x; dy = np.copy(mat)[i,:]
dy[nomissidx[i]] = dyo; dy[missidx[i]] = dym.T
yest[i,:] = dy + mu
Td = Td + np.matrix(dy).T*x.T; Td[missidx[i],:] = Td[missidx[i],:] + Wm * Rxinv
trS = trS + dy*np.matrix(dy).T + len(missidx[i])/tau + np.trace( Wm * Rxinv * Wm.T )
Td = Td/N; trS = trS/N; Rxinv = np.linalg.inv(Rx);
Dw = Rxinv + tau*Td.T*W*Rxinv + np.diag(alpha)/N; Dwinv = np.linalg.inv(Dw);
W = Td * Dwinv;
tau = (d+2*gtau0/N)/(trS-np.trace(Td.T*W) + (mu*np.matrix(mu).T*gmu0+2*gtau0/btau0)/N)[0,0];
SigW = Dwinv*(d/N);
alpha = (2*galpha0 + d)/ (tau*np.diag(W.T*W)+np.diag(SigW)+2*galpha0/balpha0).T
if np.abs(np.log10(tau)-np.log10(tauold)) < 1e-4: break;
tauold = tau
out = table(yest)
out.columns = cols
out = (out*sd)+means
return out
#-------------------- MICE --------------------#
#https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3074241/
def mice_complete(res, strings, epochs = 10, impute_method = "forest"):
x = copy(clean(res)); original = copy(x)
filled = fillcont(original, method = "median")
from sklearn.cross_validation import train_test_split
for epoch in range(epochs):
for missing_col in missingcol(original):
null_data = isnull(original[missing_col]).index
not_null = filled.iloc[notnull(original[missing_col]).index]
y = not_null.pop(missing_col)
if "forest" in impute_method or "tree" in impute_method or "bag" in impute_method:
from sklearn.ensemble import RandomForestRegressor as rfr
from sklearn.ensemble import RandomForestClassifier as rfc
if missing_col in strings: model = rfc(n_jobs = -1, n_estimators=epochs*4)
else: model = rfr(n_jobs = -1, n_estimators=epochs*4)
elif "linear" in impute_method or "log" in impute_method:
from sklearn.linear_model import LinearRegression as linreg
from sklearn.linear_model import LogisticRegression as logreg
if missing_col in strings: model = logreg(n_jobs = -1, solver = 'sag', multi_class = "multinomial")
else: model = linreg(n_jobs = -1)
elif "boost" in impute_method:
from lightgbm import LGBMRegressor as xgbr
from lightgbm import LGBMClassifier as xgbc
if missing_col in strings: model = xgbc(learning_rate = 10/epochs, n_estimators=epochs*4, nthread =-1)
else: model = xgbr(learning_rate = 10/epochs, n_estimators=epochs*4, nthread=-1)
train_x, test_x, train_y, test_y = train_test_split(not_null, y, test_size=0.33, random_state=42)
model.fit(train_x, train_y)
filled[missing_col].iloc[null_data] = model.predict(exc(filled.iloc[null_data], missing_col))
return filled
#-------------------- Squeeze or round functions --------------------#
def squeeze(df, lower = 0, upper = 1):
x = copy(df)
x[x<lower] = lower; x[x>upper] = upper
return x
#-------------
#-------------
#-------------
#-------------
#------------------------------------ MACHINE LEARNING ------------------------------------#
#-------------------- Boosting --------------------#
def lightgbm(x_train, x_test, y_train, y_test, noclass = None, lr = 0.05, method = "dart", gpu = False, trees = 100, metric = None,
depth = -1, splits=2, leaves=31.123, min_weight=20.123, features=1, bins=5.123, impurity=1e-3+0.000001, jobs=-1, state=None, bagging = 0.1,
stop = 10, l1 = 0, l2 = 1, dropout = 0.1, skipdrop = 0.5, verbose = False, info = True):
if noclass is None:
try: noclass = nunique(array(hcat(y_train,y_test)))
except: noclass = nunique(array(vcat(y_train,y_test)))
if gpu == True: gpu = "gpu"
else: gpu = "cpu"
if min_weight <1: min_weight = int(min_weight*(len(vcat(x_train,y_train))))
if bagging != False: bagged = 1;
else: bagged = 0;
if verbose == True: verbose = 1;
else: verbose = 0;
leaves = int(leaves); min_weight = int(min_weight); bins = int(bins)
try: import lightgbm as lgb
except:
print("Cannot import"); raise
x_train = array(x_train); y_train = array(y_train); x_test = array(x_test); y_test = array(y_test)
train_data = lgb.Dataset(x_train,label=y_train)
mets = metrics(noclass,"lightgbm")
param = {'num_leaves':leaves, 'application':mets[0],'max_depth':depth,'learning_rate':lr,'num_iterations':trees, 'device':gpu,
'max_depth':depth, 'metric':mets[1],'min_sum_hessian_in_leaf':impurity,'feature_fraction':features,
'min_data_in_bin':bins,'bagging_fraction':bagging,'bagging_freq':bagged,'early_stopping_round':stop,'lambda_l1':l1,
'lambda_l2':l2,'verbose':verbose,'nthread':jobs}
if method == "dart": param['drop_rate'] = dropout; param['skip_drop'] = skipdrop
elif mets[1] == 'multiclass': param['num_class'] = noclass
print("--------------------------------\nLightGBM: Training...")
modeller=lgb.train(param,train_data,trees)
print("Finished")
if info == True:
if mets[0] == ('binary' or 'multiclass'): preds = toclasses(modeller.predict(x_test), unique(hcat(y_train,y_test)))
else: preds = modeller.predict(x_test)
for k in list(mets[2].keys()):
if k != 'rmse': print("Score = "+str(k)+" = "+str(mets[2][k](y_test, preds)))
else: print("Score = "+str(k)+" = "+str(mets[2][k](y_test, preds)**0.5))
return modeller
#-------------------- RF --------------------#
def randomforest(x_train, x_test, y_train, y_test, noclass = None, lr = 0.05, method = "dart", gpu = False, trees = 100, metric = None,
depth = -1, splits=2, leaves=31.123, min_weight=20, features=1, bins=5.123, impurity=1e-3+0.000001, jobs=-1, state=None, bagging = 0.1,
stop = 10, l1 = 0, l2 = 1, dropout = 0.1, skipdrop = 0.5, verbose = False, info = True, addon = False):
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
if noclass is None:
try: noclass = nunique(array(hcat(y_train,y_test)))
except: noclass = nunique(array(vcat(y_train,y_test)))
if depth == -1: depth = None;
if method not in ["gini","entropy"]: method = "gini";
if features == 1: features = "auto";
if impurity == (1e-3+0.000001): impurity = 1e-07;
if leaves == 31.123: leaves = None;
if min_weight == 20.123: min_weight = 0;
if bins == 5.123: bins = 1;
leaves = int(leaves); bins = int(bins)
x_train = array(x_train); y_train = array(y_train); x_test = array(x_test); y_test = array(y_test)
mets = metrics(noclass,"randomforest")
if mets[0] != 'regression':
modeller = RandomForestClassifier(n_estimators=trees, criterion=method, max_depth=depth, min_samples_split=splits, min_samples_leaf=bins,
min_weight_fraction_leaf=0.0, max_features=features, max_leaf_nodes=leaves, min_impurity_split=impurity,
bootstrap=True, oob_score=info, n_jobs=jobs, random_state=state, verbose=verbose, warm_start=addon)
else:
modeller = RandomForestRegressor(n_estimators=trees, criterion="mse", max_depth=depth, min_samples_split=splits, min_samples_leaf=bins,
min_weight_fraction_leaf=0.0, max_features=features, max_leaf_nodes=leaves, min_impurity_split=impurity,
bootstrap=True, oob_score=info, n_jobs=jobs, random_state=state, verbose=verbose, warm_start=addon)
print("--------------------------------\nRandomForest: Training...")
modeller.fit(x_train,y_train)
print("Finished")
if info == True:
preds = modeller.predict(x_test)
for k in list(mets[1].keys()):
if k != 'rmse': print("Score = "+str(k)+" = "+str(mets[1][k](y_test, preds)))
else: print("Score = "+str(k)+" = "+str(mets[1][k](y_test, preds)**0.5))
print("Score = "+"OOB"+" = "+str(modeller.oob_score_))
return modeller
#-------------
#-------------
#-------------
#-------------
#------------------------------------ SCALING AND NORMALISING ------------------------------------#
#-------------------- Standardise --------------------#
def standardise(data, output = True, method = "robust"):
if method == "robust": from sklearn.preprocessing import RobustScaler as scaler
elif method == "standard": from sklearn.preprocessing import StandardScaler as scaler
elif "min" in method or "max" in method: from sklearn.preprocessing import MinMaxScaler as scaler
elif "abs" in method: from sklearn.preprocessing import MaxAbsScaler as scaler
if type(data)==pd.DataFrame: cols = columns(data)
scaler = scaler()
res = scaler.fit(data)
res = scaler.transform(data)
if type(data)==pd.DataFrame:
res = table(res)
res.columns = cols
if output == True: return res, scaler
else: return res
#-------------------- Normalise --------------------#
def normalise(data, output = True, method = "l2"):
from sklearn.preprocessing import Normalizer
if type(data)==pd.DataFrame: cols = columns(data)
scaler = Normalizer(norm=method).fit(data)
res = scaler.transform(data)
if type(data)==pd.DataFrame:
res = table(res)
res.columns = cols
if output == True: return res, scaler
else: return res
#-------------
#-------------
#-------------
#-------------
#------------------------------------ PREPROCESS FUNCTION ------------------------------------#
#-------------------- :) --------------------#
def preprocess(train, target, hold = 0.2, dummy = True, impute = "bpca", mice = "boost",remove_outlier = 0, scale = "robust", transform = 0,
norm = False, output = True):
processor = {'dummies':-1, 'impute':-1, 'scale':-1, 'transform':-1, 'norm':-1, 'columns':-1}
if remove_outlier == 1: train = notoutlier(train)
if type(target)==str: x = exc(train, target); y = train[target]
if nunique(y)<=15: processor['target'] = unique(y)
else: processor['target'] = -1
x = complete(x, method = impute, mice = mice)
if transform == (1 or True): x, unskewer = unskew(x, info = True)
if dummy == False: x, codes = dummies(x, dummies = dummy, codes = True, ids = True)
else: x = dummies(x, dummies = dummy, ids = True); codes = -2
x = conts(x)
if scale is not None and scale != False:
if scale == True: x, scaler = standardise(x, method = "robust")
else: x, scaler = standardise(x, method = scale)
if norm is not None and norm != False:
if norm == True: x, normer = normalise(x, method = "l2")
else: x, normer = normalise(x, method = norm)
if hold != (0 or False) and hold is not None: x_train, x_test, y_train, y_test = holdout(x, y = y)
print("Processing finished :)")
if output == True:
try: processor['dummies'] = codes
except: pass;
try: processor['impute'] = [impute,train,mice]
except: pass;
try: processor['scale'] = scaler
except: pass;
try: processor['norm'] = normer
except: pass;
try: processor['transform'] = unskewer
except: pass;
processor['columns'] = columns(x_train)
return x_train, x_test, y_train, y_test, processor
else: return x_train, x_test, y_train, y_test
#-------------------- :) Transform the test data --------------------#
def prefit(test, processor):
alldf = reset(vcat(processor['impute'][1],test), drop = True)
df = complete(alldf, method = processor['impute'][0], ids = True, mice = processor['impute'][2])
test = df[len(processor['impute'][1]):]
if processor['dummies'] == -2: test = dummies(test, dummies = True, ids = True)
a = set(processor['columns'])
b = set(columns(test))
matching = set.intersection(a,b)
not_matching = a.symmetric_difference(matching)
test = test[list(matching)]
if processor['dummies'] == -2:
try:
tabs = int(table(np.zeros((len(test),len(not_matching)))))
tabs.columns = list(not_matching)
test[columns(tabs)] = tabs
except: pass;
test = test[processor['columns']]
else:
for key in list(processor['dummies'].keys()):
try: test[key] = test[key].replace(processor['dummies'][key])
except: pass;
test = conts(test)
if processor['scale']!=-1: test = processor['scale'].transform(test)
if processor['norm']!=-1: test = processor['norm'].transform(test)
df = table(test)
df.columns = processor['columns']
return df
#-------------
#-------------
#-------------
#-------------
#------------------------------------ METRICS AND HOLDOUT ------------------------------------#
def holdout(x, y, test = 0.2, seed = 1234, info = True):
from sklearn.model_selection import train_test_split
if info == True: print("--------------------------------\nx_train, x_test, y_train, y_test")
return train_test_split(x, y, test_size = test, random_state = seed)
#--------------------
def metrics(noclass, model = "lightgbm"):
from sklearn.metrics import mean_squared_error, cohen_kappa_score, r2_score
if model == "lightgbm":
if noclass == 2: return ['binary', ['binary_logloss','auc'], {'kappa':cohen_kappa_score,'rmse':mean_squared_error}]
elif noclass < 15: return ['multiclass', ['multi_logloss','multi_error'], {'kappa':cohen_kappa_score,'rmse':mean_squared_error}]
else: return ['regression_l2', ['l2_root'], {'r2':r2_score,'rmse':mean_squared_error}]
elif model == "randomforest":
if noclass == 2: return ['binary', {'kappa':cohen_kappa_score,'rmse':mean_squared_error}]
elif noclass < 15: return ['multiclass', {'kappa':cohen_kappa_score,'rmse':mean_squared_error}]
else: return ['regression', {'r2':r2_score,'rmse':mean_squared_error}]
#--------------------
def toclasses(preds, classes):
preds = np.round(preds)
preds = int(squeeze(preds, lower = min(classes), upper = max(classes)))
return preds
#--------------------
def predict(test, model, processor):
preds = model.predict(array(test))
if processor['target'] != -1: return toclasses(preds, classes = processor['target'])
else: return preds
#-------------
#-------------
#-------------
#-------------
#------------------------------------ GRAPHING ------------------------------------#
def plot(x = None, y = None, z = None, hue = None, size = 8, data = None, color = 'afmhot', smooth = True, n = 4):
dfdf = copy(data)
import matplotlib.pyplot as plt
if data is None and x is not None: print("Need to specify data"); return
try:
if type(x)==str: xlabel = x; x = dfdf[xlabel]; x = dummies(x, dummies = False)
except: pass;
try:
if type(y)==str: ylabel = y; y = dfdf[ylabel]; y = dummies(y, dummies = False)
except: pass;
try:
if type(z)==str: zlabel = z; z = dfdf[zlabel]; z = dummies(z, dummies = False)
except: pass;
try:
if type(hue)==str: huelabel = hue; hue = dfdf[huelabel]; hue = dummies(hue, dummies = False)
except: pass;
try:
xlabel = columns(x)[0];
if xlabel is None: xlabel = "X"
except: pass;
try:
ylabel = columns(y)[0];
if ylabel is None: ylabel = "Y"
except: pass;
try:
zlabel = columns(z)[0];
if zlabel is None: zlabel = "Z"
except: pass;
try: <|fim▁hole|> except: pass;
if x is not None and y is not None and z is not None:
from mpl_toolkits.mplot3d import Axes3D
import matplotlib
fig = plt.figure(figsize=(size,size))
ax = Axes3D(fig)
if hue is not None:
cm = plt.get_cmap(color)
try: cNorm = matplotlib.colors.Normalize(vmin=np.min(hue)[0], vmax=np.max(hue)[0])
except: cNorm = matplotlib.colors.Normalize(vmin=np.min(hue), vmax=np.max(hue))
scalarMap = matplotlib.cm.ScalarMappable(norm=cNorm, cmap=cm)
ax.scatter(array(x),array(y),array(z),c=scalarMap.to_rgba(array(hue)),s=size*5)
ax.set_xlabel(xlabel); ax.set_ylabel(ylabel); ax.set_zlabel(zlabel)
scalarMap.set_array(hue)
fig.colorbar(scalarMap, pad=0, orientation = "h", shrink = .8)
plt.show()
else:
import matplotlib
ax.scatter(x,y,z,s=size*5)
ax.set_xlabel(xlabel); ax.set_ylabel(ylabel); ax.set_zlabel(zlabel)
plt.show()
else:
import seaborn as sb
try:
if check_type(dfdf[xlabel]) == 'cat': dfdf[xlabel] = string(dfdf[xlabel])
except: pass;
try:
if check_type(dfdf[ylabel]) == 'cat': dfdf[ylabel] = string(dfdf[ylabel])
except: pass;
try:
if check_type(dfdf[huelabel]) == 'cat': dfdf[huelabel] = string(dfdf[huelabel])
except: pass;
if y is None and hue is None:
fig = plt.figure(figsize=(size,size))
if check_type(dfdf[xlabel]) == 'cont':
fig = sb.kdeplot(data = dfdf[xlabel], linewidth = 3,clip = [min(dfdf[xlabel]),max(dfdf[xlabel])])
mean_line(dfdf[xlabel])
plt.ylabel("Frequency"); plt.xlabel(xlabel); plt.title("Kernel Density graph"); plt.show()
elif check_type(dfdf[xlabel]) == 'cat':
fig = sb.countplot(dfdf[xlabel].fillna("Missing"))
plt.title("Count graph for "+xlabel); plt.show()
elif y is None:
if check_type(dfdf[xlabel]) == 'cont': sort_by = xlabel
else: sort_by = huelabel
if dtypes(dfdf[huelabel])[0] != 'obj':
df = sort(dfdf, by = sort_by)
dfdf[sort_by+"_Q"] = qcut(dfdf[sort_by], smooth = smooth, n = n)
dfdf[sort_by+"_Q"] = string(dfdf[sort_by+"_Q"])
fig = plt.figure(figsize=(size,size))
if check_type(dfdf[xlabel]) == 'cont':
if check_type(dfdf[huelabel]) == "cont":
fig = sb.violinplot(x=xlabel+"_Q", y=huelabel, bw='scott' ,scale="width",
cut=min(dfdf[huelabel]), inner = None, linewidth =4, data = dfdf)
plt.setp(fig.get_xticklabels(), rotation=45); plt.title("Violin graph for "+xlabel+" & "+huelabel)
plt.show()
elif check_type(dfdf[huelabel]) == 'cat':
fig = sb.countplot(x = xlabel+"_Q", hue = huelabel, data = dfdf)
plt.title("Count graph for "+xlabel+" & "+huelabel); plt.setp(fig.get_xticklabels(), rotation=45)
plt.show()
elif check_type(dfdf[xlabel]) == 'cat':
if check_type(dfdf[huelabel]) == "cont":
fig = sb.countplot(x = xlabel, hue = huelabel+"_Q", data = dfdf)
plt.title("Count graph for "+xlabel+" & "+huelabel); plt.setp(fig.get_xticklabels(), rotation=45)
plt.show()
if check_type(dfdf[huelabel]) == "cat":
fig = sb.countplot(x = xlabel, hue = huelabel, data = dfdf)
plt.title("Count graph for "+xlabel+" & "+huelabel); plt.setp(fig.get_xticklabels(), rotation=45)
plt.show()
elif hue is None:
if check_type(dfdf[xlabel]) == 'cont':
if check_type(dfdf[ylabel]) == 'cont':
fig = plt.figure(figsize=(size,size))
dfdf = notnull(dfdf)
dfdf[xlabel+"_Q"] = qcut(dfdf[xlabel], n = 30, smooth = True)
dfdf = (dfdf.groupby(by = xlabel+"_Q").median()+dfdf.groupby(by = xlabel+"_Q").mean())/2
sb.regplot(x = xlabel, y = ylabel, data = dfdf, ci = None, truncate=True, order=2, color = 'black')
plt.title("Regression graph for "+xlabel+" & "+ylabel); plt.show()
elif check_type(dfdf[ylabel]) == 'cat':
fig, (ax1,ax2) = plt.subplots(1,2, sharey = True, figsize = (size*1.5,size))
sb.boxplot(x = xlabel, y = ylabel, data = dfdf, palette="Set3", linewidth = 3, whis = 1, ax = ax1)
sb.pointplot(x = xlabel, y = ylabel, data = dfdf, lw=5, ax = ax2, ci = 50, capsize = .1, palette = 'Set1')
plt.title("Mean PointPlot graph for "+xlabel+" & "+ylabel); plt.show()
elif check_type(dfdf[xlabel]) == 'cat':
if check_type(dfdf[ylabel]) == 'cont':
fig, (ax1,ax2) = plt.subplots(1,2, sharey = False, figsize = (size*1.5,size))
sb.boxplot(x = xlabel, y = ylabel, data = dfdf, palette="Set3", linewidth = 3, whis = 1, ax = ax1)
plt.setp(ax1.get_xticklabels(), rotation=45)
plt.setp(ax2.get_xticklabels(), rotation=45)
sb.pointplot(x = xlabel, y = ylabel, data = dfdf, lw=5, ax = ax2, ci = 50, capsize = .1, palette = 'Set1')
plt.title("Mean PointPlot graph for "+xlabel+" & "+ylabel); plt.show()
elif check_type(dfdf[ylabel]) == 'cat':
fig = sb.factorplot(x = xlabel, col = ylabel, data = dfdf, size = 5, palette="Set2", col_wrap = 4, kind = "count")
plt.show()
else:
if check_type(dfdf[huelabel]) == 'cont':
dfdf = notnull(sort(dfdf, by = huelabel))
dfdf[huelabel] = string(qcut(dfdf[huelabel], smooth = False, n = 4))
elif check_type(dfdf[huelabel]) == 'cat':
dfdf = notnull(dfdf)
if check_type(dfdf[xlabel]) == 'cat':
if check_type(dfdf[ylabel]) == 'cont':
try:
fig = plt.figure(figsize=(size,size))
fig = sb.barplot(x = xlabel, y = ylabel, hue = huelabel, data = dfdf)
plt.setp(fig.get_xticklabels(), rotation=45)
plt.show()
except:
fig = sb.factorplot(x = xlabel, y = ylabel, data = dfdf, col = huelabel, size = 5, capsize=.1, palette="Set2", ci = 70)
plt.show()
elif check_type(dfdf[ylabel]) == 'cat':
fig = sb.factorplot(x = xlabel, hue = ylabel, data = dfdf, col = huelabel, kind = "count", size = 5)
plt.show()
elif check_type(dfdf[xlabel]) == 'cont':
if check_type(dfdf[ylabel]) == 'cont':
fig = plt.figure(figsize=(size,size))
fig = sb.lmplot(x = xlabel, y = ylabel, hue = huelabel, data = dfdf,robust = True, n_boot = 50, scatter = False, ci = None)
plt.show()
elif check_type(dfdf[ylabel]) == 'cat':
fig = sb.factorplot(x = xlabel, y = ylabel, col = huelabel, data = dfdf, palette = "Set3", dodge=True, ci = 70,
estimator = special_statistic, capsize=.2, n_boot = 100, size = 5)
plt.show()
def highlight_larger(s):
is_max = s > CI(99,s,L=False); return ['background-color: '+pd_colour if v else '' for v in is_max]
def highlight_smaller(s):
is_min = s < CI(99,s,U=False); return ['background-color: '+pd_colour if v else '' for v in is_min]
def highlight_one(s):
is_true = s == 1; return ['background-color: '+pd_colour if v else '' for v in is_true]
def highlight_true(s):
is_true = s == True; return ['background-color: '+pd_colour if v else '' for v in is_true]
#-------------
def mean_line(x, **kwargs):
ls = {"0":"--"}
plt.axvline(mean(x), linestyle =ls[kwargs.get("label","0")],
color = kwargs.get("color", "brown"), linewidth=2)
txkw = dict(size=12, color = kwargs.get("color", "brown"))
plt.text(mean(x),0.03, "MEAN", **txkw)
#-------------
def special_statistic(x): return (2*np.nanmedian(x)+np.nanmean(x))/3
#-------------
def check_type(x):
ctd = nunique(x); parts = (((ctd<=15)&(len(x)>15))|((ctd<len(x)*0.01)&(ctd<=20)&(dtypes(x)=='int'))|((dtypes(x)=='str')&(ctd<=15)))
if dtypes(x) != 'str':
if parts == True: return 'cat'
else: return 'cont'
else:
if parts == False: return 'str'
else: return 'cat'
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DATA MINING AND NLP ------------------------------------#
#-------------------- String extracting --------------------#
def getfunction(c, args, now):
if "split" in c:
if "ex" in c: expanding = True;
else: expanding = False
if "letter" in args[c].lower() or "word" in args[c].lower() or "digit" in args[c].lower() or "number" in args[c].lower():
how = ''
for j in args[c].split(","):
if "letter" in j: how = how+"([a-z])"
elif "Letter" in j: how = how+"([a-zA-Z])"
elif "LETTER" in j: how = how+"([A-Z])"
elif "word" in j: how = how+"([a-z]+)"
elif "Word" in j: how = how+"([a-zA-Z]+)"
elif "WORD" in j: how = how+"([A-Z]+)"
elif "digit" in j.lower(): how = how+"([0-9])"
elif "number" in j.lower(): how = how+"([0-9]+)"
elif "symbol" in j.lower(): how+'[^\w]+'
now = now.str.extract(how, expand = expanding)
else: now = now.str.split(args[c], expand = expanding)
elif "col" in c or "loc" in c:
try:
if "le" in args[c]: now = now.str[0:-1]
elif "ri" in args[c]: now = now.str[-1:1]
except:
if type(now) == pd.Series: now = now.str[args[c]]
else: now = now[args[c]]
elif "not" in c: now = now.str.contains(args[c]); now = reverse(now)
elif "has" in c: now = now.str.contains(args[c])
elif "rep" in c:
if "symbol" in args[c]: now = now.replace(r'[^\w]',args[c][1])
else: now = now.str.replace(args[c][0], args[c][1])
elif "rem" in c or "strip" in c:
if "all" in args[c]:
for j in [".",",","+","=","-","_","(",")","[","]","*","$","?","<",">",'"',"'","/","<",">","%"]:
now = now.str.replace(j,"")
elif "symbol" in args[c]: now = now.replace(r'[^\w]','')
else: now = now.str.replace(args[c][0], "")
elif "len" in c:
if args[c] == 1: now = now.str.len()
elif "low" in c:
if args[c] == 1: now = now.str.lower()
elif "up" in c:
if args[c] == 1: now = now.str.upper()
elif "count" in c:
if args[c] == ".": now = now.str.count(r"(\.)")
elif args[c] == "(": now = now.str.count(r"(\()")
elif args[c] == ")": now = now.str.count(r"(\))")
elif args[c] == "[": now = now.str.count(r"(\[)")
elif args[c] == "]": now = now.str.count(r"(\])")
elif args[c] == "{": now = now.str.count(r"(\{)")
elif args[c] == "}": now = now.str.count(r"(\})")
elif 'symbol' in args[c]: now = now.str.count(r'[^\w]')
elif 'sym' in args[c]: now = now.str.count(r'[\w]')
elif 'num' in args[c] or 'dig' in args[c]: now = now.str.count(r'[\d]')
else: now = now.str.count(args[c])
elif "df" in c or "table" in c or "series" in c: now = now.apply(pd.Series)
return now
def get(x, **args):
import re
now = copy(x)
for c in args:
now = getfunction(c, args, now)
return now
def extract(x, **args): return get(x, args)
#-------------------- Word Frequency --------------------#
def flatten(y, split = " ", dropna = True, symbols = False, lower = True):
def col_split(x,split,dropna,symbols,lower):
if split is not None:
if symbols == False:
if lower == True: f = list(get(x, lower = True, rem = "all", splitex = split).fillna(np.nan).values.flatten())
else: f = list(get(x, rem = "all", splitex = split).fillna(np.nan).values.flatten())
else: f = list(get(x, splitex = split).fillna(np.nan).values.flatten())
else: f = list(x.fillna(np.nan).values.flatten())
return f
if type(y)==pd.Series: flattened = col_split(y,split,dropna,symbols,lower)
else:
flattened = []
for col in strs(y):
flattened += col_split(y[col],split,dropna,symbols,lower)
if dropna == True: return list(array(flattened)[array(flattened)!='nan'])
else: return flattened
#-------------
def wordfreq(x, hist = True, first = 15, separate = True):
if separate == False or type(x) == pd.Series:
df = reset(table(cunique(flatten(x))))[0:first]
df.columns = ["Word","Count"]
else:
first = int(first/len(strs(x)))
df = reset(table(cunique(flatten(x[strs(x)[0]]))))[0:first]
df.columns = ["Word","Count"]
df["Column"] = objcol(x)[0]
for col in objcol(x)[1:]:
dfx = reset(table(cunique(flatten(x[col]))))[0:first]
dfx.columns = ["Word","Count"]
dfx["Column"] = col
df = vcat(df,dfx)
if hist == True:
if separate == True and type(x) != pd.Series:
k = first*1.25
if k < 10: k = 8
fig = plt.figure(figsize=(k,k))
fig = sb.barplot(x = "Word", y = "Count", hue = "Column", data = df)
plt.setp(fig.get_xticklabels(), rotation=45, size = 16)
else:
fig = plt.figure(figsize=(first*0.5,first*0.35))
fig = sb.barplot(x = "Word", y = "Count", data = df)
plt.setp(fig.get_xticklabels(), rotation=45, size = 16)
plt.show()
else:
return df
#-------------
def getwords(y, first = 10):
x = copy(y)
df = wordfreq(x, first = first, hist = False)
for col in objcol(x):
cols = get(x[col], lower = True, rem = "all", table = True)
for j in df[df["Column"]==col]["Word"]:
x["Count="+str(j)] = get(cols[0], count = j)
return x
#-------------
#------------- Daniel Han-Chen 2017
#------------- https://github.com/danielhanchen/sciblox
#------------- SciBlox v0.02
#-------------<|fim▁end|> | huelabel = columns(hue)[0];
if huelabel is None: huelabel = "Hue" |
<|file_name|>set_volume_option.py<|end_file_name|><|fim▁begin|>import json<|fim▁hole|>
class SetVolumeOption(object):
def __init__(self, api_job):
super(SetVolumeOption, self).__init__()
self.api_job = api_job
self.atom = SetVolumeOption
def start(self):
attributes = json.loads(self.api_job['attributes'].decode('utf-8'))
vol_name = attributes['volname']
option = attributes['option_name']
option_value = attributes['option_value']
self.atom().start(vol_name, option, option_value)
self.api_job['status'] = "finished"
etcd.Client().write(self.api_job['request_id'],
json.dumps(self.api_job))<|fim▁end|> |
import etcd
from tendrl.gluster_bridge.atoms.volume.set import Set
|
<|file_name|>derives-span-Clone-enum-struct-variant.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
<|fim▁hole|>
#[derive(Clone)]
enum Enum {
A {
x: Error //~ ERROR
}
}
fn main() {}<|fim▁end|> |
struct Error; |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/**
* <|fim▁hole|> * @author root
*
*/
package com.amanaje.activities;<|fim▁end|> | */
/** |
<|file_name|>top_block.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Top Block
# Generated: Tue Dec 27 19:28:14 2016
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from gnuradio import analog
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import gr<|fim▁hole|>from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.wxgui import forms
from gnuradio.wxgui import scopesink2
from grc_gnuradio import wxgui as grc_wxgui
from optparse import OptionParser
import wx
class top_block(grc_wxgui.top_block_gui):
def __init__(self):
grc_wxgui.top_block_gui.__init__(self, title="Top Block")
##################################################
# Variables
##################################################
self.var = var = 11
self.samp_rate = samp_rate = 1e6
self.freq = freq = 1e3
##################################################
# Blocks
##################################################
_freq_sizer = wx.BoxSizer(wx.VERTICAL)
self._freq_text_box = forms.text_box(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
label='freq',
converter=forms.float_converter(),
proportion=0,
)
self._freq_slider = forms.slider(
parent=self.GetWin(),
sizer=_freq_sizer,
value=self.freq,
callback=self.set_freq,
minimum=0,
maximum=16e3,
num_steps=100,
style=wx.SL_HORIZONTAL,
cast=float,
proportion=1,
)
self.Add(_freq_sizer)
self.wxgui_scopesink2_0 = scopesink2.scope_sink_c(
self.GetWin(),
title='Scope Plot',
sample_rate=samp_rate,
v_scale=0,
v_offset=0,
t_scale=0,
ac_couple=False,
xy_mode=False,
num_inputs=1,
trig_mode=wxgui.TRIG_MODE_AUTO,
y_axis_label='Counts',
)
self.Add(self.wxgui_scopesink2_0.win)
self.blocks_throttle_0 = blocks.throttle(gr.sizeof_gr_complex*1, samp_rate,True)
self.analog_sig_source_x_0 = analog.sig_source_c(samp_rate, analog.GR_COS_WAVE, freq, 1, 0)
##################################################
# Connections
##################################################
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_throttle_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.wxgui_scopesink2_0, 0))
def get_var(self):
return self.var
def set_var(self, var):
self.var = var
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.wxgui_scopesink2_0.set_sample_rate(self.samp_rate)
self.blocks_throttle_0.set_sample_rate(self.samp_rate)
self.analog_sig_source_x_0.set_sampling_freq(self.samp_rate)
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self._freq_slider.set_value(self.freq)
self._freq_text_box.set_value(self.freq)
self.analog_sig_source_x_0.set_frequency(self.freq)
def main(top_block_cls=top_block, options=None):
tb = top_block_cls()
tb.Start(True)
tb.Wait()
if __name__ == '__main__':
main()<|fim▁end|> | from gnuradio import wxgui |
<|file_name|>built-in-glamorous-components.d.ts<|end_file_name|><|fim▁begin|>import { ExtraGlamorousProps } from './glamorous-component'
import {
ViewProperties,
TextStyle,
ViewStyle,
ImageStyle,
TextInputProperties,
ImageProperties,
ScrollViewProps,
TextProperties,
TouchableHighlightProperties,
TouchableNativeFeedbackProperties,
TouchableOpacityProperties,
TouchableWithoutFeedbackProps,
FlatListProperties,
SectionListProperties
} from 'react-native'
export interface NativeComponent {
Image: React.StatelessComponent<
ImageProperties & ExtraGlamorousProps & ImageStyle
>
ScrollView: React.StatelessComponent<
ScrollViewProps & ExtraGlamorousProps & ViewStyle
>
Text: React.StatelessComponent<
TextProperties & ExtraGlamorousProps & TextStyle
><|fim▁hole|> TouchableHighlight: React.StatelessComponent<
TouchableHighlightProperties & ExtraGlamorousProps & ViewStyle
>
TouchableNativeFeedback: React.StatelessComponent<
TouchableNativeFeedbackProperties & ExtraGlamorousProps & ViewStyle
>
TouchableOpacity: React.StatelessComponent<
TouchableOpacityProperties & ExtraGlamorousProps & ViewStyle
>
TouchableWithoutFeedback: React.StatelessComponent<
TouchableWithoutFeedbackProps & ExtraGlamorousProps & ViewStyle
>
View: React.StatelessComponent<
ViewProperties & ExtraGlamorousProps & ViewStyle
>
FlatList: React.StatelessComponent<
FlatListProperties<any> & ExtraGlamorousProps & ViewStyle
>
SectionList: React.StatelessComponent<
SectionListProperties<any> & ExtraGlamorousProps & ViewStyle
>
}<|fim▁end|> | TextInput: React.StatelessComponent<
TextInputProperties & ExtraGlamorousProps & TextStyle
> |
<|file_name|>filter_actions.js<|end_file_name|><|fim▁begin|>"use strict";
var FilterDispatcher = require("../dispatcher/filter_dispatcher");
module.exports = {
changeDate: function changeDate(key, date) {
var pos = arguments.length <= 2 || arguments[2] === undefined ? "start" : arguments[2];
FilterDispatcher.handleChangeDate({
type: "CHANGE_DATE",
date: date,
key: key,
pos: pos
});
},
checkFilter: function checkFilter(filterBy, id, value) {
FilterDispatcher.handleCheckFilter({
type: "CHECK_FILTER",
filterBy: filterBy,
id: id,
value: value
});
},
changeKey: function changeKey(data) {
FilterDispatcher.handleKeyUpdate({
type: "CHANGE_KEY",
data: data
});
},
fetchFilters: function fetchFilters(api) {
FilterDispatcher.handleFetchFilters({
type: "FETCH",
api: api
});
},
receiveAll: function receiveAll(data) {
FilterDispatcher.handleServerAction({
type: "RECEIVE_DATA",
data: data
});
},<|fim▁hole|> filterBy: filterBy,
id: id
});
},
setKeys: function setKeys(data) {
FilterDispatcher.setAllKeysUpdate({
type: "SET_KEYS",
data: data
});
},
setTab: function setTab(tab) {
FilterDispatcher.setTab({
type: "SET_TAB",
tab: tab
});
}
};
//# sourceMappingURL=filter_actions.js.map<|fim▁end|> |
selectFilter: function selectFilter(filterBy, id) {
FilterDispatcher.handleSelectFilter({
type: "SELECT_FILTER", |
<|file_name|>basic_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package integration
import (
"reflect"
"sort"
"testing"
"time"
apiextensionsv1beta1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1"
"k8s.io/apiextensions-apiserver/test/integration/testserver"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/dynamic"
)
func TestServerUp(t *testing.T) {
stopCh, _, _, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
}
func TestNamespaceScopedCRUD(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := "not-the-default"
testSimpleCRUD(t, ns, noxuDefinition, noxuVersionClient)
testFieldSelector(t, ns, noxuDefinition, noxuVersionClient)
}
func TestClusterScopedCRUD(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.ClusterScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := ""
testSimpleCRUD(t, ns, noxuDefinition, noxuVersionClient)
testFieldSelector(t, ns, noxuDefinition, noxuVersionClient)
}
func testSimpleCRUD(t *testing.T, ns string, noxuDefinition *apiextensionsv1beta1.CustomResourceDefinition, noxuVersionClient dynamic.Interface) {
noxuResourceClient := NewNamespacedCustomResourceClient(ns, noxuVersionClient, noxuDefinition)
initialList, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(initialList.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListTypeMeta, err := meta.TypeAccessor(initialList)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, initialListTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.ListKind, initialListTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListListMeta, err := meta.ListAccessor(initialList)
if err != nil {
t.Fatal(err)
}
noxuWatch, err := noxuResourceClient.Watch(metav1.ListOptions{ResourceVersion: initialListListMeta.GetResourceVersion()})
if err != nil {
t.Fatal(err)
}
defer noxuWatch.Stop()
createdNoxuInstance, err := instantiateCustomResource(t, testserver.NewNoxuInstance(ns, "foo"), noxuResourceClient, noxuDefinition)
if err != nil {
t.Fatalf("unable to create noxu Instance:%v", err)
}
select {
case watchEvent := <-noxuWatch.ResultChan():
if e, a := watch.Added, watchEvent.Type; e != a {
t.Errorf("expected %v, got %v", e, a)
break
}
createdObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
// it should have a UUID
if len(createdObjectMeta.GetUID()) == 0 {
t.Errorf("missing uuid: %#v", watchEvent.Object)
}
if e, a := ns, createdObjectMeta.GetNamespace(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
createdTypeMeta, err := meta.TypeAccessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, createdTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.Kind, createdTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
case <-time.After(5 * time.Second):
t.Errorf("missing watch event")
}
gottenNoxuInstance, err := noxuResourceClient.Get("foo", metav1.GetOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := createdNoxuInstance, gottenNoxuInstance; !reflect.DeepEqual(e, a) {
t.Errorf("expected %v, got %v", e, a)
}
listWithItem, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 1, len(listWithItem.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := *createdNoxuInstance, listWithItem.(*unstructured.UnstructuredList).Items[0]; !reflect.DeepEqual(e, a) {
t.Errorf("expected %v, got %v", e, a)
}
if err := noxuResourceClient.Delete("foo", nil); err != nil {
t.Fatal(err)
}
listWithoutItem, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(listWithoutItem.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
select {
case watchEvent := <-noxuWatch.ResultChan():
if e, a := watch.Deleted, watchEvent.Type; e != a {
t.Errorf("expected %v, got %v", e, a)
break
}
deletedObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
// it should have a UUID
createdObjectMeta, err := meta.Accessor(createdNoxuInstance)
if err != nil {
t.Fatal(err)
}
if e, a := createdObjectMeta.GetUID(), deletedObjectMeta.GetUID(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
case <-time.After(5 * time.Second):
t.Errorf("missing watch event")
}
}
func testFieldSelector(t *testing.T, ns string, noxuDefinition *apiextensionsv1beta1.CustomResourceDefinition, noxuVersionClient dynamic.Interface) {
noxuResourceClient := NewNamespacedCustomResourceClient(ns, noxuVersionClient, noxuDefinition)
initialList, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(initialList.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListTypeMeta, err := meta.TypeAccessor(initialList)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, initialListTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.ListKind, initialListTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListListMeta, err := meta.ListAccessor(initialList)
if err != nil {
t.Fatal(err)
}
noxuWatch, err := noxuResourceClient.Watch(
metav1.ListOptions{
ResourceVersion: initialListListMeta.GetResourceVersion(),
FieldSelector: "metadata.name=foo",
},
)
if err != nil {
t.Fatal(err)
}
defer noxuWatch.Stop()
_, err = instantiateCustomResource(t, testserver.NewNoxuInstance(ns, "bar"), noxuResourceClient, noxuDefinition)
if err != nil {
t.Fatalf("unable to create noxu Instance:%v", err)
}
createdNoxuInstanceFoo, err := instantiateCustomResource(t, testserver.NewNoxuInstance(ns, "foo"), noxuResourceClient, noxuDefinition)
if err != nil {
t.Fatalf("unable to create noxu Instance:%v", err)
}
select {
case watchEvent := <-noxuWatch.ResultChan():
if e, a := watch.Added, watchEvent.Type; e != a {
t.Errorf("expected %v, got %v", e, a)
break
}
createdObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
// it should have a UUID
if len(createdObjectMeta.GetUID()) == 0 {
t.Errorf("missing uuid: %#v", watchEvent.Object)
}
if e, a := ns, createdObjectMeta.GetNamespace(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := "foo", createdObjectMeta.GetName(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
createdTypeMeta, err := meta.TypeAccessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, createdTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.Kind, createdTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
case <-time.After(5 * time.Second):
t.Errorf("missing watch event")
}
gottenNoxuInstance, err := noxuResourceClient.Get("foo", metav1.GetOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := createdNoxuInstanceFoo, gottenNoxuInstance; !reflect.DeepEqual(e, a) {
t.Errorf("expected %v, got %v", e, a)
}
listWithItem, err := noxuResourceClient.List(metav1.ListOptions{FieldSelector: "metadata.name=foo"})
if err != nil {
t.Fatal(err)
}
if e, a := 1, len(listWithItem.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := *createdNoxuInstanceFoo, listWithItem.(*unstructured.UnstructuredList).Items[0]; !reflect.DeepEqual(e, a) {<|fim▁hole|> if err := noxuResourceClient.Delete("bar", nil); err != nil {
t.Fatal(err)
}
if err := noxuResourceClient.Delete("foo", nil); err != nil {
t.Fatal(err)
}
listWithoutItem, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(listWithoutItem.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
select {
case watchEvent := <-noxuWatch.ResultChan():
if e, a := watch.Deleted, watchEvent.Type; e != a {
t.Errorf("expected %v, got %v", e, a)
break
}
deletedObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
// it should have a UUID
createdObjectMeta, err := meta.Accessor(createdNoxuInstanceFoo)
if err != nil {
t.Fatal(err)
}
if e, a := createdObjectMeta.GetUID(), deletedObjectMeta.GetUID(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := ns, createdObjectMeta.GetNamespace(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := "foo", createdObjectMeta.GetName(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
case <-time.After(5 * time.Second):
t.Errorf("missing watch event")
}
}
func TestDiscovery(t *testing.T) {
group := "mygroup.example.com"
version := "v1beta1"
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
scope := apiextensionsv1beta1.NamespaceScoped
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(scope)
_, err = testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
// check whether it shows up in discovery properly
resources, err := apiExtensionClient.Discovery().ServerResourcesForGroupVersion(group + "/" + version)
if err != nil {
t.Fatal(err)
}
if len(resources.APIResources) != 1 {
t.Fatalf("Expected exactly the resource \"noxus\" in group version %v/%v via discovery, got: %v", group, version, resources.APIResources)
}
r := resources.APIResources[0]
if r.Name != "noxus" {
t.Fatalf("Expected exactly the resource \"noxus\" in group version %v/%v via discovery, got: %v", group, version, r.Name)
}
if r.Kind != "WishIHadChosenNoxu" {
t.Fatalf("Expected exactly the kind \"WishIHadChosenNoxu\" in group version %v/%v via discovery, got: %v", group, version, r.Kind)
}
s := []string{"foo", "bar", "abc", "def"}
if !reflect.DeepEqual(r.ShortNames, s) {
t.Fatalf("Expected exactly the shortnames `foo, bar, abc, def` in group version %v/%v via discovery, got: %v", group, version, r.ShortNames)
}
sort.Strings(r.Verbs)
expectedVerbs := []string{"create", "delete", "deletecollection", "get", "list", "patch", "update", "watch"}
if !reflect.DeepEqual([]string(r.Verbs), expectedVerbs) {
t.Fatalf("Unexpected verbs for resource \"noxus\" in group version %v/%v via discovery: expected=%v got=%v", group, version, expectedVerbs, r.Verbs)
}
}
func TestNoNamespaceReject(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := ""
noxuResourceClient := NewNamespacedCustomResourceClient(ns, noxuVersionClient, noxuDefinition)
initialList, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(initialList.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListTypeMeta, err := meta.TypeAccessor(initialList)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, initialListTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.ListKind, initialListTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
createdNoxuInstance, err := instantiateCustomResource(t, testserver.NewNoxuInstance(ns, "foo"), noxuResourceClient, noxuDefinition)
if err == nil {
t.Fatalf("unexpected non-error: an empty namespace may not be set during creation while creating noxu instance: %v ", createdNoxuInstance)
}
}
func TestSameNameDiffNamespace(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns1 := "namespace-1"
testSimpleCRUD(t, ns1, noxuDefinition, noxuVersionClient)
ns2 := "namespace-2"
testSimpleCRUD(t, ns2, noxuDefinition, noxuVersionClient)
}
func TestSelfLink(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
// namespace scoped
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := "not-the-default"
noxuNamespacedResourceClient := noxuVersionClient.Resource(&metav1.APIResource{
Name: noxuDefinition.Spec.Names.Plural,
Namespaced: noxuDefinition.Spec.Scope == apiextensionsv1beta1.NamespaceScoped,
}, ns)
noxuInstanceToCreate := testserver.NewNoxuInstance(ns, "foo")
createdNoxuInstance, err := noxuNamespacedResourceClient.Create(noxuInstanceToCreate)
if err != nil {
t.Fatal(err)
}
if e, a := "/apis/mygroup.example.com/v1beta1/namespaces/not-the-default/noxus/foo", createdNoxuInstance.GetSelfLink(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
// cluster scoped
curletDefinition := testserver.NewCurletCustomResourceDefinition(apiextensionsv1beta1.ClusterScoped)
curletVersionClient, err := testserver.CreateNewCustomResourceDefinition(curletDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
curletResourceClient := curletVersionClient.Resource(&metav1.APIResource{
Name: curletDefinition.Spec.Names.Plural,
Namespaced: curletDefinition.Spec.Scope == apiextensionsv1beta1.NamespaceScoped,
}, ns)
curletInstanceToCreate := testserver.NewCurletInstance(ns, "foo")
createdCurletInstance, err := curletResourceClient.Create(curletInstanceToCreate)
if err != nil {
t.Fatal(err)
}
if e, a := "/apis/mygroup.example.com/v1beta1/foo", createdCurletInstance.GetSelfLink(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
}
func TestPreserveInt(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.ClusterScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := "not-the-default"
noxuNamespacedResourceClient := noxuVersionClient.Resource(&metav1.APIResource{
Name: noxuDefinition.Spec.Names.Plural,
Namespaced: true,
}, ns)
noxuInstanceToCreate := testserver.NewNoxuInstance(ns, "foo")
createdNoxuInstance, err := noxuNamespacedResourceClient.Create(noxuInstanceToCreate)
if err != nil {
t.Fatal(err)
}
originalJSON, err := runtime.Encode(unstructured.UnstructuredJSONScheme, createdNoxuInstance)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
gottenNoxuInstance, err := runtime.Decode(unstructured.UnstructuredJSONScheme, originalJSON)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Check if int is preserved.
unstructuredObj := gottenNoxuInstance.(*unstructured.Unstructured).Object
num := unstructuredObj["num"].(map[string]interface{})
num1 := num["num1"].(int64)
num2 := num["num2"].(int64)
if num1 != 9223372036854775807 || num2 != 1000000 {
t.Errorf("Expected %v, got %v, %v", `9223372036854775807, 1000000`, num1, num2)
}
}
func TestCrossNamespaceListWatch(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
noxuVersionClient, err := testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
ns := ""
noxuResourceClient := NewNamespacedCustomResourceClient(ns, noxuVersionClient, noxuDefinition)
initialList, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 0, len(initialList.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
initialListListMeta, err := meta.ListAccessor(initialList)
if err != nil {
t.Fatal(err)
}
noxuWatch, err := noxuResourceClient.Watch(metav1.ListOptions{ResourceVersion: initialListListMeta.GetResourceVersion()})
if err != nil {
t.Fatal(err)
}
defer noxuWatch.Stop()
instances := make(map[string]*unstructured.Unstructured)
ns1 := "namespace-1"
noxuNamespacedResourceClient1 := NewNamespacedCustomResourceClient(ns1, noxuVersionClient, noxuDefinition)
instances[ns1] = createInstanceWithNamespaceHelper(t, ns1, "foo1", noxuNamespacedResourceClient1, noxuDefinition)
noxuNamespacesWatch1, err := noxuNamespacedResourceClient1.Watch(metav1.ListOptions{ResourceVersion: initialListListMeta.GetResourceVersion()})
defer noxuNamespacesWatch1.Stop()
ns2 := "namespace-2"
noxuNamespacedResourceClient2 := NewNamespacedCustomResourceClient(ns2, noxuVersionClient, noxuDefinition)
instances[ns2] = createInstanceWithNamespaceHelper(t, ns2, "foo2", noxuNamespacedResourceClient2, noxuDefinition)
noxuNamespacesWatch2, err := noxuNamespacedResourceClient2.Watch(metav1.ListOptions{ResourceVersion: initialListListMeta.GetResourceVersion()})
defer noxuNamespacesWatch2.Stop()
createdList, err := noxuResourceClient.List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if e, a := 2, len(createdList.(*unstructured.UnstructuredList).Items); e != a {
t.Errorf("expected %v, got %v", e, a)
}
for _, a := range createdList.(*unstructured.UnstructuredList).Items {
if e := instances[a.GetNamespace()]; !reflect.DeepEqual(e, &a) {
t.Errorf("expected %v, got %v", e, a)
}
}
addEvents := 0
for addEvents < 2 {
select {
case watchEvent := <-noxuWatch.ResultChan():
if e, a := watch.Added, watchEvent.Type; e != a {
t.Fatalf("expected %v, got %v", e, a)
}
createdObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
if len(createdObjectMeta.GetUID()) == 0 {
t.Errorf("missing uuid: %#v", watchEvent.Object)
}
createdTypeMeta, err := meta.TypeAccessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
if e, a := noxuDefinition.Spec.Group+"/"+noxuDefinition.Spec.Version, createdTypeMeta.GetAPIVersion(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
if e, a := noxuDefinition.Spec.Names.Kind, createdTypeMeta.GetKind(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
delete(instances, createdObjectMeta.GetNamespace())
addEvents++
case <-time.After(5 * time.Second):
t.Fatalf("missing watch event")
}
}
if e, a := 0, len(instances); e != a {
t.Errorf("expected %v, got %v", e, a)
}
checkNamespacesWatchHelper(t, ns1, noxuNamespacesWatch1)
checkNamespacesWatchHelper(t, ns2, noxuNamespacesWatch2)
}
func createInstanceWithNamespaceHelper(t *testing.T, ns string, name string, noxuNamespacedResourceClient dynamic.ResourceInterface, noxuDefinition *apiextensionsv1beta1.CustomResourceDefinition) *unstructured.Unstructured {
createdInstance, err := instantiateCustomResource(t, testserver.NewNoxuInstance(ns, name), noxuNamespacedResourceClient, noxuDefinition)
if err != nil {
t.Fatalf("unable to create noxu Instance:%v", err)
}
return createdInstance
}
func checkNamespacesWatchHelper(t *testing.T, ns string, namespacedwatch watch.Interface) {
namespacedAddEvent := 0
for namespacedAddEvent < 2 {
select {
case watchEvent := <-namespacedwatch.ResultChan():
// Check that the namespaced watch only has one result
if namespacedAddEvent > 0 {
t.Fatalf("extra watch event")
}
if e, a := watch.Added, watchEvent.Type; e != a {
t.Fatalf("expected %v, got %v", e, a)
}
createdObjectMeta, err := meta.Accessor(watchEvent.Object)
if err != nil {
t.Fatal(err)
}
if e, a := ns, createdObjectMeta.GetNamespace(); e != a {
t.Errorf("expected %v, got %v", e, a)
}
case <-time.After(5 * time.Second):
if namespacedAddEvent != 1 {
t.Fatalf("missing watch event")
}
}
namespacedAddEvent++
}
}
func TestNameConflict(t *testing.T) {
stopCh, apiExtensionClient, clientPool, err := testserver.StartDefaultServer()
if err != nil {
t.Fatal(err)
}
defer close(stopCh)
noxuDefinition := testserver.NewNoxuCustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
_, err = testserver.CreateNewCustomResourceDefinition(noxuDefinition, apiExtensionClient, clientPool)
if err != nil {
t.Fatal(err)
}
noxu2Definition := testserver.NewNoxu2CustomResourceDefinition(apiextensionsv1beta1.NamespaceScoped)
_, err = apiExtensionClient.Apiextensions().CustomResourceDefinitions().Create(noxu2Definition)
if err != nil {
t.Fatal(err)
}
// A NameConflict occurs
err = wait.Poll(500*time.Millisecond, wait.ForeverTestTimeout, func() (bool, error) {
crd, err := testserver.GetCustomResourceDefinition(noxu2Definition, apiExtensionClient)
if err != nil {
return false, err
}
for _, condition := range crd.Status.Conditions {
if condition.Type == apiextensionsv1beta1.NamesAccepted && condition.Status == apiextensionsv1beta1.ConditionFalse {
return true, nil
}
}
return false, nil
})
if err != nil {
t.Fatal(err)
}
err = testserver.DeleteCustomResourceDefinition(noxuDefinition, apiExtensionClient)
if err != nil {
t.Fatal(err)
}
// Names are now accepted
err = wait.Poll(500*time.Millisecond, wait.ForeverTestTimeout, func() (bool, error) {
crd, err := testserver.GetCustomResourceDefinition(noxu2Definition, apiExtensionClient)
if err != nil {
return false, err
}
for _, condition := range crd.Status.Conditions {
if condition.Type == apiextensionsv1beta1.NamesAccepted && condition.Status == apiextensionsv1beta1.ConditionTrue {
return true, nil
}
}
return false, nil
})
if err != nil {
t.Fatal(err)
}
}<|fim▁end|> | t.Errorf("expected %v, got %v", e, a)
}
|
<|file_name|>synchronicity_tests.py<|end_file_name|><|fim▁begin|>from nose.tools import eq_, with_setup
from threading import Thread
from Queue import Queue
from time import sleep
def setup():
global Person, neo4django, gdb, neo4jrestclient, neo_constants, settings, models
from neo4django.tests import Person, neo4django, gdb, neo4jrestclient, \
neo_constants, settings
from neo4django.db import models
def teardown():
gdb.cleandb()
@with_setup(None, teardown)
def test_typenode_transactionality():
class RaceModel(models.NodeModel):
pass
exc_queue = Queue()
def race():
r = RaceModel()
try:
r.save()
except Exception, e:
exc_queue.put(str(e))
else:
exc_queue.put(True)
num_threads = 5
for i in xrange(num_threads):
thread = Thread(target=race)
thread.start()
for i in xrange(num_threads):
val = exc_queue.get()
if val is not True:<|fim▁hole|> typenode_script = "g.v(0).outE('<<TYPE>>').inV.filter{it.model_name=='%s'}"
typenode_script %= RaceModel.__name__
typenodes = gdb.extensions.GremlinPlugin.execute_script(typenode_script)
eq_(len(typenodes), 1)
def race(func, num_threads):
"""
Run a multi-threaded race on func. Func should accept a single argument-
a Queue. If func succeeds, it should `q.put(True)`- if it fails, it should
`q.put('error message')`.
"""
exc_queue = Queue()
for i in xrange(num_threads):
thread = Thread(target=func, args=(exc_queue,))
thread.start()
for i in xrange(num_threads):
val = exc_queue.get()
if val is not True:
raise AssertionError('There was an error running race (#%d) - "%s"'
% (i, val))
@with_setup(None, teardown)
def test_autoproperty_transactionality():
class AutoRaceModel(models.NodeModel):
some_id = models.AutoProperty()
def autorace(queue):
r = AutoRaceModel()
try:
r.save()
except Exception, e:
queue.put(str(e))
else:
queue.put(True)
race(autorace, 3)
eq_(len(set(m.some_id for m in AutoRaceModel.objects.all())), 3)<|fim▁end|> | raise AssertionError('There was an error saving one of the '
'RaceModels (#%d) - "%s"' % (i, val))
#check the number of typenodes |
<|file_name|>iterateArcMapDocsByFolder.py<|end_file_name|><|fim▁begin|># Copyright 2012 majgis Contributors
#
# Individuals comprising majgis Contributors are identified in
# the NOTICE file found in the root directory of this project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# or
# in the file named LICENSE in the root directory of this project.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" From a list of folders, export msd documents from existing mxds, ArcGIS 10
"""
import os
from glob import glob
from arcpy import mapping
from symbologyFromArcMapDoc import MxdExtras
folders = [r'F:\Projects\NationalAtlas\ArcGIS_Server\Server', r'F:\Projects\NationalAtlas\ArcGIS_Server\Server\biodiversity']
searchPattern = '*.mxd'
ignores = ['Overlap']
tempMsg = "{0:>90} -> {1}"
newMsg = "TABLE: {0} FIELD: {1}"
mxdSuffix = ".mxd"
msdSuffix = ".msd"
for folder in folders:
mxdPaths = glob(os.path.join(folder, searchPattern))
for mxdPath in mxdPaths:
mxd = mapping.MapDocument(mxdPath)
lyrs = mapping.ListLayers(mxd)
mxde = MxdExtras(mxdPath)
msdPath = mxdPath.replace(mxdSuffix, msdSuffix)
for lyr in lyrs:
lyre = mxde[lyr.name]
joinTable = lyre.joinedTableName
joinField = lyre.symbologyShortFieldName
if joinTable:
newName = newMsg.format(joinTable, joinField)
else:
newName = lyr.name
#print tempMsg.format(lyr.name, newName)
lyr.name = newName
mxd.save()
<|fim▁hole|> #delete existing msd
if os.path.exists(msdPath):
os.remove(msdPath)
#export msd
mapping.ConvertToMSD(mxd,msdPath)
print msdPath<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>//! A stand-in for `std::error`
use core::any::TypeId;
use core::fmt::{Debug, Display};
/// A stand-in for `std::error::Error`, which requires no allocation.
#[cfg(feature = "unstable")]
pub trait Error: Debug + Display + ::core::marker::Reflect {
/// A short description of the error.
///
/// The description should not contain newlines or sentence-ending
/// punctuation, to facilitate embedding in larger user-facing
/// strings.
fn description(&self) -> &str;
/// The lower-level cause of this error, if any.
fn cause(&self) -> Option<&Error> { None }
/// Get the `TypeId` of `self`
#[doc(hidden)]
fn type_id(&self) -> TypeId where Self: 'static {
TypeId::of::<Self>()
}
}
/// A stand-in for `std::error::Error`, which requires no allocation.
#[cfg(not(feature = "unstable"))]
pub trait Error: Debug + Display {
/// A short description of the error.
///<|fim▁hole|> /// punctuation, to facilitate embedding in larger user-facing
/// strings.
fn description(&self) -> &str;
/// The lower-level cause of this error, if any.
fn cause(&self) -> Option<&Error> { None }
/// Stubbed! Returns type_id of `()`
#[doc(hidden)]
fn type_id(&self) -> TypeId where Self: 'static {
TypeId::of::<()>()
}
}<|fim▁end|> | /// The description should not contain newlines or sentence-ending |
<|file_name|>ajax.js<|end_file_name|><|fim▁begin|>// $Id: ajax.js,v 1.26.2.2 2009/11/30 22:47:05 merlinofchaos Exp $
/**
* @file ajax_admin.js
*
* Handles AJAX submission and response in Views UI.
*/
Drupal.Views.Ajax = Drupal.Views.Ajax || {};
/**
* Handles the simple process of setting the ajax form area with new data.
*/
Drupal.Views.Ajax.setForm = function(title, output) {
$(Drupal.settings.views.ajax.title).html(title);
$(Drupal.settings.views.ajax.id).html(output);
}
/**
* An ajax responder that accepts a packet of JSON data and acts appropriately.
*
* The following fields control behavior.
* - 'display': Display the associated data in the form area; bind the new
* form to 'url' if appropriate. The 'title' field may also be used.
* - 'add': This is a keyed array of HTML output to add via append. The key is
* the id to append via $(key).append(value)
* - 'replace': This is a keyed array of HTML output to add via replace. The key is
* the id to append via $(key).html(value)
*
*/
Drupal.Views.Ajax.ajaxResponse = function(data) {
$('a.views-throbbing').removeClass('views-throbbing');
$('span.views-throbbing').remove();
if (data.debug) {
alert(data.debug);
}
// See if we have any settings to extend. Do this first so that behaviors
// can access the new settings easily.
if (Drupal.settings.viewsAjax) {
Drupal.settings.viewsAjax = {};
}
if (data.js) {
$.extend(Drupal.settings, data.js);
}
// Check the 'display' for data.
if (data.display) {
Drupal.Views.Ajax.setForm(data.title, data.display);
// if a URL was supplied, bind the form to it.
if (data.url) {
var ajax_area = Drupal.settings.views.ajax.id;
var ajax_title = Drupal.settings.views.ajax.title;
// Bind a click to the button to set the value for the button.
$('input[type=submit], button', ajax_area).unbind('click');
$('input[type=submit], button', ajax_area).click(function() {
$('form', ajax_area).append('<input type="hidden" name="'
+ $(this).attr('name') + '" value="' + $(this).val() + '">');
$(this).after('<span class="views-throbbing"> </span>');
});
// Bind forms to ajax submit.
$('form', ajax_area).unbind('submit'); // be safe here.
$('form', ajax_area).submit(function(arg) {
$(this).ajaxSubmit({
url: data.url,
data: { 'js': 1 },
type: 'POST',
success: Drupal.Views.Ajax.ajaxResponse,
error: function() { $('span.views-throbbing').remove(); alert(Drupal.t("An error occurred at @path.", {'@path': data.url})); },
dataType: 'json'
});
return false;
});
}
Drupal.attachBehaviors(ajax_area);
}
else {
// If no display, reset the form.
Drupal.Views.Ajax.setForm('', Drupal.settings.views.ajax.defaultForm);
//Enable the save button.
$('#edit-save').removeAttr('disabled');<|fim▁hole|> // Trigger an update for the live preview when we reach this state:
$('#views-ui-preview-form').trigger('submit');
}
// Go through the 'add' array and add any new content we're instructed to add.
if (data.add) {
for (id in data.add) {
var newContent = $(id).append(data.add[id]);
Drupal.attachBehaviors(newContent);
}
}
// Go through the 'replace' array and replace any content we're instructed to.
if (data.replace) {
for (id in data.replace) {
$(id).html(data.replace[id]);
Drupal.attachBehaviors(id);
}
}
// Go through and add any requested tabs
if (data.tab) {
for (id in data.tab) {
$('#views-tabset').addTab(id, data.tab[id]['title'], 0);
$(id).html(data.tab[id]['body']);
$(id).addClass('views-tab');
Drupal.attachBehaviors(id);
// This is kind of annoying, but we have to actually to find where the new
// tab is.
var instance = $.ui.tabs.instances[$('#views-tabset').get(0).UI_TABS_UUID];
$('#views-tabset').clickTab(instance.$tabs.length);
}
}
if (data.hilite) {
$('.hilited').removeClass('hilited');
$(data.hilite).addClass('hilited');
}
if (data.changed) {
$('div.views-basic-info').addClass('changed');
}
}
/**
* An ajax responder that accepts a packet of JSON data and acts appropriately.
* This one specifically responds to the Views live preview area, so it's
* hardcoded and specialized.
*/
Drupal.Views.Ajax.previewResponse = function(data) {
$('a.views-throbbing').removeClass('views-throbbing');
$('span.views-throbbing').remove();
if (data.debug) {
alert(data.debug);
}
// See if we have any settings to extend. Do this first so that behaviors
// can access the new settings easily.
// Clear any previous viewsAjax settings.
if (Drupal.settings.viewsAjax) {
Drupal.settings.viewsAjax = {};
}
if (data.js) {
$.extend(Drupal.settings, data.js);
}
// Check the 'display' for data.
if (data.display) {
var ajax_area = 'div#views-live-preview';
$(ajax_area).html(data.display);
var url = $(ajax_area, 'form').attr('action');
// if a URL was supplied, bind the form to it.
if (url) {
// Bind a click to the button to set the value for the button.
$('input[type=submit], button', ajax_area).unbind('click');
$('input[type=submit], button', ajax_area).click(function() {
$('form', ajax_area).append('<input type="hidden" name="'
+ $(this).attr('name') + '" value="' + $(this).val() + '">');
$(this).after('<span class="views-throbbing"> </span>');
});
// Bind forms to ajax submit.
$('form', ajax_area).unbind('submit'); // be safe here.
$('form', ajax_area).submit(function() {
$(this).ajaxSubmit({
url: url,
data: { 'js': 1 },
type: 'POST',
success: Drupal.Views.Ajax.previewResponse,
error: function() { $('span.views-throbbing').remove(); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
});
}
Drupal.attachBehaviors(ajax_area);
}
}
Drupal.Views.updatePreviewForm = function() {
var url = $(this).attr('action');
url = url.replace('nojs', 'ajax');
$('input[type=submit], button', this).after('<span class="views-throbbing"> </span>');
$(this).ajaxSubmit({
url: url,
data: { 'js': 1 },
type: 'POST',
success: Drupal.Views.Ajax.previewResponse,
error: function() { $('span.views-throbbing').remove(); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
}
Drupal.Views.updatePreviewFilterForm = function() {
var url = $(this).attr('action');
url = url.replace('nojs', 'ajax');
$('input[type=submit], button', this).after('<span class="views-throbbing"> </span>');
$('input[name=q]', this).remove(); // remove 'q' for live preview.
$(this).ajaxSubmit({
url: url,
data: { 'js': 1 },
type: 'GET',
success: Drupal.Views.Ajax.previewResponse,
error: function() { $('span.views-throbbing').remove(); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
}
Drupal.Views.updatePreviewLink = function() {
var url = $(this).attr('href');
url = url.replace('nojs', 'ajax');
if (url.substring(0, 18) != '/admin/build/views') {
return true;
}
$(this).addClass('views-throbbing');
$.ajax({
url: url,
data: 'js=1',
type: 'POST',
success: Drupal.Views.Ajax.previewResponse,
error: function() { $(this).removeClass('views-throbbing'); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
}
Drupal.behaviors.ViewsAjaxLinks = function() {
// Make specified links ajaxy.
$('a.views-ajax-link:not(.views-processed)').addClass('views-processed').click(function() {
// Translate the href on the link to the ajax href. That way this degrades
// into a nice, normal link.
var url = $(this).attr('href');
url = url.replace('nojs', 'ajax');
// Turn on the hilite to indicate this is in use.
$(this).addClass('hilite');
// Disable the save button.
$('#edit-save').attr('disabled', 'true');
$(this).addClass('views-throbbing');
$.ajax({
type: "POST",
url: url,
data: 'js=1',
success: Drupal.Views.Ajax.ajaxResponse,
error: function() { $(this).removeClass('views-throbbing'); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
});
$('form.views-ajax-form:not(.views-processed)').addClass('views-processed').submit(function(arg) {
// Translate the href on the link to the ajax href. That way this degrades
// into a nice, normal link.
var url = $(this).attr('action');
url = url.replace('nojs', 'ajax');
// $('input[@type=submit]', this).after('<span class="views-throbbing"> </span>');
$(this).ajaxSubmit({
url: url,
data: { 'js': 1 },
type: 'POST',
success: Drupal.Views.Ajax.ajaxResponse,
error: function() { $('span.views-throbbing').remove(); alert(Drupal.t("An error occurred at @path.", {'@path': url})); },
dataType: 'json'
});
return false;
});
// Bind the live preview to where it's supposed to go.
$('form#views-ui-preview-form:not(.views-processed)')
.addClass('views-processed')
.submit(Drupal.Views.updatePreviewForm);
$('div#views-live-preview form:not(.views-processed)')
.addClass('views-processed')
.submit(Drupal.Views.updatePreviewFilterForm);
$('div#views-live-preview a:not(.views-processed)')
.addClass('views-processed')
.click(Drupal.Views.updatePreviewLink);
}
/**
* Get rid of irritating tabledrag messages
*/
Drupal.theme.tableDragChangedWarning = function () {
return '<div></div>';
}<|fim▁end|> | |
<|file_name|>autojoinSteamQueue.js<|end_file_name|><|fim▁begin|>// Most of it was made by xPaw: https://gist.github.com/xPaw/73f8ae2031b4e528abf7
// Add following lines into manifest.json under content_scripts section (and tweak or remove date below):
// {
// "js": [ "js/autojoinSteamQueue.js" ],
// "matches": [ "*://store.steampowered.com/*" ]
// }
const summer2018 = new Date(2018, 6, 6);
if (Date.now() < summer2018) {
// I will update extension after the sale and remove this injection but.. in case I die you're not stuck with useless button :)
// We have to inject it like this to access global functions and variables
const scriptToInject = `var DiscoveryQueueModal, GenerateQueue = function(queueNumber) {
DiscoveryQueueModal = ShowBlockingWaitDialog('Exploring queue...', 'Generating new discovery queue #' + ++queueNumber);
jQuery.post('//store.steampowered.com/explore/generatenewdiscoveryqueue', {
sessionid: g_sessionID,
queuetype: 0
}).done(function(data) {<|fim▁hole|> done = 0,
errorShown;
for (var i = 0; i < data.queue.length; i++) {
var request = jQuery.post('//store.steampowered.com/app/10', {
appid_to_clear_from_queue: data.queue[i],
sessionid: g_sessionID
});
request.done(function() {
if (errorShown) {
return;
}
DiscoveryQueueModal.Dismiss();
DiscoveryQueueModal = ShowBlockingWaitDialog('Exploring the queue...', 'Request ' + ++done + ' of ' + data.queue.length);
});
request.fail(function() {
errorShown = true;
DiscoveryQueueModal.Dismiss();
DiscoveryQueueModal = ShowConfirmDialog('Error', 'Failed to clear queue item #' + ++done, 'Try again').done(function() {
GenerateQueue(queueNumber - 1);
});
});
requests.push(request);
}
jQuery.when.apply(jQuery, requests).done(function() {
DiscoveryQueueModal.Dismiss();
if (queueNumber < 3) {
GenerateQueue(queueNumber);
} else {
DiscoveryQueueModal = ShowConfirmDialog('Done', 'Queue has been explored ' + queueNumber + ' times', 'Reload the page').done(function() {
ShowBlockingWaitDialog('Reloading the page');
window.location.reload();
});
}
});
}).fail(function() {
DiscoveryQueueModal.Dismiss();
DiscoveryQueueModal = ShowConfirmDialog('Error', 'Failed to generate new queue #' + queueNumber, 'Try again').done(function() {
GenerateQueue(queueNumber - 1);
});
});
};`;
const script = document.createElement('script');
script.innerHTML = scriptToInject;
document.body.appendChild(script);
document.querySelector('.supernav_container')
.insertAdjacentHTML('beforeend', '<a class="menuitem supernav" style="cursor: pointer; color: #FFD700" title="This button will be removed after the sale. Visit AutoJoin Steam group for more details." onclick="GenerateQueue(0)">AutoJoin Queue</a>');
}<|fim▁end|> | var requests = [], |
<|file_name|>media_operations.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import os
import arrow
import magic
import hashlib
import logging
import requests
from io import BytesIO
from PIL import Image
from flask import json
from .image import get_meta
from .video import get_meta as video_meta
import base64
from superdesk.errors import SuperdeskApiError
logger = logging.getLogger(__name__)
def hash_file(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
return hasher.hexdigest()
def get_file_name(file):
return hash_file(file, hashlib.sha256())
def download_file_from_url(url):
rv = requests.get(url, timeout=15)
if rv.status_code not in (200, 201):
raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url)
mime = magic.from_buffer(rv.content, mime=True).decode('UTF-8')
ext = mime.split('/')[1]
name = 'stub.' + ext
return BytesIO(rv.content), name, mime
def download_file_from_encoded_str(encoded_str):
content = encoded_str.split(';base64,')
mime = content[0].split(':')[1]
ext = content[0].split('/')[1]
name = 'web_capture.' + ext
content = base64.b64decode(content[1])
return BytesIO(content), name, mime
def process_file_from_stream(content, content_type=None):
content_type = content_type or content.content_type
content = BytesIO(content.read())
if 'application/' in content_type:
content_type = magic.from_buffer(content.getvalue(), mime=True).decode('UTF-8')
content.seek(0)
file_type, ext = content_type.split('/')
try:
metadata = process_file(content, file_type)
except OSError: # error from PIL when image is supposed to be an image but is not.
raise SuperdeskApiError.internalError('Failed to process file')
file_name = get_file_name(content)
content.seek(0)
metadata = encode_metadata(metadata)
metadata.update({'length': json.dumps(len(content.getvalue()))})
return file_name, content_type, metadata
def encode_metadata(metadata):
return dict((k.lower(), json.dumps(v)) for k, v in metadata.items())
def decode_metadata(metadata):
return dict((k.lower(), decode_val(v)) for k, v in metadata.items())
def decode_val(string_val):
"""Format dates that elastic will try to convert automatically."""
val = json.loads(string_val)
try:
arrow.get(val, 'YYYY-MM-DD') # test if it will get matched by elastic
return str(arrow.get(val))
except (Exception):
return val
def process_file(content, type):
if type == 'image':
return process_image(content, type)
if type in ('audio', 'video'):
return process_video(content, type)
return {}
def process_video(content, type):
content.seek(0)
meta = video_meta(content)
content.seek(0)
return meta
def process_image(content, type):
content.seek(0)
meta = get_meta(content)
content.seek(0)
return meta
def crop_image(content, file_name, cropping_data):
if cropping_data:
file_ext = os.path.splitext(file_name)[1][1:]
if file_ext in ('JPG', 'jpg'):
file_ext = 'jpeg'
logger.debug('Opened image from stream, going to crop it s')
content.seek(0)
img = Image.open(content)
cropped = img.crop(cropping_data)
logger.debug('Cropped image from stream, going to save it')
try:
out = BytesIO()
cropped.save(out, file_ext)
out.seek(0)
return (True, out)
except Exception as io:
logger.exception(io)<|fim▁hole|><|fim▁end|> | return (False, content) |
<|file_name|>incubate_eggs.py<|end_file_name|><|fim▁begin|>from pokemongo_bot.human_behaviour import sleep
from pokemongo_bot.base_task import BaseTask
class IncubateEggs(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
last_km_walked = 0
def initialize(self):
self.ready_incubators = []
self.used_incubators = []
self.eggs = []
self.km_walked = 0
self.hatching_animation_delay = 4.20
self.max_iv = 45.0
self._process_config()
def _process_config(self):
self.longer_eggs_first = self.config.get("longer_eggs_first", True)
def work(self):
try:
self._check_inventory()
except:
return
if self.used_incubators and IncubateEggs.last_km_walked != self.km_walked:
self.used_incubators.sort(key=lambda x: x.get("km"))
km_left = self.used_incubators[0]['km']-self.km_walked
if km_left <= 0:
self._hatch_eggs()
else:
self.emit_event(
'next_egg_incubates',
formatted='Next egg ({km_needed} km) incubates in {distance_in_km:.2f} km',
data={
'km_needed': self.used_incubators[0]['km_needed'],
'distance_in_km': km_left
}
)
IncubateEggs.last_km_walked = self.km_walked
sorting = self.longer_eggs_first
self.eggs.sort(key=lambda x: x.get("km"), reverse=sorting)
if self.ready_incubators:
self._apply_incubators()
def _apply_incubators(self):
for incubator in self.ready_incubators:
if incubator.get('used', False):
continue
for egg in self.eggs:
if egg["used"] or egg["km"] == -1:
continue
self.emit_event(
'incubate_try',
level='debug',
formatted="Attempting to apply incubator {incubator_id} to egg {egg_id}",
data={
'incubator_id': incubator['id'],
'egg_id': egg['id']
}
)
ret = self.bot.api.use_item_egg_incubator(
item_id=incubator["id"],
pokemon_id=egg["id"]
)
if ret:
code = ret.get("responses", {}).get("USE_ITEM_EGG_INCUBATOR", {}).get("result", 0)
if code == 1:
self.emit_event(
'incubate',
formatted='Incubating a {distance_in_km} egg.',
data={
'distance_in_km': str(egg['km'])
}
)
egg["used"] = True
incubator["used"] = True
break
elif code == 5 or code == 7:
self.emit_event(
'incubator_already_used',
level='debug',
formatted='Incubator in use.',
)
incubator["used"] = True
break
elif code == 6:
self.emit_event(
'egg_already_incubating',
level='debug',
formatted='Egg already incubating',
)
egg["used"] = True
def _check_inventory(self, lookup_ids=[]):
inv = {}
response_dict = self.bot.get_inventory()
matched_pokemon = []
temp_eggs = []
temp_used_incubators = []
temp_ready_incubators = []
inv = reduce(
dict.__getitem__,
["responses", "GET_INVENTORY", "inventory_delta", "inventory_items"],
response_dict
)
for inv_data in inv:
inv_data = inv_data.get("inventory_item_data", {})
if "egg_incubators" in inv_data:
temp_used_incubators = []
temp_ready_incubators = []
incubators = inv_data.get("egg_incubators", {}).get("egg_incubator",[])
if isinstance(incubators, basestring): # checking for old response
incubators = [incubators]
for incubator in incubators:
if 'pokemon_id' in incubator:
start_km = incubator.get('start_km_walked', 9001)
km_walked = incubator.get('target_km_walked', 9001)
temp_used_incubators.append({
"id": incubator.get('id', -1),
"km": km_walked,
"km_needed": (km_walked - start_km)
})
else:
temp_ready_incubators.append({
"id": incubator.get('id', -1)
})
continue
if "pokemon_data" in inv_data:
pokemon = inv_data.get("pokemon_data", {})
if pokemon.get("is_egg", False) and "egg_incubator_id" not in pokemon:
temp_eggs.append({
"id": pokemon.get("id", -1),
"km": pokemon.get("egg_km_walked_target", -1),
"used": False
})<|fim▁hole|> pokemon.get('individual_defense', 0),
pokemon.get('individual_stamina', 0)
]})
matched_pokemon.append(pokemon)
continue
if "player_stats" in inv_data:
self.km_walked = inv_data.get("player_stats", {}).get("km_walked", 0)
if temp_used_incubators:
self.used_incubators = temp_used_incubators
if temp_ready_incubators:
self.ready_incubators = temp_ready_incubators
if temp_eggs:
self.eggs = temp_eggs
return matched_pokemon
def _hatch_eggs(self):
response_dict = self.bot.api.get_hatched_eggs()
log_color = 'green'
try:
result = reduce(dict.__getitem__, ["responses", "GET_HATCHED_EGGS"], response_dict)
except KeyError:
return
pokemon_ids = []
if 'pokemon_id' in result:
pokemon_ids = [id for id in result['pokemon_id']]
stardust = result.get('stardust_awarded', "error")
candy = result.get('candy_awarded', "error")
xp = result.get('experience_awarded', "error")
sleep(self.hatching_animation_delay)
self.bot.latest_inventory = None
try:
pokemon_data = self._check_inventory(pokemon_ids)
for pokemon in pokemon_data:
# pokemon ids seem to be offset by one
if pokemon['pokemon_id']!=-1:
pokemon['name'] = self.bot.pokemon_list[(pokemon.get('pokemon_id')-1)]['Name']
else:
pokemon['name'] = "error"
except:
pokemon_data = [{"name":"error","cp":"error","iv":"error"}]
if not pokemon_ids or pokemon_data[0]['name'] == "error":
self.emit_event(
'egg_hatched',
data={
'pokemon': 'error',
'cp': 'error',
'iv': 'error',
'exp': 'error',
'stardust': 'error',
'candy': 'error',
}
)
return
for i in range(len(pokemon_data)):
msg = "Egg hatched with a {pokemon} (CP {cp} - IV {iv}), {exp} exp, {stardust} stardust and {candy} candies."
self.emit_event(
'egg_hatched',
formatted=msg,
data={
'pokemon': pokemon_data[i]['name'],
'cp': pokemon_data[i]['cp'],
'iv': "{} {}".format(
"/".join(map(str, pokemon_data[i]['iv'])),
sum(pokemon_data[i]['iv'])/self.max_iv
),
'exp': xp[i],
'stardust': stardust[i],
'candy': candy[i],
}
)<|fim▁end|> | elif 'is_egg' not in pokemon and pokemon['id'] in lookup_ids:
pokemon.update({
"iv": [
pokemon.get('individual_attack', 0), |
<|file_name|>run_plink_reformat.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
import glob
import os
import subprocess
'''
Convert 23andMe files to
PLINK format
'''
def twenty3_and_me_files():
"""Return the opensnp files that are 23 and me format"""
all_twenty3_and_me_files= glob.glob('../opensnp_datadump.current/*.23andme.txt')
fifteen_mb = 15 * 1000 * 1000
non_junk_files = [path for path in all_twenty3_and_me_files if os.path.getsize(path) > fifteen_mb]
return non_junk_files
def run_plink_format(usable_files):
"""Reformat the 23andMe files into plink binary stuff"""
for f in usable_files:
gid = f.split("/")[-1].split("_")[1].replace("file","")
call = "../plink_v190/plink --23file "+ f + " F" + gid + "ID" + gid + "I 1"
call += " --out ../plink_binaries/" + gid
print "convert gid " + gid<|fim▁hole|><|fim▁end|> | subprocess.call(call,shell=True)
usable_files = twenty3_and_me_files()
run_plink_format(usable_files) |
<|file_name|>isLeapYearSpec.js<|end_file_name|><|fim▁begin|>import { isLeapYear } from "../../Source/Cesium.js";
describe("Core/isLeapYear", function () {
it("Check for valid leap years", function () {
expect(isLeapYear(2000)).toEqual(true);
expect(isLeapYear(2004)).toEqual(true);
expect(isLeapYear(2003)).toEqual(false);
expect(isLeapYear(2300)).toEqual(false);
expect(isLeapYear(2400)).toEqual(true);
expect(isLeapYear(-1)).toEqual(false);
expect(isLeapYear(-2000)).toEqual(true);
});
it("Fail with null value", function () {
expect(function () {
isLeapYear(null);
}).toThrowDeveloperError();
});
it("Fail with undefined value", function () {
expect(function () {
isLeapYear(undefined);
}).toThrowDeveloperError();
});<|fim▁hole|> }).toThrowDeveloperError();
});
});<|fim▁end|> |
it("Fail with non-numerical value", function () {
expect(function () {
isLeapYear("asd"); |
<|file_name|>RateLimit.js<|end_file_name|><|fim▁begin|>/*!
* Redback
* Copyright(c) 2011 Chris O'Hara <[email protected]>
* MIT Licensed
*/
/**
* Module dependencies.
*/
var Structure = require('../Structure');
/**
* See https://gist.github.com/chriso/54dd46b03155fcf555adccea822193da
*
* Count the number of times a subject performs an action over an interval
* in the immediate past - this can be used to rate limit the subject if
* the count goes over a certain threshold. For example, you could track
* how many times an IP (the subject) has viewed a page (the action) over
* a certain time frame and limit them accordingly.
*
* Usage:
* `redback.createRateLimit(action [, options]);`
*
* Options:
* `bucket_interval` - default is 5 seconds
* `bucket_span` - default is 10 minutes
* `subject_expiry` - default is 20 minutes
*
* Reference:
* https://gist.github.com/chriso/54dd46b03155fcf555adccea822193da
* http://redis.io/topics/data-types#hash
*
* Redis Structure:
* `(namespace:)action:<subject1> = hash(bucket => count)`
* `(namespace:)action:<subject2> = hash(bucket => count)`
* `(namespace:)action:<subjectN> = hash(bucket => count)`
*/
var RateLimit = exports.RateLimit = Structure.new();
/**
* Setup the RateLimit structure.
*
* @param {Object} options (optional)
* @api private
*/
RateLimit.prototype.init = function (options) {
options = options || {};
this.bucket_span = options.bucket_span || 600;
this.bucket_interval = options.bucket_interval || 5;
this.subject_expiry = options.subject_expiry || 1200;
this.bucket_count = Math.round(this.bucket_span / this.bucket_interval);
}
/**
* Get the bucket associated with the current time.
*
* @param {int} time (optional) - default is the current time (ms since epoch)
* @return {int} bucket
* @api private
*/
RateLimit.prototype.getBucket = function (time) {
time = (time || new Date().getTime()) / 1000;
return Math.floor((time % this.bucket_span) / this.bucket_interval);
}
/**
* Increment the count for the specified subject.
*
* @param {string} subject
* @param {Function} callback (optional)
* @return this
* @api public
*/
RateLimit.prototype.add = function (subject, callback) {
if (Array.isArray(subject)) {
return this.addAll(subject, callback);
}<|fim▁hole|>
//Increment the current bucket
multi.hincrby(subject, bucket, 1)
//Clear the buckets ahead
multi.hdel(subject, (bucket + 1) % this.bucket_count)
.hdel(subject, (bucket + 2) % this.bucket_count)
//Renew the key TTL
multi.expire(subject, this.subject_expiry);
multi.exec(function (err) {
if (!callback) return;
if (err) return callback(err);
callback(null);
});
return this;
}
/**
* Count the number of times the subject has performed an action
* in the last `interval` seconds.
*
* @param {string} subject
* @param {int} interval
* @param {Function} callback
* @return this
* @api public
*/
RateLimit.prototype.count = function (subject, interval, callback) {
var bucket = this.getBucket(),
multi = this.client.multi(),
count = Math.floor(interval / this.bucket_interval);
subject = this.key + ':' + subject;
//Get the counts from the previous `count` buckets
multi.hget(subject, bucket);
while (count--) {
multi.hget(subject, (--bucket + this.bucket_count) % this.bucket_count);
}
//Add up the counts from each bucket
multi.exec(function (err, counts) {
if (err) return callback(err, null);
for (var count = 0, i = 0, l = counts.length; i < l; i++) {
if (counts[i]) {
count += parseInt(counts[i], 10);
}
}
callback(null, count);
});
return this;
}
/**
* An alias for `ratelimit.add(subject).count(subject, interval);`
*
* @param {string} subject
* @param {int} interval
* @param {Function} callback
* @return this
* @api public
*/
RateLimit.prototype.addCount = function (subject, interval, callback) {
var bucket = this.getBucket(),
multi = this.client.multi(),
count = Math.floor(interval / this.bucket_interval);
subject = this.key + ':' + subject;
//Increment the current bucket
multi.hincrby(subject, bucket, 1)
//Clear the buckets ahead
multi.hdel(subject, (bucket + 1) % this.bucket_count)
.hdel(subject, (bucket + 2) % this.bucket_count)
//Renew the key TTL
multi.expire(subject, this.subject_expiry);
//Get the counts from the previous `count` buckets
multi.hget(subject, bucket);
while (count--) {
multi.hget(subject, (--bucket + this.bucket_count) % this.bucket_count);
}
//Add up the counts from each bucket
multi.exec(function (err, counts) {
if (err) return callback(err, null);
for (var count = 0, i = 4, l = counts.length; i < l; i++) {
if (counts[i]) {
count += parseInt(counts[i], 10);
}
}
callback(null, count);
});
return this;
}<|fim▁end|> | var bucket = this.getBucket(), multi = this.client.multi();
subject = this.key + ':' + subject; |
<|file_name|>tree-openable.js<|end_file_name|><|fim▁begin|>/*
YUI 3.11.0 (build d549e5c)
Copyright 2013 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('tree-openable', function (Y, NAME) {
/*jshint expr:true, onevar:false */
/**
Extension for `Tree` that adds the concept of open/closed state for nodes.
@module tree
@submodule tree-openable
@main tree-openable
**/
/**
Extension for `Tree` that adds the concept of open/closed state for nodes.
@class Tree.Openable
@constructor
@extensionfor Tree
**/
/**
Fired when a node is closed.
@event close
@param {Tree.Node} node Node being closed.
@param {String} src Source of the event.
@preventable _defCloseFn
**/
var EVT_CLOSE = 'close';
/**
Fired when a node is opened.
@event open
@param {Tree.Node} node Node being opened.
@param {String} src Source of the event.
@preventable _defOpenFn
**/
var EVT_OPEN = 'open';
function Openable() {}
Openable.prototype = {
// -- Lifecycle ------------------------------------------------------------
initializer: function () {
this.nodeExtensions = this.nodeExtensions.concat(Y.Tree.Node.Openable);
},
// -- Public Methods -------------------------------------------------------
/**
Closes the specified node if it isn't already closed.
@method closeNode
@param {Tree.Node} node Node to close.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `close` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
closeNode: function (node, options) {
if (node.canHaveChildren && node.isOpen()) {
this._fireTreeEvent(EVT_CLOSE, {
node: node,
src : options && options.src
}, {
defaultFn: this._defCloseFn,
silent : options && options.silent
});
}
return this;
},
/**
Opens the specified node if it isn't already open.
@method openNode
@param {Tree.Node} node Node to open.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `open` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
openNode: function (node, options) {
if (node.canHaveChildren && !node.isOpen()) {
this._fireTreeEvent(EVT_OPEN, {
node: node,
src : options && options.src
}, {
defaultFn: this._defOpenFn,
silent : options && options.silent
});
}
return this;
},
/**
Toggles the open/closed state of the specified node, closing it if it's
currently open or opening it if it's currently closed.
@method toggleOpenNode
@param {Tree.Node} node Node to toggle.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, events will be
suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
toggleOpenNode: function (node, options) {
return node.isOpen() ? this.closeNode(node, options) :
this.openNode(node, options);
},
// -- Default Event Handlers -----------------------------------------------
/**
Default handler for the `close` event.
@method _defCloseFn
@param {EventFacade} e
@protected
**/
_defCloseFn: function (e) {
delete e.node.state.open;
},
/**
Default handler for the `open` event.
@method _defOpenFn
@param {EventFacade} e
@protected
**/
_defOpenFn: function (e) {
e.node.state.open = true;
}
};
Y.Tree.Openable = Openable;
/**
@module tree
@submodule tree-openable
**/
/**
`Tree.Node` extension that adds methods useful for nodes in trees that use the
`Tree.Openable` extension.
@class Tree.Node.Openable
@constructor
@extensionfor Tree.Node
**/
function NodeOpenable() {}
NodeOpenable.prototype = {
/**
Closes this node if it's currently open.
@method close
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `close` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
close: function (options) {
this.tree.closeNode(this, options);
return this;
},
/**
Returns `true` if this node is currently open.
<|fim▁hole|> Note: the root node of a tree is always considered to be open.
@method isOpen
@return {Boolean} `true` if this node is currently open, `false` otherwise.
**/
isOpen: function () {
return !!this.state.open || this.isRoot();
},
/**
Opens this node if it's currently closed.
@method open
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `open` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
open: function (options) {
this.tree.openNode(this, options);
return this;
},
/**
Toggles the open/closed state of this node, closing it if it's currently
open or opening it if it's currently closed.
@method toggleOpen
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, events will be
suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
toggleOpen: function (options) {
this.tree.toggleOpenNode(this, options);
return this;
}
};
Y.Tree.Node.Openable = NodeOpenable;
}, '3.11.0', {"requires": ["tree"]});<|fim▁end|> | |
<|file_name|>gsmlogger.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Tools for logging in GSM package.
"""
import logging
import os
import platform
LOGGERS = {}
if platform.node() == 'ldb001':
BASE_LOG_DIR = os.path.join(os.getenv('HOME'), 'GSM')
else:
BASE_LOG_DIR = os.path.join(os.getenv('HOME'), 'prog', 'GSM')
USE_CONSOLE = False
LOG_LEVEL = logging.DEBUG
def get_gsm_logger(log_name, file_name, use_console=USE_CONSOLE):
"""
Get the two-way logger.
"""
if log_name in LOGGERS:
return LOGGERS[log_name]
logger = logging.getLogger(log_name)
logger.setLevel(LOG_LEVEL)
formatter = logging.Formatter(
'%(asctime)-6s: %(name)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler(filename=os.path.join(BASE_LOG_DIR,
file_name))
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
if use_console:
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
LOGGERS[log_name] = logger
USE_CONSOLE = use_console
return logger
def switch_console(use_console=False):
"""
Switch console output on/off for all loggers.
"""
USE_CONSOLE = use_console
for logger in LOGGERS.values():<|fim▁hole|> '%(asctime)-6s: %(name)s - %(levelname)s - %(message)s'))
logger.addHandler(console_handler)
elif not use_console and len(logger.handlers) == 2:
logger.removeHandler(logger.handlers[1])
def set_all_levels(level):
"""
Set output level for all loggers.
"""
LOG_LEVEL = level
for logger in LOGGERS.values():
logger.setLevel(level)<|fim▁end|> | if use_console and len(logger.handlers) == 1:
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter( |
<|file_name|>PySrt.py<|end_file_name|><|fim▁begin|>import re
import sys
class SrtSection:
"""
This class is used to stock a section from a srt file (subtitle frames).
- self.beginning is the time (in seconds) where the subframe begins
- self.duration is the duration (in seconds) of the subframe
- self.content is the content of the subframe
"""
def __init__(self, beginning, duration, content):
self.beginning = beginning
self.duration = duration
self.content = content
def __repr__(self):
return '({0}, {1}), "{2}"'.format(self.beginning, self.duration, self.content.encode("unicode_escape").decode())
def export(self):
"""
Exports the section to a formatted string
"""
return self.__export_tdata() + '\n' + self.content
def __export_tdata(self):
"""
Writes the time section in the srt syntax from the tuple
(beginning, duration)
"""
# Calculates each momentum
beginning, end = self.beginning, self.beginning + self.duration
times = []
for temps in beginning, end:
hours = int(temps // 3600)
temps %= 3600
minutes = int(temps // 60)
temps %= 60
seconds = int(temps)
miliseconds = int(round(temps - seconds, 3)*1000)
times.append('{0}:{1}:{2},{3}'.format(hours, minutes, seconds,
miliseconds))<|fim▁hole|>
class SrtSubs:
"""
This class is used to stock and manipulate sections from a srt file.
self.sections, where all the datas are stored, is a list of SrtSections.
"""
def __init__(self, string):
"""
string is the content of the srt file.
"""
self.rawsections = [s.strip() for s in string.split("\n\n") if s != '']
self.sections = self.__extract_sections()
def __extract_sections(self):
"""
Extracts all the informations from a list containing all the
sections of the file, in the form of a list of tuples :
((beginning, duration), content)
with
beginning and duration in seconds
content the sub to show at this time
"""
sections = []
for section in self.rawsections:
lines = section.split('\n')
beginning, duration = self.__extract_tdata(lines[1])
content = "\n".join(lines[2:])
sections.append(SrtSection(beginning, duration, content))
return sections
def export_sections(self):
"""
Writes the sections to a string to be written to the subs file
"""
secs = []
for number, section in enumerate(self.sections):
sec = str(number+1)+'\n'
sec += section.export()
secs.append(sec)
return '\n\n'.join(secs)
def __extract_tdata(self, timesection):
"""
Returns a tuple (beginning, duration) from
the %H:%M:%S --> %H:%M:%S line.
"""
tparts = timesection.split(" --> ")
beginning_end = []
for sec in tparts:
hours, minutes, seconds, miliseconds = tuple(map(int, re.split("[:,]", sec)))
beginning_end.append(3600 * hours + 60 * minutes + seconds + miliseconds/1000)
beginning, end = tuple(beginning_end)
duration = end - beginning
return beginning, round(duration)<|fim▁end|> |
return ' --> '.join(times)
|
<|file_name|>bitcoin_pl.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="pl" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="14"/>
<source>About Kickasscoin</source>
<translation>O Kickasscoin</translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="53"/>
<source><b>Kickasscoin</b> version</source>
<translation>Wersja <b>Kickasscoin</b></translation>
</message>
<message>
<location filename="../forms/aboutdialog.ui" line="97"/>
<source>Copyright © 2009-2012 Bitcoin Developers
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>Copyright © 2009-2012 Bitcoin Developers
Oprogramowanie eksperymentalne.
Distributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.
Pomóż w tłumaczeniu:
www.transifex.net/projects/p/bitcoin/</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="14"/>
<source>Address Book</source>
<translation>Książka Adresowa</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="20"/>
<source>These are your Kickasscoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Tutaj znajdują się twoje adresy Kickasscoin do odbioru płatności. Możesz nadać oddzielne adresy dla każdego z wysyłających monety, żeby śledzić oddzielnie ich opłaty.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="36"/>
<source>Double-click to edit address or label</source>
<translation>Kliknij dwukrotnie, aby edytować adres lub etykietę</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="63"/>
<source>Create a new address</source>
<translation>Utwórz nowy adres</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="77"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Skopiuj aktualnie wybrany adres do schowka</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="66"/>
<source>&New Address</source>
<translation>&Nowy Adres</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="80"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="91"/>
<source>Show &QR Code</source>
<translation>Pokaż Kod &QR</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="102"/>
<source>Sign a message to prove you own this address</source>
<translation>Podpisz wiadomość aby dowieść, że ten adres jest twój</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="105"/>
<source>&Sign Message</source>
<translation>Podpi&sz Wiadomość</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="116"/>
<source>Delete the currently selected address from the list. Only sending addresses can be deleted.</source>
<translation>Usuń aktualnie wybrany adres z listy. Tylko adresy nadawcze mogą być usunięte.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="119"/>
<source>&Delete</source>
<translation>&Usuń</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="63"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="65"/>
<source>&Edit</source>
<translation>&Edytuj</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="292"/>
<source>Export Address Book Data</source>
<translation>Eksportuj książkę adresową</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="293"/>
<source>Comma separated file (*.csv)</source>
<translation>Plik *.CSV (rozdzielany przecinkami)</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="306"/>
<source>Error exporting</source>
<translation>Błąd podczas eksportowania</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="306"/>
<source>Could not write to file %1.</source>
<translation>Błąd zapisu do pliku %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="142"/>
<source>Label</source>
<translation>Etykieta</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="142"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../addresstablemodel.cpp" line="178"/>
<source>(no label)</source>
<translation>(bez etykiety)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="47"/>
<source>Enter passphrase</source>
<translation>Wpisz hasło</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="61"/>
<source>New passphrase</source>
<translation>Nowe hasło</translation>
</message>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="75"/>
<source>Repeat new passphrase</source>
<translation>Powtórz nowe hasło</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Wprowadź nowe hasło dla portfela.<br/>Proszę użyć hasła składającego się z <b>10 lub więcej losowych znaków</b> lub <b>ośmiu lub więcej słów</b>.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="34"/>
<source>Encrypt wallet</source>
<translation>Zaszyfruj portfel</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="37"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ta operacja wymaga hasła do portfela ażeby odblokować portfel.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="42"/>
<source>Unlock wallet</source>
<translation>Odblokuj portfel</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="45"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ta operacja wymaga hasła do portfela ażeby odszyfrować portfel.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="50"/>
<source>Decrypt wallet</source>
<translation>Odszyfruj portfel</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="53"/>
<source>Change passphrase</source>
<translation>Zmień hasło</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="54"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Podaj stare i nowe hasło do portfela.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="100"/>
<source>Confirm wallet encryption</source>
<translation>Potwierdź szyfrowanie portfela</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="101"/>
<source>WARNING: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!
Are you sure you wish to encrypt your wallet?</source>
<translation>OSTRZEŻENIE: Jeśli zaszyfrujesz portfel i zgubisz hasło, wtedy <b>STRACISZ WSZYSTKIE SWOJE BITMONETY</b>
Czy na pewno chcesz zaszyfrować swój portfel?</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="110"/>
<location filename="../askpassphrasedialog.cpp" line="159"/>
<source>Wallet encrypted</source>
<translation>Portfel zaszyfrowany</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="111"/>
<source>Kickasscoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.</source>
<translation>Program Kickasscoin zamknie się aby dokończyć proces szyfrowania. Pamiętaj, że szyfrowanie portfela nie zabezpiecza w pełni Twoich bitcoinów przed kradzieżą przez wirusy lub trojany mogące zainfekować Twój komputer.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="207"/>
<location filename="../askpassphrasedialog.cpp" line="231"/>
<source>Warning: The Caps Lock key is on.</source>
<translation>Ostrzeżenie: Caps Lock jest włączony.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="116"/>
<location filename="../askpassphrasedialog.cpp" line="123"/>
<location filename="../askpassphrasedialog.cpp" line="165"/>
<location filename="../askpassphrasedialog.cpp" line="171"/>
<source>Wallet encryption failed</source>
<translation>Szyfrowanie portfela nie powiodło się</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="117"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Szyfrowanie portfela nie powiodło się z powodu wewnętrznego błędu. Twój portfel nie został zaszyfrowany.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="124"/>
<location filename="../askpassphrasedialog.cpp" line="172"/>
<source>The supplied passphrases do not match.</source>
<translation>Podane hasła nie są takie same.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="135"/>
<source>Wallet unlock failed</source>
<translation>Odblokowanie portfela nie powiodło się</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="136"/>
<location filename="../askpassphrasedialog.cpp" line="147"/>
<location filename="../askpassphrasedialog.cpp" line="166"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Wprowadzone hasło do odszyfrowania portfela jest niepoprawne.</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="146"/>
<source>Wallet decryption failed</source>
<translation>Odszyfrowywanie portfela nie powiodło się</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="160"/>
<source>Wallet passphrase was succesfully changed.</source>
<translation>Hasło do portfela zostało pomyślnie zmienione.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="73"/>
<source>Kickasscoin Wallet</source>
<translation>Portfel Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="215"/>
<source>Sign &message...</source>
<translation>Podpisz wiado&mość...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="248"/>
<source>Show/Hide &Kickasscoin</source>
<translation>Pokaż/Ukryj &Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="515"/>
<source>Synchronizing with network...</source>
<translation>Synchronizacja z siecią...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="185"/>
<source>&Overview</source>
<translation>P&odsumowanie</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="186"/>
<source>Show general overview of wallet</source>
<translation>Pokazuje ogólny zarys portfela</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="191"/>
<source>&Transactions</source>
<translation>&Transakcje</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="192"/>
<source>Browse transaction history</source>
<translation>Przeglądaj historię transakcji</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="197"/>
<source>&Address Book</source>
<translation>Książka &adresowa</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="198"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Edytuj listę zapisanych adresów i i etykiet</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="203"/>
<source>&Receive coins</source>
<translation>Odbie&rz monety</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="204"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Pokaż listę adresów do otrzymywania płatności</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="209"/>
<source>&Send coins</source>
<translation>Wy&syłka monet</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="216"/>
<source>Prove you control an address</source>
<translation>Udowodnij, że kontrolujesz adres</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="235"/>
<source>E&xit</source>
<translation>&Zakończ</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="236"/>
<source>Quit application</source>
<translation>Zamknij program</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="239"/>
<source>&About %1</source>
<translation>&O %1</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="240"/>
<source>Show information about Kickasscoin</source>
<translation>Pokaż informację o Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="242"/>
<source>About &Qt</source>
<translation>O &Qt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="243"/>
<source>Show information about Qt</source>
<translation>Pokazuje informacje o Qt</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="245"/>
<source>&Options...</source>
<translation>&Opcje...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="252"/>
<source>&Encrypt Wallet...</source>
<translation>Zaszyfruj Portf&el</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="255"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="257"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="517"/>
<source>~%n block(s) remaining</source>
<translation><numerusform>pozostał ~%n blok</numerusform><numerusform>pozostało ~%n bloki</numerusform><numerusform>pozostało ~%n bloków</numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="528"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="250"/>
<source>&Export...</source>
<translation>&Eksportuj...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="210"/>
<source>Send coins to a Kickasscoin address</source>
<translation>Wyślij monety na adres Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="246"/>
<source>Modify configuration options for Kickasscoin</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="249"/>
<source>Show or hide the Kickasscoin window</source>
<translation>Pokaż lub ukryj okno Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="251"/>
<source>Export the data in the current tab to a file</source>
<translation>Eksportuj dane z aktywnej karty do pliku</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="253"/>
<source>Encrypt or decrypt wallet</source>
<translation>Zaszyfruj lub odszyfruj portfel</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="256"/>
<source>Backup wallet to another location</source>
<translation>Zapasowy portfel w innej lokalizacji</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="258"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Zmień hasło użyte do szyfrowania portfela</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="259"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="260"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="261"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="262"/>
<source>Verify a message signature</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoingui.cpp" line="286"/>
<source>&File</source>
<translation>&Plik</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="296"/>
<source>&Settings</source>
<translation>P&referencje</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="302"/>
<source>&Help</source>
<translation>Pomo&c</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="311"/>
<source>Tabs toolbar</source>
<translation>Pasek zakładek</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="322"/>
<source>Actions toolbar</source>
<translation>Pasek akcji</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="334"/>
<location filename="../bitcoingui.cpp" line="343"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="343"/>
<location filename="../bitcoingui.cpp" line="399"/>
<source>Kickasscoin client</source>
<translation>Kickasscoin klient</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="492"/>
<source>%n active connection(s) to Kickasscoin network</source>
<translation><numerusform>%n aktywne połączenie do sieci Kickasscoin</numerusform><numerusform>%n aktywne połączenia do sieci Kickasscoin</numerusform><numerusform>%n aktywnych połączeń do sieci Kickasscoin</numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="540"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation>Pobrano %1 bloków z historią transakcji.</translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="555"/>
<source>%n second(s) ago</source>
<translation><numerusform>%n sekundę temu</numerusform><numerusform>%n sekundy temu</numerusform><numerusform>%n sekund temu</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="559"/>
<source>%n minute(s) ago</source>
<translation><numerusform>%n minutę temu</numerusform><numerusform>%n minuty temu</numerusform><numerusform>%n minut temu</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="563"/>
<source>%n hour(s) ago</source>
<translation><numerusform>%n godzinę temu</numerusform><numerusform>%n godziny temu</numerusform><numerusform>%n godzin temu</numerusform></translation>
</message>
<message numerus="yes">
<location filename="../bitcoingui.cpp" line="567"/>
<source>%n day(s) ago</source>
<translation><numerusform>%n dzień temu</numerusform><numerusform>%n dni temu</numerusform><numerusform>%n dni temu</numerusform></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="573"/>
<source>Up to date</source>
<translation>Aktualny</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="580"/>
<source>Catching up...</source>
<translation>Łapanie bloków...</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="590"/>
<source>Last received block was generated %1.</source>
<translation>Ostatnio otrzymany blok została wygenerowany %1.</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="649"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Transakcja przekracza limit. Możesz wysłać ją płacąc prowizję %1, która zostaje przekazana do węzłów, które ją prześlą i pomoże wspierać sieć Kickasscoin. Czy chcesz zapłacić prowizję?</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="654"/>
<source>Confirm transaction fee</source>
<translation>Potwierdź prowizję transakcyjną</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="681"/>
<source>Sent transaction</source>
<translation>Transakcja wysłana</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="682"/>
<source>Incoming transaction</source>
<translation>Transakcja przychodząca</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="683"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Kwota: %2
Typ: %3
Adres: %4
</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="804"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portfel jest <b>zaszyfrowany</b> i obecnie <b>niezablokowany</b></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="812"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portfel jest <b>zaszyfrowany</b> i obecnie <b>zablokowany</b></translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="835"/>
<source>Backup Wallet</source>
<translation>Kopia Zapasowa Portfela</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="835"/>
<source>Wallet Data (*.dat)</source>
<translation>Dane Portfela (*.dat)</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="838"/>
<source>Backup Failed</source>
<translation>Kopia Zapasowa Nie Została Wykonana</translation>
</message>
<message>
<location filename="../bitcoingui.cpp" line="838"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Wystąpił błąd podczas próby zapisu portfela do nowej lokalizacji.</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="112"/>
<source>A fatal error occured. Kickasscoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="84"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>DisplayOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="246"/>
<source>Display</source>
<translation>Wyświetlanie</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="257"/>
<source>default</source>
<translation>domyślny</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="263"/>
<source>The user interface language can be set here. This setting will only take effect after restarting Kickasscoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="252"/>
<source>User Interface &Language:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="273"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="277"/>
<source>Choose the default subdivision unit to show in the interface, and when sending coins</source>
<translation>Wybierz podział jednostki pokazywany w interfejsie oraz podczas wysyłania monet</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="284"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="285"/>
<source>Whether to show Kickasscoin addresses in the transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="303"/>
<source>Warning</source>
<translation>Ostrzeżenie</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="303"/>
<source>This setting will take effect after restarting Kickasscoin.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="14"/>
<source>Edit Address</source>
<translation>Edytuj adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="25"/>
<source>&Label</source>
<translation>&Etykieta</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="35"/>
<source>The label associated with this address book entry</source>
<translation>Etykieta skojarzona z tym wpisem w książce adresowej</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="42"/>
<source>&Address</source>
<translation>&Adres</translation>
</message>
<message>
<location filename="../forms/editaddressdialog.ui" line="52"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Ten adres jest skojarzony z wpisem w książce adresowej. Może być zmodyfikowany jedynie dla adresów wysyłających.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="20"/>
<source>New receiving address</source>
<translation>Nowy adres odbiorczy</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="24"/>
<source>New sending address</source>
<translation>Nowy adres wysyłania</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="27"/>
<source>Edit receiving address</source>
<translation>Edytuj adres odbioru</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="31"/>
<source>Edit sending address</source>
<translation>Edytuj adres wysyłania</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="91"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Wprowadzony adres "%1" już istnieje w książce adresowej.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="96"/>
<source>The entered address "%1" is not a valid Kickasscoin address.</source>
<translation>Wprowadzony adres "%1" nie jest poprawnym adresem Kickasscoin.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="101"/>
<source>Could not unlock wallet.</source>
<translation>Nie można było odblokować portfela.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="106"/>
<source>New key generation failed.</source>
<translation>Tworzenie nowego klucza nie powiodło się.</translation>
</message>
</context>
<context>
<name>HelpMessageBox</name>
<message>
<location filename="../bitcoin.cpp" line="133"/>
<location filename="../bitcoin.cpp" line="143"/>
<source>Kickasscoin-Qt</source>
<translation>Kickasscoin-Qt</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="133"/>
<source>version</source>
<translation>wersja</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="135"/>
<source>Usage:</source>
<translation>Użycie:</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="136"/>
<source>options</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="138"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="139"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Ustaw Język, na przykład "pl_PL" (domyślnie: systemowy)</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="140"/>
<source>Start minimized</source>
<translation>Uruchom zminimalizowany</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="141"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Pokazuj okno powitalne przy starcie (domyślnie: 1)</translation>
</message>
</context>
<context>
<name>MainOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="227"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="212"/>
<source>Pay transaction &fee</source>
<translation>Płać prowizję za t&ransakcje</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="204"/>
<source>Main</source>
<translation>Główny</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="206"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation>Opcjonalna prowizja za transakcje za kB, wspomaga ona szybkość przebiegu transakcji. Większość transakcji jest 1 kB. Zalecana prowizja 0.01 .</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="222"/>
<source>&Start Kickasscoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="223"/>
<source>Automatically start Kickasscoin after logging in to the system</source>
<translation>Automatycznie uruchom Kickasscoin po zalogowaniu do systemu</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="226"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MessagePage</name>
<message>
<location filename="../forms/messagepage.ui" line="14"/>
<source>Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/messagepage.ui" line="20"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Możesz podpisywać wiadomości swoimi adresami aby udowodnić, że jesteś ich właścicielem. Uważaj, aby nie podpisywać niczego co wzbudza Twoje podejrzenia, ponieważ ktoś może stosować phishing próbując nakłonić Cię do ich podpisania. Akceptuj i podpisuj tylko w pełni zrozumiałe komunikaty i wiadomości.</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="38"/>
<source>The address to sign the message with (e.g. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/messagepage.ui" line="48"/>
<source>Choose adress from address book</source>
<translation>Wybierz adres z książki adresowej</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="58"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="71"/>
<source>Paste address from clipboard</source>
<translation>Wklej adres ze schowka</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="81"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="93"/>
<source>Enter the message you want to sign here</source>
<translation>Wprowadź wiadomość, którą chcesz podpisać, tutaj</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="128"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/messagepage.ui" line="131"/>
<source>&Copy Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/messagepage.ui" line="142"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/messagepage.ui" line="145"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../messagepage.cpp" line="31"/>
<source>Click "Sign Message" to get signature</source>
<translation>Kliknij "Podpisz Wiadomość" żeby uzyskać podpis</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="114"/>
<source>Sign a message to prove you own this address</source>
<translation>Podpisz wiadomość aby dowieść, że ten adres jest twój</translation>
</message>
<message>
<location filename="../forms/messagepage.ui" line="117"/>
<source>&Sign Message</source>
<translation>Podpi&sz Wiadomość</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="30"/>
<source>Enter a Kickasscoin address (e.g. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</source>
<translation>Wprowadź adres Kickasscoin (np. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="83"/>
<location filename="../messagepage.cpp" line="90"/>
<location filename="../messagepage.cpp" line="105"/>
<location filename="../messagepage.cpp" line="117"/>
<source>Error signing</source>
<translation>Błąd podpisywania</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="83"/>
<source>%1 is not a valid address.</source>
<translation>%1 nie jest poprawnym adresem.</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="90"/>
<source>%1 does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../messagepage.cpp" line="105"/>
<source>Private key for %1 is not available.</source>
<translation>Klucz prywatny dla %1 jest niedostępny.</translation>
</message>
<message>
<location filename="../messagepage.cpp" line="117"/>
<source>Sign failed</source>
<translation>Podpisywanie nie powiodło się.</translation>
</message>
</context>
<context>
<name>NetworkOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="345"/>
<source>Network</source>
<translation>Sieć</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="347"/>
<source>Map port using &UPnP</source>
<translation>Mapuj port używając &UPnP</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="348"/>
<source>Automatically open the Kickasscoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Automatycznie otwiera port klienta Kickasscoin na routerze. Ta opcja dzieła tylko jeśli twój router wspiera UPnP i jest ono włączone.</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="351"/>
<source>&Connect through SOCKS4 proxy:</source>
<translation>Połącz przez proxy SO&CKS4:</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="352"/>
<source>Connect to the Bitcon network through a SOCKS4 proxy (e.g. when connecting through Tor)</source>
<translation>Łączy się z siecią Kickasscoin przez proxy SOCKS4 (np. kiedy łączysz się przez Tor)</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="357"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP: </translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="366"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="363"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Adres IP serwera proxy (np. 127.0.0.1)</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="372"/>
<source>Port of the proxy (e.g. 1234)</source>
<translation>Port proxy (np. 1234)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../optionsdialog.cpp" line="135"/>
<source>Options</source>
<translation>Opcje</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="14"/>
<source>Form</source>
<translation>Formularz</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="47"/>
<location filename="../forms/overviewpage.ui" line="204"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Kickasscoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="89"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="147"/>
<source>Number of transactions:</source>
<translation>Liczba transakcji:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="118"/>
<source>Unconfirmed:</source>
<translation>Niepotwierdzony:</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="40"/>
<source>Wallet</source>
<translation>Portfel</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="197"/>
<source><b>Recent transactions</b></source>
<translation><b>Ostatnie transakcje</b></translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="105"/>
<source>Your current balance</source>
<translation>Twoje obecne saldo</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="134"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Suma transakcji, które nie zostały jeszcze potwierdzone, i które nie zostały wliczone do twojego obecnego salda</translation>
</message>
<message>
<location filename="../forms/overviewpage.ui" line="154"/>
<source>Total number of transactions in wallet</source>
<translation>Całkowita liczba transakcji w portfelu</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="110"/>
<location filename="../overviewpage.cpp" line="111"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="14"/>
<source>QR Code Dialog</source>
<translation>Okno Dialogowe Kodu QR</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="32"/>
<source>QR Code</source>
<translation>Kod QR</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="55"/>
<source>Request Payment</source>
<translation>Prośba o płatność</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="70"/>
<source>Amount:</source>
<translation>Kwota:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="105"/>
<source>KIK</source>
<translation>KIK</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="121"/>
<source>Label:</source>
<translation>Etykieta:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="144"/>
<source>Message:</source>
<translation>Wiadomość:</translation>
</message>
<message>
<location filename="../forms/qrcodedialog.ui" line="186"/>
<source>&Save As...</source>
<translation>Zapi&sz jako...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="45"/>
<source>Error encoding URI into QR Code.</source>
<translation>Błąd kodowania URI w Kodzie QR.</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="63"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="120"/>
<source>Save QR Code</source>
<translation>Zapisz Kod QR</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="120"/>
<source>PNG Images (*.png)</source>
<translation>Obraz PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="14"/>
<source>Kickasscoin debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="46"/>
<source>Client name</source>
<translation>Nazwa klienta</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="56"/>
<location filename="../forms/rpcconsole.ui" line="79"/>
<location filename="../forms/rpcconsole.ui" line="102"/>
<location filename="../forms/rpcconsole.ui" line="125"/>
<location filename="../forms/rpcconsole.ui" line="161"/>
<location filename="../forms/rpcconsole.ui" line="214"/>
<location filename="../forms/rpcconsole.ui" line="237"/>
<location filename="../forms/rpcconsole.ui" line="260"/>
<location filename="../rpcconsole.cpp" line="245"/>
<source>N/A</source>
<translation>NIEDOSTĘPNE</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="69"/>
<source>Client version</source>
<translation>Wersja klienta</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="24"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="39"/>
<source>Client</source>
<translation>Klient</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="115"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="144"/>
<source>Network</source>
<translation>Sieć</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="151"/>
<source>Number of connections</source>
<translation>Liczba połączeń</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="174"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="197"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="204"/>
<source>Current number of blocks</source>
<translation>Aktualna liczba bloków</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="227"/>
<source>Estimated total blocks</source>
<translation>Szacowana ilość bloków</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="250"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="292"/>
<source>Debug logfile</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="299"/>
<source>Open the Kickasscoin debug logfile from the current data directory. This can take a few seconds for large logfiles.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="302"/>
<source>&Open</source>
<translation>&Otwórz</translation>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="323"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="92"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/rpcconsole.ui" line="372"/>
<source>Clear console</source>
<translation>Wyczyść konsole</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="212"/>
<source>Welcome to the Kickasscoin RPC console.</source>
<translation>Witam w konsoli Kickasscoin RPC</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="213"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="214"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="14"/>
<location filename="../sendcoinsdialog.cpp" line="122"/>
<location filename="../sendcoinsdialog.cpp" line="127"/>
<location filename="../sendcoinsdialog.cpp" line="132"/>
<location filename="../sendcoinsdialog.cpp" line="137"/>
<location filename="../sendcoinsdialog.cpp" line="143"/>
<location filename="../sendcoinsdialog.cpp" line="148"/>
<location filename="../sendcoinsdialog.cpp" line="153"/>
<source>Send Coins</source>
<translation>Wyślij płatność</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="64"/>
<source>Send to multiple recipients at once</source>
<translation>Wyślij do wielu odbiorców na raz</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="67"/>
<source>&Add Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="84"/>
<source>Remove all transaction fields</source>
<translation>Wyczyść wszystkie pola transakcji</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="87"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="106"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="113"/>
<source>123.456 KIK</source>
<translation>123.456 KIK</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="144"/>
<source>Confirm the send action</source>
<translation>Potwierdź akcję wysyłania</translation>
</message>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="147"/>
<source>&Send</source>
<translation>Wy&syłka</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="94"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> do %2 (%3)</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="99"/>
<source>Confirm send coins</source>
<translation>Potwierdź wysyłanie monet</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="100"/>
<source>Are you sure you want to send %1?</source>
<translation>Czy na pewno chcesz wysłać %1?</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="100"/>
<source> and </source>
<translation> i </translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="123"/>
<source>The recepient address is not valid, please recheck.</source>
<translation>Adres odbiorcy jest niepoprawny, proszę go sprawdzić.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="128"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Kwota do zapłacenie musi być większa od 0.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="133"/>
<source>The amount exceeds your balance.</source>
<translation>Kwota przekracza twoje saldo.</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="138"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="144"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="149"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="154"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="14"/>
<source>Form</source>
<translation>Formularz</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="29"/>
<source>A&mount:</source>
<translation>Su&ma:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="42"/>
<source>Pay &To:</source>
<translation>Płać &Do:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="66"/>
<location filename="../sendcoinsentry.cpp" line="25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Wprowadź etykietę dla tego adresu by dodać go do książki adresowej</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="75"/>
<source>&Label:</source>
<translation>&Etykieta:</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="93"/>
<source>The address to send the payment to (e.g. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</source>
<translation>Adres do wysłania należności do (np. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="103"/>
<source>Choose address from address book</source>
<translation>Wybierz adres z książki adresowej</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="113"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="120"/>
<source>Paste address from clipboard</source>
<translation>Wklej adres ze schowka</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="130"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location filename="../forms/sendcoinsentry.ui" line="137"/>
<source>Remove this recipient</source>
<translation>Usuń tego odbiorce</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="26"/>
<source>Enter a Kickasscoin address (e.g. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</source>
<translation>Wprowadź adres Kickasscoin (np. FShM3UpgFZnXzJ5zux1AWQvAE36rFJKMs7)</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="21"/>
<source>Open for %1 blocks</source>
<translation>Otwórz dla %1 bloków</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="23"/>
<source>Open until %1</source>
<translation>Otwórz do %1</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="29"/>
<source>%1/offline?</source>
<translation>%1/offline?</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="31"/>
<source>%1/unconfirmed</source>
<translation>%1/niezatwierdzone</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="33"/>
<source>%1 confirmations</source>
<translation>%1 potwierdzeń</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="51"/>
<source><b>Status:</b> </source>
<translation><b>Status:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="56"/>
<source>, has not been successfully broadcast yet</source>
<translation>, nie został jeszcze pomyślnie wyemitowany</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="58"/>
<source>, broadcast through %1 node</source>
<translation>, emitowany przez %1 węzeł</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="60"/>
<source>, broadcast through %1 nodes</source>
<translation>, emitowany przez %1 węzły</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="64"/>
<source><b>Date:</b> </source>
<translation><b>Data:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="71"/>
<source><b>Source:</b> Generated<br></source>
<translation><b>Źródło:</b> Wygenerowano<br></translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="77"/>
<location filename="../transactiondesc.cpp" line="94"/>
<source><b>From:</b> </source>
<translation><b>Od:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="94"/>
<source>unknown</source>
<translation>nieznany</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="95"/>
<location filename="../transactiondesc.cpp" line="118"/>
<location filename="../transactiondesc.cpp" line="178"/>
<source><b>To:</b> </source>
<translation><b>Do:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="98"/>
<source> (yours, label: </source>
<translation> (twoje, etykieta: </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="100"/>
<source> (yours)</source>
<translation> (twoje)</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="136"/>
<location filename="../transactiondesc.cpp" line="150"/>
<location filename="../transactiondesc.cpp" line="195"/>
<location filename="../transactiondesc.cpp" line="212"/>
<source><b>Credit:</b> </source>
<translation><b>Przypisy:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="138"/>
<source>(%1 matures in %2 more blocks)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../transactiondesc.cpp" line="142"/>
<source>(not accepted)</source>
<translation>(niezaakceptowane)</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="186"/>
<location filename="../transactiondesc.cpp" line="194"/>
<location filename="../transactiondesc.cpp" line="209"/>
<source><b>Debit:</b> </source>
<translation><b>Debet:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="200"/>
<source><b>Transaction fee:</b> </source>
<translation><b>Prowizja transakcyjna:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="216"/>
<source><b>Net amount:</b> </source>
<translation><b>Kwota netto:</b> </translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="222"/>
<source>Message:</source>
<translation>Wiadomość:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="224"/>
<source>Comment:</source>
<translation>Komentarz:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="226"/>
<source>Transaction ID:</source>
<translation>ID transakcji:</translation>
</message>
<message>
<location filename="../transactiondesc.cpp" line="229"/>
<source>Generated coins must wait 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, it will change to "not accepted" and not be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Wygenerowane monety muszą zaczekać 120 bloków zanim będzie można je wydać. Kiedy wygenerowałeś ten blok, został on wyemitowany do sieci, aby dodać go do łańcucha bloków. Jeśli to się nie powiedzie nie zostanie on zaakceptowany i wygenerowanych monet nie będzie można wysyłać. Może się to czasami zdarzyć jeśli inny węzeł wygeneruje blok tuż przed tobą.</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="14"/>
<source>Transaction details</source>
<translation>Szczegóły transakcji</translation>
</message>
<message>
<location filename="../forms/transactiondescdialog.ui" line="20"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ten panel pokazuje szczegółowy opis transakcji</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="226"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="226"/>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="226"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="226"/>
<source>Amount</source>
<translation>Kwota</translation>
</message>
<message numerus="yes">
<location filename="../transactiontablemodel.cpp" line="281"/>
<source>Open for %n block(s)</source>
<translation><numerusform>Otwórz dla %n bloku</numerusform><numerusform>Otwórz dla %n bloków</numerusform><numerusform>Otwórz dla %n bloków</numerusform></translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="284"/>
<source>Open until %1</source>
<translation>Otwórz do %1</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="287"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 potwierdzeń)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="290"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Niezatwierdzony (%1 z %2 potwierdzeń)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="293"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Zatwierdzony (%1 potwierdzeń)</translation>
</message>
<message numerus="yes">
<location filename="../transactiontablemodel.cpp" line="301"/>
<source>Mined balance will be available in %n more blocks</source>
<translation><numerusform>Wydobyta kwota będzie dostępna za %n blok</numerusform><numerusform>Wydobyta kwota będzie dostępna za %n bloków</numerusform><numerusform>Wydobyta kwota będzie dostępna za %n bloki</numerusform></translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="307"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ten blok nie został odebrany przez jakikolwiek inny węzeł i prawdopodobnie nie zostanie zaakceptowany!</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="310"/>
<source>Generated but not accepted</source>
<translation>Wygenerowano ale nie zaakceptowano</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="353"/>
<source>Received with</source>
<translation>Otrzymane przez</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="355"/>
<source>Received from</source>
<translation>Odebrano od</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="358"/>
<source>Sent to</source>
<translation>Wysłano do</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="360"/>
<source>Payment to yourself</source>
<translation>Płatność do siebie</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="362"/>
<source>Mined</source>
<translation>Wydobyto</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="400"/>
<source>(n/a)</source>
<translation>(brak)</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="599"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status transakcji. Najedź na pole, aby zobaczyć liczbę potwierdzeń.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="601"/>
<source>Date and time that the transaction was received.</source>
<translation>Data i czas odebrania transakcji.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="603"/>
<source>Type of transaction.</source>
<translation>Rodzaj transakcji.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="605"/>
<source>Destination address of transaction.</source>
<translation>Adres docelowy transakcji.</translation>
</message>
<message>
<location filename="../transactiontablemodel.cpp" line="607"/>
<source>Amount removed from or added to balance.</source>
<translation>Kwota usunięta z lub dodana do konta.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="55"/>
<location filename="../transactionview.cpp" line="71"/>
<source>All</source>
<translation>Wszystko</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="56"/>
<source>Today</source>
<translation>Dzisiaj</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="57"/>
<source>This week</source>
<translation>W tym tygodniu</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="58"/>
<source>This month</source>
<translation>W tym miesiącu</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="59"/>
<source>Last month</source>
<translation>W zeszłym miesiącu</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="60"/>
<source>This year</source>
<translation>W tym roku</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="61"/>
<source>Range...</source>
<translation>Zakres...</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="72"/>
<source>Received with</source>
<translation>Otrzymane przez</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="74"/>
<source>Sent to</source>
<translation>Wysłano do</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="76"/>
<source>To yourself</source>
<translation>Do siebie</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="77"/>
<source>Mined</source>
<translation>Wydobyto</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="78"/>
<source>Other</source>
<translation>Inne</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="85"/>
<source>Enter address or label to search</source>
<translation>Wprowadź adres albo etykietę żeby wyszukać</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="92"/>
<source>Min amount</source>
<translation>Min suma</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="126"/>
<source>Copy address</source>
<translation>Kopiuj adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="127"/>
<source>Copy label</source>
<translation>Kopiuj etykietę</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="128"/>
<source>Copy amount</source>
<translation>Kopiuj kwotę</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="129"/>
<source>Edit label</source>
<translation>Edytuj etykietę</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="130"/>
<source>Show transaction details</source>
<translation>Pokaż szczegóły transakcji</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="270"/>
<source>Export Transaction Data</source>
<translation>Eksportuj Dane Transakcyjne</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="271"/>
<source>Comma separated file (*.csv)</source>
<translation>CSV (rozdzielany przecinkami)</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="279"/>
<source>Confirmed</source>
<translation>Potwierdzony</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="280"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="281"/>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="282"/>
<source>Label</source>
<translation>Etykieta</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="283"/>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="284"/>
<source>Amount</source>
<translation>Kwota</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="285"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="289"/>
<source>Error exporting</source>
<translation>Błąd podczas eksportowania</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="289"/>
<source>Could not write to file %1.</source>
<translation>Błąd zapisu do pliku %1.</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="384"/>
<source>Range:</source>
<translation>Zakres:</translation>
</message>
<message>
<location filename="../transactionview.cpp" line="392"/>
<source>to</source>
<translation>do</translation>
</message>
</context>
<context>
<name>VerifyMessageDialog</name>
<message>
<location filename="../forms/verifymessagedialog.ui" line="14"/>
<source>Verify Signed Message</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="20"/>
<source>Enter the message and signature below (be careful to correctly copy newlines, spaces, tabs and other invisible characters) to obtain the Kickasscoin address used to sign the message.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="62"/>
<source>Verify a message and obtain the Kickasscoin address used to sign the message</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="65"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="79"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Skopiuj aktualnie wybrany adres do schowka</translation>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="82"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="93"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/verifymessagedialog.ui" line="96"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="28"/>
<source>Enter Kickasscoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="29"/>
<source>Click "Verify Message" to obtain address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="55"/>
<location filename="../verifymessagedialog.cpp" line="62"/>
<source>Invalid Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="55"/>
<source>The signature could not be decoded. Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="62"/>
<source>The signature did not match the message digest. Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="72"/>
<source>Address not found in address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../verifymessagedialog.cpp" line="72"/>
<source>Address found in address book: %1</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="158"/>
<source>Sending...</source>
<translation>Wysyłanie...</translation>
</message>
</context>
<context>
<name>WindowOptionsPage</name>
<message>
<location filename="../optionsdialog.cpp" line="313"/>
<source>Window</source>
<translation>Okno</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="316"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimalizuj do paska przy zegarku zamiast do paska zadań</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="317"/>
<source>Show only a tray icon after minimizing the window</source>
<translation>Pokazuje tylko ikonę przy zegarku po zminimalizowaniu okna</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="320"/>
<source>M&inimize on close</source>
<translation>M&inimalizuj przy zamknięciu</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="321"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimalizuje zamiast zakończyć działanie programu przy zamykaniu okna. Kiedy ta opcja jest włączona, program zakończy działanie po wybieraniu Zamknij w menu.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="43"/>
<source>Kickasscoin version</source>
<translation>Wersja Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="44"/>
<source>Usage:</source>
<translation>Użycie:</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="45"/>
<source>Send command to -server or bitcoind</source>
<translation>Wyślij polecenie do -server lub bitcoind</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="46"/>
<source>List commands</source>
<translation>Lista poleceń</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="47"/>
<source>Get help for a command</source>
<translation>Uzyskaj pomoc do polecenia</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="49"/>
<source>Options:</source>
<translation>Opcje:</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="50"/>
<source>Specify configuration file (default: bitcoin.conf)</source>
<translation>Wskaż plik konfiguracyjny (domyślnie: bitcoin.conf)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="51"/>
<source>Specify pid file (default: bitcoind.pid)</source>
<translation>Wskaż plik pid (domyślnie: bitcoin.pid)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="52"/>
<source>Generate coins</source>
<translation>Generuj monety</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="53"/>
<source>Don't generate coins</source>
<translation>Nie generuj monet</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="54"/>
<source>Specify data directory</source>
<translation>Wskaż folder danych</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="55"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="56"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="57"/>
<source>Specify connection timeout (in milliseconds)</source>
<translation>Wskaż czas oczekiwania bezczynności połączenia (w milisekundach)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="63"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>Nasłuchuj połączeń na <port> (domyślnie: 8333 lub testnet: 18333)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="64"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Utrzymuj maksymalnie <n> połączeń z peerami (domyślnie: 125)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="66"/>
<source>Connect only to the specified node</source>
<translation>Łącz tylko do wskazanego węzła</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="67"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="68"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="69"/>
<source>Only connect to nodes in network <net> (IPv4 or IPv6)</source>
<translation>Łącz tylko z węzłami w sieci <net> (IPv4 lub IPv6)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="70"/>
<source>Try to discover public IP address (default: 1)</source>
<translation>Próbuj odkryć publiczny adres IP (domyślnie: 1)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="73"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="75"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="76"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="79"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 10000)</source>
<translation>Maksymalny bufor odbioru na połączenie, <n>*1000 bajtów (domyślnie: 10000)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="80"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 10000)</source>
<translation>Maksymalny bufor wysyłu na połączenie, <n>*1000 bajtów (domyślnie: 10000)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="83"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="86"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Akceptuj linię poleceń oraz polecenia JSON-RPC</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="87"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Uruchom w tle jako daemon i przyjmuj polecenia</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="88"/>
<source>Use the test network</source>
<translation>Użyj sieci testowej</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="89"/>
<source>Output extra debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="90"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="91"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Wyślij informację/raport do konsoli zamiast do pliku debug.log.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="92"/>
<source>Send trace/debug info to debugger</source>
<translation>Wyślij informację/raport do debuggera.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="93"/>
<source>Username for JSON-RPC connections</source>
<translation>Nazwa użytkownika dla połączeń JSON-RPC</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="94"/>
<source>Password for JSON-RPC connections</source>
<translation>Hasło do połączeń JSON-RPC</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="95"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332)</source>
<translation>Nasłuchuj połączeń JSON-RPC na <port> (domyślnie: 8332)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="96"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Przyjmuj połączenia JSON-RPC ze wskazanego adresu IP</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="97"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Wysyłaj polecenia do węzła działającego na <ip> (domyślnie: 127.0.0.1)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="98"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="101"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="102"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Ustaw rozmiar puli kluczy na <n> (domyślnie: 100)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="103"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Przeskanuj blok łańcuchów żeby znaleźć zaginione transakcje portfela</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="104"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="105"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="106"/>
<source>Imports blocks from external blk000?.dat file</source><|fim▁hole|> <location filename="../bitcoinstrings.cpp" line="108"/>
<source>
SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>
opcje SSL: (sprawdź Bitcoin Wiki dla instrukcje konfiguracji SSL)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="111"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Użyj OpenSSL (https) do połączeń JSON-RPC</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="112"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Plik certyfikatu serwera (domyślnie: server.cert)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="113"/>
<source>Server private key (default: server.pem)</source>
<translation>Klucz prywatny serwera (domyślnie: server.pem)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="114"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Aceptowalne szyfry (domyślnie: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="145"/>
<source>Warning: Disk space is low</source>
<translation>Ostrzeżenie: mało miejsca na dysku</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="107"/>
<source>This help message</source>
<translation>Ta wiadomość pomocy</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="121"/>
<source>Cannot obtain a lock on data directory %s. Kickasscoin is probably already running.</source>
<translation>Nie można zablokować folderu danych %s. Kickasscoin prawdopodobnie już działa.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="48"/>
<source>Kickasscoin</source>
<translation>Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="30"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="58"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="59"/>
<source>Select the version of socks proxy to use (4 or 5, 5 is default)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="60"/>
<source>Do not use proxy for connections to network <net> (IPv4 or IPv6)</source>
<translation>Nie używaj proxy do połączeń z siecią <net> (IPv4 lub IPv6)</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="61"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="62"/>
<source>Pass DNS requests to (SOCKS5) proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="142"/>
<source>Loading addresses...</source>
<translation>Wczytywanie adresów...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="132"/>
<source>Error loading blkindex.dat</source>
<translation>Błąd ładownia blkindex.dat</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="134"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Błąd ładowania wallet.dat: Uszkodzony portfel</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="135"/>
<source>Error loading wallet.dat: Wallet requires newer version of Kickasscoin</source>
<translation>Błąd ładowania wallet.dat: Portfel wymaga nowszej wersji Kickasscoin</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="136"/>
<source>Wallet needed to be rewritten: restart Kickasscoin to complete</source>
<translation>Portfel wymaga przepisania: zrestartuj Kickasscoina żeby ukończyć</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="137"/>
<source>Error loading wallet.dat</source>
<translation>Błąd ładowania wallet.dat</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="124"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="125"/>
<source>Unknown network specified in -noproxy: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="127"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="126"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="128"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="129"/>
<source>Not listening on any port</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="130"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="117"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Nieprawidłowa kwota dla -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="143"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="31"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="32"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="35"/>
<source>Error: Transaction creation failed </source>
<translation>Błąd: Tworzenie transakcji nie powiodło się </translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="36"/>
<source>Sending...</source>
<translation>Wysyłanie...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="37"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Błąd: transakcja została odrzucona. Może się to zdarzyć, gdy monety z Twojego portfela zostały już wydane, na przykład gdy używałeś kopii wallet.dat i bitcoiny które tam wydałeś nie zostały jeszcze odjęte z portfela z którego teraz korzystasz.</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="41"/>
<source>Invalid amount</source>
<translation>Nieprawidłowa kwota</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="42"/>
<source>Insufficient funds</source>
<translation>Niewystarczające środki</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="131"/>
<source>Loading block index...</source>
<translation>Ładowanie indeksu bloku...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="65"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="28"/>
<source>Unable to bind to %s on this computer. Kickasscoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="71"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="72"/>
<source>Accept connections from outside (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="74"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="81"/>
<source>Use Universal Plug and Play to map the listening port (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="82"/>
<source>Use Universal Plug and Play to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="85"/>
<source>Fee per KB to add to transactions you send</source>
<translation>
</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="118"/>
<source>Warning: -paytxfee is set very high. This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="133"/>
<source>Loading wallet...</source>
<translation>Wczytywanie portfela...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="138"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="139"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="140"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="141"/>
<source>Rescanning...</source>
<translation>Ponowne skanowanie...</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="144"/>
<source>Done loading</source>
<translation>Wczytywanie zakończone</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="8"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="9"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=bitcoinrpc
rpcpassword=%s
(you do not need to remember this password)
If the file does not exist, create it with owner-readable-only file permissions.
</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="18"/>
<source>Error</source>
<translation>Błąd</translation>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="19"/>
<source>An error occured while setting up the RPC port %i for listening: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="20"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoinstrings.cpp" line="25"/>
<source>Warning: Please check that your computer's date and time are correct. If your clock is wrong Kickasscoin will not work properly.</source>
<translation>Ostrzeżenie: Proszę sprawdzić poprawność czasu i daty na tym komputerze. Jeśli czas jest zły Kickasscoin może nie działać prawidłowo.</translation>
</message>
</context>
</TS><|fim▁end|> | <translation type="unfinished"/>
</message>
<message> |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use std::io::Error;
// Grpc related files used by tonic are generated here. Those files re-generate for each build
// so it's up to date.
//
// Grpc related files used by grpcio are maintained at src/proto/grpcio. tests/grpc_build.rs makes
// sure they are up to date.
fn main() -> Result<(), Error> {
#[cfg(feature = "gen-tonic")]
tonic_build::configure()
.build_server(cfg!(feature = "build-server"))
.build_client(cfg!(feature = "build-client"))
.format(false)
.compile(
&[
"src/proto/opentelemetry-proto/opentelemetry/proto/common/v1/common.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/trace/v1/trace_config.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/collector/trace/v1/trace_service.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/metrics/v1/metrics.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/collector/metrics/v1/metrics_service.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/logs/v1/logs.proto",
"src/proto/opentelemetry-proto/opentelemetry/proto/collector/logs/v1/logs_service.proto",
],
&["src/proto/opentelemetry-proto"],
)?;
Ok(())
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from kivy.core.window import Window
from kivy.uix.textinput import TextInput
__author__ = 'woolly_sammoth'
from kivy.config import Config
Config.set('graphics', 'borderless', '1')
Config.set('graphics', 'resizable', '0')
Config.set('graphics', 'fullscreen', '1')
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.button import Button
from kivy.uix.screenmanager import ScreenManager
from kivy.uix.actionbar import ActionBar
from kivy.uix.screenmanager import SlideTransition
from kivy.uix.popup import Popup
from kivy.lang import Builder
from kivy.clock import Clock
import logging
import time
import utils
import os
import json
import sys
import screens.HomeScreen as HomeScreen
import overrides
class TopActionBar(ActionBar):
def __init__(self, PlungeApp, **kwargs):
super(TopActionBar, self).__init__(**kwargs)
self.PlungeApp = PlungeApp
self.top_action_view = self.ids.top_action_view.__self__
self.top_action_previous = self.ids.top_action_previous.__self__
self.top_settings_button = self.ids.top_settings_button.__self__
self.top_size_button = self.ids.top_size_button.__self__
self.standard_height = self.height
self.top_action_previous.bind(on_release=self.PlungeApp.open_settings)
self.top_settings_button.bind(on_release=self.PlungeApp.open_settings)
return
def minimise(self, override=None):
min = self.top_size_button.text if override is None else override
if min == self.PlungeApp.get_string("Minimise"):
self.top_size_button.text = self.PlungeApp.get_string("Maximise")
self.top_action_previous.bind(on_release=self.minimise)
self.PlungeApp.homeScreen.clear_widgets()
self.PlungeApp.homeScreen.add_widget(self.PlungeApp.homeScreen.min_layout)<|fim▁hole|> self.PlungeApp.is_min = True
else:
self.top_size_button.text = self.PlungeApp.get_string("Minimise")
self.top_action_previous.color = (1, 1, 1, 1)
self.PlungeApp.homeScreen.clear_widgets()
self.PlungeApp.homeScreen.add_widget(self.PlungeApp.homeScreen.max_layout)
self.PlungeApp.is_min = False
return
class PlungeApp(App):
def __init__(self, **kwargs):
super(PlungeApp, self).__init__(**kwargs)
self.isPopup = False
self.use_kivy_settings = False
self.settings_cls = overrides.SettingsWithCloseButton
self.utils = utils.utils(self)
self.exchanges = ['ccedk', 'poloniex', 'bitcoincoid', 'bter', 'bittrex']
self.active_exchanges = []
self.currencies = ['btc', 'ltc', 'eur', 'usd', 'ppc']
self.active_currencies = []
self.client_running = False
self.is_min = False
if not os.path.isdir('logs'):
os.makedirs('logs')
if not os.path.isfile('api_keys.json'):
api_keys = []
with open('api_keys.json', 'a+') as api_file:
api_file.write(json.dumps(api_keys))
api_file.close()
if not os.path.isfile('user_data.json'):
user_data = {exchange: [] for exchange in self.exchanges}
with open('user_data.json', 'a+') as user_file:
user_file.write(json.dumps(user_data))
user_file.close()
self.first_run = True
self.logger = logging.getLogger('Plunge')
self.logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('logs/%s_%d.log' % ('Plunge', time.time()))
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
formatter = logging.Formatter(fmt='%(asctime)s %(levelname)s: %(message)s', datefmt="%Y/%m/%d-%H:%M:%S")
fh.setFormatter(formatter)
ch.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.addHandler(ch)
return
def log_uncaught_exceptions(self, exctype, value, tb):
self.logger.exception('\n===================\nException Caught\n\n%s\n===================\n' % value)
return
def build(self):
self.logger.info("Fetching language from config")
self.language = self.config.get('standard', 'language')
try:
self.lang = json.load(open('res/json/languages/' + self.language.lower() + '.json', 'r'))
except (ValueError, IOError) as e:
self.logger.error('')
self.logger.error('##################################################################')
self.logger.error('')
self.logger.error('There was an Error loading the ' + self.language + ' language file.')
self.logger.error('')
self.logger.error(str(e))
self.logger.error('')
self.logger.error('##################################################################')
raise SystemExit
self.root = BoxLayout(orientation='vertical')
self.mainScreenManager = ScreenManager(transition=SlideTransition(direction='left'))
Builder.load_file('screens/HomeScreen.kv')
self.homeScreen = HomeScreen.HomeScreen(self)
self.mainScreenManager.add_widget(self.homeScreen)
self.topActionBar = TopActionBar(self)
self.root.add_widget(self.topActionBar)
self.root.add_widget(self.mainScreenManager)
self.homeScreen.clear_widgets()
if self.config.getint('standard', 'start_min') == 1:
self.topActionBar.minimise(self.get_string("Minimise"))
self.is_min = True
else:
self.topActionBar.minimise(self.get_string("Maximise"))
self.is_min = False
self.set_monitor()
Window.fullscreen = 1
if self.config.getint('standard', 'show_disclaimer') == 1:
Clock.schedule_once(self.show_disclaimer, 1)
return self.root
def show_disclaimer(self, dt):
content = BoxLayout(orientation='vertical')
content.add_widget(TextInput(text=self.get_string('Disclaimer_Text'), size_hint=(1, 0.8), font_size=26,
read_only=True, multiline=True, background_color=(0.13725, 0.12157, 0.12549, 1),
foreground_color=(1, 1, 1, 1)))
content.add_widget(BoxLayout(size_hint=(1, 0.1)))
button_layout = BoxLayout(size_hint=(1, 0.1), spacing='20dp')
ok_button = Button(text=self.get_string('OK'), size_hint=(None, None), size=(200, 50))
cancel_button = Button(text=self.get_string('Cancel'), size_hint=(None, None), size=(200, 50))
ok_button.bind(on_press=self.close_popup)
cancel_button.bind(on_press=self.exit)
button_layout.add_widget(ok_button)
button_layout.add_widget(cancel_button)
content.add_widget(button_layout)
self.popup = Popup(title=self.get_string('Disclaimer'), content=content, auto_dismiss=False,
size_hint=(0.9, 0.9))
self.popup.open()
padding = ((self.popup.width - (ok_button.width + cancel_button.width)) / 2)
button_layout.padding = (padding, 0, padding, 0)
return
def exit(self):
sys.exit()
def set_monitor(self):
if self.is_min is False:
self.homeScreen.max_layout.remove_widget(self.homeScreen.run_layout)
if self.config.getint('standard', 'monitor') == 0:
self.homeScreen.max_layout.add_widget(self.homeScreen.run_layout)
def get_string(self, text):
try:
self.logger.debug("Getting string for %s" % text)
return_string = self.lang[text]
except (ValueError, KeyError):
self.logger.error("No string found for %s in %s language file" % (text, self.language))
return_string = 'Language Error'
return return_string
def build_config(self, config):
config.setdefaults('server', {'host': "", 'port': 80})
config.setdefaults('exchanges', {'ccedk': 0, 'poloniex': 0, 'bitcoincoid': 0, 'bter': 0, 'bittrex': 0})
config.setdefaults('standard', {'language': 'English', 'period': 30, 'monitor': 1, 'start_min': 0, 'data': 0,
'show_disclaimer': 0, 'smooth_line': 1})
config.setdefaults('api_keys', {'bitcoincoid': '', 'bittrex': '', 'bter': '', 'ccedk': '', 'poloniex': ''})
def build_settings(self, settings):
settings.register_type('string', overrides.SettingStringFocus)
settings.register_type('numeric', overrides.SettingNumericFocus)
settings.register_type('string_exchange', overrides.SettingStringExchange)
with open('user_data.json', 'a+') as user_data:
try:
saved_data = json.load(user_data)
except ValueError:
saved_data = []
user_data.close()
for exchange in self.exchanges:
if exchange not in saved_data:
self.config.set('exchanges', exchange, 0)
continue
self.config.set('exchanges', exchange, len(saved_data[exchange]))
settings.add_json_panel(self.get_string('Plunge_Configuration'), self.config, 'settings/plunge.json')
def on_config_change(self, config, section, key, value):
if section == "standard":
if key == "period":
Clock.unschedule(self.homeScreen.get_stats)
self.logger.info("Setting refresh Period to %s" % self.config.get('standard', 'period'))
Clock.schedule_interval(self.homeScreen.get_stats, self.config.getint('standard', 'period'))
if key == "monitor":
self.set_monitor()
self.active_exchanges = self.utils.get_active_exchanges()
self.homeScreen.exchange_spinner.values = [self.get_string(exchange) for exchange in self.active_exchanges]
self.homeScreen.set_exchange_spinners()
self.homeScreen.get_stats(0)
def show_popup(self, title, text):
content = BoxLayout(orientation='vertical')
content.add_widget(Label(text=text, size_hint=(1, 0.8), font_size=26))
content.add_widget(BoxLayout(size_hint=(1, 0.1)))
button_layout = BoxLayout(size_hint=(1, 0.1))
button = Button(text=self.get_string('OK'), size_hint=(None, None), size=(250, 50))
button.bind(on_press=self.close_popup)
button_layout.add_widget(button)
content.add_widget(button_layout)
self.popup = Popup(title=title, content=content, auto_dismiss=False, size_hint=(0.9, 0.9))
self.popup.open()
padding = ((self.popup.width - button.width) / 2)
button_layout.padding = (padding, 0, padding, 0)
self.isPopup = True
return
def close_popup(self, instance, value=False):
self.popup.dismiss()
self.isPopup = False
return
if __name__ == '__main__':
Plunge = PlungeApp()
Plunge.run()<|fim▁end|> | |
<|file_name|>test_ff.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test the Fieldsfile file loading plugin and FFHeader.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import zip
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import collections
import mock
import numpy as np
import iris
import iris.fileformats.ff as ff
import iris.fileformats.pp as pp
_MockField = collections.namedtuple('_MockField',
'lbext lblrec lbnrec raw_lbpack '
'lbuser boundary_packing')
# PP-field: LBPACK N1 values.
_UNPACKED = 0
_WGDOS = 1
_CRAY = 2
_GRIB = 3 # Not implemented.
_RLE = 4 # Not supported, deprecated FF format.
# PP-field: LBUSER(1) values.
_REAL = 1
_INTEGER = 2
_LOGICAL = 3 # Not implemented.
class TestFF_HEADER(tests.IrisTest):
def test_initialisation(self):
self.assertEqual(ff.FF_HEADER[0], ('data_set_format_version', (0,)))
self.assertEqual(ff.FF_HEADER[17], ('integer_constants', (99, 100)))
def test_size(self):
self.assertEqual(len(ff.FF_HEADER), 31)
@tests.skip_data
class TestFFHeader(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
self.ff_header = ff.FFHeader(self.filename)
self.valid_headers = (
'integer_constants', 'real_constants', 'level_dependent_constants',
'lookup_table', 'data'
)
self.invalid_headers = (
'row_dependent_constants', 'column_dependent_constants',
'fields_of_constants', 'extra_constants', 'temp_historyfile',
'compressed_field_index1', 'compressed_field_index2',
'compressed_field_index3'
)
def test_constructor(self):
# Test FieldsFile header attribute lookup.
self.assertEqual(self.ff_header.data_set_format_version, 20)
self.assertEqual(self.ff_header.sub_model, 1)
self.assertEqual(self.ff_header.vert_coord_type, 5)
self.assertEqual(self.ff_header.horiz_grid_type, 0)
self.assertEqual(self.ff_header.dataset_type, 3)
self.assertEqual(self.ff_header.run_identifier, 0)
self.assertEqual(self.ff_header.experiment_number, -32768)
self.assertEqual(self.ff_header.calendar, 1)<|fim▁hole|> self.assertEqual(self.ff_header.grid_staggering, 3)
self.assertEqual(self.ff_header.time_type, -32768)
self.assertEqual(self.ff_header.projection_number, -32768)
self.assertEqual(self.ff_header.model_version, 802)
self.assertEqual(self.ff_header.obs_file_type, -32768)
self.assertEqual(self.ff_header.last_fieldop_type, -32768)
self.assertEqual(self.ff_header.first_validity_time,
(2011, 7, 10, 18, 0, 0, 191))
self.assertEqual(self.ff_header.last_validity_time,
(2011, 7, 10, 21, 0, 0, 191))
self.assertEqual(self.ff_header.misc_validity_time,
(2012, 4, 30, 18, 12, 13, -32768))
self.assertEqual(self.ff_header.integer_constants.shape, (46, ))
self.assertEqual(self.ff_header.real_constants.shape, (38, ))
self.assertEqual(self.ff_header.level_dependent_constants.shape,
(71, 8))
self.assertIsNone(self.ff_header.row_dependent_constants)
self.assertIsNone(self.ff_header.column_dependent_constants)
self.assertIsNone(self.ff_header.fields_of_constants)
self.assertIsNone(self.ff_header.extra_constants)
self.assertIsNone(self.ff_header.temp_historyfile)
self.assertIsNone(self.ff_header.compressed_field_index1)
self.assertIsNone(self.ff_header.compressed_field_index2)
self.assertIsNone(self.ff_header.compressed_field_index3)
self.assertEqual(self.ff_header.lookup_table, (909, 64, 5))
self.assertEqual(self.ff_header.total_prognostic_fields, 3119)
self.assertEqual(self.ff_header.data, (2049, 2961, -32768))
def test_str(self):
self.assertString(str(self.ff_header), ('FF', 'ffheader.txt'))
def test_repr(self):
target = "FFHeader('" + self.filename + "')"
self.assertEqual(repr(self.ff_header), target)
def test_shape(self):
self.assertEqual(self.ff_header.shape('data'), (2961, -32768))
@tests.skip_data
class TestFF2PP2Cube(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
def test_unit_pass_0(self):
# Test FieldsFile to PPFields cube load.
cube_by_name = collections.defaultdict(int)
cubes = iris.load(self.filename)
while cubes:
cube = cubes.pop(0)
standard_name = cube.standard_name
cube_by_name[standard_name] += 1
filename = '{}_{}.cml'.format(standard_name,
cube_by_name[standard_name])
self.assertCML(cube, ('FF', filename))
def test_raw_to_table_count(self):
filename = tests.get_data_path(('FF', 'n48_multi_field_table_count'))
cubes = iris.load_raw(filename)
ff_header = ff.FFHeader(filename)
table_count = ff_header.lookup_table[2]
self.assertEqual(len(cubes), table_count)
@tests.skip_data
class TestFFieee32(tests.IrisTest):
def test_iris_loading(self):
ff32_fname = tests.get_data_path(('FF', 'n48_multi_field.ieee32'))
ff64_fname = tests.get_data_path(('FF', 'n48_multi_field'))
ff32_cubes = iris.load(ff32_fname)
ff64_cubes = iris.load(ff64_fname)
for ff32, ff64 in zip(ff32_cubes, ff64_cubes):
# load the data
_, _ = ff32.data, ff64.data
self.assertEqual(ff32, ff64)
@tests.skip_data
class TestFFVariableResolutionGrid(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
self.ff2pp = ff.FF2PP(self.filename)
self.ff_header = self.ff2pp._ff_header
data_shape = (73, 96)
delta = np.sin(np.linspace(0, np.pi * 5, data_shape[1])) * 5
lons = np.linspace(0, 180, data_shape[1]) + delta
lons = np.vstack([lons[:-1], lons[:-1] + 0.5 * np.diff(lons)]).T
lons = np.reshape(lons, lons.shape, order='F')
delta = np.sin(np.linspace(0, np.pi * 5, data_shape[0])) * 5
lats = np.linspace(-90, 90, data_shape[0]) + delta
lats = np.vstack([lats[:-1], lats[:-1] + 0.5 * np.diff(lats)]).T
lats = np.reshape(lats, lats.shape, order='F')
self.ff_header.column_dependent_constants = lons
self.ff_header.row_dependent_constants = lats
self.U_grid_x = lons[:-1, 1]
self.V_grid_y = lats[:-1, 1]
self.P_grid_x = lons[:, 0]
self.P_grid_y = lats[:, 0]
self.orig_make_pp_field = pp.make_pp_field
def new_make_pp_field(header):
field = self.orig_make_pp_field(header)
field.stash = self.ff2pp._custom_stash
field.bdx = field.bdy = field.bmdi
return field
# Replace the pp module function with this new function;
# this gets called in PP2FF.
pp.make_pp_field = new_make_pp_field
def tearDown(self):
pp.make_pp_field = self.orig_make_pp_field
def _check_stash(self, stash, x_coord, y_coord):
self.ff2pp._custom_stash = stash
field = next(iter(self.ff2pp))
self.assertArrayEqual(x_coord, field.x, ('x_coord was incorrect for '
'stash {}'.format(stash)))
self.assertArrayEqual(y_coord, field.y, ('y_coord was incorrect for '
'stash {}'.format(stash)))
def test_p(self):
self._check_stash('m01s00i001', self.P_grid_x, self.P_grid_y)
def test_u(self):
self._check_stash('m01s00i002', self.U_grid_x, self.P_grid_y)
def test_v(self):
self._check_stash('m01s00i003', self.P_grid_x, self.V_grid_y)
class TestFFPayload(tests.IrisTest):
def _test_payload(self, mock_field, expected_depth, expected_type):
with mock.patch('iris.fileformats.ff.FFHeader') as mock_header:
mock_header.return_value = None
ff2pp = ff.FF2PP('Not real')
data_depth, data_type = ff2pp._payload(mock_field)
self.assertEqual(data_depth, expected_depth)
self.assertEqual(data_type, expected_type)
def test_payload_unpacked_real(self):
mock_field = _MockField(lbext=0, lblrec=100, lbnrec=-1,
raw_lbpack=_UNPACKED,
lbuser=[_REAL], boundary_packing=None)
expected_type = ff._LBUSER_DTYPE_LOOKUP[_REAL].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 800, expected_type)
def test_payload_unpacked_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=100, lbnrec=-1,
raw_lbpack=_UNPACKED,
lbuser=[_REAL], boundary_packing=None)
expected_type = ff._LBUSER_DTYPE_LOOKUP[_REAL].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 400, expected_type)
def test_payload_unpacked_integer(self):
mock_field = _MockField(lbext=0, lblrec=200, lbnrec=-1,
raw_lbpack=_UNPACKED,
lbuser=[_INTEGER], boundary_packing=None)
expected_type = ff._LBUSER_DTYPE_LOOKUP[_INTEGER].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 1600, expected_type)
def test_payload_unpacked_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=200, lbnrec=-1,
raw_lbpack=_UNPACKED,
lbuser=[_INTEGER], boundary_packing=None)
expected_type = ff._LBUSER_DTYPE_LOOKUP[_INTEGER].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 800, expected_type)
def test_payload_wgdos_real(self):
mock_field = _MockField(lbext=0, lblrec=-1, lbnrec=100,
raw_lbpack=_WGDOS,
lbuser=[_REAL], boundary_packing=None)
self._test_payload(mock_field, 796, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_wgdos_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=-1, lbnrec=100,
raw_lbpack=_WGDOS,
lbuser=[_REAL], boundary_packing=None)
self._test_payload(mock_field, 796, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_wgdos_integer(self):
mock_field = _MockField(lbext=0, lblrec=-1, lbnrec=200,
raw_lbpack=_WGDOS,
lbuser=[_INTEGER], boundary_packing=None)
self._test_payload(mock_field, 1596, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_wgdos_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=-1, lbnrec=200,
raw_lbpack=_WGDOS,
lbuser=[_INTEGER], boundary_packing=None)
self._test_payload(mock_field, 1596, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_cray_real(self):
mock_field = _MockField(lbext=0, lblrec=100, lbnrec=-1,
raw_lbpack=_CRAY,
lbuser=[_REAL], boundary_packing=None)
self._test_payload(mock_field, 400, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_cray_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=100, lbnrec=-1,
raw_lbpack=_CRAY,
lbuser=[_REAL], boundary_packing=None)
self._test_payload(mock_field, 200, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_cray_integer(self):
mock_field = _MockField(lbext=0, lblrec=200, lbnrec=-1,
raw_lbpack=_CRAY,
lbuser=[_INTEGER], boundary_packing=None)
self._test_payload(mock_field, 800, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_cray_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=200, lbnrec=-1,
raw_lbpack=_CRAY,
lbuser=[_INTEGER], boundary_packing=None)
self._test_payload(mock_field, 400, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
if __name__ == '__main__':
tests.main()<|fim▁end|> | |
<|file_name|>console_test.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"sync"
"testing"
"github.com/mauricioklein/text-search-engine/ranking"
"github.com/mauricioklein/text-search-engine/reader"
"github.com/mauricioklein/text-search-engine/report"
"github.com/stretchr/testify/assert"
)
func TestConsoleInputStreamSuccess(t *testing.T) {
c, r, _, _ := NewTestConsole("./test-utils/3-files/")
r.WriteString("Foobar\n")
actual, _ := c.Read()
expected := "Foobar"
assert.Equal(t, expected, actual)
}
func TestConsoleInputStreamError(t *testing.T) {
c, r, _, _ := NewTestConsole("./test-utils/3-files/")
r.WriteString("Foobar")
_, err := c.Read()
assert.Error(t, err)
}
func TestConsoleOutputStream(t *testing.T) {
c, _, w, _ := NewTestConsole("./test-utils/3-files/")
c.Write("Foobar")
actual, _ := w.ReadString('\x00')
expected := "Foobar"
assert.Equal(t, expected, actual)
}
func TestConsoleErrorStream(t *testing.T) {
c, _, _, e := NewTestConsole("./test-utils/3-files/")<|fim▁hole|>
c.Error("a generic error")
actual, _ := e.ReadString('\x00')
expected := "a generic error"
assert.Equal(t, expected, actual)
}
func TestConsoleRun3Files(t *testing.T) {
c, r, w, _ := NewTestConsole("./test-utils/3-files/")
// write "user input" data to the read stream
r.Write([]byte("Lorem\n")) // actual search sentence
r.Write([]byte(QuitSentence + "\n")) // quit command
// Wait for the run command to finish (due the quit command above)
var wg sync.WaitGroup
wg.Add(1)
go dispatchConsole(c, &wg)
wg.Wait()
// Read response from the write stream
actual, _ := w.ReadString('\x00')
expected := `search> file1.txt: 100.00% match
file3.txt: 100.00% match
file2.txt: 0.00% match
search> `
assert.Equal(t, expected, actual)
}
func TestConsoleRun11Files(t *testing.T) {
c, r, w, _ := NewTestConsole("./test-utils/11-files/")
// write "user input" data to the read stream
r.Write([]byte("Lorem ipsum dolor sit\n")) // actual search sentence
r.Write([]byte(QuitSentence + "\n")) // quit command
// Wait for the run command to finish (due the quit command above)
var wg sync.WaitGroup
wg.Add(1)
go dispatchConsole(c, &wg)
wg.Wait()
// Read response from the write stream
actual, _ := w.ReadString('\x00')
// file11.txt should not be present, since the result
// should display only the top 10
expected := `search> file1.txt: 100.00% match
file10.txt: 100.00% match
file11.txt: 100.00% match
file3.txt: 100.00% match
file6.txt: 100.00% match
file7.txt: 100.00% match
file8.txt: 100.00% match
file9.txt: 100.00% match
file2.txt: 75.00% match
file4.txt: 75.00% match
search> `
// should filter the result to the top 10
assert.Equal(t, expected, actual)
}
func NewTestConsole(dirPath string) (Console, *bytes.Buffer, *bytes.Buffer, *bytes.Buffer) {
files, _ := reader.Disk{}.Read(dirPath)
nWorkers := 3
processor := ranking.NewProcessor(files, nWorkers, ranking.LevenshteinRanking{})
inBuf := bytes.NewBuffer([]byte{})
outBuf := bytes.NewBuffer([]byte{})
errBuf := bytes.NewBuffer([]byte{})
return NewConsole(
processor,
report.SimpleReporter{},
inBuf,
outBuf,
errBuf,
), inBuf, outBuf, errBuf
}
func dispatchConsole(c Console, wg *sync.WaitGroup) {
defer wg.Done()
c.Run()
}<|fim▁end|> | |
<|file_name|>backend.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <string.h> /* for memset */
#include <errno.h>
#include "tbbmalloc_internal.h"
namespace rml {
namespace internal {
/*********** Code to acquire memory from the OS or other executive ****************/
/*
syscall/malloc can set non-zero errno in case of failure,
but later allocator might be able to find memory to fulfill the request.
And we do not want changing of errno by successful scalable_malloc call.
To support this, restore old errno in (get|free)RawMemory, and set errno
in frontend just before returning to user code.
Please note: every syscall/libc call used inside scalable_malloc that
sets errno must be protected this way, not just memory allocation per se.
*/
#if USE_DEFAULT_MEMORY_MAPPING
#include "MapMemory.h"
#else
/* assume MapMemory and UnmapMemory are customized */
#endif
void* getRawMemory (size_t size, PageType pageType) {
return MapMemory(size, pageType);
}
int freeRawMemory (void *object, size_t size) {
return UnmapMemory(object, size);
}
#if CHECK_ALLOCATION_RANGE
void Backend::UsedAddressRange::registerAlloc(uintptr_t left, uintptr_t right)
{
MallocMutex::scoped_lock lock(mutex);
if (left < leftBound.load(std::memory_order_relaxed))
leftBound.store(left, std::memory_order_relaxed);
if (right > rightBound.load(std::memory_order_relaxed))
rightBound.store(right, std::memory_order_relaxed);
MALLOC_ASSERT(leftBound.load(std::memory_order_relaxed), ASSERT_TEXT);
MALLOC_ASSERT(leftBound.load(std::memory_order_relaxed) < rightBound.load(std::memory_order_relaxed), ASSERT_TEXT);
MALLOC_ASSERT(leftBound.load(std::memory_order_relaxed) <= left && right <= rightBound.load(std::memory_order_relaxed), ASSERT_TEXT);
}
void Backend::UsedAddressRange::registerFree(uintptr_t left, uintptr_t right)
{
MallocMutex::scoped_lock lock(mutex);
if (leftBound.load(std::memory_order_relaxed) == left) {
if (rightBound.load(std::memory_order_relaxed) == right) {
leftBound.store(ADDRESS_UPPER_BOUND, std::memory_order_relaxed);
rightBound.store(0, std::memory_order_relaxed);
} else
leftBound.store(right, std::memory_order_relaxed);
} else if (rightBound.load(std::memory_order_relaxed) == right)
rightBound.store(left, std::memory_order_relaxed);
MALLOC_ASSERT((!rightBound.load(std::memory_order_relaxed) && leftBound.load(std::memory_order_relaxed) == ADDRESS_UPPER_BOUND)
|| leftBound.load(std::memory_order_relaxed) < rightBound.load(std::memory_order_relaxed), ASSERT_TEXT);
}
#endif // CHECK_ALLOCATION_RANGE
// Initialized in frontend inside defaultMemPool
extern HugePagesStatus hugePages;
void *Backend::allocRawMem(size_t &size)
{
void *res = NULL;
size_t allocSize = 0;
if (extMemPool->userPool()) {
if (extMemPool->fixedPool && bootsrapMemDone == bootsrapMemStatus.load(std::memory_order_acquire))
return NULL;
MALLOC_ASSERT(bootsrapMemStatus != bootsrapMemNotDone,
"Backend::allocRawMem() called prematurely?");
// TODO: support for raw mem not aligned at sizeof(uintptr_t)
// memory from fixed pool is asked once and only once
allocSize = alignUpGeneric(size, extMemPool->granularity);
res = (*extMemPool->rawAlloc)(extMemPool->poolId, allocSize);
} else {
// Align allocation on page size
size_t pageSize = hugePages.isEnabled ? hugePages.getGranularity() : extMemPool->granularity;
MALLOC_ASSERT(pageSize, "Page size cannot be zero.");
allocSize = alignUpGeneric(size, pageSize);
// If user requested huge pages and they are available, try to use preallocated ones firstly.
// If there are none, lets check transparent huge pages support and use them instead.
if (hugePages.isEnabled) {
if (hugePages.isHPAvailable) {
res = getRawMemory(allocSize, PREALLOCATED_HUGE_PAGE);
}
if (!res && hugePages.isTHPAvailable) {
res = getRawMemory(allocSize, TRANSPARENT_HUGE_PAGE);
}
}
if (!res) {
res = getRawMemory(allocSize, REGULAR);
}
}
if (res) {
MALLOC_ASSERT(allocSize > 0, "Invalid size of an allocated region.");
size = allocSize;
if (!extMemPool->userPool())
usedAddrRange.registerAlloc((uintptr_t)res, (uintptr_t)res+size);
#if MALLOC_DEBUG
volatile size_t curTotalSize = totalMemSize; // to read global value once
MALLOC_ASSERT(curTotalSize+size > curTotalSize, "Overflow allocation size.");
#endif
totalMemSize.fetch_add(size);
}
return res;
}
bool Backend::freeRawMem(void *object, size_t size)
{
bool fail;
#if MALLOC_DEBUG
volatile size_t curTotalSize = totalMemSize; // to read global value once
MALLOC_ASSERT(curTotalSize-size < curTotalSize, "Negative allocation size.");
#endif
totalMemSize.fetch_sub(size);
if (extMemPool->userPool()) {
MALLOC_ASSERT(!extMemPool->fixedPool, "No free for fixed-size pools.");
fail = (*extMemPool->rawFree)(extMemPool->poolId, object, size);
} else {
usedAddrRange.registerFree((uintptr_t)object, (uintptr_t)object + size);
fail = freeRawMemory(object, size);
}
// TODO: use result in all freeRawMem() callers
return !fail;
}
/********* End memory acquisition code ********************************/
// Protected object size. After successful locking returns size of locked block,
// and releasing requires setting block size.
class GuardedSize : tbb::detail::no_copy {
std::atomic<uintptr_t> value;
public:
enum State {
LOCKED,
COAL_BLOCK, // block is coalescing now
MAX_LOCKED_VAL = COAL_BLOCK,
LAST_REGION_BLOCK, // used to mark last block in region
// values after this are "normal" block sizes
MAX_SPEC_VAL = LAST_REGION_BLOCK
};
void initLocked() { value.store(LOCKED, std::memory_order_release); } // TBB_REVAMP_TODO: was relaxed
void makeCoalscing() {
MALLOC_ASSERT(value.load(std::memory_order_relaxed) == LOCKED, ASSERT_TEXT);
value.store(COAL_BLOCK, std::memory_order_release); // TBB_REVAMP_TODO: was relaxed
}
size_t tryLock(State state) {
MALLOC_ASSERT(state <= MAX_LOCKED_VAL, ASSERT_TEXT);
size_t sz = value.load(std::memory_order_acquire);
for (;;) {
if (sz <= MAX_LOCKED_VAL) {
break;
}
if (value.compare_exchange_strong(sz, state)) {
break;
}
}
return sz;
}
void unlock(size_t size) {
MALLOC_ASSERT(value.load(std::memory_order_relaxed) <= MAX_LOCKED_VAL, "The lock is not locked");
MALLOC_ASSERT(size > MAX_LOCKED_VAL, ASSERT_TEXT);
value.store(size, std::memory_order_release);
}
bool isLastRegionBlock() const { return value.load(std::memory_order_relaxed) == LAST_REGION_BLOCK; }
friend void Backend::IndexedBins::verify();
};
struct MemRegion {
MemRegion *next, // keep all regions in any pool to release all them on
*prev; // pool destroying, 2-linked list to release individual
// regions.
size_t allocSz, // got from pool callback
blockSz; // initial and maximal inner block size
MemRegionType type;
};
// this data must be unmodified while block is in use, so separate it
class BlockMutexes {
protected:
GuardedSize myL, // lock for me
leftL; // lock for left neighbor
};
class FreeBlock : BlockMutexes {
public:
static const size_t minBlockSize;
friend void Backend::IndexedBins::verify();
FreeBlock *prev, // in 2-linked list related to bin
*next,
*nextToFree; // used to form a queue during coalescing
// valid only when block is in processing, i.e. one is not free and not
size_t sizeTmp; // used outside of backend
int myBin; // bin that is owner of the block
bool slabAligned;
bool blockInBin; // this block in myBin already
FreeBlock *rightNeig(size_t sz) const {
MALLOC_ASSERT(sz, ASSERT_TEXT);
return (FreeBlock*)((uintptr_t)this+sz);
}
FreeBlock *leftNeig(size_t sz) const {
MALLOC_ASSERT(sz, ASSERT_TEXT);
return (FreeBlock*)((uintptr_t)this - sz);
}
void initHeader() { myL.initLocked(); leftL.initLocked(); }
void setMeFree(size_t size) { myL.unlock(size); }
size_t trySetMeUsed(GuardedSize::State s) { return myL.tryLock(s); }
bool isLastRegionBlock() const { return myL.isLastRegionBlock(); }
void setLeftFree(size_t sz) { leftL.unlock(sz); }
size_t trySetLeftUsed(GuardedSize::State s) { return leftL.tryLock(s); }
size_t tryLockBlock() {
size_t rSz, sz = trySetMeUsed(GuardedSize::LOCKED);
if (sz <= GuardedSize::MAX_LOCKED_VAL)
return false;
rSz = rightNeig(sz)->trySetLeftUsed(GuardedSize::LOCKED);
if (rSz <= GuardedSize::MAX_LOCKED_VAL) {
setMeFree(sz);
return false;
}<|fim▁hole|> myL.makeCoalscing();
rightNeig(blockSz)->leftL.makeCoalscing();
sizeTmp = blockSz;
nextToFree = NULL;
}
void markUsed() {
myL.initLocked();
rightNeig(sizeTmp)->leftL.initLocked();
nextToFree = NULL;
}
static void markBlocks(FreeBlock *fBlock, int num, size_t size) {
for (int i=1; i<num; i++) {
fBlock = (FreeBlock*)((uintptr_t)fBlock + size);
fBlock->initHeader();
}
}
};
// Last block in any region. Its "size" field is GuardedSize::LAST_REGION_BLOCK,
// This kind of blocks used to find region header
// and have a possibility to return region back to OS
struct LastFreeBlock : public FreeBlock {
MemRegion *memRegion;
};
const size_t FreeBlock::minBlockSize = sizeof(FreeBlock);
inline bool BackendSync::waitTillBlockReleased(intptr_t startModifiedCnt)
{
AtomicBackoff backoff;
#if __TBB_MALLOC_BACKEND_STAT
class ITT_Guard {
void *ptr;
public:
ITT_Guard(void *p) : ptr(p) {
MALLOC_ITT_SYNC_PREPARE(ptr);
}
~ITT_Guard() {
MALLOC_ITT_SYNC_ACQUIRED(ptr);
}
};
ITT_Guard ittGuard(&inFlyBlocks);
#endif
for (intptr_t myBinsInFlyBlocks = inFlyBlocks.load(std::memory_order_acquire),
myCoalescQInFlyBlocks = backend->blocksInCoalescing(); ; backoff.pause()) {
MALLOC_ASSERT(myBinsInFlyBlocks>=0 && myCoalescQInFlyBlocks>=0, NULL);
intptr_t currBinsInFlyBlocks = inFlyBlocks.load(std::memory_order_acquire),
currCoalescQInFlyBlocks = backend->blocksInCoalescing();
WhiteboxTestingYield();
// Stop waiting iff:
// 1) blocks were removed from processing, not added
if (myBinsInFlyBlocks > currBinsInFlyBlocks
// 2) released during delayed coalescing queue
|| myCoalescQInFlyBlocks > currCoalescQInFlyBlocks)
break;
// 3) if there are blocks in coalescing, and no progress in its processing,
// try to scan coalescing queue and stop waiting, if changes were made
// (if there are no changes and in-fly blocks exist, we continue
// waiting to not increase load on coalescQ)
if (currCoalescQInFlyBlocks > 0 && backend->scanCoalescQ(/*forceCoalescQDrop=*/false))
break;
// 4) when there are no blocks
if (!currBinsInFlyBlocks && !currCoalescQInFlyBlocks)
// re-scan make sense only if bins were modified since scanned
return startModifiedCnt != getNumOfMods();
myBinsInFlyBlocks = currBinsInFlyBlocks;
myCoalescQInFlyBlocks = currCoalescQInFlyBlocks;
}
return true;
}
void CoalRequestQ::putBlock(FreeBlock *fBlock)
{
MALLOC_ASSERT(fBlock->sizeTmp >= FreeBlock::minBlockSize, ASSERT_TEXT);
fBlock->markUsed();
// the block is in the queue, do not forget that it's here
inFlyBlocks++;
FreeBlock *myBlToFree = blocksToFree.load(std::memory_order_acquire);
for (;;) {
fBlock->nextToFree = myBlToFree;
if (blocksToFree.compare_exchange_strong(myBlToFree, fBlock)) {
return;
}
}
}
FreeBlock *CoalRequestQ::getAll()
{
for (;;) {
FreeBlock *myBlToFree = blocksToFree.load(std::memory_order_acquire);
if (!myBlToFree) {
return NULL;
} else {
if (blocksToFree.compare_exchange_strong(myBlToFree, 0)) {
return myBlToFree;
} else {
continue;
}
}
}
}
inline void CoalRequestQ::blockWasProcessed()
{
bkndSync->binsModified();
int prev = inFlyBlocks.fetch_sub(1);
MALLOC_ASSERT(prev > 0, ASSERT_TEXT);
}
// Try to get a block from a bin.
// If the remaining free space would stay in the same bin,
// split the block without removing it.
// If the free space should go to other bin(s), remove the block.
// alignedBin is true, if all blocks in the bin have slab-aligned right side.
FreeBlock *Backend::IndexedBins::getFromBin(int binIdx, BackendSync *sync, size_t size,
bool needAlignedRes, bool alignedBin, bool wait, int *binLocked)
{
Bin *b = &freeBins[binIdx];
try_next:
FreeBlock *fBlock = NULL;
if (!b->empty()) {
bool locked;
MallocMutex::scoped_lock scopedLock(b->tLock, wait, &locked);
if (!locked) {
if (binLocked) (*binLocked)++;
return NULL;
}
for (FreeBlock *curr = b->head.load(std::memory_order_relaxed); curr; curr = curr->next) {
size_t szBlock = curr->tryLockBlock();
if (!szBlock) {
// block is locked, re-do bin lock, as there is no place to spin
// while block coalescing
goto try_next;
}
// GENERAL CASE
if (alignedBin || !needAlignedRes) {
size_t splitSz = szBlock - size;
// If we got a block as split result, it must have a room for control structures.
if (szBlock >= size && (splitSz >= FreeBlock::minBlockSize || !splitSz))
fBlock = curr;
} else {
// SPECIAL CASE, to get aligned block from unaligned bin we have to cut the middle of a block
// and return remaining left and right part. Possible only in fixed pool scenario, assert for this
// is set inside splitBlock() function.
void *newB = alignUp(curr, slabSize);
uintptr_t rightNew = (uintptr_t)newB + size;
uintptr_t rightCurr = (uintptr_t)curr + szBlock;
// Check if the block size is sufficient,
// and also left and right split results are either big enough or non-existent
if (rightNew <= rightCurr
&& (newB == curr || ((uintptr_t)newB - (uintptr_t)curr) >= FreeBlock::minBlockSize)
&& (rightNew == rightCurr || (rightCurr - rightNew) >= FreeBlock::minBlockSize))
fBlock = curr;
}
if (fBlock) {
// consume must be called before result of removing from a bin is visible externally.
sync->blockConsumed();
// TODO: think about cases when block stays in the same bin
b->removeBlock(fBlock);
if (freeBins[binIdx].empty())
bitMask.set(binIdx, false);
fBlock->sizeTmp = szBlock;
break;
} else { // block size is not valid, search for next block in the bin
curr->setMeFree(szBlock);
curr->rightNeig(szBlock)->setLeftFree(szBlock);
}
}
}
return fBlock;
}
bool Backend::IndexedBins::tryReleaseRegions(int binIdx, Backend *backend)
{
Bin *b = &freeBins[binIdx];
FreeBlock *fBlockList = NULL;
// got all blocks from the bin and re-do coalesce on them
// to release single-block regions
try_next:
if (!b->empty()) {
MallocMutex::scoped_lock binLock(b->tLock);
for (FreeBlock *curr = b->head.load(std::memory_order_relaxed); curr; ) {
size_t szBlock = curr->tryLockBlock();
if (!szBlock)
goto try_next;
FreeBlock *next = curr->next;
b->removeBlock(curr);
curr->sizeTmp = szBlock;
curr->nextToFree = fBlockList;
fBlockList = curr;
curr = next;
}
}
return backend->coalescAndPutList(fBlockList, /*forceCoalescQDrop=*/true,
/*reportBlocksProcessed=*/false);
}
void Backend::Bin::removeBlock(FreeBlock *fBlock)
{
MALLOC_ASSERT(fBlock->next||fBlock->prev||fBlock== head.load(std::memory_order_relaxed),
"Detected that a block is not in the bin.");
if (head.load(std::memory_order_relaxed) == fBlock)
head.store(fBlock->next, std::memory_order_relaxed);
if (tail == fBlock)
tail = fBlock->prev;
if (fBlock->prev)
fBlock->prev->next = fBlock->next;
if (fBlock->next)
fBlock->next->prev = fBlock->prev;
}
void Backend::IndexedBins::addBlock(int binIdx, FreeBlock *fBlock, size_t /* blockSz */, bool addToTail)
{
Bin *b = &freeBins[binIdx];
fBlock->myBin = binIdx;
fBlock->next = fBlock->prev = NULL;
{
MallocMutex::scoped_lock scopedLock(b->tLock);
if (addToTail) {
fBlock->prev = b->tail;
b->tail = fBlock;
if (fBlock->prev)
fBlock->prev->next = fBlock;
if (!b->head.load(std::memory_order_relaxed))
b->head.store(fBlock, std::memory_order_relaxed);
} else {
fBlock->next = b->head.load(std::memory_order_relaxed);
b->head.store(fBlock, std::memory_order_relaxed);
if (fBlock->next)
fBlock->next->prev = fBlock;
if (!b->tail)
b->tail = fBlock;
}
}
bitMask.set(binIdx, true);
}
bool Backend::IndexedBins::tryAddBlock(int binIdx, FreeBlock *fBlock, bool addToTail)
{
bool locked;
Bin *b = &freeBins[binIdx];
fBlock->myBin = binIdx;
if (addToTail) {
fBlock->next = NULL;
{
MallocMutex::scoped_lock scopedLock(b->tLock, /*wait=*/false, &locked);
if (!locked)
return false;
fBlock->prev = b->tail;
b->tail = fBlock;
if (fBlock->prev)
fBlock->prev->next = fBlock;
if (!b->head.load(std::memory_order_relaxed))
b->head.store(fBlock, std::memory_order_relaxed);
}
} else {
fBlock->prev = NULL;
{
MallocMutex::scoped_lock scopedLock(b->tLock, /*wait=*/false, &locked);
if (!locked)
return false;
fBlock->next = b->head.load(std::memory_order_relaxed);
b->head.store(fBlock, std::memory_order_relaxed);
if (fBlock->next)
fBlock->next->prev = fBlock;
if (!b->tail)
b->tail = fBlock;
}
}
bitMask.set(binIdx, true);
return true;
}
void Backend::IndexedBins::reset()
{
for (unsigned i=0; i<Backend::freeBinsNum; i++)
freeBins[i].reset();
bitMask.reset();
}
void Backend::IndexedBins::lockRemoveBlock(int binIdx, FreeBlock *fBlock)
{
MallocMutex::scoped_lock scopedLock(freeBins[binIdx].tLock);
freeBins[binIdx].removeBlock(fBlock);
if (freeBins[binIdx].empty())
bitMask.set(binIdx, false);
}
bool ExtMemoryPool::regionsAreReleaseable() const
{
return !keepAllMemory && !delayRegsReleasing;
}
FreeBlock *Backend::splitBlock(FreeBlock *fBlock, int num, size_t size, bool blockIsAligned, bool needAlignedBlock)
{
const size_t totalSize = num * size;
// SPECIAL CASE, for unaligned block we have to cut the middle of a block
// and return remaining left and right part. Possible only in a fixed pool scenario.
if (needAlignedBlock && !blockIsAligned) {
MALLOC_ASSERT(extMemPool->fixedPool,
"Aligned block request from unaligned bin possible only in fixed pool scenario.");
// Space to use is in the middle
FreeBlock *newBlock = alignUp(fBlock, slabSize);
FreeBlock *rightPart = (FreeBlock*)((uintptr_t)newBlock + totalSize);
uintptr_t fBlockEnd = (uintptr_t)fBlock + fBlock->sizeTmp;
// Return free right part
if ((uintptr_t)rightPart != fBlockEnd) {
rightPart->initHeader(); // to prevent coalescing rightPart with fBlock
size_t rightSize = fBlockEnd - (uintptr_t)rightPart;
coalescAndPut(rightPart, rightSize, toAlignedBin(rightPart, rightSize));
}
// And free left part
if (newBlock != fBlock) {
newBlock->initHeader(); // to prevent coalescing fBlock with newB
size_t leftSize = (uintptr_t)newBlock - (uintptr_t)fBlock;
coalescAndPut(fBlock, leftSize, toAlignedBin(fBlock, leftSize));
}
fBlock = newBlock;
} else if (size_t splitSize = fBlock->sizeTmp - totalSize) { // need to split the block
// GENERAL CASE, cut the left or right part of the block
FreeBlock *splitBlock = NULL;
if (needAlignedBlock) {
// For slab aligned blocks cut the right side of the block
// and return it to a requester, original block returns to backend
splitBlock = fBlock;
fBlock = (FreeBlock*)((uintptr_t)splitBlock + splitSize);
fBlock->initHeader();
} else {
// For large object blocks cut original block and put free righ part to backend
splitBlock = (FreeBlock*)((uintptr_t)fBlock + totalSize);
splitBlock->initHeader();
}
// Mark free block as it`s parent only when the requested type (needAlignedBlock)
// and returned from Bins/OS block (isAligned) are equal (XOR operation used)
bool markAligned = (blockIsAligned ^ needAlignedBlock) ? toAlignedBin(splitBlock, splitSize) : blockIsAligned;
coalescAndPut(splitBlock, splitSize, markAligned);
}
MALLOC_ASSERT(!needAlignedBlock || isAligned(fBlock, slabSize), "Expect to get aligned block, if one was requested.");
FreeBlock::markBlocks(fBlock, num, size);
return fBlock;
}
size_t Backend::getMaxBinnedSize() const
{
return hugePages.isEnabled && !inUserPool() ?
maxBinned_HugePage : maxBinned_SmallPage;
}
inline bool Backend::MaxRequestComparator::operator()(size_t oldMaxReq, size_t requestSize) const
{
return requestSize > oldMaxReq && requestSize < backend->getMaxBinnedSize();
}
// last chance to get memory
FreeBlock *Backend::releaseMemInCaches(intptr_t startModifiedCnt,
int *lockedBinsThreshold, int numOfLockedBins)
{
// something released from caches
if (extMemPool->hardCachesCleanup()
// ..or can use blocks that are in processing now
|| bkndSync.waitTillBlockReleased(startModifiedCnt))
return (FreeBlock*)VALID_BLOCK_IN_BIN;
// OS can't give us more memory, but we have some in locked bins
if (*lockedBinsThreshold && numOfLockedBins) {
*lockedBinsThreshold = 0;
return (FreeBlock*)VALID_BLOCK_IN_BIN;
}
return NULL; // nothing found, give up
}
FreeBlock *Backend::askMemFromOS(size_t blockSize, intptr_t startModifiedCnt,
int *lockedBinsThreshold, int numOfLockedBins,
bool *splittableRet, bool needSlabRegion)
{
FreeBlock *block;
// The block sizes can be divided into 3 groups:
// 1. "quite small": popular object size, we are in bootstarp or something
// like; request several regions.
// 2. "quite large": we want to have several such blocks in the region
// but not want several pre-allocated regions.
// 3. "huge": exact fit, we allocate only one block and do not allow
// any other allocations to placed in a region.
// Dividing the block sizes in these groups we are trying to balance between
// too small regions (that leads to fragmentation) and too large ones (that
// leads to excessive address space consumption). If a region is "too
// large", allocate only one, to prevent fragmentation. It supposedly
// doesn't hurt performance, because the object requested by user is large.
// Bounds for the groups are:
const size_t maxBinned = getMaxBinnedSize();
const size_t quiteSmall = maxBinned / 8;
const size_t quiteLarge = maxBinned;
if (blockSize >= quiteLarge) {
// Do not interact with other threads via semaphores, as for exact fit
// we can't share regions with them, memory requesting is individual.
block = addNewRegion(blockSize, MEMREG_ONE_BLOCK, /*addToBin=*/false);
if (!block)
return releaseMemInCaches(startModifiedCnt, lockedBinsThreshold, numOfLockedBins);
*splittableRet = false;
} else {
const size_t regSz_sizeBased = alignUp(4*maxRequestedSize, 1024*1024);
// Another thread is modifying backend while we can't get the block.
// Wait while it leaves and re-do the scan
// before trying other ways to extend the backend.
if (bkndSync.waitTillBlockReleased(startModifiedCnt)
// semaphore is protecting adding more more memory from OS
|| memExtendingSema.wait())
return (FreeBlock*)VALID_BLOCK_IN_BIN;
if (startModifiedCnt != bkndSync.getNumOfMods()) {
memExtendingSema.signal();
return (FreeBlock*)VALID_BLOCK_IN_BIN;
}
if (blockSize < quiteSmall) {
// For this size of blocks, add NUM_OF_REG "advance" regions in bin,
// and return one as a result.
// TODO: add to bin first, because other threads can use them right away.
// This must be done carefully, because blocks in bins can be released
// in releaseCachesToLimit().
const unsigned NUM_OF_REG = 3;
MemRegionType regType = needSlabRegion ? MEMREG_SLAB_BLOCKS : MEMREG_LARGE_BLOCKS;
block = addNewRegion(regSz_sizeBased, regType, /*addToBin=*/false);
if (block)
for (unsigned idx=0; idx<NUM_OF_REG; idx++)
if (! addNewRegion(regSz_sizeBased, regType, /*addToBin=*/true))
break;
} else {
block = addNewRegion(regSz_sizeBased, MEMREG_LARGE_BLOCKS, /*addToBin=*/false);
}
memExtendingSema.signal();
// no regions found, try to clean cache
if (!block || block == (FreeBlock*)VALID_BLOCK_IN_BIN)
return releaseMemInCaches(startModifiedCnt, lockedBinsThreshold, numOfLockedBins);
// Since a region can hold more than one block it can be split.
*splittableRet = true;
}
// after asking memory from OS, release caches if we above the memory limits
releaseCachesToLimit();
return block;
}
void Backend::releaseCachesToLimit()
{
if (!memSoftLimit.load(std::memory_order_relaxed)
|| totalMemSize.load(std::memory_order_relaxed) <= memSoftLimit.load(std::memory_order_relaxed)) {
return;
}
size_t locTotalMemSize, locMemSoftLimit;
scanCoalescQ(/*forceCoalescQDrop=*/false);
if (extMemPool->softCachesCleanup() &&
(locTotalMemSize = totalMemSize.load(std::memory_order_acquire)) <=
(locMemSoftLimit = memSoftLimit.load(std::memory_order_acquire)))
return;
// clean global large-object cache, if this is not enough, clean local caches
// do this in several tries, because backend fragmentation can prevent
// region from releasing
for (int cleanLocal = 0; cleanLocal<2; cleanLocal++)
while (cleanLocal ?
extMemPool->allLocalCaches.cleanup(/*cleanOnlyUnused=*/true) :
extMemPool->loc.decreasingCleanup())
if ((locTotalMemSize = totalMemSize.load(std::memory_order_acquire)) <=
(locMemSoftLimit = memSoftLimit.load(std::memory_order_acquire)))
return;
// last chance to match memSoftLimit
extMemPool->hardCachesCleanup();
}
int Backend::IndexedBins::getMinNonemptyBin(unsigned startBin) const
{
int p = bitMask.getMinTrue(startBin);
return p == -1 ? Backend::freeBinsNum : p;
}
FreeBlock *Backend::IndexedBins::findBlock(int nativeBin, BackendSync *sync, size_t size,
bool needAlignedBlock, bool alignedBin, int *numOfLockedBins)
{
for (int i=getMinNonemptyBin(nativeBin); i<freeBinsNum; i=getMinNonemptyBin(i+1))
if (FreeBlock *block = getFromBin(i, sync, size, needAlignedBlock, alignedBin, /*wait=*/false, numOfLockedBins))
return block;
return NULL;
}
void Backend::requestBootstrapMem()
{
if (bootsrapMemDone == bootsrapMemStatus.load(std::memory_order_acquire))
return;
MallocMutex::scoped_lock lock( bootsrapMemStatusMutex );
if (bootsrapMemDone == bootsrapMemStatus)
return;
MALLOC_ASSERT(bootsrapMemNotDone == bootsrapMemStatus, ASSERT_TEXT);
bootsrapMemStatus = bootsrapMemInitializing;
// request some rather big region during bootstrap in advance
// ok to get NULL here, as later we re-do a request with more modest size
addNewRegion(2*1024*1024, MEMREG_SLAB_BLOCKS, /*addToBin=*/true);
bootsrapMemStatus = bootsrapMemDone;
}
// try to allocate size Byte block in available bins
// needAlignedRes is true if result must be slab-aligned
FreeBlock *Backend::genericGetBlock(int num, size_t size, bool needAlignedBlock)
{
FreeBlock *block = NULL;
const size_t totalReqSize = num*size;
// no splitting after requesting new region, asks exact size
const int nativeBin = sizeToBin(totalReqSize);
requestBootstrapMem();
// If we found 2 or less locked bins, it's time to ask more memory from OS.
// But nothing can be asked from fixed pool. And we prefer wait, not ask
// for more memory, if block is quite large.
int lockedBinsThreshold = extMemPool->fixedPool || size>=maxBinned_SmallPage? 0 : 2;
// Find maximal requested size limited by getMaxBinnedSize()
AtomicUpdate(maxRequestedSize, totalReqSize, MaxRequestComparator(this));
scanCoalescQ(/*forceCoalescQDrop=*/false);
bool splittable = true;
for (;;) {
const intptr_t startModifiedCnt = bkndSync.getNumOfMods();
int numOfLockedBins;
do {
numOfLockedBins = 0;
if (needAlignedBlock) {
block = freeSlabAlignedBins.findBlock(nativeBin, &bkndSync, num*size, needAlignedBlock,
/*alignedBin=*/true, &numOfLockedBins);
if (!block && extMemPool->fixedPool)
block = freeLargeBlockBins.findBlock(nativeBin, &bkndSync, num*size, needAlignedBlock,
/*alignedBin=*/false, &numOfLockedBins);
} else {
block = freeLargeBlockBins.findBlock(nativeBin, &bkndSync, num*size, needAlignedBlock,
/*alignedBin=*/false, &numOfLockedBins);
if (!block && extMemPool->fixedPool)
block = freeSlabAlignedBins.findBlock(nativeBin, &bkndSync, num*size, needAlignedBlock,
/*alignedBin=*/true, &numOfLockedBins);
}
} while (!block && numOfLockedBins>lockedBinsThreshold);
if (block)
break;
if (!(scanCoalescQ(/*forceCoalescQDrop=*/true) | extMemPool->softCachesCleanup())) {
// bins are not updated,
// only remaining possibility is to ask for more memory
block = askMemFromOS(totalReqSize, startModifiedCnt, &lockedBinsThreshold,
numOfLockedBins, &splittable, needAlignedBlock);
if (!block)
return NULL;
if (block != (FreeBlock*)VALID_BLOCK_IN_BIN) {
// size can be increased in askMemFromOS, that's why >=
MALLOC_ASSERT(block->sizeTmp >= size, ASSERT_TEXT);
break;
}
// valid block somewhere in bins, let's find it
block = NULL;
}
}
MALLOC_ASSERT(block, ASSERT_TEXT);
if (splittable) {
// At this point we have to be sure that slabAligned attribute describes the right block state
block = splitBlock(block, num, size, block->slabAligned, needAlignedBlock);
}
// matched blockConsumed() from startUseBlock()
bkndSync.blockReleased();
return block;
}
LargeMemoryBlock *Backend::getLargeBlock(size_t size)
{
LargeMemoryBlock *lmb =
(LargeMemoryBlock*)genericGetBlock(1, size, /*needAlignedRes=*/false);
if (lmb) {
lmb->unalignedSize = size;
if (extMemPool->userPool())
extMemPool->lmbList.add(lmb);
}
return lmb;
}
BlockI *Backend::getSlabBlock(int num) {
BlockI *b = (BlockI*)genericGetBlock(num, slabSize, /*slabAligned=*/true);
MALLOC_ASSERT(isAligned(b, slabSize), ASSERT_TEXT);
return b;
}
void Backend::putSlabBlock(BlockI *block) {
genericPutBlock((FreeBlock *)block, slabSize, /*slabAligned=*/true);
}
void *Backend::getBackRefSpace(size_t size, bool *rawMemUsed)
{
// This block is released only at shutdown, so it can prevent
// a entire region releasing when it's received from the backend,
// so prefer getRawMemory using.
if (void *ret = getRawMemory(size, REGULAR)) {
*rawMemUsed = true;
return ret;
}
void *ret = genericGetBlock(1, size, /*needAlignedRes=*/false);
if (ret) *rawMemUsed = false;
return ret;
}
void Backend::putBackRefSpace(void *b, size_t size, bool rawMemUsed)
{
if (rawMemUsed)
freeRawMemory(b, size);
// ignore not raw mem, as it released on region releasing
}
void Backend::removeBlockFromBin(FreeBlock *fBlock)
{
if (fBlock->myBin != Backend::NO_BIN) {
if (fBlock->slabAligned)
freeSlabAlignedBins.lockRemoveBlock(fBlock->myBin, fBlock);
else
freeLargeBlockBins.lockRemoveBlock(fBlock->myBin, fBlock);
}
}
void Backend::genericPutBlock(FreeBlock *fBlock, size_t blockSz, bool slabAligned)
{
bkndSync.blockConsumed();
coalescAndPut(fBlock, blockSz, slabAligned);
bkndSync.blockReleased();
}
void AllLargeBlocksList::add(LargeMemoryBlock *lmb)
{
MallocMutex::scoped_lock scoped_cs(largeObjLock);
lmb->gPrev = NULL;
lmb->gNext = loHead;
if (lmb->gNext)
lmb->gNext->gPrev = lmb;
loHead = lmb;
}
void AllLargeBlocksList::remove(LargeMemoryBlock *lmb)
{
MallocMutex::scoped_lock scoped_cs(largeObjLock);
if (loHead == lmb)
loHead = lmb->gNext;
if (lmb->gNext)
lmb->gNext->gPrev = lmb->gPrev;
if (lmb->gPrev)
lmb->gPrev->gNext = lmb->gNext;
}
void Backend::putLargeBlock(LargeMemoryBlock *lmb)
{
if (extMemPool->userPool())
extMemPool->lmbList.remove(lmb);
genericPutBlock((FreeBlock *)lmb, lmb->unalignedSize, false);
}
void Backend::returnLargeObject(LargeMemoryBlock *lmb)
{
removeBackRef(lmb->backRefIdx);
putLargeBlock(lmb);
STAT_increment(getThreadId(), ThreadCommonCounters, freeLargeObj);
}
#if BACKEND_HAS_MREMAP
void *Backend::remap(void *ptr, size_t oldSize, size_t newSize, size_t alignment)
{
// no remap for user pools and for object too small that living in bins
if (inUserPool() || min(oldSize, newSize)<maxBinned_SmallPage
// during remap, can't guarantee alignment more strict than current or
// more strict than page alignment
|| !isAligned(ptr, alignment) || alignment>extMemPool->granularity)
return NULL;
const LargeMemoryBlock* lmbOld = ((LargeObjectHdr *)ptr - 1)->memoryBlock;
const size_t oldUnalignedSize = lmbOld->unalignedSize;
FreeBlock *oldFBlock = (FreeBlock *)lmbOld;
FreeBlock *right = oldFBlock->rightNeig(oldUnalignedSize);
// in every region only one block can have LAST_REGION_BLOCK on right,
// so don't need no synchronization
if (!right->isLastRegionBlock())
return NULL;
MemRegion *oldRegion = static_cast<LastFreeBlock*>(right)->memRegion;
MALLOC_ASSERT( oldRegion < ptr, ASSERT_TEXT );
const size_t oldRegionSize = oldRegion->allocSz;
if (oldRegion->type != MEMREG_ONE_BLOCK)
return NULL; // we are not single in the region
const size_t userOffset = (uintptr_t)ptr - (uintptr_t)oldRegion;
const size_t alignedSize = LargeObjectCache::alignToBin(newSize + userOffset);
const size_t requestSize =
alignUp(sizeof(MemRegion) + alignedSize + sizeof(LastFreeBlock), extMemPool->granularity);
if (requestSize < alignedSize) // is wrapped around?
return NULL;
regionList.remove(oldRegion);
// The deallocation should be registered in address range before mremap to
// prevent a race condition with allocation on another thread.
// (OS can reuse the memory and registerAlloc will be missed on another thread)
usedAddrRange.registerFree((uintptr_t)oldRegion, (uintptr_t)oldRegion + oldRegionSize);
void *ret = mremap(oldRegion, oldRegion->allocSz, requestSize, MREMAP_MAYMOVE);
if (MAP_FAILED == ret) { // can't remap, revert and leave
regionList.add(oldRegion);
usedAddrRange.registerAlloc((uintptr_t)oldRegion, (uintptr_t)oldRegion + oldRegionSize);
return NULL;
}
MemRegion *region = (MemRegion*)ret;
MALLOC_ASSERT(region->type == MEMREG_ONE_BLOCK, ASSERT_TEXT);
region->allocSz = requestSize;
region->blockSz = alignedSize;
FreeBlock *fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion),
largeObjectAlignment);
regionList.add(region);
startUseBlock(region, fBlock, /*addToBin=*/false);
MALLOC_ASSERT(fBlock->sizeTmp == region->blockSz, ASSERT_TEXT);
// matched blockConsumed() in startUseBlock().
// TODO: get rid of useless pair blockConsumed()/blockReleased()
bkndSync.blockReleased();
// object must start at same offset from region's start
void *object = (void*)((uintptr_t)region + userOffset);
MALLOC_ASSERT(isAligned(object, alignment), ASSERT_TEXT);
LargeObjectHdr *header = (LargeObjectHdr*)object - 1;
setBackRef(header->backRefIdx, header);
LargeMemoryBlock *lmb = (LargeMemoryBlock*)fBlock;
lmb->unalignedSize = region->blockSz;
lmb->objectSize = newSize;
lmb->backRefIdx = header->backRefIdx;
header->memoryBlock = lmb;
MALLOC_ASSERT((uintptr_t)lmb + lmb->unalignedSize >=
(uintptr_t)object + lmb->objectSize, "An object must fit to the block.");
usedAddrRange.registerAlloc((uintptr_t)region, (uintptr_t)region + requestSize);
totalMemSize.fetch_add(region->allocSz - oldRegionSize);
return object;
}
#endif /* BACKEND_HAS_MREMAP */
void Backend::releaseRegion(MemRegion *memRegion)
{
regionList.remove(memRegion);
freeRawMem(memRegion, memRegion->allocSz);
}
// coalesce fBlock with its neighborhood
FreeBlock *Backend::doCoalesc(FreeBlock *fBlock, MemRegion **mRegion)
{
FreeBlock *resBlock = fBlock;
size_t resSize = fBlock->sizeTmp;
MemRegion *memRegion = NULL;
fBlock->markCoalescing(resSize);
resBlock->blockInBin = false;
// coalescing with left neighbor
size_t leftSz = fBlock->trySetLeftUsed(GuardedSize::COAL_BLOCK);
if (leftSz != GuardedSize::LOCKED) {
if (leftSz == GuardedSize::COAL_BLOCK) {
coalescQ.putBlock(fBlock);
return NULL;
} else {
FreeBlock *left = fBlock->leftNeig(leftSz);
size_t lSz = left->trySetMeUsed(GuardedSize::COAL_BLOCK);
if (lSz <= GuardedSize::MAX_LOCKED_VAL) {
fBlock->setLeftFree(leftSz); // rollback
coalescQ.putBlock(fBlock);
return NULL;
} else {
MALLOC_ASSERT(lSz == leftSz, "Invalid header");
left->blockInBin = true;
resBlock = left;
resSize += leftSz;
resBlock->sizeTmp = resSize;
}
}
}
// coalescing with right neighbor
FreeBlock *right = fBlock->rightNeig(fBlock->sizeTmp);
size_t rightSz = right->trySetMeUsed(GuardedSize::COAL_BLOCK);
if (rightSz != GuardedSize::LOCKED) {
// LastFreeBlock is on the right side
if (GuardedSize::LAST_REGION_BLOCK == rightSz) {
right->setMeFree(GuardedSize::LAST_REGION_BLOCK);
memRegion = static_cast<LastFreeBlock*>(right)->memRegion;
} else if (GuardedSize::COAL_BLOCK == rightSz) {
if (resBlock->blockInBin) {
resBlock->blockInBin = false;
removeBlockFromBin(resBlock);
}
coalescQ.putBlock(resBlock);
return NULL;
} else {
size_t rSz = right->rightNeig(rightSz)->
trySetLeftUsed(GuardedSize::COAL_BLOCK);
if (rSz <= GuardedSize::MAX_LOCKED_VAL) {
right->setMeFree(rightSz); // rollback
if (resBlock->blockInBin) {
resBlock->blockInBin = false;
removeBlockFromBin(resBlock);
}
coalescQ.putBlock(resBlock);
return NULL;
} else {
MALLOC_ASSERT(rSz == rightSz, "Invalid header");
removeBlockFromBin(right);
resSize += rightSz;
// Is LastFreeBlock on the right side of right?
FreeBlock *nextRight = right->rightNeig(rightSz);
size_t nextRightSz = nextRight->
trySetMeUsed(GuardedSize::COAL_BLOCK);
if (nextRightSz > GuardedSize::MAX_LOCKED_VAL) {
if (nextRightSz == GuardedSize::LAST_REGION_BLOCK)
memRegion = static_cast<LastFreeBlock*>(nextRight)->memRegion;
nextRight->setMeFree(nextRightSz);
}
}
}
}
if (memRegion) {
MALLOC_ASSERT((uintptr_t)memRegion + memRegion->allocSz >=
(uintptr_t)right + sizeof(LastFreeBlock), ASSERT_TEXT);
MALLOC_ASSERT((uintptr_t)memRegion < (uintptr_t)resBlock, ASSERT_TEXT);
*mRegion = memRegion;
} else
*mRegion = NULL;
resBlock->sizeTmp = resSize;
return resBlock;
}
bool Backend::coalescAndPutList(FreeBlock *list, bool forceCoalescQDrop, bool reportBlocksProcessed)
{
bool regionReleased = false;
for (FreeBlock *helper; list;
list = helper,
// matches block enqueue in CoalRequestQ::putBlock()
reportBlocksProcessed? coalescQ.blockWasProcessed() : (void)0) {
MemRegion *memRegion;
bool addToTail = false;
helper = list->nextToFree;
FreeBlock *toRet = doCoalesc(list, &memRegion);
if (!toRet)
continue;
if (memRegion && memRegion->blockSz == toRet->sizeTmp
&& !extMemPool->fixedPool) {
if (extMemPool->regionsAreReleaseable()) {
// release the region, because there is no used blocks in it
if (toRet->blockInBin)
removeBlockFromBin(toRet);
releaseRegion(memRegion);
regionReleased = true;
continue;
} else // add block from empty region to end of bin,
addToTail = true; // preserving for exact fit
}
size_t currSz = toRet->sizeTmp;
int bin = sizeToBin(currSz);
bool toAligned = extMemPool->fixedPool ? toAlignedBin(toRet, currSz) : toRet->slabAligned;
bool needAddToBin = true;
if (toRet->blockInBin) {
// Does it stay in same bin?
if (toRet->myBin == bin && toRet->slabAligned == toAligned)
needAddToBin = false;
else {
toRet->blockInBin = false;
removeBlockFromBin(toRet);
}
}
// Does not stay in same bin, or bin-less; add it
if (needAddToBin) {
toRet->prev = toRet->next = toRet->nextToFree = NULL;
toRet->myBin = NO_BIN;
toRet->slabAligned = toAligned;
// If the block is too small to fit in any bin, keep it bin-less.
// It's not a leak because the block later can be coalesced.
if (currSz >= minBinnedSize) {
toRet->sizeTmp = currSz;
IndexedBins *target = toRet->slabAligned ? &freeSlabAlignedBins : &freeLargeBlockBins;
if (forceCoalescQDrop) {
target->addBlock(bin, toRet, toRet->sizeTmp, addToTail);
} else if (!target->tryAddBlock(bin, toRet, addToTail)) {
coalescQ.putBlock(toRet);
continue;
}
}
toRet->sizeTmp = 0;
}
// Free (possibly coalesced) free block.
// Adding to bin must be done before this point,
// because after a block is free it can be coalesced, and
// using its pointer became unsafe.
// Remember that coalescing is not done under any global lock.
toRet->setMeFree(currSz);
toRet->rightNeig(currSz)->setLeftFree(currSz);
}
return regionReleased;
}
// Coalesce fBlock and add it back to a bin;
// processing delayed coalescing requests.
void Backend::coalescAndPut(FreeBlock *fBlock, size_t blockSz, bool slabAligned)
{
fBlock->sizeTmp = blockSz;
fBlock->nextToFree = NULL;
fBlock->slabAligned = slabAligned;
coalescAndPutList(fBlock, /*forceCoalescQDrop=*/false, /*reportBlocksProcessed=*/false);
}
bool Backend::scanCoalescQ(bool forceCoalescQDrop)
{
FreeBlock *currCoalescList = coalescQ.getAll();
if (currCoalescList)
// reportBlocksProcessed=true informs that the blocks leave coalescQ,
// matches blockConsumed() from CoalRequestQ::putBlock()
coalescAndPutList(currCoalescList, forceCoalescQDrop,
/*reportBlocksProcessed=*/true);
// returns status of coalescQ.getAll(), as an indication of possible changes in backend
// TODO: coalescAndPutList() may report is some new free blocks became available or not
return currCoalescList;
}
FreeBlock *Backend::findBlockInRegion(MemRegion *region, size_t exactBlockSize)
{
FreeBlock *fBlock;
size_t blockSz;
uintptr_t fBlockEnd,
lastFreeBlock = (uintptr_t)region + region->allocSz - sizeof(LastFreeBlock);
static_assert(sizeof(LastFreeBlock) % sizeof(uintptr_t) == 0,
"Atomic applied on LastFreeBlock, and we put it at the end of region, that"
" is uintptr_t-aligned, so no unaligned atomic operations are possible.");
// right bound is slab-aligned, keep LastFreeBlock after it
if (region->type == MEMREG_SLAB_BLOCKS) {
fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion), sizeof(uintptr_t));
fBlockEnd = alignDown(lastFreeBlock, slabSize);
} else {
fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion), largeObjectAlignment);
fBlockEnd = (uintptr_t)fBlock + exactBlockSize;
MALLOC_ASSERT(fBlockEnd <= lastFreeBlock, ASSERT_TEXT);
}
if (fBlockEnd <= (uintptr_t)fBlock)
return NULL; // allocSz is too small
blockSz = fBlockEnd - (uintptr_t)fBlock;
// TODO: extend getSlabBlock to support degradation, i.e. getting less blocks
// then requested, and then relax this check
// (now all or nothing is implemented, check according to this)
if (blockSz < numOfSlabAllocOnMiss*slabSize)
return NULL;
region->blockSz = blockSz;
return fBlock;
}
// startUseBlock may add the free block to a bin, the block can be used and
// even released after this, so the region must be added to regionList already
void Backend::startUseBlock(MemRegion *region, FreeBlock *fBlock, bool addToBin)
{
size_t blockSz = region->blockSz;
fBlock->initHeader();
fBlock->setMeFree(blockSz);
LastFreeBlock *lastBl = static_cast<LastFreeBlock*>(fBlock->rightNeig(blockSz));
// to not get unaligned atomics during LastFreeBlock access
MALLOC_ASSERT(isAligned(lastBl, sizeof(uintptr_t)), NULL);
lastBl->initHeader();
lastBl->setMeFree(GuardedSize::LAST_REGION_BLOCK);
lastBl->setLeftFree(blockSz);
lastBl->myBin = NO_BIN;
lastBl->memRegion = region;
if (addToBin) {
unsigned targetBin = sizeToBin(blockSz);
// during adding advance regions, register bin for a largest block in region
advRegBins.registerBin(targetBin);
if (region->type == MEMREG_SLAB_BLOCKS) {
fBlock->slabAligned = true;
freeSlabAlignedBins.addBlock(targetBin, fBlock, blockSz, /*addToTail=*/false);
} else {
fBlock->slabAligned = false;
freeLargeBlockBins.addBlock(targetBin, fBlock, blockSz, /*addToTail=*/false);
}
} else {
// to match with blockReleased() in genericGetBlock
bkndSync.blockConsumed();
// Understand our alignment for correct splitBlock operation
fBlock->slabAligned = region->type == MEMREG_SLAB_BLOCKS ? true : false;
fBlock->sizeTmp = fBlock->tryLockBlock();
MALLOC_ASSERT(fBlock->sizeTmp >= FreeBlock::minBlockSize, "Locking must be successful");
}
}
void MemRegionList::add(MemRegion *r)
{
r->prev = NULL;
MallocMutex::scoped_lock lock(regionListLock);
r->next = head;
head = r;
if (head->next)
head->next->prev = head;
}
void MemRegionList::remove(MemRegion *r)
{
MallocMutex::scoped_lock lock(regionListLock);
if (head == r)
head = head->next;
if (r->next)
r->next->prev = r->prev;
if (r->prev)
r->prev->next = r->next;
}
#if __TBB_MALLOC_BACKEND_STAT
int MemRegionList::reportStat(FILE *f)
{
int regNum = 0;
MallocMutex::scoped_lock lock(regionListLock);
for (MemRegion *curr = head; curr; curr = curr->next) {
fprintf(f, "%p: max block %lu B, ", curr, curr->blockSz);
regNum++;
}
return regNum;
}
#endif
FreeBlock *Backend::addNewRegion(size_t size, MemRegionType memRegType, bool addToBin)
{
static_assert(sizeof(BlockMutexes) <= sizeof(BlockI), "Header must be not overwritten in used blocks");
MALLOC_ASSERT(FreeBlock::minBlockSize > GuardedSize::MAX_SPEC_VAL,
"Block length must not conflict with special values of GuardedSize");
// If the region is not "for slabs" we should reserve some space for
// a region header, the worst case alignment and the last block mark.
const size_t requestSize = memRegType == MEMREG_SLAB_BLOCKS ? size :
size + sizeof(MemRegion) + largeObjectAlignment
+ FreeBlock::minBlockSize + sizeof(LastFreeBlock);
size_t rawSize = requestSize;
MemRegion *region = (MemRegion*)allocRawMem(rawSize);
if (!region) {
MALLOC_ASSERT(rawSize==requestSize, "getRawMem has not allocated memory but changed the allocated size.");
return NULL;
}
if (rawSize < sizeof(MemRegion)) {
if (!extMemPool->fixedPool)
freeRawMem(region, rawSize);
return NULL;
}
region->type = memRegType;
region->allocSz = rawSize;
FreeBlock *fBlock = findBlockInRegion(region, size);
if (!fBlock) {
if (!extMemPool->fixedPool)
freeRawMem(region, rawSize);
return NULL;
}
regionList.add(region);
startUseBlock(region, fBlock, addToBin);
bkndSync.binsModified();
return addToBin? (FreeBlock*)VALID_BLOCK_IN_BIN : fBlock;
}
void Backend::init(ExtMemoryPool *extMemoryPool)
{
extMemPool = extMemoryPool;
usedAddrRange.init();
coalescQ.init(&bkndSync);
bkndSync.init(this);
}
void Backend::reset()
{
MALLOC_ASSERT(extMemPool->userPool(), "Only user pool can be reset.");
// no active threads are allowed in backend while reset() called
verify();
freeLargeBlockBins.reset();
freeSlabAlignedBins.reset();
advRegBins.reset();
for (MemRegion *curr = regionList.head; curr; curr = curr->next) {
FreeBlock *fBlock = findBlockInRegion(curr, curr->blockSz);
MALLOC_ASSERT(fBlock, "A memory region unexpectedly got smaller");
startUseBlock(curr, fBlock, /*addToBin=*/true);
}
}
bool Backend::destroy()
{
bool noError = true;
// no active threads are allowed in backend while destroy() called
verify();
if (!inUserPool()) {
freeLargeBlockBins.reset();
freeSlabAlignedBins.reset();
}
while (regionList.head) {
MemRegion *helper = regionList.head->next;
noError &= freeRawMem(regionList.head, regionList.head->allocSz);
regionList.head = helper;
}
return noError;
}
bool Backend::clean()
{
scanCoalescQ(/*forceCoalescQDrop=*/false);
bool res = false;
// We can have several blocks occupying a whole region,
// because such regions are added in advance (see askMemFromOS() and reset()),
// and never used. Release them all.
for (int i = advRegBins.getMinUsedBin(0); i != -1; i = advRegBins.getMinUsedBin(i+1)) {
if (i == freeSlabAlignedBins.getMinNonemptyBin(i))
res |= freeSlabAlignedBins.tryReleaseRegions(i, this);
if (i == freeLargeBlockBins.getMinNonemptyBin(i))
res |= freeLargeBlockBins.tryReleaseRegions(i, this);
}
return res;
}
void Backend::IndexedBins::verify()
{
#if MALLOC_DEBUG
for (int i=0; i<freeBinsNum; i++) {
for (FreeBlock *fb = freeBins[i].head.load(std::memory_order_relaxed); fb; fb=fb->next) {
uintptr_t mySz = fb->myL.value;
MALLOC_ASSERT(mySz>GuardedSize::MAX_SPEC_VAL, ASSERT_TEXT);
FreeBlock *right = (FreeBlock*)((uintptr_t)fb + mySz);
suppress_unused_warning(right);
MALLOC_ASSERT(right->myL.value<=GuardedSize::MAX_SPEC_VAL, ASSERT_TEXT);
MALLOC_ASSERT(right->leftL.value==mySz, ASSERT_TEXT);
MALLOC_ASSERT(fb->leftL.value<=GuardedSize::MAX_SPEC_VAL, ASSERT_TEXT);
}
}
#endif
}
// For correct operation, it must be called when no other threads
// is changing backend.
void Backend::verify()
{
#if MALLOC_DEBUG
scanCoalescQ(/*forceCoalescQDrop=*/false);
#endif // MALLOC_DEBUG
freeLargeBlockBins.verify();
freeSlabAlignedBins.verify();
}
#if __TBB_MALLOC_BACKEND_STAT
size_t Backend::Bin::countFreeBlocks()
{
size_t cnt = 0;
{
MallocMutex::scoped_lock lock(tLock);
for (FreeBlock *fb = head; fb; fb = fb->next)
cnt++;
}
return cnt;
}
size_t Backend::Bin::reportFreeBlocks(FILE *f)
{
size_t totalSz = 0;
MallocMutex::scoped_lock lock(tLock);
for (FreeBlock *fb = head; fb; fb = fb->next) {
size_t sz = fb->tryLockBlock();
fb->setMeFree(sz);
fprintf(f, " [%p;%p]", fb, (void*)((uintptr_t)fb+sz));
totalSz += sz;
}
return totalSz;
}
void Backend::IndexedBins::reportStat(FILE *f)
{
size_t totalSize = 0;
for (int i=0; i<Backend::freeBinsNum; i++)
if (size_t cnt = freeBins[i].countFreeBlocks()) {
totalSize += freeBins[i].reportFreeBlocks(f);
fprintf(f, " %d:%lu, ", i, cnt);
}
fprintf(f, "\ttotal size %lu KB", totalSize/1024);
}
void Backend::reportStat(FILE *f)
{
scanCoalescQ(/*forceCoalescQDrop=*/false);
fprintf(f, "\n regions:\n");
int regNum = regionList.reportStat(f);
fprintf(f, "\n%d regions, %lu KB in all regions\n free bins:\nlarge bins: ",
regNum, totalMemSize/1024);
freeLargeBlockBins.reportStat(f);
fprintf(f, "\naligned bins: ");
freeSlabAlignedBins.reportStat(f);
fprintf(f, "\n");
}
#endif // __TBB_MALLOC_BACKEND_STAT
} } // namespaces<|fim▁end|> | MALLOC_ASSERT(rSz == sz, ASSERT_TEXT);
return sz;
}
void markCoalescing(size_t blockSz) { |
<|file_name|>session.rs<|end_file_name|><|fim▁begin|>//! Endpoints for user session management.
pub mod get_login_types;
pub mod login;<|fim▁hole|>pub mod logout_all;
pub mod sso_login;
pub mod sso_login_with_provider;<|fim▁end|> | pub mod login_fallback;
pub mod logout; |
<|file_name|>user_agent.rs<|end_file_name|><|fim▁begin|>header! {
#[doc="`User-Agent` header, defined in"]
#[doc="[RFC7231](http://tools.ietf.org/html/rfc7231#section-5.5.3)"]
#[doc=""]<|fim▁hole|> #[doc="agent originating the request, which is often used by servers to help"]
#[doc="identify the scope of reported interoperability problems, to work"]
#[doc="around or tailor responses to avoid particular user agent"]
#[doc="limitations, and for analytics regarding browser or operating system"]
#[doc="use. A user agent SHOULD send a User-Agent field in each request"]
#[doc="unless specifically configured not to do so."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="User-Agent = product *( RWS ( product / comment ) )"]
#[doc="product = token [\"/\" product-version]"]
#[doc="product-version = token"]
#[doc="```"]
#[doc=""]
#[doc="# Example values"]
#[doc="* `CERN-LineMode/2.15 libwww/2.17b3`"]
#[doc="* `Bunnies`"]
#[doc=""]
#[doc="# Notes"]
#[doc="* The parser does not split the value"]
(UserAgent, "User-Agent") => [String]
test_user_agent {
// Testcase from RFC
test_header!(test1, vec![b"CERN-LineMode/2.15 libwww/2.17b3"]);
// Own testcase
test_header!(test2, vec![b"Bunnies"], Some(UserAgent("Bunnies".to_string())));
}
}<|fim▁end|> | #[doc="The `User-Agent` header field contains information about the user"] |
<|file_name|>SeedRule.ts<|end_file_name|><|fim▁begin|>import { TOptionsNormalizerRule } from '../../types/options/TOptionsNormalizerRule';
import { IOptions } from '../../interfaces/options/IOptions';
/**
* @param {IOptions} options
* @returns {IOptions}
*/
export const SeedRule: TOptionsNormalizerRule = (options: IOptions): IOptions => {
if (options.seed) {
return {
...options,
seed: options.seed<|fim▁hole|> const getRandomInteger: (min: number, max: number) => number = (min: number, max: number) => {
return Math.floor(Math.random() * (max - min + 1) + min);
};
return {
...options,
seed: getRandomInteger(0, 999_999_999)
};
};<|fim▁end|> | };
}
|
<|file_name|>SmsCommunicatorFragment.java<|end_file_name|><|fim▁begin|>package info.nightscout.androidaps.plugins.SmsCommunicator;
import android.app.Activity;<|fim▁hole|>import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.Html;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.squareup.otto.Subscribe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Comparator;
import info.nightscout.androidaps.MainApp;
import info.nightscout.androidaps.R;
import info.nightscout.androidaps.plugins.Common.SubscriberFragment;
import info.nightscout.androidaps.plugins.SmsCommunicator.events.EventSmsCommunicatorUpdateGui;
import info.nightscout.utils.DateUtil;
/**
* A simple {@link Fragment} subclass.
*/
public class SmsCommunicatorFragment extends SubscriberFragment {
private static Logger log = LoggerFactory.getLogger(SmsCommunicatorFragment.class);
private static SmsCommunicatorPlugin smsCommunicatorPlugin;
public static SmsCommunicatorPlugin getPlugin() {
if(smsCommunicatorPlugin==null){
smsCommunicatorPlugin = new SmsCommunicatorPlugin();
}
return smsCommunicatorPlugin;
}
TextView logView;
public SmsCommunicatorFragment() {
super();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.smscommunicator_fragment, container, false);
logView = (TextView) view.findViewById(R.id.smscommunicator_log);
updateGUI();
return view;
}
@Subscribe
public void onStatusEvent(final EventSmsCommunicatorUpdateGui ev) {
updateGUI();
}
@Override
protected void updateGUI() {
Activity activity = getActivity();
if (activity != null)
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
class CustomComparator implements Comparator<SmsCommunicatorPlugin.Sms> {
public int compare(SmsCommunicatorPlugin.Sms object1, SmsCommunicatorPlugin.Sms object2) {
return (int) (object1.date.getTime() - object2.date.getTime());
}
}
Collections.sort(getPlugin().messages, new CustomComparator());
int messagesToShow = 40;
int start = Math.max(0, getPlugin().messages.size() - messagesToShow);
String logText = "";
for (int x = start; x < getPlugin().messages.size(); x++) {
SmsCommunicatorPlugin.Sms sms = getPlugin().messages.get(x);
if (sms.received) {
logText += DateUtil.timeString(sms.date) + " <<< " + (sms.processed ? "● " : "○ ") + sms.phoneNumber + " <b>" + sms.text + "</b><br>";
} else if (sms.sent) {
logText += DateUtil.timeString(sms.date) + " >>> " + (sms.processed ? "● " : "○ ") + sms.phoneNumber + " <b>" + sms.text + "</b><br>";
}
}
logView.setText(Html.fromHtml(logText));
}
});
}
}<|fim▁end|> | |
<|file_name|>test_checks.py<|end_file_name|><|fim▁begin|>from flake8_quotes import QuoteChecker
import os
import subprocess
from unittest import TestCase
class TestChecks(TestCase):
def test_get_noqa_lines(self):
checker = QuoteChecker(None, filename=get_absolute_path('data/no_qa.py'))
self.assertEqual(checker.get_noqa_lines(checker.get_file_contents()), [2])
class TestFlake8Stdin(TestCase):
def test_stdin(self):
"""Test using stdin."""
filepath = get_absolute_path('data/doubles.py')
with open(filepath, 'rb') as f:
p = subprocess.Popen(['flake8', '--select=Q', '-'], stdin=f,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
stdout_lines = stdout.splitlines()
self.assertEqual(stderr, b'')
self.assertEqual(len(stdout_lines), 3)
self.assertRegex(
stdout_lines[0],
b'stdin:1:(24|25): Q000 Double quotes found but single quotes preferred')
self.assertRegex(
stdout_lines[1],
b'stdin:2:(24|25): Q000 Double quotes found but single quotes preferred')
self.assertRegex(
stdout_lines[2],
b'stdin:3:(24|25): Q000 Double quotes found but single quotes preferred')
class DoublesTestChecks(TestCase):
def setUp(self):
class DoublesOptions():
inline_quotes = "'"
multiline_quotes = "'"
QuoteChecker.parse_options(DoublesOptions)
def test_multiline_string(self):
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles_multiline_string.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [
{'col': 4, 'line': 1, 'message': 'Q001 Double quote multiline found but single quotes preferred'},
])<|fim▁hole|> lines = f.readlines()
doubles_checker = QuoteChecker(None, lines=lines)
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [
{'col': 4, 'line': 1, 'message': 'Q001 Double quote multiline found but single quotes preferred'},
])
def test_wrapped(self):
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles_wrapped.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [])
def test_doubles(self):
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [
{'col': 24, 'line': 1, 'message': 'Q000 Double quotes found but single quotes preferred'},
{'col': 24, 'line': 2, 'message': 'Q000 Double quotes found but single quotes preferred'},
{'col': 24, 'line': 3, 'message': 'Q000 Double quotes found but single quotes preferred'},
])
def test_noqa_doubles(self):
checker = QuoteChecker(None, get_absolute_path('data/doubles_noqa.py'))
self.assertEqual(list(checker.run()), [])
def test_escapes(self):
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles_escaped.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [
{'col': 25, 'line': 1, 'message': 'Q003 Change outer quotes to avoid escaping inner quotes'},
])
def test_escapes_allowed(self):
class Options():
inline_quotes = "'"
avoid_escape = False
QuoteChecker.parse_options(Options)
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles_escaped.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [])
class DoublesAliasTestChecks(TestCase):
def setUp(self):
class DoublesAliasOptions():
inline_quotes = 'single'
multiline_quotes = 'single'
QuoteChecker.parse_options(DoublesAliasOptions)
def test_doubles(self):
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles_wrapped.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [])
doubles_checker = QuoteChecker(None, filename=get_absolute_path('data/doubles.py'))
self.assertEqual(list(doubles_checker.get_quotes_errors(doubles_checker.get_file_contents())), [
{'col': 24, 'line': 1, 'message': 'Q000 Double quotes found but single quotes preferred'},
{'col': 24, 'line': 2, 'message': 'Q000 Double quotes found but single quotes preferred'},
{'col': 24, 'line': 3, 'message': 'Q000 Double quotes found but single quotes preferred'},
])
class SinglesTestChecks(TestCase):
def setUp(self):
class SinglesOptions():
inline_quotes = '"'
multiline_quotes = '"'
QuoteChecker.parse_options(SinglesOptions)
def test_multiline_string(self):
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles_multiline_string.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [
{'col': 4, 'line': 1, 'message': 'Q001 Single quote multiline found but double quotes preferred'},
])
def test_wrapped(self):
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles_wrapped.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [])
def test_singles(self):
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [
{'col': 24, 'line': 1, 'message': 'Q000 Single quotes found but double quotes preferred'},
{'col': 24, 'line': 2, 'message': 'Q000 Single quotes found but double quotes preferred'},
{'col': 24, 'line': 3, 'message': 'Q000 Single quotes found but double quotes preferred'},
])
def test_noqa_singles(self):
checker = QuoteChecker(None, get_absolute_path('data/singles_noqa.py'))
self.assertEqual(list(checker.run()), [])
def test_escapes(self):
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles_escaped.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [
{'col': 25, 'line': 1, 'message': 'Q003 Change outer quotes to avoid escaping inner quotes'},
])
def test_escapes_allowed(self):
class Options():
inline_quotes = '"'
avoid_escape = False
QuoteChecker.parse_options(Options)
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles_escaped.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [])
class SinglesAliasTestChecks(TestCase):
def setUp(self):
class SinglesAliasOptions():
inline_quotes = 'double'
multiline_quotes = 'double'
QuoteChecker.parse_options(SinglesAliasOptions)
def test_singles(self):
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles_wrapped.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [])
singles_checker = QuoteChecker(None, filename=get_absolute_path('data/singles.py'))
self.assertEqual(list(singles_checker.get_quotes_errors(singles_checker.get_file_contents())), [
{'col': 24, 'line': 1, 'message': 'Q000 Single quotes found but double quotes preferred'},
{'col': 24, 'line': 2, 'message': 'Q000 Single quotes found but double quotes preferred'},
{'col': 24, 'line': 3, 'message': 'Q000 Single quotes found but double quotes preferred'},
])
class MultilineTestChecks(TestCase):
def test_singles(self):
class Options():
inline_quotes = "'"
multiline_quotes = '"'
QuoteChecker.parse_options(Options)
multiline_checker = QuoteChecker(None, filename=get_absolute_path('data/multiline_string.py'))
self.assertEqual(list(multiline_checker.get_quotes_errors(multiline_checker.get_file_contents())), [
{'col': 4, 'line': 10, 'message': 'Q001 Single quote multiline found but double quotes preferred'},
])
def test_singles_alias(self):
class Options():
inline_quotes = 'single'
multiline_quotes = 'double'
QuoteChecker.parse_options(Options)
multiline_checker = QuoteChecker(None, filename=get_absolute_path('data/multiline_string.py'))
self.assertEqual(list(multiline_checker.get_quotes_errors(multiline_checker.get_file_contents())), [
{'col': 4, 'line': 10, 'message': 'Q001 Single quote multiline found but double quotes preferred'},
])
def test_doubles(self):
class Options():
inline_quotes = '"'
multiline_quotes = "'"
QuoteChecker.parse_options(Options)
multiline_checker = QuoteChecker(None, filename=get_absolute_path('data/multiline_string.py'))
self.assertEqual(list(multiline_checker.get_quotes_errors(multiline_checker.get_file_contents())), [
{'col': 4, 'line': 1, 'message': 'Q001 Double quote multiline found but single quotes preferred'},
])
def test_doubles_alias(self):
class Options():
inline_quotes = 'double'
multiline_quotes = 'single'
QuoteChecker.parse_options(Options)
multiline_checker = QuoteChecker(None, filename=get_absolute_path('data/multiline_string.py'))
self.assertEqual(list(multiline_checker.get_quotes_errors(multiline_checker.get_file_contents())), [
{'col': 4, 'line': 1, 'message': 'Q001 Double quote multiline found but single quotes preferred'},
])
def get_absolute_path(filepath):
return os.path.join(os.path.dirname(__file__), filepath)<|fim▁end|> |
def test_multiline_string_using_lines(self):
with open(get_absolute_path('data/doubles_multiline_string.py')) as f: |
<|file_name|>test_parsers.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
import mock
import os
from shutil import rmtree
from tempfile import mkdtemp
from django.test import TestCase
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from django.template.base import TemplateDoesNotExist
from paperclip.models import Attachment
from geotrek.common.models import Organism, FileType
from geotrek.common.parsers import ExcelParser, AttachmentParserMixin
class OrganismParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
class OrganismEidParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
eid = 'organism'
class AttachmentParser(AttachmentParserMixin, OrganismEidParser):
non_fields = {'attachments': 'photo'}
class ParserTests(TestCase):
def test_bad_parser_class(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.DoesNotExist', '', verbosity=0)
self.assertEqual(unicode(cm.exception), u"Failed to import parser class 'geotrek.common.DoesNotExist'")
def test_bad_filename(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', 'find_me/I_am_not_there.shp', verbosity=0)
self.assertEqual(unicode(cm.exception), u"File does not exists at: find_me/I_am_not_there.shp")
def test_create(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
organism = Organism.objects.get()
self.assertEqual(organism.organism, u"Comité Théodule")
def test_duplicate_without_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
def test_unmodified_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
def test_updated_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
filename2 = os.path.join(os.path.dirname(__file__), 'data', 'organism2.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename2, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
organisms = Organism.objects.order_by('pk')
self.assertEqual(organisms[0].organism, u"Comité Théodule")
self.assertEqual(organisms[1].organism, u"Comité Hippolyte")
def test_report_format_text(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(), '0/0 lines imported.')
self.assertNotRegexpMatches(parser.report(), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')
def test_report_format_html(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(output_format='html'), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')<|fim▁hole|> def test_report_format_bad(self):
parser = OrganismParser()
with self.assertRaises(TemplateDoesNotExist):
parser.report(output_format='toto')
@override_settings(MEDIA_ROOT=mkdtemp('geotrek_test'))
class AttachmentParserTests(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def tearDown(self):
rmtree(settings.MEDIA_ROOT)
@mock.patch('requests.get')
def test_attachment(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
organism = Organism.objects.get()
attachment = Attachment.objects.get()
self.assertEqual(attachment.content_object, organism)
self.assertEqual(attachment.attachment_file.name, 'paperclip/common_organism/{pk}/titi.png'.format(pk=organism.pk))
self.assertEqual(attachment.filetype, self.filetype)
@mock.patch('requests.get')
def test_attachment_not_updated(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
self.assertEqual(mocked.call_count, 1)
self.assertEqual(Attachment.objects.count(), 1)<|fim▁end|> | |
<|file_name|>retry.py<|end_file_name|><|fim▁begin|># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Function call retry utility."""
import time
from typing import Any, Callable, Mapping, Optional, Sequence, Type, TypeVar
from gazoo_device import errors
def _default_is_successful(_: Any) -> bool:
return True
def not_func(val: Any) -> bool:
"""Returns True if bool(val) evaluates to False."""
return not bool(val)
def is_true(val: Any) -> bool:
"""Returns True if bool(val) evaluates to True."""
return bool(val)
_FuncReturnType = TypeVar("_FuncReturnType")
def retry(
func: Callable[..., _FuncReturnType],
func_args: Sequence[Any] = (),
func_kwargs: Optional[Mapping[str, Any]] = None,
is_successful: Callable[[_FuncReturnType], bool] = _default_is_successful,
timeout: float = 10,
interval: float = 1,
reraise: bool = True,<|fim▁hole|> Success of execution of func() is determined by is_successful() function,
which should return True on successful execution of func().
Args:
func: Function to execute.
func_args: Positional arguments to the function.
func_kwargs: Keyword arguments to the function.
is_successful: Function which takes in the result of func() and returns
whether function execution should be considered successful. To indicate
success, return True. Defaults to always returning True.
timeout: If no run of func() succeeds in this time period, raise an error.
interval: How long to wait between attempts of func().
reraise: Whether to re-raise exceptions in func() or not. If True, will
re-raise any exceptions from func(). If False, considers execution of
func() a failure if an Exception is raised. is_successful() will NOT be
called if an Exception occurs.
exc_type: Type of exception to raise when timeout is reached. Note that the
class constructor will be called with just 1 argument.
Returns:
Return value of first successful func() call.
Raises:
Exception: if timeout is reached, or if an Exception occurs in func() with
reraise=True.
"""
if func_kwargs is None:
func_kwargs = {}
tried_times = 0
start_time = time.time()
end_time = start_time + timeout
while time.time() < end_time:
exception_occurred = False
tried_times += 1
try:
func_result = func(*func_args, **func_kwargs)
except Exception: # pylint: disable=broad-except
if reraise:
raise
else:
exception_occurred = True
if not exception_occurred and is_successful(func_result):
return func_result
time.sleep(interval)
time_elapsed = time.time() - start_time
raise exc_type("Timeout in {}s. Tried calling {} {} times.".format(
time_elapsed, func.__name__, tried_times))<|fim▁end|> | exc_type: Type[Exception] = errors.CommunicationTimeoutError
) -> _FuncReturnType:
"""Retries func() until it succeeds or timeout is reached.
|
<|file_name|>InputDataClient.py<|end_file_name|><|fim▁begin|>from shutil import copy
from os import remove
import sys
from I_Data_Degradation_Block.Code.Degradation import main as blockI
from II_Cryptography_Block.Code.encrypt import main as blockII
from III_Shamirs_Block.Code.SecretSharing import main as blockIII
from IV_DHT_Block.Code.Parted_Keys_to_OWI_Input import main as blockIV
def removeE(path):
try:
remove(path)
except:
pass
def checkentryInt(var):<|fim▁hole|> print "Incorrect entry, please retype it."
var = raw_input()
var = checkentryInt(var)
return var
def checkentryFloat(var):
try:
return float(var)
except:
print "Incorrect entry, please retype it."
var = raw_input()
var = checkentryFloat(var)
return var
def checkentryDuration(var):
try:
int(var[0])
int(var[1])
int(var[2])
return var
except:
print "Incorrect entry, please retype it. Syntax Hours/Minutes/Seconds"
var = raw_input()
var = var.split("/")
var = checkentryDuration(var)
return var
def main(n,churn,std):
n= int(n)
churn=int(churn)
std=int(std)
print "Hello"
print "Please enter your current latitude:"
latitude = raw_input()
latitude = checkentryFloat(latitude)
print "Please enter your current longitude:"
longitude = raw_input()
longitude = checkentryFloat(longitude)
print "How many degradation levels do you want ?"
degradationLevels = raw_input()
degradationLevels = checkentryInt(degradationLevels)
################################################################
# BLOCK 1
################################################################
blockIinput = open('./I_Data_Degradation_Block/Inputs/Data to Degrade/Data', 'w')
blockIinput.write(str(latitude)+" ; "+str(longitude))
blockIinput.close()
blockI('./I_Data_Degradation_Block/Inputs/Data to Degrade/Data','./I_Data_Degradation_Block/Outputs/Degraded Data/Degraded Data',degradationLevels)
copy('./I_Data_Degradation_Block/Outputs/Degraded Data/Degraded Data' , './II_Cryptography_Block/Inputs/Degraded Data')
################################################################
# BLOCK 2
################################################################
blockII('./II_Cryptography_Block/Inputs/Degraded Data/Degraded Data','./II_Cryptography_Block/Outputs/Encrypted Data/Encrypted Data','./II_Cryptography_Block/Outputs/Encryption Keys/Keys','./II_Cryptography_Block/Outputs/Encryption Keys/Record')
copy('./II_Cryptography_Block/Outputs/Encryption Keys/Keys' , './III_Shamirs_Block/Inputs/Encryption Keys')
################################################################
# BLOCK 3
################################################################
dataduration = []
for a in range(0,degradationLevels):
i = a+1
print "How long do you want the data of privacy level "+str(i)+" to be available ? (1 is the most private level, the syntax is hours/minutes/seconds)"
currentduration = raw_input()
currentduration = currentduration.split("/")
currentduration = checkentryDuration(currentduration)
dataduration.append(currentduration)
blockIII('./III_Shamirs_Block/Inputs/Encryption Keys/Keys', './III_Shamirs_Block/Outputs/Parted Keys/KeyParts',churn,dataduration,n,std,'./PointRecord')
copy('./III_Shamirs_Block/Outputs/Parted Keys/KeyParts' , './IV_DHT_Block/Inputs/Parted Keys')
copy('./II_Cryptography_Block/Outputs/Encryption Keys/Record' , '.')
################################################################
# BLOCK 4
################################################################
blockIV('./IV_DHT_Block/Inputs/Parted Keys/KeyParts',"./LocalizationsInstruction","127.0.0.1","10000")
FinalOutputFile = open("./FinalOutput","w")
with open("./Record") as RecordFile:
PointFile = open("./PointRecord")
for line in RecordFile:
if(not "*************" in line):
dataHash = line.split(";")[0]
i=0
for i in range(0,n):
pointline = PointFile.readline()
pointline = pointline.split(" ")
FinalOutputFile.write(dataHash +" ; "+ pointline[1])
PointFile.close()
FinalOutputFile.close()
################################################################
# Cleaning up
################################################################
removeE('./I_Data_Degradation_Block/Inputs/Data to Degrade/Data')
removeE('./I_Data_Degradation_Block/Outputs/Degraded Data/Degraded Data')
removeE('./II_Cryptography_Block/Outputs/Encryption Keys/Keys')
removeE('./II_Cryptography_Block/Outputs/Encryption Keys/Record')
removeE('./II_Cryptography_Block/Inputs/Degraded Data/Degraded Data')
removeE('./III_Shamirs_Block/Inputs/Encryption Keys/Keys')
removeE('./III_Shamirs_Block/Outputs/Parted Keys/KeyParts')
removeE('./IV_DHT_Block/Inputs/Parted Keys')
removeE('./Record')
removeE('./PointRecord')
removeE('./tempfile.owi')
if __name__ == "__main__":
sys.exit(main(sys.argv[1], sys.argv[2], sys.argv[3]))<|fim▁end|> | try:
return int(var)
except: |
<|file_name|>authapi.go<|end_file_name|><|fim▁begin|>package authapi
import(
"github.com/duosecurity/duo_api_golang"
"encoding/json"
"net/url"
"strconv"
)
type AuthApi struct {
api duoapi.DuoApi
}
// Build a new Duo Auth API object.
// api is a duoapi.DuoApi object used to make the Duo Rest API calls.
// Example: authapi.NewAuthApi(*duoapi.NewDuoApi(ikey,skey,host,userAgent,duoapi.SetTimeout(10*time.Second)))
func NewAuthApi(api duoapi.DuoApi) *AuthApi {
return &AuthApi{api: api}
}
// API calls will return a StatResult object. On success, Stat is 'OK'.
// On error, Stat is 'FAIL', and Code, Message, and Message_Detail
// contain error information.
type StatResult struct {
Stat string
Code *int32
Message *string
Message_Detail *string
}
// Return object for the 'Ping' API call.
type PingResult struct {
StatResult
Response struct {
Time int64
}
}
// Duo's Ping method. https://www.duosecurity.com/docs/authapi#/ping
// This is an unsigned Duo Rest API call which returns the Duo system's time.
// Use this method to determine whether your system time is in sync with Duo's.
func (api *AuthApi) Ping() (*PingResult, error) {
_, body, err := api.api.Call("GET", "/auth/v2/ping", nil, duoapi.UseTimeout)
if err != nil {
return nil, err
}
ret := &PingResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// Return object for the 'Check' API call.
type CheckResult struct {
StatResult
Response struct {
Time int64
}
}
// Call Duo's Check method. https://www.duosecurity.com/docs/authapi#/check
// Check is a signed Duo API call, which returns the Duo system's time.
// Use this method to determine whether your ikey, skey and host are correct,
// and whether your system time is in sync with Duo's.
func (api *AuthApi) Check() (*CheckResult, error) {
_, body, err := api.api.SignedCall("GET", "/auth/v2/check", nil, duoapi.UseTimeout)
if err != nil {
return nil, err
}
ret := &CheckResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// Return object for the 'Logo' API call.
type LogoResult struct {
StatResult
png *[]byte
}
// Duo's Logo method. https://www.duosecurity.com/docs/authapi#/logo
// If the API call is successful, the configured logo png is returned. Othwerwise,
// error information is returned in the LogoResult return value.
func (api *AuthApi) Logo() (*LogoResult, error) {
resp, body, err := api.api.SignedCall("GET", "/auth/v2/logo", nil, duoapi.UseTimeout)
if err != nil {
return nil, err
}
if resp.StatusCode == 200 {
ret := &LogoResult{StatResult:StatResult{Stat: "OK"},
png: &body}
return ret, nil
}
ret := &LogoResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// Optional parameter for the Enroll method.
func EnrollUsername(username string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("username", username)
}
}
// Optional parameter for the Enroll method.
func EnrollValidSeconds(secs uint64) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("valid_secs", strconv.FormatUint(secs, 10))
}
}
// Enroll return type.
type EnrollResult struct {
StatResult
Response struct {
Activation_Barcode string
Activation_Code string
Expiration int64
User_Id string
Username string
}
}
// Duo's Enroll method. https://www.duosecurity.com/docs/authapi#/enroll
// Use EnrollUsername() to include the optional username parameter.
// Use EnrollValidSeconds() to change the default validation time limit that the
// user has to complete enrollment.
func (api *AuthApi) Enroll(options ...func(*url.Values)) (*EnrollResult, error) {
opts := url.Values{}
for _, o := range options {
o(&opts)
}
_, body, err := api.api.SignedCall("POST", "/auth/v2/enroll", opts, duoapi.UseTimeout)
if err != nil {
return nil, err
}
ret := &EnrollResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// Response is "success", "invalid" or "waiting".
type EnrollStatusResult struct {
StatResult
Response string
}
// Duo's EnrollStatus method. https://www.duosecurity.com/docs/authapi#/enroll_status
// Return the status of an outstanding Enrollment.
func (api *AuthApi) EnrollStatus(userid string,
activationCode string) (*EnrollStatusResult, error) {
queryArgs := url.Values{}
queryArgs.Set("user_id", userid)
queryArgs.Set("activation_code", activationCode)
_, body, err := api.api.SignedCall("POST",
"/auth/v2/enroll_status",
queryArgs,
duoapi.UseTimeout)
if err != nil {
return nil, err
}
ret := &EnrollStatusResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// Preauth return type.
type PreauthResult struct {
StatResult
Response struct {
Result string
Status_Msg string
Enroll_Portal_Url string
Devices []struct {
Device string
Type string
Name string
Number string
Capabilities []string
}
}
}
func PreauthUserId(userid string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("user_id", userid)
}
}
func PreauthUsername(username string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("username", username)
}
}
func PreauthIpAddr(ip string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("ipaddr", ip)
}
}
func PreauthTrustedToken(trustedtoken string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("trusted_device_token", trustedtoken)
}
}
// Duo's Preauth method. https://www.duosecurity.com/docs/authapi#/preauth
// options Optional values to include in the preauth call.
// Use PreauthUserId to specify the user_id parameter.
// Use PreauthUsername to specify the username parameter. You must
// specify PreauthUserId or PreauthUsername, but not both.
// Use PreauthIpAddr to include the ipaddr parameter, the ip address
// of the client attempting authroization.
// Use PreauthTrustedToken to specify the trusted_device_token parameter.
func (api *AuthApi) Preauth(options ...func(*url.Values)) (*PreauthResult, error) {
opts := url.Values{}
for _, o := range options {
o(&opts)
}
_, body, err := api.api.SignedCall("POST", "/auth/v2/preauth", opts, duoapi.UseTimeout)
if err != nil {
return nil, err
}
ret := &PreauthResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
func AuthUserId(userid string) func(*url.Values) {
return func(opts *url.Values) {
opts.Set("user_id", userid)
}
}
func AuthUsername(username string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("username", username)
}
}
func AuthIpAddr(ip string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("ipaddr", ip)
}
}
func AuthAsync() func (*url.Values) {
return func(opts *url.Values) {
opts.Set("async", "1")
}
}
func AuthDevice(device string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("device", device)
}
}
func AuthType(type_ string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("type", type_)
}
}
func AuthDisplayUsername(username string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("display_username", username)
}
}
<|fim▁hole|>}
func AuthPasscode(passcode string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("passcode", passcode)
}
}
// Auth return type.
type AuthResult struct {
StatResult
Response struct {
// Synchronous
Result string
Status string
Status_Msg string
// Asynchronous
Txid string
}
}
// Duo's Auth method. https://www.duosecurity.com/docs/authapi#/auth
// Factor must be one of 'auto', 'push', 'passcode', 'sms' or 'phone'.
// Use AuthUserId to specify the user_id.
// Use AuthUsername to speicy the username. You must specify either AuthUserId
// or AuthUsername, but not both.
// Use AuthIpAddr to include the client's IP address.
// Use AuthAsync to toggle whether the call blocks for the user's response or not.
// If used asynchronously, get the auth status with the AuthStatus method.
// When using factor 'push', use AuthDevice to specify the device ID to push to.
// When using factor 'push', use AuthType to display some extra auth text to the user.
// When using factor 'push', use AuthDisplayUsername to display some extra text
// to the user.
// When using factor 'push', use AuthPushInfo to include some URL-encoded key/value
// pairs to display to the user.
// When using factor 'passcode', use AuthPasscode to specify the passcode entered
// by the user.
// When using factor 'sms' or 'phone', use AuthDevice to specify which device
// should receive the SMS or phone call.
func (api *AuthApi) Auth(factor string, options ...func(*url.Values)) (*AuthResult, error) {
params := url.Values{}
for _, o := range options {
o(¶ms)
}
params.Set("factor", factor)
var apiOps []duoapi.DuoApiOption
if _, ok := params["async"]; ok == true {
apiOps = append(apiOps, duoapi.UseTimeout)
}
_, body, err := api.api.SignedCall("POST", "/auth/v2/auth", params, apiOps...)
if err != nil {
return nil, err
}
ret := &AuthResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}
// AuthStatus return type.
type AuthStatusResult struct {
StatResult
Response struct {
Result string
Status string
Status_Msg string
Trusted_Device_Token string
}
}
// Duo's auth_status method. https://www.duosecurity.com/docs/authapi#/auth_status
// When using the Auth call in async mode, use this method to retrieve the
// result of the authentication attempt.
// txid is returned by the Auth call.
func (api *AuthApi) AuthStatus(txid string) (*AuthStatusResult, error) {
opts := url.Values{}
opts.Set("txid", txid)
_, body, err := api.api.SignedCall("GET", "/auth/v2/auth_status", opts)
if err != nil {
return nil, err
}
ret := &AuthStatusResult{}
if err = json.Unmarshal(body, ret); err != nil {
return nil, err
}
return ret, nil
}<|fim▁end|> | func AuthPushinfo(pushinfo string) func (*url.Values) {
return func(opts *url.Values) {
opts.Set("pushinfo", pushinfo)
} |
<|file_name|>conn_manager_impl.cc<|end_file_name|><|fim▁begin|>#include "common/http/conn_manager_impl.h"
#include <cstdint>
#include <functional>
#include <list>
#include <memory>
#include <string>
#include <vector>
#include "envoy/buffer/buffer.h"
#include "envoy/common/time.h"
#include "envoy/event/dispatcher.h"
#include "envoy/network/drain_decision.h"
#include "envoy/router/router.h"
#include "envoy/ssl/connection.h"
#include "envoy/stats/scope.h"
#include "envoy/tracing/http_tracer.h"
#include "common/buffer/buffer_impl.h"
#include "common/common/assert.h"
#include "common/common/empty_string.h"
#include "common/common/enum_to_int.h"
#include "common/common/fmt.h"
#include "common/common/utility.h"
#include "common/http/codes.h"
#include "common/http/conn_manager_utility.h"
#include "common/http/exception.h"
#include "common/http/header_map_impl.h"
#include "common/http/headers.h"
#include "common/http/http1/codec_impl.h"
#include "common/http/http2/codec_impl.h"
#include "common/http/path_utility.h"
#include "common/http/utility.h"
#include "common/network/utility.h"
#include "absl/strings/escaping.h"
#include "absl/strings/match.h"
namespace Envoy {
namespace Http {
namespace {
template <class T> using FilterList = std::list<std::unique_ptr<T>>;
// Shared helper for recording the latest filter used.
template <class T>
void recordLatestDataFilter(const typename FilterList<T>::iterator current_filter,
T*& latest_filter, const FilterList<T>& filters) {
// If this is the first time we're calling onData, just record the current filter.
if (latest_filter == nullptr) {
latest_filter = current_filter->get();
return;
}
// We want to keep this pointing at the latest filter in the filter list that has received the
// onData callback. To do so, we compare the current latest with the *previous* filter. If they
// match, then we must be processing a new filter for the first time. We omit this check if we're
// the first filter, since the above check handles that case.
//
// We compare against the previous filter to avoid multiple filter iterations from reseting the
// pointer: If we just set latest to current, then the first onData filter iteration would
// correctly iterate over the filters and set latest, but on subsequent onData iterations
// we'd start from the beginning again, potentially allowing filter N to modify the buffer even
// though filter M > N was the filter that inserted data into the buffer.
if (current_filter != filters.begin() && latest_filter == std::prev(current_filter)->get()) {
latest_filter = current_filter->get();
}
}
} // namespace
ConnectionManagerStats ConnectionManagerImpl::generateStats(const std::string& prefix,
Stats::Scope& scope) {
return {
{ALL_HTTP_CONN_MAN_STATS(POOL_COUNTER_PREFIX(scope, prefix), POOL_GAUGE_PREFIX(scope, prefix),
POOL_HISTOGRAM_PREFIX(scope, prefix))},
prefix,
scope};
}
ConnectionManagerTracingStats ConnectionManagerImpl::generateTracingStats(const std::string& prefix,
Stats::Scope& scope) {
return {CONN_MAN_TRACING_STATS(POOL_COUNTER_PREFIX(scope, prefix + "tracing."))};
}
ConnectionManagerListenerStats
ConnectionManagerImpl::generateListenerStats(const std::string& prefix, Stats::Scope& scope) {
return {CONN_MAN_LISTENER_STATS(POOL_COUNTER_PREFIX(scope, prefix))};
}
ConnectionManagerImpl::ConnectionManagerImpl(ConnectionManagerConfig& config,
const Network::DrainDecision& drain_close,
Runtime::RandomGenerator& random_generator,
Http::Context& http_context, Runtime::Loader& runtime,
const LocalInfo::LocalInfo& local_info,
Upstream::ClusterManager& cluster_manager,
Server::OverloadManager* overload_manager,
TimeSource& time_source)
: config_(config), stats_(config_.stats()),
conn_length_(new Stats::Timespan(stats_.named_.downstream_cx_length_ms_, time_source)),
drain_close_(drain_close), random_generator_(random_generator), http_context_(http_context),
runtime_(runtime), local_info_(local_info), cluster_manager_(cluster_manager),
listener_stats_(config_.listenerStats()),
overload_stop_accepting_requests_ref_(
overload_manager ? overload_manager->getThreadLocalOverloadState().getState(
Server::OverloadActionNames::get().StopAcceptingRequests)
: Server::OverloadManager::getInactiveState()),
overload_disable_keepalive_ref_(
overload_manager ? overload_manager->getThreadLocalOverloadState().getState(
Server::OverloadActionNames::get().DisableHttpKeepAlive)
: Server::OverloadManager::getInactiveState()),
time_source_(time_source) {}
const HeaderMapImpl& ConnectionManagerImpl::continueHeader() {
CONSTRUCT_ON_FIRST_USE(HeaderMapImpl,
{Http::Headers::get().Status, std::to_string(enumToInt(Code::Continue))});
}
void ConnectionManagerImpl::initializeReadFilterCallbacks(Network::ReadFilterCallbacks& callbacks) {
read_callbacks_ = &callbacks;
stats_.named_.downstream_cx_total_.inc();
stats_.named_.downstream_cx_active_.inc();
if (read_callbacks_->connection().ssl()) {
stats_.named_.downstream_cx_ssl_total_.inc();
stats_.named_.downstream_cx_ssl_active_.inc();
}
read_callbacks_->connection().addConnectionCallbacks(*this);
if (config_.idleTimeout()) {
connection_idle_timer_ = read_callbacks_->connection().dispatcher().createTimer(
[this]() -> void { onIdleTimeout(); });
connection_idle_timer_->enableTimer(config_.idleTimeout().value());
}
read_callbacks_->connection().setDelayedCloseTimeout(config_.delayedCloseTimeout());
read_callbacks_->connection().setConnectionStats(
{stats_.named_.downstream_cx_rx_bytes_total_, stats_.named_.downstream_cx_rx_bytes_buffered_,
stats_.named_.downstream_cx_tx_bytes_total_, stats_.named_.downstream_cx_tx_bytes_buffered_,
nullptr, &stats_.named_.downstream_cx_delayed_close_timeout_});
}
ConnectionManagerImpl::~ConnectionManagerImpl() {
stats_.named_.downstream_cx_destroy_.inc();
stats_.named_.downstream_cx_active_.dec();
if (read_callbacks_->connection().ssl()) {
stats_.named_.downstream_cx_ssl_active_.dec();
}
if (codec_) {
if (codec_->protocol() == Protocol::Http2) {
stats_.named_.downstream_cx_http2_active_.dec();
} else {
stats_.named_.downstream_cx_http1_active_.dec();
}
}
conn_length_->complete();
user_agent_.completeConnectionLength(*conn_length_);
}
void ConnectionManagerImpl::checkForDeferredClose() {
if (drain_state_ == DrainState::Closing && streams_.empty() && !codec_->wantsToWrite()) {
read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWriteAndDelay);
}
}
void ConnectionManagerImpl::doEndStream(ActiveStream& stream) {
// The order of what happens in this routine is important and a little complicated. We first see
// if the stream needs to be reset. If it needs to be, this will end up invoking reset callbacks
// and then moving the stream to the deferred destruction list. If the stream has not been reset,
// we move it to the deferred deletion list here. Then, we potentially close the connection. This
// must be done after deleting the stream since the stream refers to the connection and must be
// deleted first.
bool reset_stream = false;
// If the response encoder is still associated with the stream, reset the stream. The exception
// here is when Envoy "ends" the stream by calling recreateStream at which point recreateStream
// explicitly nulls out response_encoder to avoid the downstream being notified of the
// Envoy-internal stream instance being ended.
if (stream.response_encoder_ != nullptr &&
(!stream.state_.remote_complete_ || !stream.state_.local_complete_)) {
// Indicate local is complete at this point so that if we reset during a continuation, we don't
// raise further data or trailers.
stream.state_.local_complete_ = true;
stream.response_encoder_->getStream().resetStream(StreamResetReason::LocalReset);
reset_stream = true;
}
if (!reset_stream) {
doDeferredStreamDestroy(stream);
}
if (reset_stream && codec_->protocol() != Protocol::Http2) {
drain_state_ = DrainState::Closing;
}
checkForDeferredClose();
// Reading may have been disabled for the non-multiplexing case, so enable it again.
// Also be sure to unwind any read-disable done by the prior downstream
// connection.
if (drain_state_ != DrainState::Closing && codec_->protocol() != Protocol::Http2) {
while (!read_callbacks_->connection().readEnabled()) {
read_callbacks_->connection().readDisable(false);
}
}
if (connection_idle_timer_ && streams_.empty()) {
connection_idle_timer_->enableTimer(config_.idleTimeout().value());
}
}
void ConnectionManagerImpl::doDeferredStreamDestroy(ActiveStream& stream) {
if (stream.stream_idle_timer_ != nullptr) {
stream.stream_idle_timer_->disableTimer();
stream.stream_idle_timer_ = nullptr;
}
stream.disarmRequestTimeout();
stream.state_.destroyed_ = true;
for (auto& filter : stream.decoder_filters_) {
filter->handle_->onDestroy();
}
for (auto& filter : stream.encoder_filters_) {
// Do not call on destroy twice for dual registered filters.
if (!filter->dual_filter_) {
filter->handle_->onDestroy();
}
}
read_callbacks_->connection().dispatcher().deferredDelete(stream.removeFromList(streams_));
}
StreamDecoder& ConnectionManagerImpl::newStream(StreamEncoder& response_encoder,
bool is_internally_created) {
if (connection_idle_timer_) {
connection_idle_timer_->disableTimer();
}
ENVOY_CONN_LOG(debug, "new stream", read_callbacks_->connection());
ActiveStreamPtr new_stream(new ActiveStream(*this));
new_stream->state_.is_internally_created_ = is_internally_created;
new_stream->response_encoder_ = &response_encoder;
new_stream->response_encoder_->getStream().addCallbacks(*new_stream);
new_stream->buffer_limit_ = new_stream->response_encoder_->getStream().bufferLimit();
// If the network connection is backed up, the stream should be made aware of it on creation.
// Both HTTP/1.x and HTTP/2 codecs handle this in StreamCallbackHelper::addCallbacks_.
ASSERT(read_callbacks_->connection().aboveHighWatermark() == false ||
new_stream->high_watermark_count_ > 0);
new_stream->moveIntoList(std::move(new_stream), streams_);
return **streams_.begin();
}
Network::FilterStatus ConnectionManagerImpl::onData(Buffer::Instance& data, bool) {
if (!codec_) {
codec_ = config_.createCodec(read_callbacks_->connection(), data, *this);
if (codec_->protocol() == Protocol::Http2) {
stats_.named_.downstream_cx_http2_total_.inc();
stats_.named_.downstream_cx_http2_active_.inc();
} else {
stats_.named_.downstream_cx_http1_total_.inc();
stats_.named_.downstream_cx_http1_active_.inc();
}
}
bool redispatch;
do {
redispatch = false;
try {
codec_->dispatch(data);
} catch (const CodecProtocolException& e) {
// HTTP/1.1 codec has already sent a 400 response if possible. HTTP/2 codec has already sent
// GOAWAY.
ENVOY_CONN_LOG(debug, "dispatch error: {}", read_callbacks_->connection(), e.what());
stats_.named_.downstream_cx_protocol_error_.inc();
// In the protocol error case, we need to reset all streams now. Since we do a flush write and
// delayed close, the connection might stick around long enough for a pending stream to come
// back and try to encode.
resetAllStreams();
read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWriteAndDelay);
return Network::FilterStatus::StopIteration;
}
// Processing incoming data may release outbound data so check for closure here as well.
checkForDeferredClose();
// The HTTP/1 codec will pause dispatch after a single message is complete. We want to
// either redispatch if there are no streams and we have more data. If we have a single
// complete non-WebSocket stream but have not responded yet we will pause socket reads
// to apply back pressure.
if (codec_->protocol() != Protocol::Http2) {
if (read_callbacks_->connection().state() == Network::Connection::State::Open &&
data.length() > 0 && streams_.empty()) {
redispatch = true;
}
if (!streams_.empty() && streams_.front()->state_.remote_complete_) {
read_callbacks_->connection().readDisable(true);
}
}
} while (redispatch);
return Network::FilterStatus::StopIteration;
}
void ConnectionManagerImpl::resetAllStreams() {
while (!streams_.empty()) {
// Mimic a downstream reset in this case.
streams_.front()->onResetStream(StreamResetReason::ConnectionTermination, absl::string_view());
}
}
void ConnectionManagerImpl::onEvent(Network::ConnectionEvent event) {
if (event == Network::ConnectionEvent::LocalClose) {
stats_.named_.downstream_cx_destroy_local_.inc();
}
if (event == Network::ConnectionEvent::RemoteClose) {
stats_.named_.downstream_cx_destroy_remote_.inc();
}
if (event == Network::ConnectionEvent::RemoteClose ||
event == Network::ConnectionEvent::LocalClose) {
if (connection_idle_timer_) {
connection_idle_timer_->disableTimer();
connection_idle_timer_.reset();
}
if (drain_timer_) {
drain_timer_->disableTimer();
drain_timer_.reset();
}
}
if (!streams_.empty()) {
if (event == Network::ConnectionEvent::LocalClose) {
stats_.named_.downstream_cx_destroy_local_active_rq_.inc();
}
if (event == Network::ConnectionEvent::RemoteClose) {
stats_.named_.downstream_cx_destroy_remote_active_rq_.inc();
}
stats_.named_.downstream_cx_destroy_active_rq_.inc();
user_agent_.onConnectionDestroy(event, true);
resetAllStreams();
}
}
void ConnectionManagerImpl::onGoAway() {
// Currently we do nothing with remote go away frames. In the future we can decide to no longer
// push resources if applicable.
}
void ConnectionManagerImpl::onIdleTimeout() {
ENVOY_CONN_LOG(debug, "idle timeout", read_callbacks_->connection());
stats_.named_.downstream_cx_idle_timeout_.inc();
if (!codec_) {
// No need to delay close after flushing since an idle timeout has already fired. Attempt to
// write out buffered data one last time and issue a local close if successful.
read_callbacks_->connection().close(Network::ConnectionCloseType::FlushWrite);
} else if (drain_state_ == DrainState::NotDraining) {
startDrainSequence();
}
}
void ConnectionManagerImpl::onDrainTimeout() {
ASSERT(drain_state_ != DrainState::NotDraining);
codec_->goAway();
drain_state_ = DrainState::Closing;
checkForDeferredClose();
}
void ConnectionManagerImpl::chargeTracingStats(const Tracing::Reason& tracing_reason,
ConnectionManagerTracingStats& tracing_stats) {
switch (tracing_reason) {
case Tracing::Reason::ClientForced:
tracing_stats.client_enabled_.inc();
break;
case Tracing::Reason::NotTraceableRequestId:
tracing_stats.not_traceable_.inc();
break;
case Tracing::Reason::Sampling:
tracing_stats.random_sampling_.inc();
break;
case Tracing::Reason::ServiceForced:
tracing_stats.service_forced_.inc();
break;
default:
throw std::invalid_argument(
fmt::format("invalid tracing reason, value: {}", static_cast<int32_t>(tracing_reason)));
}
}
ConnectionManagerImpl::ActiveStream::ActiveStream(ConnectionManagerImpl& connection_manager)
: connection_manager_(connection_manager),
snapped_route_config_(connection_manager.config_.routeConfigProvider().config()),
stream_id_(connection_manager.random_generator_.random()),
request_response_timespan_(new Stats::Timespan(
connection_manager_.stats_.named_.downstream_rq_time_, connection_manager_.timeSource())),
stream_info_(connection_manager_.codec_->protocol(), connection_manager_.timeSource()) {
connection_manager_.stats_.named_.downstream_rq_total_.inc();
connection_manager_.stats_.named_.downstream_rq_active_.inc();
if (connection_manager_.codec_->protocol() == Protocol::Http2) {
connection_manager_.stats_.named_.downstream_rq_http2_total_.inc();
} else {
connection_manager_.stats_.named_.downstream_rq_http1_total_.inc();
}
stream_info_.setDownstreamLocalAddress(
connection_manager_.read_callbacks_->connection().localAddress());
stream_info_.setDownstreamDirectRemoteAddress(
connection_manager_.read_callbacks_->connection().remoteAddress());
// Initially, the downstream remote address is the source address of the
// downstream connection. That can change later in the request's lifecycle,
// based on XFF processing, but setting the downstream remote address here
// prevents surprises for logging code in edge cases.
stream_info_.setDownstreamRemoteAddress(
connection_manager_.read_callbacks_->connection().remoteAddress());
stream_info_.setDownstreamSslConnection(connection_manager_.read_callbacks_->connection().ssl());
if (connection_manager_.config_.streamIdleTimeout().count()) {
idle_timeout_ms_ = connection_manager_.config_.streamIdleTimeout();
stream_idle_timer_ = connection_manager_.read_callbacks_->connection().dispatcher().createTimer(
[this]() -> void { onIdleTimeout(); });
resetIdleTimer();
}
if (connection_manager_.config_.requestTimeout().count()) {
std::chrono::milliseconds request_timeout_ms_ = connection_manager_.config_.requestTimeout();
request_timer_ = connection_manager.read_callbacks_->connection().dispatcher().createTimer(
[this]() -> void { onRequestTimeout(); });
request_timer_->enableTimer(request_timeout_ms_);
}
stream_info_.setRequestedServerName(
connection_manager_.read_callbacks_->connection().requestedServerName());
}
ConnectionManagerImpl::ActiveStream::~ActiveStream() {
stream_info_.onRequestComplete();
// A downstream disconnect can be identified for HTTP requests when the upstream returns with a 0
// response code and when no other response flags are set.
if (!stream_info_.hasAnyResponseFlag() && !stream_info_.responseCode()) {
stream_info_.setResponseFlag(StreamInfo::ResponseFlag::DownstreamConnectionTermination);
}
connection_manager_.stats_.named_.downstream_rq_active_.dec();
for (const AccessLog::InstanceSharedPtr& access_log : connection_manager_.config_.accessLogs()) {
access_log->log(request_headers_.get(), response_headers_.get(), response_trailers_.get(),
stream_info_);
}
for (const auto& log_handler : access_log_handlers_) {
log_handler->log(request_headers_.get(), response_headers_.get(), response_trailers_.get(),
stream_info_);
}
if (stream_info_.healthCheck()) {
connection_manager_.config_.tracingStats().health_check_.inc();
}
if (active_span_) {
Tracing::HttpTracerUtility::finalizeSpan(*active_span_, request_headers_.get(), stream_info_,
*this);
}
if (state_.successful_upgrade_) {
connection_manager_.stats_.named_.downstream_cx_upgrades_active_.dec();
}
ASSERT(state_.filter_call_state_ == 0);
}
void ConnectionManagerImpl::ActiveStream::resetIdleTimer() {
if (stream_idle_timer_ != nullptr) {
// TODO(htuch): If this shows up in performance profiles, optimize by only
// updating a timestamp here and doing periodic checks for idle timeouts
// instead, or reducing the accuracy of timers.
stream_idle_timer_->enableTimer(idle_timeout_ms_);
}
}
void ConnectionManagerImpl::ActiveStream::onIdleTimeout() {
connection_manager_.stats_.named_.downstream_rq_idle_timeout_.inc();
// If headers have not been sent to the user, send a 408.
if (response_headers_ != nullptr) {
// TODO(htuch): We could send trailers here with an x-envoy timeout header
// or gRPC status code, and/or set H2 RST_STREAM error.
connection_manager_.doEndStream(*this);
} else {
stream_info_.setResponseFlag(StreamInfo::ResponseFlag::StreamIdleTimeout);
sendLocalReply(
request_headers_ != nullptr && Grpc::Common::hasGrpcContentType(*request_headers_),
Http::Code::RequestTimeout, "stream timeout", nullptr, is_head_request_, absl::nullopt);
}
}
void ConnectionManagerImpl::ActiveStream::onRequestTimeout() {
connection_manager_.stats_.named_.downstream_rq_timeout_.inc();
sendLocalReply(request_headers_ != nullptr && Grpc::Common::hasGrpcContentType(*request_headers_),
Http::Code::RequestTimeout, "request timeout", nullptr, is_head_request_,
absl::nullopt);
}
void ConnectionManagerImpl::ActiveStream::addStreamDecoderFilterWorker(
StreamDecoderFilterSharedPtr filter, bool dual_filter) {
ActiveStreamDecoderFilterPtr wrapper(new ActiveStreamDecoderFilter(*this, filter, dual_filter));
filter->setDecoderFilterCallbacks(*wrapper);
wrapper->moveIntoListBack(std::move(wrapper), decoder_filters_);
}
void ConnectionManagerImpl::ActiveStream::addStreamEncoderFilterWorker(
StreamEncoderFilterSharedPtr filter, bool dual_filter) {
ActiveStreamEncoderFilterPtr wrapper(new ActiveStreamEncoderFilter(*this, filter, dual_filter));
filter->setEncoderFilterCallbacks(*wrapper);
wrapper->moveIntoList(std::move(wrapper), encoder_filters_);
}
void ConnectionManagerImpl::ActiveStream::addAccessLogHandler(
AccessLog::InstanceSharedPtr handler) {
access_log_handlers_.push_back(handler);
}
void ConnectionManagerImpl::ActiveStream::chargeStats(const HeaderMap& headers) {
uint64_t response_code = Utility::getResponseStatus(headers);
stream_info_.response_code_ = response_code;
if (stream_info_.health_check_request_) {
return;
}
connection_manager_.stats_.named_.downstream_rq_completed_.inc();
connection_manager_.listener_stats_.downstream_rq_completed_.inc();
if (CodeUtility::is1xx(response_code)) {
connection_manager_.stats_.named_.downstream_rq_1xx_.inc();
connection_manager_.listener_stats_.downstream_rq_1xx_.inc();
} else if (CodeUtility::is2xx(response_code)) {
connection_manager_.stats_.named_.downstream_rq_2xx_.inc();
connection_manager_.listener_stats_.downstream_rq_2xx_.inc();
} else if (CodeUtility::is3xx(response_code)) {
connection_manager_.stats_.named_.downstream_rq_3xx_.inc();
connection_manager_.listener_stats_.downstream_rq_3xx_.inc();
} else if (CodeUtility::is4xx(response_code)) {
connection_manager_.stats_.named_.downstream_rq_4xx_.inc();
connection_manager_.listener_stats_.downstream_rq_4xx_.inc();
} else if (CodeUtility::is5xx(response_code)) {
connection_manager_.stats_.named_.downstream_rq_5xx_.inc();
connection_manager_.listener_stats_.downstream_rq_5xx_.inc();
}
}
const Network::Connection* ConnectionManagerImpl::ActiveStream::connection() {
return &connection_manager_.read_callbacks_->connection();
}
// Ordering in this function is complicated, but important.
//
// We want to do minimal work before selecting route and creating a filter
// chain to maximize the number of requests which get custom filter behavior,
// e.g. registering access logging.
//
// This must be balanced by doing sanity checking for invalid requests (one
// can't route select properly without full headers), checking state required to
// serve error responses (connection close, head requests, etc), and
// modifications which may themselves affect route selection.
//
// TODO(alyssawilk) all the calls here should be audited for order priority,
// e.g. many early returns do not currently handle connection: close properly.
void ConnectionManagerImpl::ActiveStream::decodeHeaders(HeaderMapPtr&& headers, bool end_stream) {
request_headers_ = std::move(headers);
if (Http::Headers::get().MethodValues.Head ==
request_headers_->Method()->value().getStringView()) {
is_head_request_ = true;
}
ENVOY_STREAM_LOG(debug, "request headers complete (end_stream={}):\n{}", *this, end_stream,
*request_headers_);
// We end the decode here only if the request is header only. If we convert the request to a
// header only, the stream will be marked as done once a subsequent decodeData/decodeTrailers is
// called with end_stream=true.
maybeEndDecode(end_stream);
// Drop new requests when overloaded as soon as we have decoded the headers.
if (connection_manager_.overload_stop_accepting_requests_ref_ ==
Server::OverloadActionState::Active) {
// In this one special case, do not create the filter chain. If there is a risk of memory
// overload it is more important to avoid unnecessary allocation than to create the filters.
state_.created_filter_chain_ = true;
connection_manager_.stats_.named_.downstream_rq_overload_close_.inc();
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_),
Http::Code::ServiceUnavailable, "envoy overloaded", nullptr, is_head_request_,
absl::nullopt);
return;
}
if (!connection_manager_.config_.proxy100Continue() && request_headers_->Expect() &&
request_headers_->Expect()->value() == Headers::get().ExpectValues._100Continue.c_str()) {
// Note in the case Envoy is handling 100-Continue complexity, it skips the filter chain
// and sends the 100-Continue directly to the encoder.
chargeStats(continueHeader());
response_encoder_->encode100ContinueHeaders(continueHeader());
// Remove the Expect header so it won't be handled again upstream.
request_headers_->removeExpect();
}
connection_manager_.user_agent_.initializeFromHeaders(
*request_headers_, connection_manager_.stats_.prefix_, connection_manager_.stats_.scope_);
// Make sure we are getting a codec version we support.
Protocol protocol = connection_manager_.codec_->protocol();
if (protocol == Protocol::Http10) {
// Assume this is HTTP/1.0. This is fine for HTTP/0.9 but this code will also affect any
// requests with non-standard version numbers (0.9, 1.3), basically anything which is not
// HTTP/1.1.
//
// The protocol may have shifted in the HTTP/1.0 case so reset it.
stream_info_.protocol(protocol);
if (!connection_manager_.config_.http1Settings().accept_http_10_) {
// Send "Upgrade Required" if HTTP/1.0 support is not explicitly configured on.
sendLocalReply(false, Code::UpgradeRequired, "", nullptr, is_head_request_, absl::nullopt);
return;
} else {
// HTTP/1.0 defaults to single-use connections. Make sure the connection
// will be closed unless Keep-Alive is present.
state_.saw_connection_close_ = true;
if (request_headers_->Connection() &&
absl::EqualsIgnoreCase(request_headers_->Connection()->value().getStringView(),
Http::Headers::get().ConnectionValues.KeepAlive)) {
state_.saw_connection_close_ = false;
}
}
}
if (!request_headers_->Host()) {
if ((protocol == Protocol::Http10) &&
!connection_manager_.config_.http1Settings().default_host_for_http_10_.empty()) {
// Add a default host if configured to do so.
request_headers_->insertHost().value(
connection_manager_.config_.http1Settings().default_host_for_http_10_);
} else {
// Require host header. For HTTP/1.1 Host has already been translated to :authority.
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::BadRequest, "",
nullptr, is_head_request_, absl::nullopt);
return;
}
}
ASSERT(connection_manager_.config_.maxRequestHeadersKb() > 0);
if (request_headers_->byteSize() > (connection_manager_.config_.maxRequestHeadersKb() * 1024)) {
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_),
Code::RequestHeaderFieldsTooLarge, "", nullptr, is_head_request_, absl::nullopt);
return;
}
// Currently we only support relative paths at the application layer. We expect the codec to have
// broken the path into pieces if applicable. NOTE: Currently the HTTP/1.1 codec only does this
// when the allow_absolute_url flag is enabled on the HCM.
// https://tools.ietf.org/html/rfc7230#section-5.3 We also need to check for the existence of
// :path because CONNECT does not have a path, and we don't support that currently.
if (!request_headers_->Path() || request_headers_->Path()->value().getStringView().empty() ||
request_headers_->Path()->value().getStringView()[0] != '/') {
connection_manager_.stats_.named_.downstream_rq_non_relative_path_.inc();
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::NotFound, "", nullptr,
is_head_request_, absl::nullopt);
return;
}
// Path sanitization should happen before any path access other than the above sanity check.
if (!ConnectionManagerUtility::maybeNormalizePath(*request_headers_,
connection_manager_.config_)) {
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::BadRequest, "",
nullptr, is_head_request_, absl::nullopt);
return;
}
if (protocol == Protocol::Http11 && request_headers_->Connection() &&
absl::EqualsIgnoreCase(request_headers_->Connection()->value().getStringView(),
Http::Headers::get().ConnectionValues.Close)) {
state_.saw_connection_close_ = true;
}
if (!state_.is_internally_created_) { // Only sanitize headers on first pass.
// Modify the downstream remote address depending on configuration and headers.
stream_info_.setDownstreamRemoteAddress(ConnectionManagerUtility::mutateRequestHeaders(
*request_headers_, connection_manager_.read_callbacks_->connection(),
connection_manager_.config_, *snapped_route_config_, connection_manager_.random_generator_,
connection_manager_.runtime_, connection_manager_.local_info_));
}
ASSERT(stream_info_.downstreamRemoteAddress() != nullptr);
ASSERT(!cached_route_);
refreshCachedRoute();
const bool upgrade_rejected = createFilterChain() == false;
// TODO if there are no filters when starting a filter iteration, the connection manager
// should return 404. The current returns no response if there is no router filter.
if (protocol == Protocol::Http11 && cached_route_.value()) {
if (upgrade_rejected) {
// Do not allow upgrades if the route does not support it.
connection_manager_.stats_.named_.downstream_rq_ws_on_non_ws_route_.inc();
sendLocalReply(Grpc::Common::hasGrpcContentType(*request_headers_), Code::Forbidden, "",
nullptr, is_head_request_, absl::nullopt);
return;
}
// Allow non websocket requests to go through websocket enabled routes.
}
if (cached_route_.value()) {
const Router::RouteEntry* route_entry = cached_route_.value()->routeEntry();
if (route_entry != nullptr && route_entry->idleTimeout()) {
idle_timeout_ms_ = route_entry->idleTimeout().value();
if (idle_timeout_ms_.count()) {
// If we have a route-level idle timeout but no global stream idle timeout, create a timer.
if (stream_idle_timer_ == nullptr) {
stream_idle_timer_ =
connection_manager_.read_callbacks_->connection().dispatcher().createTimer(
[this]() -> void { onIdleTimeout(); });
}
} else if (stream_idle_timer_ != nullptr) {
// If we had a global stream idle timeout but the route-level idle timeout is set to zero
// (to override), we disable the idle timer.
stream_idle_timer_->disableTimer();
stream_idle_timer_ = nullptr;
}
}
}
// Check if tracing is enabled at all.
if (connection_manager_.config_.tracingConfig()) {
traceRequest();
}
decodeHeaders(nullptr, *request_headers_, end_stream);
// Reset it here for both global and overridden cases.
resetIdleTimer();
}
void ConnectionManagerImpl::ActiveStream::traceRequest() {
Tracing::Decision tracing_decision =
Tracing::HttpTracerUtility::isTracing(stream_info_, *request_headers_);
ConnectionManagerImpl::chargeTracingStats(tracing_decision.reason,
connection_manager_.config_.tracingStats());
active_span_ = connection_manager_.tracer().startSpan(*this, *request_headers_, stream_info_,
tracing_decision);
if (!active_span_) {
return;
}
// TODO: Need to investigate the following code based on the cached route, as may
// be broken in the case a filter changes the route.
// If a decorator has been defined, apply it to the active span.
if (cached_route_.value() && cached_route_.value()->decorator()) {
cached_route_.value()->decorator()->apply(*active_span_);
// Cache decorated operation.
if (!cached_route_.value()->decorator()->getOperation().empty()) {
decorated_operation_ = &cached_route_.value()->decorator()->getOperation();
}
}
if (connection_manager_.config_.tracingConfig()->operation_name_ ==
Tracing::OperationName::Egress) {
// For egress (outbound) requests, pass the decorator's operation name (if defined)
// as a request header to enable the receiving service to use it in its server span.
if (decorated_operation_) {
request_headers_->insertEnvoyDecoratorOperation().value(*decorated_operation_);
}
} else {
const HeaderEntry* req_operation_override = request_headers_->EnvoyDecoratorOperation();
// For ingress (inbound) requests, if a decorator operation name has been provided, it
// should be used to override the active span's operation.
if (req_operation_override) {
if (!req_operation_override->value().empty()) {
// TODO(dnoe): Migrate setOperation to take string_view (#6580)
active_span_->setOperation(std::string(req_operation_override->value().getStringView()));
// Clear the decorated operation so won't be used in the response header, as
// it has been overridden by the inbound decorator operation request header.
decorated_operation_ = nullptr;
}
// Remove header so not propagated to service
request_headers_->removeEnvoyDecoratorOperation();
}
}
}
void ConnectionManagerImpl::ActiveStream::decodeHeaders(ActiveStreamDecoderFilter* filter,
HeaderMap& headers, bool end_stream) {
// Headers filter iteration should always start with the next filter if available.
std::list<ActiveStreamDecoderFilterPtr>::iterator entry =
commonDecodePrefix(filter, FilterIterationStartState::AlwaysStartFromNext);
std::list<ActiveStreamDecoderFilterPtr>::iterator continue_data_entry = decoder_filters_.end();
for (; entry != decoder_filters_.end(); entry++) {
ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeHeaders));
state_.filter_call_state_ |= FilterCallState::DecodeHeaders;
(*entry)->end_stream_ =
decoding_headers_only_ || (end_stream && continue_data_entry == decoder_filters_.end());
FilterHeadersStatus status = (*entry)->decodeHeaders(headers, (*entry)->end_stream_);
ASSERT(!(status == FilterHeadersStatus::ContinueAndEndStream && (*entry)->end_stream_));
state_.filter_call_state_ &= ~FilterCallState::DecodeHeaders;
ENVOY_STREAM_LOG(trace, "decode headers called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!(*entry)->commonHandleAfterHeadersCallback(status, decoding_headers_only_) &&
std::next(entry) != decoder_filters_.end()) {
// Stop iteration IFF this is not the last filter. If it is the last filter, continue with
// processing since we need to handle the case where a terminal filter wants to buffer, but
// a previous filter has added body.
return;
}
// Here we handle the case where we have a header only request, but a filter adds a body
// to it. We need to not raise end_stream = true to further filters during inline iteration.
if (end_stream && buffered_request_data_ && continue_data_entry == decoder_filters_.end()) {
continue_data_entry = entry;
}
}
if (continue_data_entry != decoder_filters_.end()) {
// We use the continueDecoding() code since it will correctly handle not calling
// decodeHeaders() again. Fake setting StopSingleIteration since the continueDecoding() code
// expects it.
ASSERT(buffered_request_data_);
(*continue_data_entry)->iteration_state_ =
ActiveStreamFilterBase::IterationState::StopSingleIteration;
(*continue_data_entry)->continueDecoding();
}
if (end_stream) {
disarmRequestTimeout();
}
}
void ConnectionManagerImpl::ActiveStream::decodeData(Buffer::Instance& data, bool end_stream) {
maybeEndDecode(end_stream);
stream_info_.addBytesReceived(data.length());
decodeData(nullptr, data, end_stream, FilterIterationStartState::CanStartFromCurrent);
}
void ConnectionManagerImpl::ActiveStream::decodeData(
ActiveStreamDecoderFilter* filter, Buffer::Instance& data, bool end_stream,
FilterIterationStartState filter_iteration_start_state) {
resetIdleTimer();
// If we previously decided to decode only the headers, do nothing here.
if (decoding_headers_only_) {
return;
}
// If a response is complete or a reset has been sent, filters do not care about further body
// data. Just drop it.
if (state_.local_complete_) {
return;
}
auto trailers_added_entry = decoder_filters_.end();
const bool trailers_exists_at_start = request_trailers_ != nullptr;
// Filter iteration may start at the current filter.
std::list<ActiveStreamDecoderFilterPtr>::iterator entry =
commonDecodePrefix(filter, filter_iteration_start_state);
for (; entry != decoder_filters_.end(); entry++) {
// If the filter pointed by entry has stopped for all frame types, return now.
if (handleDataIfStopAll(**entry, data, state_.decoder_filters_streaming_)) {
return;
}
// If end_stream_ is marked for a filter, the data is not for this filter and filters after.
//
// In following case, ActiveStreamFilterBase::commonContinue() could be called recursively and
// its doData() is called with wrong data.
//
// There are 3 decode filters and "wrapper" refers to ActiveStreamFilter object.
//
// filter0->decodeHeaders(_, true)
// return STOP
// filter0->continueDecoding()
// wrapper0->commonContinue()
// wrapper0->decodeHeaders(_, _, true)
// filter1->decodeHeaders(_, true)
// filter1->addDecodeData()
// return CONTINUE
// filter2->decodeHeaders(_, false)
// return CONTINUE
// wrapper1->commonContinue() // Detects data is added.
// wrapper1->doData()
// wrapper1->decodeData()
// filter2->decodeData(_, true)
// return CONTINUE
// wrapper0->doData() // This should not be called
// wrapper0->decodeData()
// filter1->decodeData(_, true) // It will cause assertions.
//
// One way to solve this problem is to mark end_stream_ for each filter.
// If a filter is already marked as end_stream_ when decodeData() is called, bails out the
// whole function. If just skip the filter, the codes after the loop will be called with
// wrong data. For encodeData, the response_encoder->encode() will be called.
if ((*entry)->end_stream_) {
return;
}
ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeData));
// We check the request_trailers_ pointer here in case addDecodedTrailers
// is called in decodeData during a previous filter invocation, at which point we communicate to
// the current and future filters that the stream has not yet ended.
if (end_stream) {
state_.filter_call_state_ |= FilterCallState::LastDataFrame;
}
recordLatestDataFilter(entry, state_.latest_data_decoding_filter_, decoder_filters_);
state_.filter_call_state_ |= FilterCallState::DecodeData;
(*entry)->end_stream_ = end_stream && !request_trailers_;
FilterDataStatus status = (*entry)->handle_->decodeData(data, (*entry)->end_stream_);
if ((*entry)->end_stream_) {
(*entry)->handle_->decodeComplete();
}
state_.filter_call_state_ &= ~FilterCallState::DecodeData;
if (end_stream) {
state_.filter_call_state_ &= ~FilterCallState::LastDataFrame;
}
ENVOY_STREAM_LOG(trace, "decode data called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!trailers_exists_at_start && request_trailers_ &&
trailers_added_entry == decoder_filters_.end()) {
trailers_added_entry = entry;
}
if (!(*entry)->commonHandleAfterDataCallback(status, data, state_.decoder_filters_streaming_) &&
std::next(entry) != decoder_filters_.end()) {
// Stop iteration IFF this is not the last filter. If it is the last filter, continue with
// processing since we need to handle the case where a terminal filter wants to buffer, but
// a previous filter has added trailers.
return;
}
}
// If trailers were adding during decodeData we need to trigger decodeTrailers in order
// to allow filters to process the trailers.
if (trailers_added_entry != decoder_filters_.end()) {
decodeTrailers(trailers_added_entry->get(), *request_trailers_);
}
if (end_stream) {
disarmRequestTimeout();
}
}
HeaderMap& ConnectionManagerImpl::ActiveStream::addDecodedTrailers() {
// Trailers can only be added during the last data frame (i.e. end_stream = true).
ASSERT(state_.filter_call_state_ & FilterCallState::LastDataFrame);
// Trailers can only be added once.
ASSERT(!request_trailers_);
request_trailers_ = std::make_unique<HeaderMapImpl>();
return *request_trailers_;
}
void ConnectionManagerImpl::ActiveStream::addDecodedData(ActiveStreamDecoderFilter& filter,
Buffer::Instance& data, bool streaming) {
if (state_.filter_call_state_ == 0 ||
(state_.filter_call_state_ & FilterCallState::DecodeHeaders) ||
(state_.filter_call_state_ & FilterCallState::DecodeData)) {
// Make sure if this triggers watermarks, the correct action is taken.
state_.decoder_filters_streaming_ = streaming;
// If no call is happening or we are in the decode headers/data callback, buffer the data.
// Inline processing happens in the decodeHeaders() callback if necessary.
filter.commonHandleBufferData(data);
} else if (state_.filter_call_state_ & FilterCallState::DecodeTrailers) {
// In this case we need to inline dispatch the data to further filters. If those filters
// choose to buffer/stop iteration that's fine.
decodeData(&filter, data, false, FilterIterationStartState::AlwaysStartFromNext);
} else {
// TODO(mattklein123): Formalize error handling for filters and add tests. Should probably
// throw an exception here.
NOT_IMPLEMENTED_GCOVR_EXCL_LINE;
}
}
void ConnectionManagerImpl::ActiveStream::decodeTrailers(HeaderMapPtr&& trailers) {
resetIdleTimer();
maybeEndDecode(true);
request_trailers_ = std::move(trailers);
decodeTrailers(nullptr, *request_trailers_);
}
void ConnectionManagerImpl::ActiveStream::decodeTrailers(ActiveStreamDecoderFilter* filter,
HeaderMap& trailers) {
// If we previously decided to decode only the headers, do nothing here.
if (decoding_headers_only_) {
return;
}
// See decodeData() above for why we check local_complete_ here.
if (state_.local_complete_) {
return;
}
// Filter iteration may start at the current filter.
std::list<ActiveStreamDecoderFilterPtr>::iterator entry =
commonDecodePrefix(filter, FilterIterationStartState::CanStartFromCurrent);
for (; entry != decoder_filters_.end(); entry++) {
// If the filter pointed by entry has stopped for all frame type, return now.
if ((*entry)->stoppedAll()) {
return;
}
ASSERT(!(state_.filter_call_state_ & FilterCallState::DecodeTrailers));
state_.filter_call_state_ |= FilterCallState::DecodeTrailers;
FilterTrailersStatus status = (*entry)->handle_->decodeTrailers(trailers);
(*entry)->handle_->decodeComplete();
(*entry)->end_stream_ = true;
state_.filter_call_state_ &= ~FilterCallState::DecodeTrailers;
ENVOY_STREAM_LOG(trace, "decode trailers called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!(*entry)->commonHandleAfterTrailersCallback(status)) {
return;
}
}
disarmRequestTimeout();
}
void ConnectionManagerImpl::ActiveStream::maybeEndDecode(bool end_stream) {
ASSERT(!state_.remote_complete_);
state_.remote_complete_ = end_stream;
if (end_stream) {
stream_info_.onLastDownstreamRxByteReceived();
ENVOY_STREAM_LOG(debug, "request end stream", *this);
}
}
void ConnectionManagerImpl::ActiveStream::disarmRequestTimeout() {
if (request_timer_) {
request_timer_->disableTimer();
}
}
std::list<ConnectionManagerImpl::ActiveStreamEncoderFilterPtr>::iterator
ConnectionManagerImpl::ActiveStream::commonEncodePrefix(
ActiveStreamEncoderFilter* filter, bool end_stream,
FilterIterationStartState filter_iteration_start_state) {
// Only do base state setting on the initial call. Subsequent calls for filtering do not touch
// the base state.
if (filter == nullptr) {
ASSERT(!state_.local_complete_);
state_.local_complete_ = end_stream;
return encoder_filters_.begin();
}
if (filter_iteration_start_state == FilterIterationStartState::CanStartFromCurrent &&
(*(filter->entry()))->iterate_from_current_filter_) {
// The filter iteration has been stopped for all frame types, and now the iteration continues.
// The current filter's encoding callback has not be called. Call it now.
return filter->entry();
}
return std::next(filter->entry());
}
std::list<ConnectionManagerImpl::ActiveStreamDecoderFilterPtr>::iterator
ConnectionManagerImpl::ActiveStream::commonDecodePrefix(
ActiveStreamDecoderFilter* filter, FilterIterationStartState filter_iteration_start_state) {
if (!filter) {
return decoder_filters_.begin();
}
if (filter_iteration_start_state == FilterIterationStartState::CanStartFromCurrent &&
(*(filter->entry()))->iterate_from_current_filter_) {
// The filter iteration has been stopped for all frame types, and now the iteration continues.
// The current filter's callback function has not been called. Call it now.
return filter->entry();
}
return std::next(filter->entry());
}
void ConnectionManagerImpl::startDrainSequence() {
ASSERT(drain_state_ == DrainState::NotDraining);
drain_state_ = DrainState::Draining;
codec_->shutdownNotice();
drain_timer_ = read_callbacks_->connection().dispatcher().createTimer(
[this]() -> void { onDrainTimeout(); });
drain_timer_->enableTimer(config_.drainTimeout());
}
void ConnectionManagerImpl::ActiveStream::refreshCachedRoute() {
Router::RouteConstSharedPtr route;
if (request_headers_ != nullptr) {
route = snapped_route_config_->route(*request_headers_, stream_id_);
}
stream_info_.route_entry_ = route ? route->routeEntry() : nullptr;
cached_route_ = std::move(route);
if (nullptr == stream_info_.route_entry_) {
cached_cluster_info_ = nullptr;
} else {
Upstream::ThreadLocalCluster* local_cluster =
connection_manager_.cluster_manager_.get(stream_info_.route_entry_->clusterName());
cached_cluster_info_ = (nullptr == local_cluster) ? nullptr : local_cluster->info();
}
}
void ConnectionManagerImpl::ActiveStream::sendLocalReply(
bool is_grpc_request, Code code, absl::string_view body,
const std::function<void(HeaderMap& headers)>& modify_headers, bool is_head_request,
const absl::optional<Grpc::Status::GrpcStatus> grpc_status) {
ASSERT(response_headers_ == nullptr);
// For early error handling, do a best-effort attempt to create a filter chain
// to ensure access logging.
if (!state_.created_filter_chain_) {
createFilterChain();
}
Utility::sendLocalReply(
is_grpc_request,
[this, modify_headers](HeaderMapPtr&& headers, bool end_stream) -> void {
if (modify_headers != nullptr) {
modify_headers(*headers);
}
response_headers_ = std::move(headers);
// TODO: Start encoding from the last decoder filter that saw the
// request instead.
encodeHeaders(nullptr, *response_headers_, end_stream);
},
[this](Buffer::Instance& data, bool end_stream) -> void {
// TODO: Start encoding from the last decoder filter that saw the
// request instead.
encodeData(nullptr, data, end_stream, FilterIterationStartState::CanStartFromCurrent);
},
state_.destroyed_, code, body, grpc_status, is_head_request);
}
void ConnectionManagerImpl::ActiveStream::encode100ContinueHeaders(
ActiveStreamEncoderFilter* filter, HeaderMap& headers) {
resetIdleTimer();
ASSERT(connection_manager_.config_.proxy100Continue());
// Make sure commonContinue continues encode100ContinueHeaders.
has_continue_headers_ = true;
// Similar to the block in encodeHeaders, run encode100ContinueHeaders on each
// filter. This is simpler than that case because 100 continue implies no
// end-stream, and because there are normal headers coming there's no need for
// complex continuation logic.
// 100-continue filter iteration should always start with the next filter if available.
std::list<ActiveStreamEncoderFilterPtr>::iterator entry =
commonEncodePrefix(filter, false, FilterIterationStartState::AlwaysStartFromNext);
for (; entry != encoder_filters_.end(); entry++) {
ASSERT(!(state_.filter_call_state_ & FilterCallState::Encode100ContinueHeaders));
state_.filter_call_state_ |= FilterCallState::Encode100ContinueHeaders;
FilterHeadersStatus status = (*entry)->handle_->encode100ContinueHeaders(headers);
state_.filter_call_state_ &= ~FilterCallState::Encode100ContinueHeaders;
ENVOY_STREAM_LOG(trace, "encode 100 continue headers called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!(*entry)->commonHandleAfter100ContinueHeadersCallback(status)) {
return;
}
}
// Strip the T-E headers etc. Defer other header additions as well as drain-close logic to the
// continuation headers.
ConnectionManagerUtility::mutateResponseHeaders(headers, request_headers_.get(), EMPTY_STRING);
// Count both the 1xx and follow-up response code in stats.
chargeStats(headers);
ENVOY_STREAM_LOG(debug, "encoding 100 continue headers via codec:\n{}", *this, headers);
// Now actually encode via the codec.
response_encoder_->encode100ContinueHeaders(headers);
}
void ConnectionManagerImpl::ActiveStream::encodeHeaders(ActiveStreamEncoderFilter* filter,
HeaderMap& headers, bool end_stream) {
resetIdleTimer();
disarmRequestTimeout();
// Headers filter iteration should always start with the next filter if available.
std::list<ActiveStreamEncoderFilterPtr>::iterator entry =
commonEncodePrefix(filter, end_stream, FilterIterationStartState::AlwaysStartFromNext);
std::list<ActiveStreamEncoderFilterPtr>::iterator continue_data_entry = encoder_filters_.end();
for (; entry != encoder_filters_.end(); entry++) {
ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeHeaders));
state_.filter_call_state_ |= FilterCallState::EncodeHeaders;
(*entry)->end_stream_ =
encoding_headers_only_ || (end_stream && continue_data_entry == encoder_filters_.end());
FilterHeadersStatus status = (*entry)->handle_->encodeHeaders(headers, (*entry)->end_stream_);
if ((*entry)->end_stream_) {
(*entry)->handle_->encodeComplete();
}
state_.filter_call_state_ &= ~FilterCallState::EncodeHeaders;
ENVOY_STREAM_LOG(trace, "encode headers called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
const auto continue_iteration =
(*entry)->commonHandleAfterHeadersCallback(status, encoding_headers_only_);
// If we're encoding a headers only response, then mark the local as complete. This ensures
// that we don't attempt to reset the downstream request in doEndStream.
if (encoding_headers_only_) {
state_.local_complete_ = true;
}
if (!continue_iteration) {
return;
}
// Here we handle the case where we have a header only response, but a filter adds a body
// to it. We need to not raise end_stream = true to further filters during inline iteration.
if (end_stream && buffered_response_data_ && continue_data_entry == encoder_filters_.end()) {
continue_data_entry = entry;
}
}
// Base headers.
connection_manager_.config_.dateProvider().setDateHeader(headers);
// Following setReference() is safe because serverName() is constant for the life of the listener.
headers.insertServer().value().setReference(connection_manager_.config_.serverName());
ConnectionManagerUtility::mutateResponseHeaders(headers, request_headers_.get(),
connection_manager_.config_.via());
// See if we want to drain/close the connection. Send the go away frame prior to encoding the
// header block.
if (connection_manager_.drain_state_ == DrainState::NotDraining &&
connection_manager_.drain_close_.drainClose()) {
// This doesn't really do anything for HTTP/1.1 other then give the connection another boost
// of time to race with incoming requests. It mainly just keeps the logic the same between
// HTTP/1.1 and HTTP/2.
connection_manager_.startDrainSequence();
connection_manager_.stats_.named_.downstream_cx_drain_close_.inc();
ENVOY_STREAM_LOG(debug, "drain closing connection", *this);
}
if (connection_manager_.drain_state_ == DrainState::NotDraining && state_.saw_connection_close_) {
ENVOY_STREAM_LOG(debug, "closing connection due to connection close header", *this);
connection_manager_.drain_state_ = DrainState::Closing;
}
if (connection_manager_.drain_state_ == DrainState::NotDraining &&
connection_manager_.overload_disable_keepalive_ref_ == Server::OverloadActionState::Active) {
ENVOY_STREAM_LOG(debug, "disabling keepalive due to envoy overload", *this);
connection_manager_.drain_state_ = DrainState::Closing;
connection_manager_.stats_.named_.downstream_cx_overload_disable_keepalive_.inc();
}
// If we are destroying a stream before remote is complete and the connection does not support
// multiplexing, we should disconnect since we don't want to wait around for the request to
// finish.
if (!state_.remote_complete_) {
if (connection_manager_.codec_->protocol() != Protocol::Http2) {
connection_manager_.drain_state_ = DrainState::Closing;
}
connection_manager_.stats_.named_.downstream_rq_response_before_rq_complete_.inc();
}
if (connection_manager_.drain_state_ == DrainState::Closing &&
connection_manager_.codec_->protocol() != Protocol::Http2) {
// If the connection manager is draining send "Connection: Close" on HTTP/1.1 connections.
// Do not do this for H2 (which drains via GOAWAY) or Upgrade (as the upgrade
// payload is no longer HTTP/1.1)
if (!Utility::isUpgrade(headers)) {
headers.insertConnection().value().setReference(Headers::get().ConnectionValues.Close);
}
}
if (connection_manager_.config_.tracingConfig()) {
if (connection_manager_.config_.tracingConfig()->operation_name_ ==
Tracing::OperationName::Ingress) {
// For ingress (inbound) responses, if the request headers do not include a
// decorator operation (override), then pass the decorator's operation name (if defined)
// as a response header to enable the client service to use it in its client span.
if (decorated_operation_) {
headers.insertEnvoyDecoratorOperation().value(*decorated_operation_);
}
} else if (connection_manager_.config_.tracingConfig()->operation_name_ ==
Tracing::OperationName::Egress) {
const HeaderEntry* resp_operation_override = headers.EnvoyDecoratorOperation();
// For Egress (outbound) response, if a decorator operation name has been provided, it
// should be used to override the active span's operation.
if (resp_operation_override) {
if (!resp_operation_override->value().empty() && active_span_) {
active_span_->setOperation(std::string(resp_operation_override->value().getStringView()));
}
// Remove header so not propagated to service.
headers.removeEnvoyDecoratorOperation();
}
}
}
chargeStats(headers);
ENVOY_STREAM_LOG(debug, "encoding headers via codec (end_stream={}):\n{}", *this,
encoding_headers_only_ ||
(end_stream && continue_data_entry == encoder_filters_.end()),
headers);
// Now actually encode via the codec.
stream_info_.onFirstDownstreamTxByteSent();
response_encoder_->encodeHeaders(
headers,
encoding_headers_only_ || (end_stream && continue_data_entry == encoder_filters_.end()));
if (continue_data_entry != encoder_filters_.end()) {
// We use the continueEncoding() code since it will correctly handle not calling
// encodeHeaders() again. Fake setting StopSingleIteration since the continueEncoding() code
// expects it.
ASSERT(buffered_response_data_);
(*continue_data_entry)->iteration_state_ =
ActiveStreamFilterBase::IterationState::StopSingleIteration;
(*continue_data_entry)->continueEncoding();
} else {
// End encoding if this is a header only response, either due to a filter converting it to one
// or due to the upstream returning headers only.
maybeEndEncode(encoding_headers_only_ || end_stream);
}
}
void ConnectionManagerImpl::ActiveStream::encodeMetadata(ActiveStreamEncoderFilter* filter,
MetadataMapPtr&& metadata_map_ptr) {
resetIdleTimer();
// Metadata currently go through all filters.
ASSERT(filter == nullptr);
std::list<ActiveStreamEncoderFilterPtr>::iterator entry = encoder_filters_.begin();
for (; entry != encoder_filters_.end(); entry++) {
FilterMetadataStatus status = (*entry)->handle_->encodeMetadata(*metadata_map_ptr);
ENVOY_STREAM_LOG(trace, "encode metadata called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
}
// TODO(soya3129): update stats with metadata.
// Now encode metadata via the codec.
if (!metadata_map_ptr->empty()) {
ENVOY_STREAM_LOG(debug, "encoding metadata via codec:\n{}", *this, *metadata_map_ptr);
MetadataMapVector metadata_map_vector;
metadata_map_vector.emplace_back(std::move(metadata_map_ptr));
response_encoder_->encodeMetadata(metadata_map_vector);
}
}
HeaderMap& ConnectionManagerImpl::ActiveStream::addEncodedTrailers() {
// Trailers can only be added during the last data frame (i.e. end_stream = true).
ASSERT(state_.filter_call_state_ & FilterCallState::LastDataFrame);
// Trailers can only be added once.
ASSERT(!response_trailers_);
response_trailers_ = std::make_unique<HeaderMapImpl>();
return *response_trailers_;
}
void ConnectionManagerImpl::ActiveStream::addEncodedData(ActiveStreamEncoderFilter& filter,
Buffer::Instance& data, bool streaming) {
if (state_.filter_call_state_ == 0 ||
(state_.filter_call_state_ & FilterCallState::EncodeHeaders) ||
(state_.filter_call_state_ & FilterCallState::EncodeData)) {
// Make sure if this triggers watermarks, the correct action is taken.
state_.encoder_filters_streaming_ = streaming;
// If no call is happening or we are in the decode headers/data callback, buffer the data.
// Inline processing happens in the decodeHeaders() callback if necessary.
filter.commonHandleBufferData(data);
} else if (state_.filter_call_state_ & FilterCallState::EncodeTrailers) {
// In this case we need to inline dispatch the data to further filters. If those filters
// choose to buffer/stop iteration that's fine.
encodeData(&filter, data, false, FilterIterationStartState::AlwaysStartFromNext);
} else {
// TODO(mattklein123): Formalize error handling for filters and add tests. Should probably
// throw an exception here.
NOT_IMPLEMENTED_GCOVR_EXCL_LINE;
}
}
void ConnectionManagerImpl::ActiveStream::encodeData(
ActiveStreamEncoderFilter* filter, Buffer::Instance& data, bool end_stream,
FilterIterationStartState filter_iteration_start_state) {
resetIdleTimer();
// If we previously decided to encode only the headers, do nothing here.
if (encoding_headers_only_) {
return;
}
// Filter iteration may start at the current filter.
std::list<ActiveStreamEncoderFilterPtr>::iterator entry =
commonEncodePrefix(filter, end_stream, filter_iteration_start_state);
auto trailers_added_entry = encoder_filters_.end();
const bool trailers_exists_at_start = response_trailers_ != nullptr;
for (; entry != encoder_filters_.end(); entry++) {
// If the filter pointed by entry has stopped for all frame type, return now.
if (handleDataIfStopAll(**entry, data, state_.encoder_filters_streaming_)) {
return;
}
// If end_stream_ is marked for a filter, the data is not for this filter and filters after.
// For details, please see the comment in the ActiveStream::decodeData() function.
if ((*entry)->end_stream_) {
return;
}
ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeData));
// We check the response_trailers_ pointer here in case addEncodedTrailers
// is called in encodeData during a previous filter invocation, at which point we communicate to
// the current and future filters that the stream has not yet ended.
state_.filter_call_state_ |= FilterCallState::EncodeData;
if (end_stream) {
state_.filter_call_state_ |= FilterCallState::LastDataFrame;
}
recordLatestDataFilter(entry, state_.latest_data_encoding_filter_, encoder_filters_);
(*entry)->end_stream_ = end_stream && !response_trailers_;
FilterDataStatus status = (*entry)->handle_->encodeData(data, (*entry)->end_stream_);
if ((*entry)->end_stream_) {
(*entry)->handle_->encodeComplete();
}
state_.filter_call_state_ &= ~FilterCallState::EncodeData;
if (end_stream) {
state_.filter_call_state_ &= ~FilterCallState::LastDataFrame;
}
ENVOY_STREAM_LOG(trace, "encode data called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!trailers_exists_at_start && response_trailers_ &&
trailers_added_entry == encoder_filters_.end()) {
trailers_added_entry = entry;
}
if (!(*entry)->commonHandleAfterDataCallback(status, data, state_.encoder_filters_streaming_)) {
return;
}
}
ENVOY_STREAM_LOG(trace, "encoding data via codec (size={} end_stream={})", *this, data.length(),
end_stream);
stream_info_.addBytesSent(data.length());
// If trailers were adding during encodeData we need to trigger decodeTrailers in order
// to allow filters to process the trailers.
if (trailers_added_entry != encoder_filters_.end()) {
response_encoder_->encodeData(data, false);
encodeTrailers(trailers_added_entry->get(), *response_trailers_);
} else {
response_encoder_->encodeData(data, end_stream);
maybeEndEncode(end_stream);
}
}
void ConnectionManagerImpl::ActiveStream::encodeTrailers(ActiveStreamEncoderFilter* filter,
HeaderMap& trailers) {
resetIdleTimer();
// If we previously decided to encode only the headers, do nothing here.
if (encoding_headers_only_) {
return;
}
// Filter iteration may start at the current filter.
std::list<ActiveStreamEncoderFilterPtr>::iterator entry =
commonEncodePrefix(filter, true, FilterIterationStartState::CanStartFromCurrent);
for (; entry != encoder_filters_.end(); entry++) {
// If the filter pointed by entry has stopped for all frame type, return now.
if ((*entry)->stoppedAll()) {
return;
}
ASSERT(!(state_.filter_call_state_ & FilterCallState::EncodeTrailers));
state_.filter_call_state_ |= FilterCallState::EncodeTrailers;
FilterTrailersStatus status = (*entry)->handle_->encodeTrailers(trailers);
(*entry)->handle_->encodeComplete();
(*entry)->end_stream_ = true;
state_.filter_call_state_ &= ~FilterCallState::EncodeTrailers;
ENVOY_STREAM_LOG(trace, "encode trailers called: filter={} status={}", *this,
static_cast<const void*>((*entry).get()), static_cast<uint64_t>(status));
if (!(*entry)->commonHandleAfterTrailersCallback(status)) {
return;
}
}
ENVOY_STREAM_LOG(debug, "encoding trailers via codec:\n{}", *this, trailers);
response_encoder_->encodeTrailers(trailers);
maybeEndEncode(true);
}
void ConnectionManagerImpl::ActiveStream::maybeEndEncode(bool end_stream) {
if (end_stream) {
stream_info_.onLastDownstreamTxByteSent();
request_response_timespan_->complete();
connection_manager_.doEndStream(*this);
}
}
bool ConnectionManagerImpl::ActiveStream::handleDataIfStopAll(ActiveStreamFilterBase& filter,
Buffer::Instance& data,
bool& filter_streaming) {
if (filter.stoppedAll()) {
ASSERT(!filter.canIterate());
filter_streaming =
filter.iteration_state_ == ActiveStreamFilterBase::IterationState::StopAllWatermark;
filter.commonHandleBufferData(data);
return true;
}
return false;
}
void ConnectionManagerImpl::ActiveStream::onResetStream(StreamResetReason, absl::string_view) {
// NOTE: This function gets called in all of the following cases:
// 1) We TX an app level reset
// 2) The codec TX a codec level reset
// 3) The codec RX a reset
// If we need to differentiate we need to do it inside the codec. Can start with this.
connection_manager_.stats_.named_.downstream_rq_rx_reset_.inc();
connection_manager_.doDeferredStreamDestroy(*this);
}
void ConnectionManagerImpl::ActiveStream::onAboveWriteBufferHighWatermark() {
ENVOY_STREAM_LOG(debug, "Disabling upstream stream due to downstream stream watermark.", *this);
callHighWatermarkCallbacks();
}
void ConnectionManagerImpl::ActiveStream::onBelowWriteBufferLowWatermark() {
ENVOY_STREAM_LOG(debug, "Enabling upstream stream due to downstream stream watermark.", *this);
callLowWatermarkCallbacks();
}
Tracing::OperationName ConnectionManagerImpl::ActiveStream::operationName() const {
return connection_manager_.config_.tracingConfig()->operation_name_;
}
const std::vector<Http::LowerCaseString>&
ConnectionManagerImpl::ActiveStream::requestHeadersForTags() const {
return connection_manager_.config_.tracingConfig()->request_headers_for_tags_;
}
bool ConnectionManagerImpl::ActiveStream::verbose() const {
return connection_manager_.config_.tracingConfig()->verbose_;
}
void ConnectionManagerImpl::ActiveStream::callHighWatermarkCallbacks() {
++high_watermark_count_;
for (auto watermark_callbacks : watermark_callbacks_) {
watermark_callbacks->onAboveWriteBufferHighWatermark();
}
}
void ConnectionManagerImpl::ActiveStream::callLowWatermarkCallbacks() {
ASSERT(high_watermark_count_ > 0);
--high_watermark_count_;
for (auto watermark_callbacks : watermark_callbacks_) {
watermark_callbacks->onBelowWriteBufferLowWatermark();
}
}
void ConnectionManagerImpl::ActiveStream::setBufferLimit(uint32_t new_limit) {
ENVOY_STREAM_LOG(debug, "setting buffer limit to {}", *this, new_limit);
buffer_limit_ = new_limit;
if (buffered_request_data_) {
buffered_request_data_->setWatermarks(buffer_limit_);
}
if (buffered_response_data_) {
buffered_response_data_->setWatermarks(buffer_limit_);
}
}
bool ConnectionManagerImpl::ActiveStream::createFilterChain() {
if (state_.created_filter_chain_) {
return false;
}
bool upgrade_rejected = false;
auto upgrade = request_headers_ ? request_headers_->Upgrade() : nullptr;
state_.created_filter_chain_ = true;
if (upgrade != nullptr) {
const Router::RouteEntry::UpgradeMap* upgrade_map = nullptr;
// We must check if the 'cached_route_' optional is populated since this function can be called
// early via sendLocalReply(), before the cached route is populated.
if (cached_route_.has_value() && cached_route_.value() && cached_route_.value()->routeEntry()) {
upgrade_map = &cached_route_.value()->routeEntry()->upgradeMap();
}
if (connection_manager_.config_.filterFactory().createUpgradeFilterChain(
upgrade->value().getStringView(), upgrade_map, *this)) {
state_.successful_upgrade_ = true;
connection_manager_.stats_.named_.downstream_cx_upgrades_total_.inc();
connection_manager_.stats_.named_.downstream_cx_upgrades_active_.inc();
return true;
} else {
upgrade_rejected = true;
// Fall through to the default filter chain. The function calling this
// will send a local reply indicating that the upgrade failed.
}
}
connection_manager_.config_.filterFactory().createFilterChain(*this);
return !upgrade_rejected;
}
void ConnectionManagerImpl::ActiveStreamFilterBase::commonContinue() {
// TODO(mattklein123): Raise an error if this is called during a callback.
if (!canContinue()) {
ENVOY_STREAM_LOG(trace, "cannot continue filter chain: filter={}", parent_,
static_cast<const void*>(this));
return;
}
ENVOY_STREAM_LOG(trace, "continuing filter chain: filter={}", parent_,
static_cast<const void*>(this));
ASSERT(!canIterate());
// If iteration has stopped for all frame types, set iterate_from_current_filter_ to true so the
// filter iteration starts with the current filter instead of the next one.
if (stoppedAll()) {
iterate_from_current_filter_ = true;
}
allowIteration();
// Only resume with do100ContinueHeaders() if we've actually seen a 100-Continue.
if (parent_.has_continue_headers_ && !continue_headers_continued_) {
continue_headers_continued_ = true;
do100ContinueHeaders();
// If the response headers have not yet come in, don't continue on with
// headers and body. doHeaders expects request headers to exist.
if (!parent_.response_headers_.get()) {
return;
}
}<|fim▁hole|> // case in terms of merging it into a header only request/response. This could be done in the
// future.
if (!headers_continued_) {
headers_continued_ = true;
doHeaders(complete() && !bufferedData() && !trailers());
}
// TODO(mattklein123): If a filter returns StopIterationNoBuffer and then does a continue, we
// won't be able to end the stream if there is no buffered data. Need to handle this.
if (bufferedData()) {
doData(complete() && !trailers());
}
if (trailers()) {
doTrailers();
}
iterate_from_current_filter_ = false;
}
bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfter100ContinueHeadersCallback(
FilterHeadersStatus status) {
ASSERT(parent_.has_continue_headers_);
ASSERT(!continue_headers_continued_);
ASSERT(canIterate());
if (status == FilterHeadersStatus::StopIteration) {
iteration_state_ = IterationState::StopSingleIteration;
return false;
} else {
ASSERT(status == FilterHeadersStatus::Continue);
continue_headers_continued_ = true;
return true;
}
}
bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterHeadersCallback(
FilterHeadersStatus status, bool& headers_only) {
ASSERT(!headers_continued_);
ASSERT(canIterate());
if (status == FilterHeadersStatus::StopIteration) {
iteration_state_ = IterationState::StopSingleIteration;
return false;
} else if (status == FilterHeadersStatus::StopAllIterationAndBuffer) {
iteration_state_ = IterationState::StopAllBuffer;
return false;
} else if (status == FilterHeadersStatus::StopAllIterationAndWatermark) {
iteration_state_ = IterationState::StopAllWatermark;
return false;
} else if (status == FilterHeadersStatus::ContinueAndEndStream) {
// Set headers_only to true so we know to end early if necessary,
// but continue filter iteration so we actually write the headers/run the cleanup code.
headers_only = true;
ENVOY_STREAM_LOG(debug, "converting to headers only", parent_);
return true;
} else {
ASSERT(status == FilterHeadersStatus::Continue);
headers_continued_ = true;
return true;
}
}
void ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleBufferData(
Buffer::Instance& provided_data) {
// The way we do buffering is a little complicated which is why we have this common function
// which is used for both encoding and decoding. When data first comes into our filter pipeline,
// we send it through. Any filter can choose to stop iteration and buffer or not. If we then
// continue iteration in the future, we use the buffered data. A future filter can stop and
// buffer again. In this case, since we are already operating on buffered data, we don't
// rebuffer, because we assume the filter has modified the buffer as it wishes in place.
if (bufferedData().get() != &provided_data) {
if (!bufferedData()) {
bufferedData() = createBuffer();
}
bufferedData()->move(provided_data);
}
}
bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterDataCallback(
FilterDataStatus status, Buffer::Instance& provided_data, bool& buffer_was_streaming) {
if (status == FilterDataStatus::Continue) {
if (iteration_state_ == IterationState::StopSingleIteration) {
commonHandleBufferData(provided_data);
commonContinue();
return false;
} else {
ASSERT(headers_continued_);
}
} else {
iteration_state_ = IterationState::StopSingleIteration;
if (status == FilterDataStatus::StopIterationAndBuffer ||
status == FilterDataStatus::StopIterationAndWatermark) {
buffer_was_streaming = status == FilterDataStatus::StopIterationAndWatermark;
commonHandleBufferData(provided_data);
}
return false;
}
return true;
}
bool ConnectionManagerImpl::ActiveStreamFilterBase::commonHandleAfterTrailersCallback(
FilterTrailersStatus status) {
if (status == FilterTrailersStatus::Continue) {
if (iteration_state_ == IterationState::StopSingleIteration) {
commonContinue();
return false;
} else {
ASSERT(headers_continued_);
}
} else {
return false;
}
return true;
}
const Network::Connection* ConnectionManagerImpl::ActiveStreamFilterBase::connection() {
return parent_.connection();
}
Event::Dispatcher& ConnectionManagerImpl::ActiveStreamFilterBase::dispatcher() {
return parent_.connection_manager_.read_callbacks_->connection().dispatcher();
}
StreamInfo::StreamInfo& ConnectionManagerImpl::ActiveStreamFilterBase::streamInfo() {
return parent_.stream_info_;
}
Tracing::Span& ConnectionManagerImpl::ActiveStreamFilterBase::activeSpan() {
if (parent_.active_span_) {
return *parent_.active_span_;
} else {
return Tracing::NullSpan::instance();
}
}
Tracing::Config& ConnectionManagerImpl::ActiveStreamFilterBase::tracingConfig() { return parent_; }
Upstream::ClusterInfoConstSharedPtr ConnectionManagerImpl::ActiveStreamFilterBase::clusterInfo() {
// NOTE: Refreshing route caches clusterInfo as well.
if (!parent_.cached_route_.has_value()) {
parent_.refreshCachedRoute();
}
return parent_.cached_cluster_info_.value();
}
Router::RouteConstSharedPtr ConnectionManagerImpl::ActiveStreamFilterBase::route() {
if (!parent_.cached_route_.has_value()) {
parent_.refreshCachedRoute();
}
return parent_.cached_route_.value();
}
void ConnectionManagerImpl::ActiveStreamFilterBase::clearRouteCache() {
parent_.cached_route_ = absl::optional<Router::RouteConstSharedPtr>();
parent_.cached_cluster_info_ = absl::optional<Upstream::ClusterInfoConstSharedPtr>();
}
Buffer::WatermarkBufferPtr ConnectionManagerImpl::ActiveStreamDecoderFilter::createBuffer() {
auto buffer =
std::make_unique<Buffer::WatermarkBuffer>([this]() -> void { this->requestDataDrained(); },
[this]() -> void { this->requestDataTooLarge(); });
buffer->setWatermarks(parent_.buffer_limit_);
return buffer;
}
HeaderMap& ConnectionManagerImpl::ActiveStreamDecoderFilter::addDecodedTrailers() {
return parent_.addDecodedTrailers();
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::addDecodedData(Buffer::Instance& data,
bool streaming) {
parent_.addDecodedData(*this, data, streaming);
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::injectDecodedDataToFilterChain(
Buffer::Instance& data, bool end_stream) {
parent_.decodeData(this, data, end_stream,
ActiveStream::FilterIterationStartState::CanStartFromCurrent);
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::continueDecoding() { commonContinue(); }
void ConnectionManagerImpl::ActiveStreamDecoderFilter::encode100ContinueHeaders(
HeaderMapPtr&& headers) {
// If Envoy is not configured to proxy 100-Continue responses, swallow the 100 Continue
// here. This avoids the potential situation where Envoy strips Expect: 100-Continue and sends a
// 100-Continue, then proxies a duplicate 100 Continue from upstream.
if (parent_.connection_manager_.config_.proxy100Continue()) {
parent_.continue_headers_ = std::move(headers);
parent_.encode100ContinueHeaders(nullptr, *parent_.continue_headers_);
}
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeHeaders(HeaderMapPtr&& headers,
bool end_stream) {
parent_.response_headers_ = std::move(headers);
parent_.encodeHeaders(nullptr, *parent_.response_headers_, end_stream);
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeData(Buffer::Instance& data,
bool end_stream) {
parent_.encodeData(nullptr, data, end_stream,
ActiveStream::FilterIterationStartState::CanStartFromCurrent);
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeTrailers(HeaderMapPtr&& trailers) {
parent_.response_trailers_ = std::move(trailers);
parent_.encodeTrailers(nullptr, *parent_.response_trailers_);
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::encodeMetadata(
MetadataMapPtr&& metadata_map_ptr) {
parent_.encodeMetadata(nullptr, std::move(metadata_map_ptr));
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::
onDecoderFilterAboveWriteBufferHighWatermark() {
ENVOY_STREAM_LOG(debug, "Read-disabling downstream stream due to filter callbacks.", parent_);
parent_.response_encoder_->getStream().readDisable(true);
parent_.connection_manager_.stats_.named_.downstream_flow_control_paused_reading_total_.inc();
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::requestDataTooLarge() {
ENVOY_STREAM_LOG(debug, "request data too large watermark exceeded", parent_);
if (parent_.state_.decoder_filters_streaming_) {
onDecoderFilterAboveWriteBufferHighWatermark();
} else {
parent_.connection_manager_.stats_.named_.downstream_rq_too_large_.inc();
sendLocalReply(Code::PayloadTooLarge, CodeUtility::toString(Code::PayloadTooLarge), nullptr,
absl::nullopt);
}
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::requestDataDrained() {
// If this is called it means the call to requestDataTooLarge() was a
// streaming call, or a 413 would have been sent.
onDecoderFilterBelowWriteBufferLowWatermark();
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::
onDecoderFilterBelowWriteBufferLowWatermark() {
ENVOY_STREAM_LOG(debug, "Read-enabling downstream stream due to filter callbacks.", parent_);
parent_.response_encoder_->getStream().readDisable(false);
parent_.connection_manager_.stats_.named_.downstream_flow_control_resumed_reading_total_.inc();
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::addDownstreamWatermarkCallbacks(
DownstreamWatermarkCallbacks& watermark_callbacks) {
// This is called exactly once per upstream-stream, by the router filter. Therefore, we
// expect the same callbacks to not be registered twice.
ASSERT(std::find(parent_.watermark_callbacks_.begin(), parent_.watermark_callbacks_.end(),
&watermark_callbacks) == parent_.watermark_callbacks_.end());
parent_.watermark_callbacks_.emplace(parent_.watermark_callbacks_.end(), &watermark_callbacks);
for (uint32_t i = 0; i < parent_.high_watermark_count_; ++i) {
watermark_callbacks.onAboveWriteBufferHighWatermark();
}
}
void ConnectionManagerImpl::ActiveStreamDecoderFilter::removeDownstreamWatermarkCallbacks(
DownstreamWatermarkCallbacks& watermark_callbacks) {
ASSERT(std::find(parent_.watermark_callbacks_.begin(), parent_.watermark_callbacks_.end(),
&watermark_callbacks) != parent_.watermark_callbacks_.end());
parent_.watermark_callbacks_.remove(&watermark_callbacks);
}
bool ConnectionManagerImpl::ActiveStreamDecoderFilter::recreateStream() {
// Because the filter's and the HCM view of if the stream has a body and if
// the stream is complete may differ, re-check bytesReceived() to make sure
// there was no body from the HCM's point of view.
if (!complete() || parent_.stream_info_.bytesReceived() != 0) {
return false;
}
// n.b. we do not currently change the codecs to point at the new stream
// decoder because the decoder callbacks are complete. It would be good to
// null out that pointer but should not be necessary.
HeaderMapPtr request_headers(std::move(parent_.request_headers_));
StreamEncoder* response_encoder = parent_.response_encoder_;
parent_.response_encoder_ = nullptr;
// This functionally deletes the stream (via deferred delete) so do not
// reference anything beyond this point.
parent_.connection_manager_.doEndStream(this->parent_);
StreamDecoder& new_stream = parent_.connection_manager_.newStream(*response_encoder, true);
new_stream.decodeHeaders(std::move(request_headers), true);
return true;
}
Buffer::WatermarkBufferPtr ConnectionManagerImpl::ActiveStreamEncoderFilter::createBuffer() {
auto buffer = new Buffer::WatermarkBuffer([this]() -> void { this->responseDataDrained(); },
[this]() -> void { this->responseDataTooLarge(); });
buffer->setWatermarks(parent_.buffer_limit_);
return Buffer::WatermarkBufferPtr{buffer};
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::addEncodedData(Buffer::Instance& data,
bool streaming) {
return parent_.addEncodedData(*this, data, streaming);
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::injectEncodedDataToFilterChain(
Buffer::Instance& data, bool end_stream) {
parent_.encodeData(this, data, end_stream,
ActiveStream::FilterIterationStartState::CanStartFromCurrent);
}
HeaderMap& ConnectionManagerImpl::ActiveStreamEncoderFilter::addEncodedTrailers() {
return parent_.addEncodedTrailers();
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::
onEncoderFilterAboveWriteBufferHighWatermark() {
ENVOY_STREAM_LOG(debug, "Disabling upstream stream due to filter callbacks.", parent_);
parent_.callHighWatermarkCallbacks();
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::
onEncoderFilterBelowWriteBufferLowWatermark() {
ENVOY_STREAM_LOG(debug, "Enabling upstream stream due to filter callbacks.", parent_);
parent_.callLowWatermarkCallbacks();
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::continueEncoding() { commonContinue(); }
void ConnectionManagerImpl::ActiveStreamEncoderFilter::responseDataTooLarge() {
if (parent_.state_.encoder_filters_streaming_) {
onEncoderFilterAboveWriteBufferHighWatermark();
} else {
parent_.connection_manager_.stats_.named_.rs_too_large_.inc();
// If headers have not been sent to the user, send a 500.
if (!headers_continued_) {
// Make sure we won't end up with nested watermark calls from the body buffer.
parent_.state_.encoder_filters_streaming_ = true;
allowIteration();
Http::Utility::sendLocalReply(
Grpc::Common::hasGrpcContentType(*parent_.request_headers_),
[&](HeaderMapPtr&& response_headers, bool end_stream) -> void {
parent_.response_headers_ = std::move(response_headers);
parent_.response_encoder_->encodeHeaders(*parent_.response_headers_, end_stream);
parent_.state_.local_complete_ = end_stream;
},
[&](Buffer::Instance& data, bool end_stream) -> void {
parent_.response_encoder_->encodeData(data, end_stream);
parent_.state_.local_complete_ = end_stream;
},
parent_.state_.destroyed_, Http::Code::InternalServerError,
CodeUtility::toString(Http::Code::InternalServerError), absl::nullopt,
parent_.is_head_request_);
parent_.maybeEndEncode(parent_.state_.local_complete_);
} else {
resetStream();
}
}
}
void ConnectionManagerImpl::ActiveStreamEncoderFilter::responseDataDrained() {
onEncoderFilterBelowWriteBufferLowWatermark();
}
void ConnectionManagerImpl::ActiveStreamFilterBase::resetStream() {
parent_.connection_manager_.stats_.named_.downstream_rq_tx_reset_.inc();
parent_.connection_manager_.doEndStream(this->parent_);
}
uint64_t ConnectionManagerImpl::ActiveStreamFilterBase::streamId() { return parent_.stream_id_; }
} // namespace Http
} // namespace Envoy<|fim▁end|> |
// Make sure that we handle the zero byte data frame case. We make no effort to optimize this |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.