file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
stock_rank_forecast.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2021/9/12 18:29
Desc: 巨潮资讯-数据中心-评级预测-投资评级
http://webapi.cninfo.com.cn/#/thematicStatistics?name=%E6%8A%95%E8%B5%84%E8%AF%84%E7%BA%A7
"""
import time
from py_mini_racer import py_mini_racer
import requests
import pandas as pd
js_str = """
function mcode(input) {
var keyStr = "ABCDEFGHIJKLMNOP" + "QRSTUVWXYZabcdef" + "ghijklmnopqrstuv" + "wxyz0123456789+/" + "=";
var output = "";
var chr1, chr2, chr3 = "";
var enc1, enc2, enc3, enc4 = "";
var i = 0;
do {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2)
+ keyStr.charAt(enc3) + keyStr.charAt(enc4);
chr1 = chr2 = chr3 = "";
enc1 = enc2 = enc3 = enc4 = "";
} while (i < input.length);
return output;
}
"""
def stock_rank_forecast_cninfo(date: | ataFrame:
"""
巨潮资讯-数据中心-评级预测-投资评级
http://webapi.cninfo.com.cn/#/thematicStatistics?name=%E6%8A%95%E8%B5%84%E8%AF%84%E7%BA%A7
:param date: 查询日期
:type date: str
:return: 投资评级
:rtype: pandas.DataFrame
"""
url = "http://webapi.cninfo.com.cn/api/sysapi/p_sysapi1089"
params = {"tdate": "-".join([date[:4], date[4:6], date[6:]])}
random_time_str = str(int(time.time()))
js_code = py_mini_racer.MiniRacer()
js_code.eval(js_str)
mcode = js_code.call("mcode", random_time_str)
headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Cache-Control": "no-cache",
"Content-Length": "0",
"Host": "webapi.cninfo.com.cn",
"mcode": mcode,
"Origin": "http://webapi.cninfo.com.cn",
"Pragma": "no-cache",
"Proxy-Connection": "keep-alive",
"Referer": "http://webapi.cninfo.com.cn/",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
r = requests.post(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json["records"])
temp_df.columns = [
"证券简称",
"发布日期",
"前一次投资评级",
"评级变化",
"目标价格-上限",
"是否首次评级",
"投资评级",
"研究员名称",
"研究机构简称",
"目标价格-下限",
"证券代码",
]
temp_df = temp_df[[
"证券代码",
"证券简称",
"发布日期",
"研究机构简称",
"研究员名称",
"投资评级",
"是否首次评级",
"评级变化",
"前一次投资评级",
"目标价格-下限",
"目标价格-上限",
]]
temp_df["目标价格-上限"] = pd.to_numeric(temp_df["目标价格-上限"], errors="coerce")
temp_df["目标价格-下限"] = pd.to_numeric(temp_df["目标价格-下限"], errors="coerce")
return temp_df
if __name__ == "__main__":
stock_rank_forecast_cninfo_df = stock_rank_forecast_cninfo(date="20210907")
print(stock_rank_forecast_cninfo_df)
| str = "20210910") -> pd.D |
test_primary_validator.py | from thenewboston_node.business_logic.blockchain.file_blockchain import FileBlockchain
from thenewboston_node.business_logic.models import (
AccountState, Block, Node, NodeDeclarationSignedChangeRequest, PrimaryValidatorSchedule,
PrimaryValidatorScheduleSignedChangeRequest
)
from thenewboston_node.business_logic.node import get_node_signing_key
from thenewboston_node.business_logic.tests.baker_factories import baker
from thenewboston_node.core.utils.cryptography import generate_key_pair
def test_no_pv_schedule(blockchain_directory, blockchain_genesis_state):
blockchain = FileBlockchain(base_directory=blockchain_directory)
blockchain.add_blockchain_state(blockchain_genesis_state)
blockchain.validate()
assert blockchain.get_primary_validator() is None
assert blockchain.get_primary_validator(0) is None
assert blockchain.get_primary_validator(10) is None
def test_can_get_pv_from_blockchain_genesis_state(
blockchain_directory, blockchain_genesis_state, user_account_key_pair
):
blockchain = FileBlockchain(base_directory=blockchain_directory)
account_number = user_account_key_pair.public
node = baker.make(Node, identifier=account_number)
pv_schedule = baker.make(PrimaryValidatorSchedule, begin_block_number=0, end_block_number=99)
blockchain_genesis_state.account_states[account_number] = AccountState(
node=node, primary_validator_schedule=pv_schedule
)
blockchain.add_blockchain_state(blockchain_genesis_state)
blockchain.validate()
assert blockchain.get_primary_validator() == node
assert blockchain.get_primary_validator(0) == node
assert blockchain.get_primary_validator(10) == node
assert blockchain.get_primary_validator(99) == node
assert blockchain.get_primary_validator(100) is None
def test_can_get_pv_from_from_blocks(blockchain_directory, blockchain_genesis_state, user_account_key_pair):
blockchain = FileBlockchain(base_directory=blockchain_directory)
blockchain.add_blockchain_state(blockchain_genesis_state)
blockchain.validate()
signing_key = user_account_key_pair.private
request = NodeDeclarationSignedChangeRequest.create(
network_addresses=['https://127.0.0.1:8555/'], fee_amount=3, signing_key=signing_key
)
node = request.message.node
assert node.identifier
block = Block.create_from_signed_change_request(blockchain, request, get_node_signing_key())
blockchain.add_block(block)
request = PrimaryValidatorScheduleSignedChangeRequest.create(0, 99, signing_key)
block = Block.create_from_signed_change_request(blockchain, request, get_node_signing_key())
blockchain.add_block(block)
assert blockchain.get_primary_validator() == node
assert blockchain.get_primary_validator(0) == node
assert blockchain.get_primary_validator(10) == node
assert blockchain.get_primary_validator(99) == node
assert blockchain.get_primary_validator(100) is None
def test_can_get_node_from_genesis_state_and_pv_from_blocks(
blockchain_directory, blockchain_genesis_state, user_account_key_pair
):
|
def test_can_get_overridden_pv(blockchain_directory, blockchain_genesis_state, user_account_key_pair):
blockchain = FileBlockchain(base_directory=blockchain_directory)
account_number = user_account_key_pair.public
node = baker.make(Node, identifier=account_number)
pv_schedule = baker.make(PrimaryValidatorSchedule, begin_block_number=0, end_block_number=99)
blockchain_genesis_state.account_states[account_number] = AccountState(
node=node, primary_validator_schedule=pv_schedule
)
another_key_pair = generate_key_pair()
another_node = baker.make(Node, identifier=another_key_pair.public)
blockchain_genesis_state.account_states[another_key_pair.public] = AccountState(node=another_node)
blockchain.add_blockchain_state(blockchain_genesis_state)
blockchain.validate()
assert blockchain.get_primary_validator() == node
assert blockchain.get_primary_validator(0) == node
assert blockchain.get_primary_validator(10) == node
assert blockchain.get_primary_validator(99) == node
assert blockchain.get_primary_validator(100) is None
request = PrimaryValidatorScheduleSignedChangeRequest.create(0, 99, another_key_pair.private)
block = Block.create_from_signed_change_request(blockchain, request, get_node_signing_key())
blockchain.add_block(block)
assert blockchain.get_primary_validator() == another_node
assert blockchain.get_primary_validator(0) == another_node
assert blockchain.get_primary_validator(10) == another_node
assert blockchain.get_primary_validator(99) == another_node
assert blockchain.get_primary_validator(100) is None
request = PrimaryValidatorScheduleSignedChangeRequest.create(0, 99, user_account_key_pair.private)
block = Block.create_from_signed_change_request(blockchain, request, get_node_signing_key())
blockchain.add_block(block)
assert blockchain.get_primary_validator() == node
assert blockchain.get_primary_validator(0) == node
assert blockchain.get_primary_validator(10) == node
assert blockchain.get_primary_validator(99) == node
assert blockchain.get_primary_validator(100) is None
| blockchain = FileBlockchain(base_directory=blockchain_directory)
account_number = user_account_key_pair.public
node = baker.make(Node, identifier=account_number)
pv_schedule = baker.make(PrimaryValidatorSchedule, begin_block_number=0, end_block_number=99)
blockchain_genesis_state.account_states[account_number] = AccountState(
node=node, primary_validator_schedule=pv_schedule
)
blockchain.add_blockchain_state(blockchain_genesis_state)
blockchain.validate()
request = PrimaryValidatorScheduleSignedChangeRequest.create(0, 99, user_account_key_pair.private)
block = Block.create_from_signed_change_request(blockchain, request, get_node_signing_key())
blockchain.add_block(block)
assert blockchain.get_primary_validator() == node
assert blockchain.get_primary_validator(0) == node
assert blockchain.get_primary_validator(10) == node
assert blockchain.get_primary_validator(99) == node
assert blockchain.get_primary_validator(100) is None |
lib.rs | //! `tor-units` -- Safe wrappers for primitive numeric types.
//!
//! # Overview
//!
//! This crate is part of
//! [Arti](https://gitlab.torproject.org/tpo/core/arti/), a project to
//! implement [Tor](https://www.torproject.org/) in Rust.
//! It provides safe wrappers for primitive numeric wrappers used in
//! other parts of Arti.
//! In particular, it provides:
//! * a bounded i32 with both checked and clamping constructors,
//! * an integer milliseconds wrapper with conversion to [`Duration`]
//! * an integer seconds wrapper with conversion to [`Duration`]
//! * a percentage wrapper, to prevent accidental failure
//! to divide by 100.
//! * a SendMeVersion which can be compared only.
#![deny(missing_docs)]
#![warn(noop_method_call)]
#![deny(unreachable_pub)]
#![warn(clippy::all)]
#![deny(clippy::await_holding_lock)]
#![deny(clippy::cargo_common_metadata)]
#![deny(clippy::cast_lossless)]
#![deny(clippy::checked_conversions)]
#![warn(clippy::cognitive_complexity)]
#![deny(clippy::debug_assert_with_mut_call)]
#![deny(clippy::exhaustive_enums)]
#![deny(clippy::exhaustive_structs)]
#![deny(clippy::expl_impl_clone_on_copy)]
#![deny(clippy::fallible_impl_from)]
#![deny(clippy::implicit_clone)]
#![deny(clippy::large_stack_arrays)]
#![warn(clippy::manual_ok_or)]
#![deny(clippy::missing_docs_in_private_items)]
#![deny(clippy::missing_panics_doc)]
#![warn(clippy::needless_borrow)]
#![warn(clippy::needless_pass_by_value)]
#![warn(clippy::option_option)]
#![warn(clippy::rc_buffer)]
#![deny(clippy::ref_option_ref)]
#![warn(clippy::semicolon_if_nothing_returned)]
#![warn(clippy::trait_duplication_in_bounds)]
#![deny(clippy::unnecessary_wraps)]
#![warn(clippy::unseparated_literal_suffix)]
#![deny(clippy::unwrap_used)]
use derive_more::{Add, Display, Div, From, FromStr, Mul};
use std::convert::{TryFrom, TryInto};
use std::time::Duration;
use thiserror::Error;
/// Conversion errors from converting a value into a [`BoundedInt32`].
#[derive(Debug, Clone, PartialEq, Eq, Error)]
#[non_exhaustive]
pub enum Error {
/// A passed value was below the lower bound for the type.
#[error("Value {0} was below the lower bound {1} for this type.")]
BelowLowerBound(i32, i32),
/// A passed value was above the upper bound for the type.
#[error("Value {0} was above the lower bound {1} for this type.")]
AboveUpperBound(i32, i32),
/// Tried to convert a negative value to an unsigned type.
#[error("Tried to convert a negative value to an unsigned type")]
Negative,
/// Tried to parse a value that was not representable as the
/// underlying type.
#[error("Value could not be represented as an i32")]
Unrepresentable,
/// We encountered some kind of integer overflow when converting a number.
#[error("Integer overflow")]
Overflow,
/// Tried to instantiate an uninhabited type.
#[error("No value is valid for this type")]
Uninhabited,
}
/// A 32-bit signed integer with a restricted range.
///
/// This type holds an i32 value such that `LOWER` <= value <= `UPPER`
///
/// # Limitations
///
/// If you try to instantiate this type with LOWER > UPPER, you will
/// get an uninhabitable type. It would be better if we could check that at
/// compile time, and prevent such types from being named.
//
// [TODO: If you need a Bounded* for some type other than i32, ask nickm:
// he has an implementation kicking around.]
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct BoundedInt32<const LOWER: i32, const UPPER: i32> {
/// Interior Value
value: i32,
}
impl<const LOWER: i32, const UPPER: i32> BoundedInt32<LOWER, UPPER> {
/// Lower bound
pub const LOWER: i32 = LOWER;
/// Upper bound
pub const UPPER: i32 = UPPER;
/// Private constructor function for this type.
fn unchecked_new(value: i32) -> Self {
assert!(LOWER <= UPPER); //The compiler optimizes this out, no run-time cost.
BoundedInt32 { value }
}
/// Return the underlying i32 value.
///
/// This value will always be between [`Self::LOWER`] and [`Self::UPPER`],
/// inclusive.
pub fn get(&self) -> i32 {
self.value
}
/// If `val` is within range, return a new `BoundedInt32` wrapping
/// it; otherwise, clamp it to the upper or lower bound as
/// appropriate.
pub fn saturating_new(val: i32) -> Self {
Self::unchecked_new(Self::clamp(val))
}
/// If `val` is an acceptable value inside the range for this type,
/// return a new [`BoundedInt32`]. Otherwise return an error.
pub fn checked_new(val: i32) -> Result<Self, Error> {
if val > UPPER {
Err(Error::AboveUpperBound(val, UPPER))
} else if val < LOWER {
Err(Error::BelowLowerBound(val, LOWER))
} else {
Ok(BoundedInt32::unchecked_new(val))
}
}
/// This private function clamps an input to the acceptable range.
fn clamp(val: i32) -> i32 {
Ord::clamp(val, LOWER, UPPER)
}
/// Convert from the underlying type, clamping to the upper or
/// lower bound if needed.
///
/// # Panics
///
/// This function will panic if UPPER < LOWER.
pub fn saturating_from(val: i32) -> Self {
Self::unchecked_new(Self::clamp(val))
}
/// Convert from a string, clamping to the upper or lower bound if needed.
///
/// # Limitations
///
/// If the input is a number that cannot be represented as an i32,
/// then we return an error instead of clamping it.
pub fn saturating_from_str(s: &str) -> Result<Self, Error> {
if UPPER < LOWER {
// The compiler should optimize this block out at compile time.
return Err(Error::Uninhabited);
}
let val: i32 = s.parse().map_err(|_| Error::Unrepresentable)?;
Ok(Self::saturating_from(val))
}
}
impl<const L: i32, const U: i32> std::fmt::Display for BoundedInt32<L, U> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value)
}
}
impl<const L: i32, const U: i32> From<BoundedInt32<L, U>> for i32 {
fn from(val: BoundedInt32<L, U>) -> i32 |
}
impl<const L: i32, const U: i32> From<BoundedInt32<L, U>> for f64 {
fn from(val: BoundedInt32<L, U>) -> f64 {
val.value.into()
}
}
impl<const L: i32, const H: i32> TryFrom<i32> for BoundedInt32<L, H> {
type Error = Error;
fn try_from(val: i32) -> Result<Self, Self::Error> {
Self::checked_new(val)
}
}
impl<const L: i32, const H: i32> std::str::FromStr for BoundedInt32<L, H> {
type Err = Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::checked_new(s.parse().map_err(|_| Error::Unrepresentable)?)
}
}
impl From<BoundedInt32<0, 1>> for bool {
fn from(val: BoundedInt32<0, 1>) -> bool {
val.value == 1
}
}
impl From<BoundedInt32<0, 255>> for u8 {
fn from(val: BoundedInt32<0, 255>) -> u8 {
val.value as u8
}
}
impl<const H: i32> From<BoundedInt32<0, H>> for u32 {
fn from(val: BoundedInt32<0, H>) -> u32 {
val.value as u32
}
}
impl<const H: i32> From<BoundedInt32<1, H>> for u32 {
fn from(val: BoundedInt32<1, H>) -> u32 {
val.value as u32
}
}
impl<const L: i32, const H: i32> TryFrom<BoundedInt32<L, H>> for u64 {
type Error = Error;
fn try_from(val: BoundedInt32<L, H>) -> Result<Self, Self::Error> {
if val.value < 0 {
Err(Error::Negative)
} else {
Ok(val.value as u64)
}
}
}
impl<const L: i32, const H: i32> TryFrom<BoundedInt32<L, H>> for usize {
type Error = Error;
fn try_from(val: BoundedInt32<L, H>) -> Result<Self, Self::Error> {
if val.value < 0 {
Err(Error::Negative)
} else {
Ok(val.value as usize)
}
}
}
/// A percentage value represented as a number.
///
/// This type wraps an underlying numeric type, and ensures that callers
/// are clear whether they want a _fraction_, or a _percentage_.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Percentage<T: Copy + Into<f64>> {
/// The underlying percentage value.
value: T,
}
impl<T: Copy + Into<f64>> Percentage<T> {
/// Create a new `IntPercentage` from the underlying percentage.
pub fn new(value: T) -> Self {
Self { value }
}
/// Return this value as a (possibly improper) fraction.
///
/// ```
/// use tor_units::Percentage;
/// let pct_200 = Percentage::<u8>::new(200);
/// let pct_100 = Percentage::<u8>::new(100);
/// let pct_50 = Percentage::<u8>::new(50);
///
/// assert_eq!(pct_200.as_fraction(), 2.0);
/// assert_eq!(pct_100.as_fraction(), 1.0);
/// assert_eq!(pct_50.as_fraction(), 0.5);
/// // Note: don't actually compare f64 with ==.
/// ```
pub fn as_fraction(self) -> f64 {
self.value.into() / 100.0
}
/// Return this value as a percentage.
///
/// ```
/// use tor_units::Percentage;
/// let pct_200 = Percentage::<u8>::new(200);
/// let pct_100 = Percentage::<u8>::new(100);
/// let pct_50 = Percentage::<u8>::new(50);
///
/// assert_eq!(pct_200.as_percent(), 200);
/// assert_eq!(pct_100.as_percent(), 100);
/// assert_eq!(pct_50.as_percent(), 50);
/// ```
pub fn as_percent(self) -> T {
self.value
}
}
impl<const H: i32, const L: i32> TryFrom<i32> for Percentage<BoundedInt32<H, L>> {
type Error = Error;
fn try_from(v: i32) -> Result<Self, Error> {
Ok(Percentage::new(v.try_into()?))
}
}
#[derive(
Add, Copy, Clone, Mul, Div, From, FromStr, Display, Debug, PartialEq, Eq, Ord, PartialOrd,
)]
/// This type represents an integer number of milliseconds.
///
/// The underlying type should implement TryInto<u64>.
pub struct IntegerMilliseconds<T> {
/// Interior Value. Should Implement TryInto<u64> to be useful.
value: T,
}
impl<T: TryInto<u64>> IntegerMilliseconds<T> {
/// Public Constructor
pub fn new(value: T) -> Self {
IntegerMilliseconds { value }
}
}
impl<T: TryInto<u64>> TryFrom<IntegerMilliseconds<T>> for Duration {
type Error = <T as TryInto<u64>>::Error;
fn try_from(val: IntegerMilliseconds<T>) -> Result<Self, <T as TryInto<u64>>::Error> {
Ok(Self::from_millis(val.value.try_into()?))
}
}
impl<const H: i32, const L: i32> TryFrom<i32> for IntegerMilliseconds<BoundedInt32<H, L>> {
type Error = Error;
fn try_from(v: i32) -> Result<Self, Error> {
Ok(IntegerMilliseconds::new(v.try_into()?))
}
}
#[derive(
Add, Copy, Clone, Mul, Div, From, FromStr, Display, Debug, PartialEq, Eq, Ord, PartialOrd,
)]
/// This type represents an integer number of seconds.
///
/// The underlying type should implement TryInto<u64>.
pub struct IntegerSeconds<T> {
/// Interior Value. Should Implement TryInto<u64> to be useful.
value: T,
}
impl<T: TryInto<u64>> IntegerSeconds<T> {
/// Public Constructor
pub fn new(value: T) -> Self {
IntegerSeconds { value }
}
}
impl<T: TryInto<u64>> TryFrom<IntegerSeconds<T>> for Duration {
type Error = <T as TryInto<u64>>::Error;
fn try_from(val: IntegerSeconds<T>) -> Result<Self, <T as TryInto<u64>>::Error> {
Ok(Self::from_secs(val.value.try_into()?))
}
}
impl<const H: i32, const L: i32> TryFrom<i32> for IntegerSeconds<BoundedInt32<H, L>> {
type Error = Error;
fn try_from(v: i32) -> Result<Self, Error> {
Ok(IntegerSeconds::new(v.try_into()?))
}
}
#[derive(Copy, Clone, From, FromStr, Display, Debug, PartialEq, Eq, Ord, PartialOrd)]
/// This type represents an integer number of days.
///
/// The underlying type should implement TryInto<u64>.
pub struct IntegerDays<T> {
/// Interior Value. Should Implement TryInto<u64> to be useful.
value: T,
}
impl<T> IntegerDays<T> {
/// Public Constructor
pub fn new(value: T) -> Self {
IntegerDays { value }
}
}
impl<T: TryInto<u64>> TryFrom<IntegerDays<T>> for Duration {
type Error = Error;
fn try_from(val: IntegerDays<T>) -> Result<Self, Error> {
/// Number of seconds in a single day.
const SECONDS_PER_DAY: u64 = 86400;
let days: u64 = val.value.try_into().map_err(|_| Error::Overflow)?;
let seconds = days.checked_mul(SECONDS_PER_DAY).ok_or(Error::Overflow)?;
Ok(Self::from_secs(seconds))
}
}
impl<const H: i32, const L: i32> TryFrom<i32> for IntegerDays<BoundedInt32<H, L>> {
type Error = Error;
fn try_from(v: i32) -> Result<Self, Error> {
Ok(IntegerDays::new(v.try_into()?))
}
}
/// A SendMe Version
///
/// DOCDOC: Explain why this needs to have its own type, or remove it.
#[derive(Clone, Copy, From, FromStr, Display, Debug, PartialEq, Eq, Ord, PartialOrd)]
pub struct SendMeVersion(u8);
impl SendMeVersion {
/// Public Constructor
pub fn new(value: u8) -> Self {
SendMeVersion(value)
}
/// Helper
pub fn get(&self) -> u8 {
self.0
}
}
impl TryFrom<i32> for SendMeVersion {
type Error = Error;
fn try_from(v: i32) -> Result<Self, Error> {
let val_u8 = BoundedInt32::<0, 255>::checked_new(v)?;
Ok(SendMeVersion::new(val_u8.get() as u8))
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::unwrap_used)]
use float_cmp::assert_approx_eq;
use super::*;
use std::convert::TryInto;
type TestFoo = BoundedInt32<1, 5>;
type TestBar = BoundedInt32<-45, 17>;
//make_parameter_type! {TestFoo(3,)}
#[test]
fn entire_range_parsed() {
let x: TestFoo = "1".parse().unwrap();
assert!(x.get() == 1);
let x: TestFoo = "2".parse().unwrap();
assert!(x.get() == 2);
let x: TestFoo = "3".parse().unwrap();
assert!(x.get() == 3);
let x: TestFoo = "4".parse().unwrap();
assert!(x.get() == 4);
let x: TestFoo = "5".parse().unwrap();
assert!(x.get() == 5);
}
#[test]
fn saturating() {
let x: TestFoo = TestFoo::saturating_new(1000);
let x_val: i32 = x.into();
assert!(x_val == TestFoo::UPPER);
let x: TestFoo = TestFoo::saturating_new(0);
let x_val: i32 = x.into();
assert!(x_val == TestFoo::LOWER);
}
#[test]
fn saturating_string() {
let x: TestFoo = TestFoo::saturating_from_str("1000").unwrap();
let x_val: i32 = x.into();
assert!(x_val == TestFoo::UPPER);
let x: TestFoo = TestFoo::saturating_from_str("0").unwrap();
let x_val: i32 = x.into();
assert!(x_val == TestFoo::LOWER);
}
#[test]
#[should_panic]
fn uninhabited_saturating_new() {
// This value should be uncreatable.
let _: BoundedInt32<10, 5> = BoundedInt32::saturating_new(7);
}
#[test]
fn uninhabited_from_string() {
let v: Result<BoundedInt32<10, 5>, Error> = BoundedInt32::saturating_from_str("7");
assert!(matches!(v, Err(Error::Uninhabited)));
}
#[test]
fn errors_correct() {
let x: Result<TestBar, Error> = "1000".parse();
assert!(x.unwrap_err() == Error::AboveUpperBound(1000, TestBar::UPPER));
let x: Result<TestBar, Error> = "-1000".parse();
assert!(x.unwrap_err() == Error::BelowLowerBound(-1000, TestBar::LOWER));
let x: Result<TestBar, Error> = "xyz".parse();
assert!(x.unwrap_err() == Error::Unrepresentable);
}
#[test]
fn display() {
let v = BoundedInt32::<99, 1000>::checked_new(345).unwrap();
assert_eq!(v.to_string(), "345".to_string());
}
#[test]
#[should_panic]
fn checked_too_high() {
let _: TestBar = "1000".parse().unwrap();
}
#[test]
#[should_panic]
fn checked_too_low() {
let _: TestBar = "-46".parse().unwrap();
}
#[test]
fn bounded_to_u64() {
let b: BoundedInt32<-100, 100> = BoundedInt32::checked_new(77).unwrap();
let u: u64 = b.try_into().unwrap();
assert_eq!(u, 77);
let b: BoundedInt32<-100, 100> = BoundedInt32::checked_new(-77).unwrap();
let u: Result<u64, Error> = b.try_into();
assert!(u.is_err());
}
#[test]
fn bounded_to_f64() {
let x: BoundedInt32<-100, 100> = BoundedInt32::checked_new(77).unwrap();
let f: f64 = x.into();
assert_approx_eq!(f64, f, 77.0);
}
#[test]
fn bounded_from_i32() {
let x: Result<BoundedInt32<-100, 100>, _> = 50.try_into();
let y: i32 = x.unwrap().into();
assert_eq!(y, 50);
let x: Result<BoundedInt32<-100, 100>, _> = 1000.try_into();
assert!(x.is_err());
}
#[test]
fn into_bool() {
let zero: BoundedInt32<0, 1> = BoundedInt32::saturating_from(0);
let one: BoundedInt32<0, 1> = BoundedInt32::saturating_from(1);
let f: bool = zero.into();
let t: bool = one.into();
assert!(!f);
assert!(t);
}
#[test]
fn into_u8() {
let zero: BoundedInt32<0, 255> = BoundedInt32::saturating_from(0);
let one: BoundedInt32<0, 255> = BoundedInt32::saturating_from(1);
let ninety: BoundedInt32<0, 255> = BoundedInt32::saturating_from(90);
let max: BoundedInt32<0, 255> = BoundedInt32::saturating_from(1000);
let a: u8 = zero.into();
let b: u8 = one.into();
let c: u8 = ninety.into();
let d: u8 = max.into();
assert_eq!(a, 0);
assert_eq!(b, 1);
assert_eq!(c, 90);
assert_eq!(d, 255);
}
#[test]
fn into_u32() {
let zero: BoundedInt32<0, 1000> = BoundedInt32::saturating_from(0);
let one: BoundedInt32<0, 1000> = BoundedInt32::saturating_from(1);
let ninety: BoundedInt32<0, 1000> = BoundedInt32::saturating_from(90);
let max: BoundedInt32<0, 1000> = BoundedInt32::saturating_from(1000);
assert_eq!(u32::from(zero), 0);
assert_eq!(u32::from(one), 1);
assert_eq!(u32::from(ninety), 90);
assert_eq!(u32::from(max), 1000);
let zero: BoundedInt32<1, 1000> = BoundedInt32::saturating_from(0);
let one: BoundedInt32<1, 1000> = BoundedInt32::saturating_from(1);
let ninety: BoundedInt32<1, 1000> = BoundedInt32::saturating_from(90);
let max: BoundedInt32<1, 1000> = BoundedInt32::saturating_from(1000);
assert_eq!(u32::from(zero), 1);
assert_eq!(u32::from(one), 1);
assert_eq!(u32::from(ninety), 90);
assert_eq!(u32::from(max), 1000);
}
#[test]
fn try_into_usize() {
let b0: BoundedInt32<-10, 300> = BoundedInt32::saturating_from(0);
let b100: BoundedInt32<-10, 300> = BoundedInt32::saturating_from(100);
let bn5: BoundedInt32<-10, 300> = BoundedInt32::saturating_from(-5);
assert_eq!(usize::try_from(b0), Ok(0_usize));
assert_eq!(usize::try_from(b100), Ok(100_usize));
assert_eq!(usize::try_from(bn5), Err(Error::Negative));
}
#[test]
fn percents() {
type Pct = Percentage<u8>;
let p = Pct::new(100);
assert_eq!(p.as_percent(), 100);
assert_approx_eq!(f64, p.as_fraction(), 1.0);
let p = Pct::new(0);
assert_eq!(p.as_percent(), 0);
assert_approx_eq!(f64, p.as_fraction(), 0.0);
let p = Pct::new(25);
assert_eq!(p.as_percent(), 25);
assert_eq!(p.clone(), p);
assert_approx_eq!(f64, p.as_fraction(), 0.25);
type BPct = Percentage<BoundedInt32<0, 100>>;
assert_eq!(BPct::try_from(99).unwrap().as_percent().get(), 99);
}
#[test]
fn milliseconds() {
type Msec = IntegerMilliseconds<i32>;
let ms = Msec::new(500);
let d: Result<Duration, _> = ms.try_into();
assert_eq!(d.unwrap(), Duration::from_millis(500));
assert_eq!(Duration::try_from(ms * 2).unwrap(), Duration::from_secs(1));
let ms = Msec::new(-100);
let d: Result<Duration, _> = ms.try_into();
assert!(d.is_err());
type BMSec = IntegerMilliseconds<BoundedInt32<0, 1000>>;
let half_sec = BMSec::try_from(500).unwrap();
assert_eq!(
Duration::try_from(half_sec).unwrap(),
Duration::from_millis(500)
);
assert!(BMSec::try_from(1001).is_err());
}
#[test]
fn seconds() {
type Sec = IntegerSeconds<i32>;
let ms = Sec::new(500);
let d: Result<Duration, _> = ms.try_into();
assert_eq!(d.unwrap(), Duration::from_secs(500));
let ms = Sec::new(-100);
let d: Result<Duration, _> = ms.try_into();
assert!(d.is_err());
type BSec = IntegerSeconds<BoundedInt32<0, 3600>>;
let half_hour = BSec::try_from(1800).unwrap();
assert_eq!(
Duration::try_from(half_hour).unwrap(),
Duration::from_secs(1800)
);
assert!(BSec::try_from(9999).is_err());
assert_eq!(half_hour.clone(), half_hour);
}
#[test]
fn days() {
type Days = IntegerDays<i32>;
let t = Days::new(500);
let d: Duration = t.try_into().unwrap();
assert_eq!(d, Duration::from_secs(500 * 86400));
let t = Days::new(-100);
let d: Result<Duration, _> = t.try_into();
assert_eq!(d, Err(Error::Overflow));
let t = IntegerDays::<u64>::new(u64::MAX);
let d: Result<Duration, _> = t.try_into();
assert_eq!(d, Err(Error::Overflow));
type BDays = IntegerDays<BoundedInt32<10, 30>>;
assert_eq!(
BDays::new(17_i32.try_into().unwrap()),
BDays::try_from(17).unwrap()
);
}
#[test]
fn sendme() {
let smv = SendMeVersion::new(5);
assert_eq!(smv.get(), 5);
assert_eq!(smv.clone().get(), 5);
assert_eq!(smv, SendMeVersion::try_from(5).unwrap());
}
}
| {
val.value
} |
index.js | import EventEmitter from 'events';
import Promise from 'bluebird';
import config from '../config';
import methods from './methods';
import transports from './transports';
import { camelCase } from '../utils';
import { hash } from '../auth/ecc';
import { signMethod } from '../auth';
import { ops } from '../auth/serializer';
import { jsonRpc } from './transports/http';
import { sign as signRequest } from '@steemit/rpc-auth';
class Scorum extends EventEmitter {
constructor(options = {}) {
super(options);
this._setTransport(options);
this._setLogger(options);
this.options = options;
this.seqNo = 0; // used for rpc calls
methods.forEach(method => {
const methodName = method.method_name || camelCase(method.method);
const methodParams = method.params || [];
this[`${methodName}With`] = (...args) => {
let key;
let options;
let callback;
// When method is called using signed API
if (args.length === 3) {
key = args[0];
options = args[1];
callback = args[2];
} else {
options = args[0];
callback = args[1];
}
let params = methodParams.map(param => options[param]);
if (method.params[1] === 'salt' && method.params[2] === 'signature') {
const salt = Math.random().toString(36).substring(2);
const signature = signMethod(params[0], salt, params[3] || [], key);
params = [params[0], salt, signature, params[3]];
}
return this.send(
method.api,
{
method: method.method,
params,
},
callback
);
};
this[methodName] = (...args) => {
const options = methodParams.reduce((memo, param, i) => {
memo[param] = args[i]; // eslint-disable-line no-param-reassign
return memo;
}, {});
const callback = args[methodParams.length];
return this[`${methodName}With`](options, callback);
};
this[`${methodName}WithAsync`] = Promise.promisify(this[`${methodName}With`]);
this[`${methodName}Async`] = Promise.promisify(this[methodName]);
});
this.callAsync = Promise.promisify(this.call);
this.signedCallAsync = Promise.promisify(this.signedCall);
}
_setTransport(options) {
if (options.url && options.url.match('^((http|https)?://)')) {
options.transport = 'http';
this._transportType = options.transport;
this.options = options;
this.transport = new transports.http(options);
} else if (options.url && options.url.match('^((ws|wss)?://)')) {
throw new TypeError('Invalid `transport`. Library support only `http` transport');
} else if (options.transport) {
if (this.transport && this._transportType !== options.transport) {
this.transport.stop();
}
this._transportType = options.transport;
if (typeof options.transport === 'string') {
if (!transports[options.transport]) {
throw new TypeError('Invalid `transport`, valid values are `http` or a class');
}
this.transport = new transports[options.transport](options);
} else {
this.transport = new options.transport(options);
}
} else {
this.transport = new transports.http(options);
}
}
_setLogger(options) {
if (options.hasOwnProperty('logger')) {
switch (typeof options.logger) {
case 'function':
this.__logger = {
log: options.logger
};
break;
case 'object':
if (typeof options.logger.log !== 'function') {
throw new Error('setOptions({logger:{}}) must have a property .log of type function');
}
this.__logger = options.logger;
break;
case 'undefined':
if (this.__logger) break;
default:
this.__logger = false;
}
}
}
log(logLevel) {
if (this.__logger) {
if (arguments.length > 1 && typeof this.__logger[logLevel] === 'function') {
let args = Array.prototype.slice.call(arguments, 1);
this.__logger[logLevel].apply(this.__logger, args);
} else {
this.__logger.log.apply(this.__logger, arguments);
}
}
}
start() {
return this.transport.start();
}
stop() {
return this.transport.stop();
}
send(api, data, callback) {
var cb = callback;
if (this.__logger) {
let id = Math.random();
let self = this;
this.log('xmit:' + id + ':', data);
cb = function(e, d) {
if (e) {
self.log('error', 'rsp:' + id + ':\n\n', e, d);
} else {
self.log('rsp:' + id + ':', d);
}
if (callback) {
callback.apply(self, arguments);
}
};
}
return this.transport.send(api, data, cb);
}
call(method, params, callback) {
if (this._transportType !== 'http') {
callback(new Error('RPC methods can only be called when using http transport'));
return;
}
const id = ++this.seqNo;
jsonRpc(this.options.url, { method, params, id }).then(
res => {
callback(null, res);
},
err => {
callback(err);
}
);
}
signedCall(method, params, account, key, callback) {
if (this._transportType !== 'http') {
callback(new Error('RPC methods can only be called when using http transport'));
return;
}
const id = ++this.seqNo;
let request;
try {
request = signRequest({ method, params, id }, account, [key]);
} catch (error) {
callback(error);
return;
}
jsonRpc(this.options.url, request).then(
res => {
callback(null, res);
},
err => {
callback(err);
}
);
}
setOptions(options) {
Object.assign(this.options, options);
this._setLogger(options);
this._setTransport(options);
this.transport.setOptions(options);
}
streamBlockNumber(mode = 'head', callback, ts = 200) {
if (typeof mode === 'function') {
callback = mode;
mode = 'head';
}
let current = '';
let running = true;
const update = () => {
if (!running) return;
this.getDynamicGlobalPropertiesAsync().then(
result => {
const blockId = mode === 'irreversible' ? result.last_irreversible_block_num : result.head_block_number;
if (blockId !== current) {
if (current) {
for (let i = current; i < blockId; i++) {
if (i !== current) {
callback(null, i);
}
current = i;
}
} else {
current = blockId;
callback(null, blockId);
}
}
Promise.delay(ts).then(() => {
update();
});
},
err => {
callback(err);
}
);
};
update();
return () => {
running = false;
};
}
streamBlock(mode = 'head', callback) {
if (typeof mode === 'function') {
callback = mode;
mode = 'head';
}
let current = '';
let last = '';
const release = this.streamBlockNumber(mode, (err, id) => {
if (err) {
release();
callback(err); | if (current !== last) {
last = current;
this.getBlock(current, callback);
}
});
return release;
}
streamTransactions(mode = 'head', callback) {
if (typeof mode === 'function') {
callback = mode;
mode = 'head';
}
const release = this.streamBlock(mode, (err, result) => {
if (err) {
release();
callback(err);
return;
}
if (result && result.transactions) {
result.transactions.forEach(transaction => {
callback(null, transaction);
});
}
});
return release;
}
streamOperations(mode = 'head', callback) {
if (typeof mode === 'function') {
callback = mode;
mode = 'head';
}
const release = this.streamTransactions(mode, (err, transaction) => {
if (err) {
release();
callback(err);
return;
}
transaction.operations.forEach(operation => {
callback(null, operation);
});
});
return release;
}
broadcastTransactionSynchronousWith(options, callback) {
const trx = options.trx;
return this.send(
'network_broadcast_api',
{
method: 'broadcast_transaction_synchronous',
params: [trx]
},
(err, result) => {
if (err) {
const { signed_transaction } = ops;
// toObject converts objects into serializable types
const trObject = signed_transaction.toObject(trx);
const buf = signed_transaction.toBuffer(trx);
err.digest = hash.sha256(buf).toString('hex');
err.transaction_id = buf.toString('hex');
err.transaction = JSON.stringify(trObject);
callback(err, '');
} else {
callback('', result);
}
}
);
}
}
// Export singleton instance
const scorum = new Scorum(config);
exports = module.exports = scorum;
exports.Scorum = Scorum; | return;
}
current = id; |
lib.rs | #![deny(missing_docs)]
#![warn(rust_2018_idioms)]
#![doc(html_root_url = "https://docs.rs/amq-protocol-codegen/6.0.0-alpha2/")]
//! # AMQP code generation utilities | //! amq-protocol-codegen is a library aiming at providing tools to generate
//! code from official AMQP specs definition.
mod internal;
mod specs;
mod templating;
mod util;
pub use crate::{specs::*, templating::*, util::*}; | //! |
PrivateRoute.js | import React from 'react';
import { Route, Redirect } from 'react-router-dom';
import { useCookies } from 'react-cookie';
const PrivateRoute = ({ component: Component, ...rest }) => {
const [ cookies ] = useCookies([process.env.REACT_APP_USER_COOKIE_NAME]);
return (
<Route {...rest} render={props => {
//cookies is an empty object if no cookies are set
//Need to test if cookies object contains any properties
//by getting an array of its keys and checking if the length
//is greater than 0
//Also test for the object's constructor to confirm it was Object
//This tests for an edge case where a new Date object would
//give 0 when checking this length and will help be more
//confident the object is truly empty
if (Object.keys(cookies).length === 0 && cookies.constructor === Object) {
return <Redirect to='/login' />
} else {
return <Component {...props} currentUser={cookies[process.env.REACT_APP_USER_COOKIE_NAME]}/>
} | }
} />
);
};
export default PrivateRoute; | |
objectTypeTableChildRow.ts | export class | {
private _referenceType: string;
private _nodeClass: string;
private _browsename: string;
private _datatype: string;
private _description: string;
get description(): string {
return this._description;
}
set description(value: string) {
this._description = value;
}
get referenceType(): string {
return this._referenceType;
}
set referenceType(value: string) {
this._referenceType = value;
}
get nodeClass(): string {
return this._nodeClass;
}
set nodeClass(value: string) {
this._nodeClass = value;
}
get browsename(): string {
return this._browsename;
}
set browsename(value: string) {
if (value.startsWith('1') == true) {
this._browsename = value.substring(2);
} else {
this._browsename = value;
}
}
get datatype(): string {
return this._datatype;
}
set datatype(value: string) {
this._datatype = value;
}
get typedefinition(): string {
return this._typedefinition;
}
set typedefinition(value: string) {
this._typedefinition = value;
}
get modelingrule(): string {
return this._modelingrule;
}
set modelingrule(value: string) {
this._modelingrule = value;
}
private _typedefinition: string;
private _modelingrule: string;
} | objectTypeTableChildRow |
token.rs | use std::convert::TryFrom;
use crate::Result;
use crate::error::Error;
use crate::cli::constants::*;
// II.22
#[derive(Debug, PartialEq, Eq, Copy, Clone, Default)]
pub struct MetadataToken(u32);
impl TryFrom<u32> for MetadataToken {
type Error = crate::error::Error; | Ok(MetadataToken(x))
} else {
Err(Error::General("Unknown metadata table in possible token."))
}
}
}
impl MetadataToken {
pub fn table_index(&self) -> usize {
table_index(self.0)
}
pub fn row_index(&self) -> usize {
((self.0 & 0xFFFFFF) - 1) as usize
}
}
fn table_index(x: u32) -> usize {
(x >> 24) as usize
} |
fn try_from(x: u32) -> Result<Self> {
let idx = table_index(x);
if idx >= METADATA_MODULE && idx <= METADATA_GENERIC_PARAM_CONSTRAINT { |
idc.py | #!/usr/bin/env python
#---------------------------------------------------------------------
# IDAPython - Python plugin for Interactive Disassembler
#
# Original IDC.IDC:
# Copyright (c) 1990-2010 Ilfak Guilfanov
#
# Python conversion:
# Copyright (c) 2004-2010 Gergely Erdelyi <[email protected]>
#
# All rights reserved.
#
# For detailed copyright information see the file COPYING in
# the root of the distribution archive.
#---------------------------------------------------------------------
# idc.py - IDC compatibility module
#---------------------------------------------------------------------
"""
IDC compatibility module
This file contains IDA built-in function declarations and internal bit
definitions. Each byte of the program has 32-bit flags (low 8 bits keep
the byte value). These 32 bits are used in get_full_flags/get_flags functions.
This file is subject to change without any notice.
Future versions of IDA may use other definitions.
"""
from __future__ import print_function
# FIXME: Perhaps those should be loaded on-demand
import ida_idaapi
import ida_auto
import ida_dbg
import ida_diskio
import ida_entry
import ida_enum
import ida_expr
import ida_fixup
import ida_frame
import ida_funcs
import ida_gdl
import ida_ida
import ida_idc
import ida_bytes
import ida_idd
import ida_idp
import ida_kernwin
import ida_lines
import ida_loader
import ida_moves
import ida_nalt
import ida_name
import ida_netnode
import ida_offset
import ida_pro
import ida_search
import ida_segment
import ida_segregs
import ida_struct
import ida_typeinf
import ida_ua
import ida_xref
import _ida_idaapi
import os
import re
import struct
import time
import types
import sys
__EA64__ = ida_idaapi.BADADDR == 0xFFFFFFFFFFFFFFFF
WORDMASK = 0xFFFFFFFFFFFFFFFF if __EA64__ else 0xFFFFFFFF # just there for bw-compat purposes; please don't use
class DeprecatedIDCError(Exception):
"""
Exception for deprecated function calls
"""
pass
__warned_deprecated_proto_confusion = {}
def __warn_once_deprecated_proto_confusion(what, alternative):
if what not in __warned_deprecated_proto_confusion:
print("NOTE: idc.%s is deprecated due to signature confusion with %s. Please use %s instead" % (
what,
alternative,
alternative))
__warned_deprecated_proto_confusion[what] = True
def _IDC_GetAttr(obj, attrmap, attroffs):
"""
Internal function to generically get object attributes
Do not use unless you know what you are doing
"""
if attroffs in attrmap and hasattr(obj, attrmap[attroffs][1]):
return getattr(obj, attrmap[attroffs][1])
else:
errormsg = "attribute with offset %d not found, check the offset and report the problem" % attroffs
raise KeyError(errormsg)
def _IDC_SetAttr(obj, attrmap, attroffs, value):
"""
Internal function to generically set object attributes
Do not use unless you know what you are doing
"""
# check for read-only atributes
if attroffs in attrmap:
if attrmap[attroffs][0]:
raise KeyError("attribute with offset %d is read-only" % attroffs)
elif hasattr(obj, attrmap[attroffs][1]):
return setattr(obj, attrmap[attroffs][1], value)
errormsg = "attribute with offset %d not found, check the offset and report the problem" % attroffs
raise KeyError(errormsg)
BADADDR = ida_idaapi.BADADDR # Not allowed address value
BADSEL = ida_idaapi.BADSEL # Not allowed selector value/number
SIZE_MAX = _ida_idaapi.SIZE_MAX
ida_ida.__set_module_dynattrs(
__name__,
{
"MAXADDR" : (lambda: ida_ida.inf_get_privrange_start_ea(), None),
})
#
# Flag bit definitions (for get_full_flags())
#
MS_VAL = ida_bytes.MS_VAL # Mask for byte value
FF_IVL = ida_bytes.FF_IVL # Byte has value ?
# Do flags contain byte value? (i.e. has the byte a value?)
# if not, the byte is uninitialized.
def has_value(F): return ((F & FF_IVL) != 0) # any defined value?
def byte_value(F):
"""
Get byte value from flags
Get value of byte provided that the byte is initialized.
This macro works ok only for 8-bit byte machines.
"""
return (F & MS_VAL)
def is_loaded(ea):
"""Is the byte initialized?"""
return has_value(get_full_flags(ea)) # any defined value?
MS_CLS = ida_bytes.MS_CLS # Mask for typing
FF_CODE = ida_bytes.FF_CODE # Code ?
FF_DATA = ida_bytes.FF_DATA # Data ?
FF_TAIL = ida_bytes.FF_TAIL # Tail ?
FF_UNK = ida_bytes.FF_UNK # Unknown ?
def is_code(F): return ((F & MS_CLS) == FF_CODE) # is code byte?
def is_data(F): return ((F & MS_CLS) == FF_DATA) # is data byte?
def is_tail(F): return ((F & MS_CLS) == FF_TAIL) # is tail byte?
def is_unknown(F): return ((F & MS_CLS) == FF_UNK) # is unexplored byte?
def is_head(F): return ((F & FF_DATA) != 0) # is start of code/data?
#
# Common bits
#
MS_COMM = ida_bytes.MS_COMM # Mask of common bits
FF_COMM = ida_bytes.FF_COMM # Has comment?
FF_REF = ida_bytes.FF_REF # has references?
FF_LINE = ida_bytes.FF_LINE # Has next or prev cmt lines ?
FF_NAME = ida_bytes.FF_NAME # Has user-defined name ?
FF_LABL = ida_bytes.FF_LABL # Has dummy name?
FF_FLOW = ida_bytes.FF_FLOW # Exec flow from prev instruction?
FF_ANYNAME = FF_LABL | FF_NAME
def is_flow(F): return ((F & FF_FLOW) != 0)
def isExtra(F): return ((F & FF_LINE) != 0)
def isRef(F): return ((F & FF_REF) != 0)
def hasName(F): return ((F & FF_NAME) != 0)
def hasUserName(F): return ((F & FF_ANYNAME) == FF_NAME)
MS_0TYPE = ida_bytes.MS_0TYPE # Mask for 1st arg typing
FF_0VOID = ida_bytes.FF_0VOID # Void (unknown)?
FF_0NUMH = ida_bytes.FF_0NUMH # Hexadecimal number?
FF_0NUMD = ida_bytes.FF_0NUMD # Decimal number?
FF_0CHAR = ida_bytes.FF_0CHAR # Char ('x')?
FF_0SEG = ida_bytes.FF_0SEG # Segment?
FF_0OFF = ida_bytes.FF_0OFF # Offset?
FF_0NUMB = ida_bytes.FF_0NUMB # Binary number?
FF_0NUMO = ida_bytes.FF_0NUMO # Octal number?
FF_0ENUM = ida_bytes.FF_0ENUM # Enumeration?
FF_0FOP = ida_bytes.FF_0FOP # Forced operand?
FF_0STRO = ida_bytes.FF_0STRO # Struct offset?
FF_0STK = ida_bytes.FF_0STK # Stack variable?
MS_1TYPE = ida_bytes.MS_1TYPE # Mask for 2nd arg typing
FF_1VOID = ida_bytes.FF_1VOID # Void (unknown)?
FF_1NUMH = ida_bytes.FF_1NUMH # Hexadecimal number?
FF_1NUMD = ida_bytes.FF_1NUMD # Decimal number?
FF_1CHAR = ida_bytes.FF_1CHAR # Char ('x')?
FF_1SEG = ida_bytes.FF_1SEG # Segment?
FF_1OFF = ida_bytes.FF_1OFF # Offset?
FF_1NUMB = ida_bytes.FF_1NUMB # Binary number?
FF_1NUMO = ida_bytes.FF_1NUMO # Octal number?
FF_1ENUM = ida_bytes.FF_1ENUM # Enumeration?
FF_1FOP = ida_bytes.FF_1FOP # Forced operand?
FF_1STRO = ida_bytes.FF_1STRO # Struct offset?
FF_1STK = ida_bytes.FF_1STK # Stack variable?
# The following macros answer questions like
# 'is the 1st (or 2nd) operand of instruction or data of the given type'?
# Please note that data items use only the 1st operand type (is...0)
def is_defarg0(F): return ((F & MS_0TYPE) != FF_0VOID)
def is_defarg1(F): return ((F & MS_1TYPE) != FF_1VOID)
def isDec0(F): return ((F & MS_0TYPE) == FF_0NUMD)
def isDec1(F): return ((F & MS_1TYPE) == FF_1NUMD)
def isHex0(F): return ((F & MS_0TYPE) == FF_0NUMH)
def isHex1(F): return ((F & MS_1TYPE) == FF_1NUMH)
def isOct0(F): return ((F & MS_0TYPE) == FF_0NUMO)
def isOct1(F): return ((F & MS_1TYPE) == FF_1NUMO)
def isBin0(F): return ((F & MS_0TYPE) == FF_0NUMB)
def isBin1(F): return ((F & MS_1TYPE) == FF_1NUMB)
def is_off0(F): return ((F & MS_0TYPE) == FF_0OFF)
def is_off1(F): return ((F & MS_1TYPE) == FF_1OFF)
def is_char0(F): return ((F & MS_0TYPE) == FF_0CHAR)
def is_char1(F): return ((F & MS_1TYPE) == FF_1CHAR)
def is_seg0(F): return ((F & MS_0TYPE) == FF_0SEG)
def is_seg1(F): return ((F & MS_1TYPE) == FF_1SEG)
def is_enum0(F): return ((F & MS_0TYPE) == FF_0ENUM)
def is_enum1(F): return ((F & MS_1TYPE) == FF_1ENUM)
def is_manual0(F): return ((F & MS_0TYPE) == FF_0FOP)
def is_manual1(F): return ((F & MS_1TYPE) == FF_1FOP)
def is_stroff0(F): return ((F & MS_0TYPE) == FF_0STRO)
def is_stroff1(F): return ((F & MS_1TYPE) == FF_1STRO)
def is_stkvar0(F): return ((F & MS_0TYPE) == FF_0STK)
def is_stkvar1(F): return ((F & MS_1TYPE) == FF_1STK)
#
# Bits for DATA bytes
#
DT_TYPE = ida_bytes.DT_TYPE & 0xFFFFFFFF # Mask for DATA typing
FF_BYTE = ida_bytes.FF_BYTE & 0xFFFFFFFF # byte
FF_WORD = ida_bytes.FF_WORD & 0xFFFFFFFF # word
FF_DWORD = ida_bytes.FF_DWORD & 0xFFFFFFFF # dword
FF_QWORD = ida_bytes.FF_QWORD & 0xFFFFFFFF # qword
FF_TBYTE = ida_bytes.FF_TBYTE & 0xFFFFFFFF # tbyte
FF_STRLIT = ida_bytes.FF_STRLIT & 0xFFFFFFFF # ASCII ?
FF_STRUCT = ida_bytes.FF_STRUCT & 0xFFFFFFFF # Struct ?
FF_OWORD = ida_bytes.FF_OWORD & 0xFFFFFFFF # octaword (16 bytes)
FF_FLOAT = ida_bytes.FF_FLOAT & 0xFFFFFFFF # float
FF_DOUBLE = ida_bytes.FF_DOUBLE & 0xFFFFFFFF # double
FF_PACKREAL = ida_bytes.FF_PACKREAL & 0xFFFFFFFF # packed decimal real
FF_ALIGN = ida_bytes.FF_ALIGN & 0xFFFFFFFF # alignment directive
def is_byte(F): return (is_data(F) and (F & DT_TYPE) == FF_BYTE)
def is_word(F): return (is_data(F) and (F & DT_TYPE) == FF_WORD)
def is_dword(F): return (is_data(F) and (F & DT_TYPE) == FF_DWORD)
def is_qword(F): return (is_data(F) and (F & DT_TYPE) == FF_QWORD)
def is_oword(F): return (is_data(F) and (F & DT_TYPE) == FF_OWORD)
def is_tbyte(F): return (is_data(F) and (F & DT_TYPE) == FF_TBYTE)
def is_float(F): return (is_data(F) and (F & DT_TYPE) == FF_FLOAT)
def is_double(F): return (is_data(F) and (F & DT_TYPE) == FF_DOUBLE)
def is_pack_real(F): return (is_data(F) and (F & DT_TYPE) == FF_PACKREAL)
def is_strlit(F): return (is_data(F) and (F & DT_TYPE) == FF_STRLIT)
def is_struct(F): return (is_data(F) and (F & DT_TYPE) == FF_STRUCT)
def is_align(F): return (is_data(F) and (F & DT_TYPE) == FF_ALIGN)
#
# Bits for CODE bytes
#
MS_CODE = ida_bytes.MS_CODE & 0xFFFFFFFF
FF_FUNC = ida_bytes.FF_FUNC & 0xFFFFFFFF # function start?
FF_IMMD = ida_bytes.FF_IMMD & 0xFFFFFFFF # Has Immediate value ?
FF_JUMP = ida_bytes.FF_JUMP & 0xFFFFFFFF # Has jump table
#
# Loader flags
#
if ida_idaapi.uses_swig_builtins:
_scope = ida_loader.loader_t
else:
_scope = ida_loader
NEF_SEGS = _scope.NEF_SEGS # Create segments
NEF_RSCS = _scope.NEF_RSCS # Load resources
NEF_NAME = _scope.NEF_NAME # Rename entries
NEF_MAN = _scope.NEF_MAN # Manual load
NEF_FILL = _scope.NEF_FILL # Fill segment gaps
NEF_IMPS = _scope.NEF_IMPS # Create imports section
NEF_FIRST = _scope.NEF_FIRST # This is the first file loaded
NEF_CODE = _scope.NEF_CODE # for load_binary_file:
NEF_RELOAD = _scope.NEF_RELOAD # reload the file at the same place:
NEF_FLAT = _scope.NEF_FLAT # Autocreated FLAT group (PE)
# List of built-in functions
# --------------------------
#
# The following conventions are used in this list:
# 'ea' is a linear address
# 'success' is 0 if a function failed, 1 otherwise
# 'void' means that function returns no meaningful value (always 0)
#
# All function parameter conversions are made automatically.
#
# ----------------------------------------------------------------------------
# M I S C E L L A N E O U S
# ----------------------------------------------------------------------------
def value_is_string(var): raise NotImplementedError("this function is not needed in Python")
def value_is_long(var): raise NotImplementedError("this function is not needed in Python")
def value_is_float(var): raise NotImplementedError("this function is not needed in Python")
def value_is_func(var): raise NotImplementedError("this function is not needed in Python")
def value_is_pvoid(var): raise NotImplementedError("this function is not needed in Python")
def value_is_int64(var): raise NotImplementedError("this function is not needed in Python")
def to_ea(seg, off):
"""
Return value of expression: ((seg<<4) + off)
"""
return (seg << 4) + off
def form(format, *args):
raise DeprecatedIDCError("form() is deprecated. Use python string operations instead.")
def substr(s, x1, x2):
raise DeprecatedIDCError("substr() is deprecated. Use python string operations instead.")
def strstr(s1, s2):
raise DeprecatedIDCError("strstr() is deprecated. Use python string operations instead.")
def strlen(s):
raise DeprecatedIDCError("strlen() is deprecated. Use python string operations instead.")
def xtol(s):
raise DeprecatedIDCError("xtol() is deprecated. Use python long() instead.")
def atoa(ea):
"""
Convert address value to a string
Return address in the form 'seg000:1234'
(the same as in line prefixes)
@param ea: address to format
"""
return ida_kernwin.ea2str(ea)
def ltoa(n, radix):
raise DeprecatedIDCError("ltoa() is deprecated. Use python string operations instead.")
def atol(s):
raise DeprecatedIDCError("atol() is deprecated. Use python long() instead.")
def rotate_left(value, count, nbits, offset):
"""
Rotate a value to the left (or right)
@param value: value to rotate
@param count: number of times to rotate. negative counter means
rotate to the right
@param nbits: number of bits to rotate
@param offset: offset of the first bit to rotate
@return: the value with the specified field rotated
all other bits are not modified
"""
assert offset >= 0, "offset must be >= 0"
assert nbits > 0, "nbits must be > 0"
mask = 2**(offset+nbits) - 2**offset
tmp = value & mask
if count > 0:
for x in range(count):
if (tmp >> (offset+nbits-1)) & 1:
tmp = (tmp << 1) | (1 << offset)
else:
tmp = (tmp << 1)
else:
for x in range(-count):
if (tmp >> offset) & 1:
tmp = (tmp >> 1) | (1 << (offset+nbits-1))
else:
tmp = (tmp >> 1)
value = (value-(value&mask)) | (tmp & mask)
return value
def rotate_dword(x, count): return rotate_left(x, count, 32, 0)
def rotate_word(x, count): return rotate_left(x, count, 16, 0)
def rotate_byte(x, count): return rotate_left(x, count, 8, 0)
# add_idc_hotkey return codes
IDCHK_OK = 0 # ok
IDCHK_ARG = -1 # bad argument(s)
IDCHK_KEY = -2 # bad hotkey name
IDCHK_MAX = -3 # too many IDC hotkeys
add_idc_hotkey = ida_kernwin.add_idc_hotkey
del_idc_hotkey = ida_kernwin.del_idc_hotkey
jumpto = ida_kernwin.jumpto
auto_wait = ida_auto.auto_wait
def eval_idc(expr):
"""
Evaluate an IDC expression
@param expr: an expression
@return: the expression value. If there are problems, the returned value will be "IDC_FAILURE: xxx"
where xxx is the error description
@note: Python implementation evaluates IDC only, while IDC can call other registered languages
"""
rv = ida_expr.idc_value_t()
err = ida_expr.eval_idc_expr(rv, BADADDR, expr)
if err:
return "IDC_FAILURE: "+err
else:
if rv.vtype == '\x02': # long
return rv.num
elif rv.vtype == '\x07': # VT_STR
return rv.c_str()
else:
raise NotImplementedError("eval_idc() supports only expressions returning strings or longs")
def EVAL_FAILURE(code):
"""
Check the result of eval_idc() for evaluation failures
@param code: result of eval_idc()
@return: True if there was an evaluation error
"""
return type(code) == bytes and code.startswith("IDC_FAILURE: ")
def save_database(idbname, flags=0):
"""
Save current database to the specified idb file
@param idbname: name of the idb file. if empty, the current idb
file will be used.
@param flags: combination of ida_loader.DBFL_... bits or 0
"""
if len(idbname) == 0:
idbname = get_idb_path()
mask = ida_loader.DBFL_KILL | ida_loader.DBFL_COMP | ida_loader.DBFL_BAK
return ida_loader.save_database(idbname, flags & mask)
DBFL_BAK = ida_loader.DBFL_BAK # for compatiblity with older versions, eventually delete this
def validate_idb_names(do_repair = 0):
"""
check consistency of IDB name records
@param do_repair: try to repair netnode header it TRUE
@return: number of inconsistent name records
"""
return ida_nalt.validate_idb_names(do_repair)
qexit = ida_pro.qexit
def call_system(command):
"""
Execute an OS command.
@param command: command line to execute
@return: error code from OS
@note:
IDA will wait for the started program to finish.
In order to start the command in parallel, use OS methods.
For example, you may start another program in parallel using
"start" command.
"""
return os.system(command)
def qsleep(milliseconds):
"""
qsleep the specified number of milliseconds
This function suspends IDA for the specified amount of time
@param milliseconds: time to sleep
"""
time.sleep(float(milliseconds)/1000)
load_and_run_plugin = ida_loader.load_and_run_plugin
plan_to_apply_idasgn = ida_funcs.plan_to_apply_idasgn
#----------------------------------------------------------------------------
# C H A N G E P R O G R A M R E P R E S E N T A T I O N
#----------------------------------------------------------------------------
def delete_all_segments():
"""
Delete all segments, instructions, comments, i.e. everything
except values of bytes.
"""
ea = ida_ida.cvar.inf.min_ea
# Brute-force nuke all info from all the heads
while ea != BADADDR and ea <= ida_ida.cvar.inf.max_ea:
ida_name.del_local_name(ea)
ida_name.del_global_name(ea)
func = ida_funcs.get_func(ea)
if func:
ida_funcs.set_func_cmt(func, "", False)
ida_funcs.set_func_cmt(func, "", True)
ida_funcs.del_func(ea)
ida_bytes.del_hidden_range(ea)
seg = ida_segment.getseg(ea)
if seg:
ida_segment.set_segment_cmt(seg, "", False)
ida_segment.set_segment_cmt(seg, "", True)
ida_segment.del_segm(ea, ida_segment.SEGMOD_KEEP | ida_segment.SEGMOD_SILENT)
ea = ida_bytes.next_head(ea, ida_ida.cvar.inf.max_ea)
create_insn = ida_ua.create_insn
def plan_and_wait(sEA, eEA, final_pass=True):
"""
Perform full analysis of the range
@param sEA: starting linear address
@param eEA: ending linear address (excluded)
@param final_pass: make the final pass over the specified range
@return: 1-ok, 0-Ctrl-Break was pressed.
"""
return ida_auto.plan_and_wait(sEA, eEA, final_pass)
def set_name(ea, name, flags=ida_name.SN_CHECK):
"""
Rename an address
@param ea: linear address
@param name: new name of address. If name == "", then delete old name
@param flags: combination of SN_... constants
@return: 1-ok, 0-failure
"""
return ida_name.set_name(ea, name, flags)
SN_CHECK = ida_name.SN_CHECK
SN_NOCHECK = ida_name.SN_NOCHECK # Don't fail if the name contains invalid characters.
# If this bit is clear, all invalid chars
# (those !is_ident_cp()) will be replaced
# by SUBSTCHAR (usually '_').
# List of valid characters is defined in ida.cfg
SN_PUBLIC = ida_name.SN_PUBLIC # if set, make name public
SN_NON_PUBLIC = ida_name.SN_NON_PUBLIC # if set, make name non-public
SN_WEAK = ida_name.SN_WEAK # if set, make name weak
SN_NON_WEAK = ida_name.SN_NON_WEAK # if set, make name non-weak
SN_AUTO = ida_name.SN_AUTO # if set, make name autogenerated
SN_NON_AUTO = ida_name.SN_NON_AUTO # if set, make name non-autogenerated
SN_NOLIST = ida_name.SN_NOLIST # if set, exclude name from the list
# if not set, then include the name into
# the list (however, if other bits are set,
# the name might be immediately excluded
# from the list)
SN_NOWARN = ida_name.SN_NOWARN # don't display a warning if failed
SN_LOCAL = ida_name.SN_LOCAL # create local name. a function should exist.
# local names can't be public or weak.
# also they are not included into the list
# of names they can't have dummy prefixes
set_cmt = ida_bytes.set_cmt
def make_array(ea, nitems):
"""
Create an array.
@param ea: linear address
@param nitems: size of array in items
@note: This function will create an array of the items with the same type as
the type of the item at 'ea'. If the byte at 'ea' is undefined, then
this function will create an array of bytes.
"""
flags = ida_bytes.get_flags(ea)
if ida_bytes.is_code(flags) or ida_bytes.is_tail(flags) or ida_bytes.is_align(flags):
return False
if ida_bytes.is_unknown(flags):
flags = ida_bytes.FF_BYTE
if ida_bytes.is_struct(flags):
ti = ida_nalt.opinfo_t()
assert ida_bytes.get_opinfo(ti, ea, 0, flags), "get_opinfo() failed"
itemsize = ida_bytes.get_data_elsize(ea, flags, ti)
tid = ti.tid
else:
itemsize = ida_bytes.get_item_size(ea)
tid = BADADDR
return ida_bytes.create_data(ea, flags, itemsize*nitems, tid)
def create_strlit(ea, endea):
"""
Create a string.
This function creates a string (the string type is determined by the
value of get_inf_attr(INF_STRTYPE))
@param ea: linear address
@param endea: ending address of the string (excluded)
if endea == BADADDR, then length of string will be calculated
by the kernel
@return: 1-ok, 0-failure
@note: The type of an existing string is returned by get_str_type()
"""
return ida_bytes.create_strlit(ea, 0 if endea == BADADDR else endea - ea, get_inf_attr(INF_STRTYPE))
create_data = ida_bytes.create_data
def create_byte(ea):
"""
Convert the current item to a byte
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_byte(ea, 1)
def create_word(ea):
"""
Convert the current item to a word (2 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_word(ea, 2)
def create_dword(ea):
"""
Convert the current item to a double word (4 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_dword(ea, 4)
def create_qword(ea):
"""
Convert the current item to a quadro word (8 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_qword(ea, 8)
def create_oword(ea):
"""
Convert the current item to an octa word (16 bytes/128 bits)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_oword(ea, 16)
def create_yword(ea):
"""
Convert the current item to a ymm word (32 bytes/256 bits)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_yword(ea, 32)
def create_float(ea):
"""
Convert the current item to a floating point (4 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_float(ea, 4)
def create_double(ea):
"""
Convert the current item to a double floating point (8 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_double(ea, 8)
def create_pack_real(ea):
"""
Convert the current item to a packed real (10 or 12 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_packed_real(ea, ida_idp.ph_get_tbyte_size())
def create_tbyte(ea):
"""
Convert the current item to a tbyte (10 or 12 bytes)
@param ea: linear address
@return: 1-ok, 0-failure
"""
return ida_bytes.create_tbyte(ea, ida_idp.ph_get_tbyte_size())
def create_struct(ea, size, strname):
"""
Convert the current item to a structure instance
@param ea: linear address
@param size: structure size in bytes. -1 means that the size
will be calculated automatically
@param strname: name of a structure type
@return: 1-ok, 0-failure
"""
strid = ida_struct.get_struc_id(strname)
if size == -1:
size = ida_struct.get_struc_size(strid)
return ida_bytes.create_struct(ea, size, strid)
create_custom_data = ida_bytes.create_custdata
create_align = ida_bytes.create_align
def define_local_var(start, end, location, name):
"""
Create a local variable
@param start: start of address range for the local variable
@param end: end of address range for the local variable
@param location: the variable location in the "[bp+xx]" form where xx is
a number. The location can also be specified as a
register name.
@param name: name of the local variable
@return: 1-ok, 0-failure
@note: For the stack variables the end address is ignored.
If there is no function at 'start' then this function.
will fail.
"""
func = ida_funcs.get_func(start)
if not func:
return 0
# Find out if location is in the [bp+xx] form
r = re.compile("\[([a-z]+)([-+][0-9a-fx]+)", re.IGNORECASE)
m = r.match(location)
if m:
# Location in the form of [bp+xx]
register = ida_idp.str2reg(m.group(1))
if register == -1:
return 0
offset = int(m.group(2), 0)
return 1 if ida_frame.define_stkvar(func, name, offset, ida_bytes.byte_flag(), None, 1) else 0
else:
# Location as simple register name
return ida_frame.add_regvar(func, start, end, location, name, None)
del_items = ida_bytes.del_items
DELIT_SIMPLE = ida_bytes.DELIT_SIMPLE # simply undefine the specified item
DELIT_EXPAND = ida_bytes.DELIT_EXPAND # propogate undefined items, for example
# if removing an instruction removes all
# references to the next instruction, then
# plan to convert to unexplored the next
# instruction too.
DELIT_DELNAMES = ida_bytes.DELIT_DELNAMES # delete any names at the specified address(es)
def set_array_params(ea, flags, litems, align):
"""
Set array representation format
@param ea: linear address
@param flags: combination of AP_... constants or 0
@param litems: number of items per line. 0 means auto
@param align: element alignment
- -1: do not align
- 0: automatic alignment
- other values: element width
@return: 1-ok, 0-failure
"""
return eval_idc("set_array_params(0x%X, 0x%X, %d, %d)"%(ea, flags, litems, align))
AP_ALLOWDUPS = 0x00000001 # use 'dup' construct
AP_SIGNED = 0x00000002 # treats numbers as signed
AP_INDEX = 0x00000004 # display array element indexes as comments
AP_ARRAY = 0x00000008 # reserved (this flag is not stored in database)
AP_IDXBASEMASK = 0x000000F0 # mask for number base of the indexes
AP_IDXDEC = 0x00000000 # display indexes in decimal
AP_IDXHEX = 0x00000010 # display indexes in hex
AP_IDXOCT = 0x00000020 # display indexes in octal
AP_IDXBIN = 0x00000030 # display indexes in binary
op_bin = ida_bytes.op_bin
op_oct = ida_bytes.op_oct
op_dec = ida_bytes.op_dec
op_hex = ida_bytes.op_hex
op_chr = ida_bytes.op_chr
def op_plain_offset(ea, n, base):
"""
Convert operand to an offset
(for the explanations of 'ea' and 'n' please see op_bin())
Example:
========
seg000:2000 dw 1234h
and there is a segment at paragraph 0x1000 and there is a data item
within the segment at 0x1234:
seg000:1234 MyString db 'Hello, world!',0
Then you need to specify a linear address of the segment base to
create a proper offset:
op_plain_offset(["seg000",0x2000],0,0x10000);
and you will have:
seg000:2000 dw offset MyString
Motorola 680x0 processor have a concept of "outer offsets".
If you want to create an outer offset, you need to combine number
of the operand with the following bit:
Please note that the outer offsets are meaningful only for
Motorola 680x0.
@param ea: linear address
@param n: number of operand
- 0 - the first operand
- 1 - the second, third and all other operands
- -1 - all operands
@param base: base of the offset as a linear address
If base == BADADDR then the current operand becomes non-offset
"""
if base == BADADDR:
return ida_bytes.clr_op_type(ea, n)
else:
return ida_offset.op_plain_offset(ea, n, base)
OPND_OUTER = ida_bytes.OPND_OUTER # outer offset base
op_offset = ida_offset.op_offset
REF_OFF8 = ida_nalt.REF_OFF8 # 8bit full offset
REF_OFF16 = ida_nalt.REF_OFF16 # 16bit full offset
REF_OFF32 = ida_nalt.REF_OFF32 # 32bit full offset
REF_LOW8 = ida_nalt.REF_LOW8 # low 8bits of 16bit offset
REF_LOW16 = ida_nalt.REF_LOW16 # low 16bits of 32bit offset
REF_HIGH8 = ida_nalt.REF_HIGH8 # high 8bits of 16bit offset
REF_HIGH16 = ida_nalt.REF_HIGH16 # high 16bits of 32bit offset
REF_OFF64 = ida_nalt.REF_OFF64 # 64bit full offset
REFINFO_RVA = 0x10 # based reference (rva)
REFINFO_PASTEND = 0x20 # reference past an item it may point to an nonexistitng
# do not destroy alignment dirs
REFINFO_NOBASE = 0x80 # offset base is a number
# that base have be any value
# nb: base xrefs are created only if base
# points to the middle of a segment
REFINFO_SUBTRACT = 0x0100 # the reference value is subtracted from
# the base value instead of (as usual)
# being added to it
REFINFO_SIGNEDOP = 0x0200 # the operand value is sign-extended (only
# supported for REF_OFF8/16/32/64)
op_seg = ida_bytes.op_seg
op_num = ida_bytes.op_num
op_flt = ida_bytes.op_flt
op_man = ida_bytes.set_forced_operand
toggle_sign = ida_bytes.toggle_sign
def toggle_bnot(ea, n):
"""
Toggle the bitwise not operator for the operand
@param ea: linear address
@param n: number of operand
- 0 - the first operand
- 1 - the second, third and all other operands
- -1 - all operands
"""
ida_bytes.toggle_bnot(ea, n)
return True
op_enum = ida_bytes.op_enum
def op_stroff(ea, n, strid, delta):
"""
Convert operand to an offset in a structure
@param ea: linear address
@param n: number of operand
- 0 - the first operand
- 1 - the second, third and all other operands
- -1 - all operands
@param strid: id of a structure type
@param delta: struct offset delta. usually 0. denotes the difference
between the structure base and the pointer into the structure.
"""
path = ida_pro.tid_array(1)
path[0] = strid
if isinstance(ea, ida_ua.insn_t):
insn = ea
else:
insn = ida_ua.insn_t()
ida_ua.decode_insn(insn, ea)
return ida_bytes.op_stroff(insn, n, path.cast(), 1, delta)
op_stkvar = ida_bytes.op_stkvar
def op_offset_high16(ea, n, target):
"""
Convert operand to a high offset
High offset is the upper 16bits of an offset.
This type is used by TMS320C6 processors (and probably by other
RISC processors too)
@param ea: linear address
@param n: number of operand
- 0 - the first operand
- 1 - the second, third and all other operands
- -1 - all operands
@param target: the full value (all 32bits) of the offset
"""
return ida_offset.op_offset(ea, n, ida_nalt.REF_HIGH16, target)
def MakeVar(ea):
pass
# Every anterior/posterior line has its number.
# Anterior lines have numbers from E_PREV
# Posterior lines have numbers from E_NEXT
E_PREV = ida_lines.E_PREV
E_NEXT = ida_lines.E_NEXT
get_extra_cmt = ida_lines.get_extra_cmt
update_extra_cmt = ida_lines.update_extra_cmt
del_extra_cmt = ida_lines.del_extra_cmt
set_manual_insn = ida_bytes.set_manual_insn
get_manual_insn = ida_bytes.get_manual_insn
patch_dbg_byte = ida_dbg.put_dbg_byte
patch_byte = ida_bytes.patch_byte
patch_word = ida_bytes.patch_word
patch_dword = ida_bytes.patch_dword
patch_qword = ida_bytes.patch_qword
SR_inherit = 1 # value is inherited from the previous range
SR_user = 2 # value is specified by the user
SR_auto = 3 # value is determined by IDA
SR_autostart = 4 # as SR_auto for segment starting address
def split_sreg_range(ea, reg, value, tag=SR_user):
"""
Set value of a segment register.
@param ea: linear address
@param reg: name of a register, like "cs", "ds", "es", etc.
@param value: new value of the segment register.
@param tag: of SR_... constants
@note: IDA keeps tracks of all the points where segment register change their
values. This function allows you to specify the correct value of a segment
register if IDA is not able to find the corrent value.
"""
reg = ida_idp.str2reg(reg);
if reg >= 0:
return ida_segregs.split_sreg_range(ea, reg, value, tag)
else:
return False
auto_mark_range = ida_auto.auto_mark_range
auto_unmark = ida_auto.auto_unmark
def AutoMark(ea,qtype):
"""
Plan to analyze an address
"""
return auto_mark_range(ea,ea+1,qtype)
AU_UNK = ida_auto.AU_UNK # make unknown
AU_CODE = ida_auto.AU_CODE # convert to instruction
AU_PROC = ida_auto.AU_PROC # make function
AU_USED = ida_auto.AU_USED # reanalyze
AU_LIBF = ida_auto.AU_LIBF # apply a flirt signature (the current signature!)
AU_FINAL = ida_auto.AU_FINAL # coagulate unexplored items
#----------------------------------------------------------------------------
# P R O D U C E O U T P U T F I L E S
#----------------------------------------------------------------------------
def gen_file(filetype, path, ea1, ea2, flags):
"""
Generate an output file
@param filetype: type of output file. One of OFILE_... symbols. See below.
@param path: the output file path (will be overwritten!)
@param ea1: start address. For some file types this argument is ignored
@param ea2: end address. For some file types this argument is ignored
@param flags: bit combination of GENFLG_...
@returns: number of the generated lines.
-1 if an error occurred
OFILE_EXE: 0-can't generate exe file, 1-ok
"""
f = ida_diskio.fopenWB(path)
if f:
retval = ida_loader.gen_file(filetype, f, ea1, ea2, flags)
ida_diskio.eclose(f)
return retval
else:
return -1
# output file types:
OFILE_MAP = ida_loader.OFILE_MAP
OFILE_EXE = ida_loader.OFILE_EXE
OFILE_IDC = ida_loader.OFILE_IDC
OFILE_LST = ida_loader.OFILE_LST
OFILE_ASM = ida_loader.OFILE_ASM
OFILE_DIF = ida_loader.OFILE_DIF
# output control flags:
GENFLG_MAPSEG = ida_loader.GENFLG_MAPSEG # map: generate map of segments
GENFLG_MAPNAME = ida_loader.GENFLG_MAPNAME # map: include dummy names
GENFLG_MAPDMNG = ida_loader.GENFLG_MAPDMNG # map: demangle names
GENFLG_MAPLOC = ida_loader.GENFLG_MAPLOC # map: include local names
GENFLG_IDCTYPE = ida_loader.GENFLG_IDCTYPE # idc: gen only information about types
GENFLG_ASMTYPE = ida_loader.GENFLG_ASMTYPE # asm&lst: gen information about types too
GENFLG_GENHTML = ida_loader.GENFLG_GENHTML # asm&lst: generate html (gui version only)
GENFLG_ASMINC = ida_loader.GENFLG_ASMINC # asm&lst: gen information only about types
def gen_flow_graph(outfile, title, ea1, ea2, flags):
"""
Generate a flow chart GDL file
@param outfile: output file name. GDL extension will be used
@param title: graph title
@param ea1: beginning of the range to flow chart
@param ea2: end of the range to flow chart.
@param flags: combination of CHART_... constants
@note: If ea2 == BADADDR then ea1 is treated as an address within a function.
That function will be flow charted.
"""
return ida_gdl.gen_flow_graph(outfile, title, None, ea1, ea2, flags)
CHART_PRINT_NAMES = 0x1000 # print labels for each block?
CHART_GEN_GDL = 0x4000 # generate .gdl file (file extension is forced to .gdl)
CHART_WINGRAPH = 0x8000 # call wingraph32 to display the graph
CHART_NOLIBFUNCS = 0x0400 # don't include library functions in the graph
def gen_simple_call_chart(outfile, title, flags):
"""
Generate a function call graph GDL file
@param outfile: output file name. GDL extension will be used
@param title: graph title
@param flags: combination of CHART_GEN_GDL, CHART_WINGRAPH, CHART_NOLIBFUNCS
"""
return ida_gdl.gen_simple_call_chart(outfile, "Generating chart", title, flags)
#----------------------------------------------------------------------------
# C O M M O N I N F O R M A T I O N
#----------------------------------------------------------------------------
def idadir():
"""
Get IDA directory
This function returns the directory where IDA.EXE resides
"""
return ida_diskio.idadir("")
get_root_filename = ida_nalt.get_root_filename
get_input_file_path = ida_nalt.get_input_file_path
set_root_filename = ida_nalt.set_root_filename
def get_idb_path():
"""
Get IDB full path
This function returns full path of the current IDB database
"""
return ida_loader.get_path(ida_loader.PATH_TYPE_IDB)
retrieve_input_file_md5 = ida_nalt.retrieve_input_file_md5
get_full_flags = ida_bytes.get_full_flags
get_db_byte = ida_bytes.get_db_byte
def get_bytes(ea, size, use_dbg = False):
"""
Return the specified number of bytes of the program
@param ea: linear address
@param size: size of buffer in normal 8-bit bytes
@param use_dbg: if True, use debugger memory, otherwise just the database
@return: None on failure
otherwise a string containing the read bytes
"""
if use_dbg:
return ida_idd.dbg_read_memory(ea, size)
else:
return ida_bytes.get_bytes(ea, size)
get_wide_byte = ida_bytes.get_wide_byte
def __DbgValue(ea, len):
if len not in ida_idaapi.__struct_unpack_table:
return None
r = ida_idd.dbg_read_memory(ea, len)
return None if r is None else struct.unpack((">" if ida_ida.cvar.inf.is_be() else "<") + ida_idaapi.__struct_unpack_table[len][1], r)[0]
def read_dbg_byte(ea):
"""
Get value of program byte using the debugger memory
@param ea: linear address
@return: The value or None on failure.
"""
return __DbgValue(ea, 1)
def read_dbg_word(ea):
"""
Get value of program word using the debugger memory
@param ea: linear address
@return: The value or None on failure.
"""
return __DbgValue(ea, 2)
def read_dbg_dword(ea):
"""
Get value of program double-word using the debugger memory
@param ea: linear address
@return: The value or None on failure.
"""
return __DbgValue(ea, 4)
def read_dbg_qword(ea):
"""
Get value of program quadro-word using the debugger memory
@param ea: linear address
@return: The value or None on failure.
"""
return __DbgValue(ea, 8)
read_dbg_memory = ida_idd.dbg_read_memory
def write_dbg_memory(ea, data):
"""
Write to debugger memory.
@param ea: linear address
@param data: string to write
@return: number of written bytes (-1 - network/debugger error)
Thread-safe function (may be called only from the main thread and debthread)
"""
__warn_once_deprecated_proto_confusion("write_dbg_memory", "ida_dbg.write_dbg_memory")
if not ida_dbg.dbg_can_query():
return -1
elif len(data) > 0:
return ida_idd.dbg_write_memory(ea, data)
get_original_byte = ida_bytes.get_original_byte
get_wide_word = ida_bytes.get_wide_word
get_wide_dword = ida_bytes.get_wide_dword
get_qword = ida_bytes.get_qword
def GetFloat(ea):
"""
Get value of a floating point number (4 bytes)
This function assumes number stored using IEEE format
and in the same endianness as integers.
@param ea: linear address
@return: float
"""
tmp = struct.pack("I", get_wide_dword(ea))
return struct.unpack("f", tmp)[0]
def GetDouble(ea):
"""
Get value of a floating point number (8 bytes)
This function assumes number stored using IEEE format
and in the same endianness as integers.
@param ea: linear address
@return: double
"""
tmp = struct.pack("Q", get_qword(ea))
return struct.unpack("d", tmp)[0]
def get_name_ea_simple(name):
"""
Get linear address of a name
@param name: name of program byte
@return: address of the name
BADADDR - No such name
"""
return ida_name.get_name_ea(BADADDR, name)
get_name_ea = ida_name.get_name_ea
def get_segm_by_sel(base):
"""
Get segment by segment base
@param base: segment base paragraph or selector
@return: linear address of the start of the segment or BADADDR
if no such segment
"""
sel = ida_segment.find_selector(base)
seg = ida_segment.get_segm_by_sel(sel)
if seg:
return seg.start_ea
else:
return BADADDR
get_screen_ea = ida_kernwin.get_screen_ea
def get_curline():
"""
Get the disassembly line at the cursor
@return: string
"""
return ida_lines.tag_remove(ida_kernwin.get_curline())
def read_selection_start():
"""
Get start address of the selected range
returns BADADDR - the user has not selected an range
"""
selection, startaddr, endaddr = ida_kernwin.read_range_selection(None)
if selection == 1:
return startaddr
else:
return BADADDR
def read_selection_end():
"""
Get end address of the selected range
@return: BADADDR - the user has not selected an range
"""
selection, startaddr, endaddr = ida_kernwin.read_range_selection(None)
if selection == 1:
return endaddr
else:
return BADADDR
def get_sreg(ea, reg):
"""
Get value of segment register at the specified address
@param ea: linear address
@param reg: name of segment register
@return: the value of the segment register or -1 on error
@note: The segment registers in 32bit program usually contain selectors,
so to get paragraph pointed to by the segment register you need to
call sel2para() function.
"""
reg = ida_idp.str2reg(reg);
if reg >= 0:
return ida_segregs.get_sreg(ea, reg)
else:
return -1
next_addr = ida_bytes.next_addr
prev_addr = ida_bytes.prev_addr
def next_head(ea, maxea=BADADDR):
"""
Get next defined item (instruction or data) in the program
@param ea: linear address to start search from
@param maxea: the search will stop at the address
maxea is not included in the search range
@return: BADADDR - no (more) defined items
"""
return ida_bytes.next_head(ea, maxea)
def prev_head(ea, minea=0):
"""
Get previous defined item (instruction or data) in the program
@param ea: linear address to start search from
@param minea: the search will stop at the address
minea is included in the search range
@return: BADADDR - no (more) defined items
"""
return ida_bytes.prev_head(ea, minea)
next_not_tail = ida_bytes.next_not_tail
prev_not_tail = ida_bytes.prev_not_tail
get_item_head = ida_bytes.get_item_head
get_item_end = ida_bytes.get_item_end
def get_item_size(ea):
"""
Get size of instruction or data item in bytes
@param ea: linear address
@return: 1..n
"""
return ida_bytes.get_item_end(ea) - ea
def func_contains(func_ea, ea):
"""
Does the given function contain the given address?
@param func_ea: any address belonging to the function
@param ea: linear address
@return: success
"""
func = ida_funcs.get_func(func_ea)
if func:
return ida_funcs.func_contains(func, ea)
return False
GN_VISIBLE = ida_name.GN_VISIBLE # replace forbidden characters by SUBSTCHAR
GN_COLORED = ida_name.GN_COLORED # return colored name
GN_DEMANGLED = ida_name.GN_DEMANGLED # return demangled name
GN_STRICT = ida_name.GN_STRICT # fail if cannot demangle
GN_SHORT = ida_name.GN_SHORT # use short form of demangled name
GN_LONG = ida_name.GN_LONG # use long form of demangled name
GN_LOCAL = ida_name.GN_LOCAL # try to get local name first; if failed, get global
GN_ISRET = ida_name.GN_ISRET # for dummy names: use retloc
GN_NOT_ISRET = ida_name.GN_NOT_ISRET # for dummy names: do not use retloc
calc_gtn_flags = ida_name.calc_gtn_flags
def get_name(ea, gtn_flags=0):
"""
Get name at the specified address
@param ea: linear address
@param gtn_flags: how exactly the name should be retrieved.
combination of GN_ bits
@return: "" - byte has no name
"""
return ida_name.get_ea_name(ea, gtn_flags)
def demangle_name(name, disable_mask):
"""
demangle_name a name
@param name: name to demangle
@param disable_mask: a mask that tells how to demangle the name
it is a good idea to get this mask using
get_inf_attr(INF_SHORT_DN) or get_inf_attr(INF_LONG_DN)
@return: a demangled name
If the input name cannot be demangled, returns None
"""
return ida_name.demangle_name(name, disable_mask, ida_name.DQT_FULL)
def generate_disasm_line(ea, flags):
"""
Get disassembly line
@param ea: linear address of instruction
@param flags: combination of the GENDSM_ flags, or 0
@return: "" - could not decode instruction at the specified location
@note: this function may not return exactly the same mnemonics
as you see on the screen.
"""
text = ida_lines.generate_disasm_line(ea, flags)
if text:
return ida_lines.tag_remove(text)
else:
return ""
# flags for generate_disasm_line
# generate a disassembly line as if
# there is an instruction at 'ea'
GENDSM_FORCE_CODE = ida_lines.GENDSM_FORCE_CODE
# if the instruction consists of several lines,
# produce all of them (useful for parallel instructions)
GENDSM_MULTI_LINE = ida_lines.GENDSM_MULTI_LINE
def GetDisasm(ea):
"""
Get disassembly line
@param ea: linear address of instruction
@return: "" - could not decode instruction at the specified location
@note: this function may not return exactly the same mnemonics
as you see on the screen.
"""
return generate_disasm_line(ea, 0)
def print_insn_mnem(ea):
"""
Get instruction mnemonics
@param ea: linear address of instruction
@return: "" - no instruction at the specified location
@note: this function may not return exactly the same mnemonics
as you see on the screen.
"""
res = ida_ua.ua_mnem(ea)
if not res:
return ""
else:
return res
def print_operand(ea, n):
"""
Get operand of an instruction or data
@param ea: linear address of the item
@param n: number of operand:
0 - the first operand
1 - the second operand
@return: the current text representation of operand or ""
"""
res = ida_ua.print_operand(ea, n)
if not res:
return ""
else:
return ida_lines.tag_remove(res)
def get_operand_type(ea, n):
"""
Get type of instruction operand
@param ea: linear address of instruction
@param n: number of operand:
0 - the first operand
1 - the second operand
@return: any of o_* constants or -1 on error
"""
insn = ida_ua.insn_t()
inslen = ida_ua.decode_insn(insn, ea)
return -1 if inslen == 0 else insn.ops[n].type
o_void = ida_ua.o_void # No Operand ----------
o_reg = ida_ua.o_reg # General Register (al,ax,es,ds...) reg
o_mem = ida_ua.o_mem # Direct Memory Reference (DATA) addr
o_phrase = ida_ua.o_phrase # Memory Ref [Base Reg + Index Reg] phrase
o_displ = ida_ua.o_displ # Memory Reg [Base Reg + Index Reg + Displacement] phrase+addr
o_imm = ida_ua.o_imm # Immediate Value value
o_far = ida_ua.o_far # Immediate Far Address (CODE) addr
o_near = ida_ua.o_near # Immediate Near Address (CODE) addr
o_idpspec0 = ida_ua.o_idpspec0 # Processor specific type
o_idpspec1 = ida_ua.o_idpspec1 # Processor specific type
o_idpspec2 = ida_ua.o_idpspec2 # Processor specific type
o_idpspec3 = ida_ua.o_idpspec3 # Processor specific type
o_idpspec4 = ida_ua.o_idpspec4 # Processor specific type
o_idpspec5 = ida_ua.o_idpspec5 # Processor specific type
# There can be more processor specific types
# x86
o_trreg = ida_ua.o_idpspec0 # trace register
o_dbreg = ida_ua.o_idpspec1 # debug register
o_crreg = ida_ua.o_idpspec2 # control register
o_fpreg = ida_ua.o_idpspec3 # floating point register
o_mmxreg = ida_ua.o_idpspec4 # mmx register
o_xmmreg = ida_ua.o_idpspec5 # xmm register
# arm
o_reglist = ida_ua.o_idpspec1 # Register list (for LDM/STM)
o_creglist = ida_ua.o_idpspec2 # Coprocessor register list (for CDP)
o_creg = ida_ua.o_idpspec3 # Coprocessor register (for LDC/STC)
o_fpreglist = ida_ua.o_idpspec4 # Floating point register list
o_text = ida_ua.o_idpspec5 # Arbitrary text stored in the operand
o_cond = (ida_ua.o_idpspec5+1) # ARM condition as an operand
# ppc
o_spr = ida_ua.o_idpspec0 # Special purpose register
o_twofpr = ida_ua.o_idpspec1 # Two FPRs
o_shmbme = ida_ua.o_idpspec2 # SH & MB & ME
o_crf = ida_ua.o_idpspec3 # crfield x.reg
o_crb = ida_ua.o_idpspec4 # crbit x.reg
o_dcr = ida_ua.o_idpspec5 # Device control register
def get_operand_value(ea, n):
"""
Get number used in the operand
This function returns an immediate number used in the operand
@param ea: linear address of instruction
@param n: the operand number
@return: value
operand is an immediate value => immediate value
operand has a displacement => displacement
operand is a direct memory ref => memory address
operand is a register => register number
operand is a register phrase => phrase number
otherwise => -1
"""
insn = ida_ua.insn_t()
inslen = ida_ua.decode_insn(insn, ea)
if inslen == 0:
return -1
op = insn.ops[n]
if not op:
return -1
if op.type in [ ida_ua.o_mem, ida_ua.o_far, ida_ua.o_near, ida_ua.o_displ ]:
value = op.addr
elif op.type == ida_ua.o_reg:
value = op.reg
elif op.type == ida_ua.o_imm:
value = op.value
elif op.type == ida_ua.o_phrase:
value = op.phrase
else:
value = -1
return value
GetCommentEx = ida_bytes.get_cmt
get_cmt = GetCommentEx
get_forced_operand = ida_bytes.get_forced_operand
BPU_1B = ida_nalt.BPU_1B
BPU_2B = ida_nalt.BPU_2B
BPU_4B = ida_nalt.BPU_4B
STRWIDTH_1B = ida_nalt.STRWIDTH_1B
STRWIDTH_2B = ida_nalt.STRWIDTH_2B
STRWIDTH_4B = ida_nalt.STRWIDTH_4B
STRWIDTH_MASK = ida_nalt.STRWIDTH_MASK
STRLYT_TERMCHR = ida_nalt.STRLYT_TERMCHR
STRLYT_PASCAL1 = ida_nalt.STRLYT_PASCAL1
STRLYT_PASCAL2 = ida_nalt.STRLYT_PASCAL2
STRLYT_PASCAL4 = ida_nalt.STRLYT_PASCAL4
STRLYT_MASK = ida_nalt.STRLYT_MASK
STRLYT_SHIFT = ida_nalt.STRLYT_SHIFT
# Character-terminated string. The termination characters
# are kept in the next bytes of string type.
STRTYPE_TERMCHR = ida_nalt.STRTYPE_TERMCHR
# C-style string.
STRTYPE_C = ida_nalt.STRTYPE_C
# Zero-terminated 16bit chars
STRTYPE_C_16 = ida_nalt.STRTYPE_C_16
# Zero-terminated 32bit chars
STRTYPE_C_32 = ida_nalt.STRTYPE_C_32
# Pascal-style, one-byte length prefix
STRTYPE_PASCAL = ida_nalt.STRTYPE_PASCAL
# Pascal-style, 16bit chars, one-byte length prefix
STRTYPE_PASCAL_16 = ida_nalt.STRTYPE_PASCAL_16
# Pascal-style, two-byte length prefix
STRTYPE_LEN2 = ida_nalt.STRTYPE_LEN2
# Pascal-style, 16bit chars, two-byte length prefix
STRTYPE_LEN2_16 = ida_nalt.STRTYPE_LEN2_16
# Pascal-style, four-byte length prefix
STRTYPE_LEN4 = ida_nalt.STRTYPE_LEN4
# Pascal-style, 16bit chars, four-byte length prefix
STRTYPE_LEN4_16 = ida_nalt.STRTYPE_LEN4_16
# alias
STRTYPE_C16 = STRTYPE_C_16
def get_strlit_contents(ea, length = -1, strtype = STRTYPE_C):
"""
Get string contents
@param ea: linear address
@param length: string length. -1 means to calculate the max string length
@param strtype: the string type (one of STRTYPE_... constants)
@return: string contents or empty string
"""
if length == -1:
length = ida_bytes.get_max_strlit_length(ea, strtype, ida_bytes.ALOPT_IGNHEADS)
return ida_bytes.get_strlit_contents(ea, length, strtype)
def get_str_type(ea):
"""
Get string type
@param ea: linear address
@return: One of STRTYPE_... constants
"""
flags = ida_bytes.get_flags(ea)
if ida_bytes.is_strlit(flags):
oi = ida_nalt.opinfo_t()
if ida_bytes.get_opinfo(oi, ea, 0, flags):
return oi.strtype
# The following functions search for the specified byte
# ea - address to start from
# flag is combination of the following bits
# returns BADADDR - not found
find_suspop = ida_search.find_suspop
find_code = ida_search.find_code
find_data = ida_search.find_data
find_unknown = ida_search.find_unknown
find_defined = ida_search.find_defined
find_imm = ida_search.find_imm
SEARCH_UP = ida_search.SEARCH_UP # search backward
SEARCH_DOWN = ida_search.SEARCH_DOWN # search forward
SEARCH_NEXT = ida_search.SEARCH_NEXT # start the search at the next/prev item
# useful only for find_text() and find_binary()
SEARCH_CASE = ida_search.SEARCH_CASE # search case-sensitive
# (only for bin&txt search)
SEARCH_REGEX = ida_search.SEARCH_REGEX # enable regular expressions (only for text)
SEARCH_NOBRK = ida_search.SEARCH_NOBRK # don't test ctrl-break
SEARCH_NOSHOW = ida_search.SEARCH_NOSHOW # don't display the search progress
def find_text(ea, flag, y, x, searchstr):
__warn_once_deprecated_proto_confusion("find_text", "ida_search.find_text")
return ida_search.find_text(ea, y, x, searchstr, flag)
def find_binary(ea, flag, searchstr, radix=16):
__warn_once_deprecated_proto_confusion("find_binary", "ida_search.find_binary")
endea = flag & 1 and ida_ida.cvar.inf.max_ea or ida_ida.cvar.inf.min_ea
return ida_search.find_binary(ea, endea, searchstr, radix, flag)
#----------------------------------------------------------------------------
# G L O B A L S E T T I N G S M A N I P U L A T I O N
#----------------------------------------------------------------------------
def process_config_line(directive):
"""
Obsolete. Please use ida_idp.process_config_directive().
"""
return eval_idc('process_config_directive("%s")' % ida_kernwin.str2user(directive))
# The following functions allow you to set/get common parameters.
# Please note that not all parameters can be set directly.
INF_VERSION = 0 # short; Version of database
INF_PROCNAME = 1 # char[8]; Name of current processor
INF_GENFLAGS = 2 # ushort; General flags:
INF_LFLAGS = 3 # uint32; IDP-dependent flags
INF_DATABASE_CHANGE_COUNT= 4 # uint32; database change counter; keeps track of byte and segment modifications
INF_CHANGE_COUNTER=INF_DATABASE_CHANGE_COUNT
INF_FILETYPE = 5 # short; type of input file (see ida.hpp)
FT_EXE_OLD = 0 # MS DOS EXE File (obsolete)
FT_COM_OLD = 1 # MS DOS COM File (obsolete)
FT_BIN = 2 # Binary File
FT_DRV = 3 # MS DOS Driver
FT_WIN = 4 # New Executable (NE)
FT_HEX = 5 # Intel Hex Object File
FT_MEX = 6 # MOS Technology Hex Object File
FT_LX = 7 # Linear Executable (LX)
FT_LE = 8 # Linear Executable (LE)
FT_NLM = 9 # Netware Loadable Module (NLM)
FT_COFF = 10 # Common Object File Format (COFF)
FT_PE = 11 # Portable Executable (PE)
FT_OMF = 12 # Object Module Format
FT_SREC = 13 # R-records
FT_ZIP = 14 # ZIP file (this file is never loaded to IDA database)
FT_OMFLIB = 15 # Library of OMF Modules
FT_AR = 16 # ar library
FT_LOADER = 17 # file is loaded using LOADER DLL
FT_ELF = 18 # Executable and Linkable Format (ELF)
FT_W32RUN = 19 # Watcom DOS32 Extender (W32RUN)
FT_AOUT = 20 # Linux a.out (AOUT)
FT_PRC = 21 # PalmPilot program file
FT_EXE = 22 # MS DOS EXE File
FT_COM = 23 # MS DOS COM File
FT_AIXAR = 24 # AIX ar library
FT_MACHO = 25 # Mac OS X Mach-O file
INF_OSTYPE = 6 # short; FLIRT: OS type the program is for
OSTYPE_MSDOS= 0x0001
OSTYPE_WIN = 0x0002
OSTYPE_OS2 = 0x0004
OSTYPE_NETW = 0x0008
INF_APPTYPE = 7 # short; FLIRT: Application type
APPT_CONSOLE= 0x0001 # console
APPT_GRAPHIC= 0x0002 # graphics
APPT_PROGRAM= 0x0004 # EXE
APPT_LIBRARY= 0x0008 # DLL
APPT_DRIVER = 0x0010 # DRIVER
APPT_1THREAD= 0x0020 # Singlethread
APPT_MTHREAD= 0x0040 # Multithread
APPT_16BIT = 0x0080 # 16 bit application
APPT_32BIT = 0x0100 # 32 bit application
INF_ASMTYPE = 8 # char; target assembler number (0..n)
INF_SPECSEGS = 9
INF_AF = 10 # uint32; Analysis flags:
def _import_module_flag_sets(module, prefixes):
if isinstance(prefixes, str):
prefixes = [prefixes]
for prefix in prefixes:
for key in dir(module):
if key.startswith(prefix):
value = getattr(module, key)
if isinstance(value, ida_idaapi.integer_types):
globals()[key] = value
_import_module_flag_sets(
ida_ida,
[
"INFFL_",
"LFLG_",
"IDB_",
"AF_",
"AF2_",
"SW_",
"NM_",
"DEMNAM_",
"LN_",
"OFLG_",
"SCF_",
"LMT_",
"PREF_",
"STRF_",
"ABI_",
])
INF_AF2 = 11 # uint32; Analysis flags 2
INF_BASEADDR = 12 # uval_t; base paragraph of the program
INF_START_SS = 13 # int32; value of SS at the start
INF_START_CS = 14 # int32; value of CS at the start
INF_START_IP = 15 # ea_t; IP register value at the start of
# program execution
INF_START_EA = 16 # ea_t; Linear address of program entry point
INF_START_SP = 17 # ea_t; SP register value at the start of
# program execution
INF_MAIN = 18 # ea_t; address of main()
INF_MIN_EA = 19 # ea_t; The lowest address used
# in the program
INF_MAX_EA = 20 # ea_t; The highest address used
# in the program - 1
INF_OMIN_EA = 21
INF_OMAX_EA = 22
INF_LOWOFF = 23 # ea_t; low limit of voids
INF_LOW_OFF=INF_LOWOFF
INF_HIGHOFF = 24 # ea_t; high limit of voids
INF_HIGH_OFF=INF_HIGHOFF
INF_MAXREF = 25 # uval_t; max xref depth
INF_PRIVRANGE_START_EA = 27 # uval_t; Range of addresses reserved for internal use.
INF_START_PRIVRANGE=INF_PRIVRANGE_START_EA
INF_PRIVRANGE_END_EA = 28 # uval_t; Initially (MAXADDR, MAXADDR+0x100000)
INF_END_PRIVRANGE=INF_PRIVRANGE_END_EA
INF_NETDELTA = 29 # sval_t; Delta value to be added to all adresses for mapping to netnodes.
# Initially 0.
# CROSS REFERENCES
INF_XREFNUM = 30 # char; Number of references to generate
# 0 - xrefs won't be generated at all
INF_TYPE_XREFNUM = 31 # char; Number of references to generate
# in the struct & enum windows
# 0 - xrefs won't be generated at all
INF_TYPE_XREFS=INF_TYPE_XREFNUM
INF_REFCMTNUM = 32 # uchar; number of comment lines to
# generate for refs to ASCII
# string or demangled name
# 0 - such comments won't be
# generated at all
INF_REFCMTS=INF_REFCMTNUM
INF_XREFFLAG = 33 # char; xrefs representation:
INF_XREFS=INF_XREFFLAG
# NAMES
INF_MAX_AUTONAME_LEN = 34 # ushort; max name length (without zero byte)
INF_NAMETYPE = 35 # char; dummy names represenation type
INF_SHORT_DEMNAMES = 36 # int32; short form of demangled names
INF_SHORT_DN=INF_SHORT_DEMNAMES
INF_LONG_DEMNAMES = 37 # int32; long form of demangled names
# see demangle.h for definitions
INF_LONG_DN=INF_LONG_DEMNAMES
INF_DEMNAMES = 38 # char; display demangled names as:
INF_LISTNAMES = 39 # uchar; What names should be included in the list?
# DISASSEMBLY LISTING DETAILS
INF_INDENT = 40 # char; Indention for instructions
INF_CMT_INDENT = 41 # char; Indention for comments
INF_COMMENT = 41 # for compatibility
INF_MARGIN = 42 # ushort; max length of data lines
INF_LENXREF = 43 # ushort; max length of line with xrefs
INF_OUTFLAGS = 44 # uint32; output flags
INF_CMTFLG = 45 # char; comments:
INF_CMTFLAG=INF_CMTFLG
INF_LIMITER = 46 # char; Generate borders?
INF_BORDER=INF_LIMITER
INF_BIN_PREFIX_SIZE = 47 # short; # of instruction bytes to show
# in line prefix
INF_BINPREF=INF_BIN_PREFIX_SIZE
INF_PREFFLAG = 48 # char; line prefix type:
# STRING LITERALS
INF_STRLIT_FLAGS= 49 # uchar; string literal flags
INF_STRLIT_BREAK= 50 # char; string literal line break symbol
INF_STRLIT_ZEROES= 51 # char; leading zeroes
INF_STRTYPE = 52 # int32; current ascii string type
# is considered as several bytes:
# low byte:
INF_STRLIT_PREF = 53 # char[16];ASCII names prefix
INF_STRLIT_SERNUM= 54 # uint32; serial number
# DATA ITEMS
INF_DATATYPES = 55 # int32; data types allowed in data carousel
# COMPILER
INF_CC_ID = 57 # uchar; compiler
COMP_MASK = 0x0F # mask to apply to get the pure compiler id
COMP_UNK = 0x00 # Unknown
COMP_MS = 0x01 # Visual C++
COMP_BC = 0x02 # Borland C++
COMP_WATCOM = 0x03 # Watcom C++
COMP_GNU = 0x06 # GNU C++
COMP_VISAGE = 0x07 # Visual Age C++
COMP_BP = 0x08 # Delphi
INF_CC_CM = 58 # uchar; memory model & calling convention
INF_CC_SIZE_I = 59 # uchar; sizeof(int)
INF_CC_SIZE_B = 60 # uchar; sizeof(bool)
INF_CC_SIZE_E = 61 # uchar; sizeof(enum)
INF_CC_DEFALIGN = 62 # uchar; default alignment
INF_CC_SIZE_S = 63
INF_CC_SIZE_L = 64
INF_CC_SIZE_LL = 65
INF_CC_SIZE_LDBL = 66 # uchar; sizeof(long double)
INF_COMPILER = INF_CC_ID
INF_MODEL = INF_CC_CM
INF_SIZEOF_INT = INF_CC_SIZE_I
INF_SIZEOF_BOOL = INF_CC_SIZE_B
INF_SIZEOF_ENUM = INF_CC_SIZE_E
INF_SIZEOF_ALGN = INF_CC_DEFALIGN
INF_SIZEOF_SHORT= INF_CC_SIZE_S
INF_SIZEOF_LONG = INF_CC_SIZE_L
INF_SIZEOF_LLONG= INF_CC_SIZE_LL
INF_SIZEOF_LDBL = INF_CC_SIZE_LDBL
INF_ABIBITS= 67 # uint32; ABI features
INF_APPCALL_OPTIONS= 68 # uint32; appcall options
_INF_attrs_accessors = {
INF_ABIBITS : (ida_ida.inf_get_abibits, ida_ida.inf_set_abibits),
INF_AF : (ida_ida.inf_get_af, ida_ida.inf_set_af),
INF_AF2 : (ida_ida.inf_get_af2, ida_ida.inf_set_af2),
INF_APPCALL_OPTIONS : (ida_ida.inf_get_appcall_options, ida_ida.inf_set_appcall_options),
INF_APPTYPE : (ida_ida.inf_get_apptype, ida_ida.inf_set_apptype),
INF_ASMTYPE : (ida_ida.inf_get_asmtype, ida_ida.inf_set_asmtype),
INF_BASEADDR : (ida_ida.inf_get_baseaddr, ida_ida.inf_set_baseaddr),
INF_BIN_PREFIX_SIZE : (ida_ida.inf_get_bin_prefix_size, ida_ida.inf_set_bin_prefix_size),
INF_CC_CM : (ida_ida.inf_get_cc_cm, ida_ida.inf_set_cc_cm),
INF_CC_DEFALIGN : (ida_ida.inf_get_cc_defalign, ida_ida.inf_set_cc_defalign),
INF_CC_ID : (ida_ida.inf_get_cc_id, ida_ida.inf_set_cc_id),
INF_CC_SIZE_B : (ida_ida.inf_get_cc_size_b, ida_ida.inf_set_cc_size_b),
INF_CC_SIZE_E : (ida_ida.inf_get_cc_size_e, ida_ida.inf_set_cc_size_e),
INF_CC_SIZE_I : (ida_ida.inf_get_cc_size_i, ida_ida.inf_set_cc_size_i),
INF_CC_SIZE_L : (ida_ida.inf_get_cc_size_l, ida_ida.inf_set_cc_size_l),
INF_CC_SIZE_LDBL : (ida_ida.inf_get_cc_size_ldbl, ida_ida.inf_set_cc_size_ldbl),
INF_CC_SIZE_LL : (ida_ida.inf_get_cc_size_ll, ida_ida.inf_set_cc_size_ll),
INF_CC_SIZE_S : (ida_ida.inf_get_cc_size_s, ida_ida.inf_set_cc_size_s),
INF_CMTFLAG : (ida_ida.inf_get_cmtflg, ida_ida.inf_set_cmtflg),
INF_CMT_INDENT : (ida_ida.inf_get_cmt_indent, ida_ida.inf_set_cmt_indent),
INF_DATABASE_CHANGE_COUNT : (ida_ida.inf_get_database_change_count, ida_ida.inf_set_database_change_count),
INF_DATATYPES : (ida_ida.inf_get_datatypes, ida_ida.inf_set_datatypes),
INF_DEMNAMES : (ida_ida.inf_get_demnames, ida_ida.inf_set_demnames),
INF_END_PRIVRANGE : (ida_ida.inf_get_privrange_end_ea, ida_ida.inf_set_privrange_end_ea),
INF_FILETYPE : (ida_ida.inf_get_filetype, ida_ida.inf_set_filetype),
INF_GENFLAGS : (ida_ida.inf_get_genflags, ida_ida.inf_set_genflags),
INF_HIGHOFF : (ida_ida.inf_get_highoff, ida_ida.inf_set_highoff),
INF_INDENT : (ida_ida.inf_get_indent, ida_ida.inf_set_indent),
INF_LENXREF : (ida_ida.inf_get_lenxref, ida_ida.inf_set_lenxref),
INF_LFLAGS : (ida_ida.inf_get_lflags, ida_ida.inf_set_lflags),
INF_LIMITER : (ida_ida.inf_get_limiter, ida_ida.inf_set_limiter),
INF_LISTNAMES : (ida_ida.inf_get_listnames, ida_ida.inf_set_listnames),
INF_LONG_DEMNAMES : (ida_ida.inf_get_long_demnames, ida_ida.inf_set_long_demnames),
INF_LOWOFF : (ida_ida.inf_get_lowoff, ida_ida.inf_set_lowoff),
INF_MAIN : (ida_ida.inf_get_main, ida_ida.inf_set_main),
INF_MARGIN : (ida_ida.inf_get_margin, ida_ida.inf_set_margin),
INF_MAXREF : (ida_ida.inf_get_maxref, ida_ida.inf_set_maxref),
INF_MAX_AUTONAME_LEN : (ida_ida.inf_get_max_autoname_len, ida_ida.inf_set_max_autoname_len),
INF_MAX_EA : (ida_ida.inf_get_max_ea, ida_ida.inf_set_max_ea),
INF_MIN_EA : (ida_ida.inf_get_min_ea, ida_ida.inf_set_min_ea),
INF_MODEL : (ida_ida.inf_get_cc_cm, ida_ida.inf_set_cc_cm),
INF_NAMETYPE : (ida_ida.inf_get_nametype, ida_ida.inf_set_nametype),
INF_NETDELTA : (ida_ida.inf_get_netdelta, ida_ida.inf_set_netdelta),
INF_OMAX_EA : (ida_ida.inf_get_omax_ea, ida_ida.inf_set_omax_ea),
INF_OMIN_EA : (ida_ida.inf_get_omin_ea, ida_ida.inf_set_omin_ea),
INF_OSTYPE : (ida_ida.inf_get_ostype, ida_ida.inf_set_ostype),
INF_OUTFLAGS : (ida_ida.inf_get_outflags, ida_ida.inf_set_outflags),
INF_PREFFLAG : (ida_ida.inf_get_prefflag, ida_ida.inf_set_prefflag),
INF_PRIVRANGE_END_EA : (ida_ida.inf_get_privrange_end_ea, ida_ida.inf_set_privrange_end_ea),
INF_PRIVRANGE_START_EA : (ida_ida.inf_get_privrange_start_ea, ida_ida.inf_set_privrange_start_ea),
INF_PROCNAME : (ida_ida.inf_get_procname, ida_ida.inf_set_procname),
INF_REFCMTNUM : (ida_ida.inf_get_refcmtnum, ida_ida.inf_set_refcmtnum),
INF_SHORT_DEMNAMES : (ida_ida.inf_get_short_demnames, ida_ida.inf_set_short_demnames),
INF_SPECSEGS : (ida_ida.inf_get_specsegs, ida_ida.inf_set_specsegs),
INF_START_CS : (ida_ida.inf_get_start_cs, ida_ida.inf_set_start_cs),
INF_START_EA : (ida_ida.inf_get_start_ea, ida_ida.inf_set_start_ea),
INF_START_IP : (ida_ida.inf_get_start_ip, ida_ida.inf_set_start_ip),
INF_START_PRIVRANGE : (ida_ida.inf_get_privrange_start_ea, ida_ida.inf_set_privrange_start_ea),
INF_START_SP : (ida_ida.inf_get_start_sp, ida_ida.inf_set_start_sp),
INF_START_SS : (ida_ida.inf_get_start_ss, ida_ida.inf_set_start_ss),
INF_STRLIT_BREAK : (ida_ida.inf_get_strlit_break, ida_ida.inf_set_strlit_break),
INF_STRLIT_FLAGS : (ida_ida.inf_get_strlit_flags, ida_ida.inf_set_strlit_flags),
INF_STRLIT_PREF : (ida_ida.inf_get_strlit_pref, ida_ida.inf_set_strlit_pref),
INF_STRLIT_SERNUM : (ida_ida.inf_get_strlit_sernum, ida_ida.inf_set_strlit_sernum),
INF_STRLIT_ZEROES : (ida_ida.inf_get_strlit_zeroes, ida_ida.inf_set_strlit_zeroes),
INF_STRTYPE : (ida_ida.inf_get_strtype, ida_ida.inf_set_strtype),
INF_TYPE_XREFNUM : (ida_ida.inf_get_type_xrefnum, ida_ida.inf_set_type_xrefnum),
INF_VERSION : (ida_ida.inf_get_version, ida_ida.inf_set_version),
INF_XREFFLAG : (ida_ida.inf_get_xrefflag, ida_ida.inf_set_xrefflag),
INF_XREFNUM : (ida_ida.inf_get_xrefnum, ida_ida.inf_set_xrefnum),
}
def get_inf_attr(attr):
"""
Deprecated. Please ida_ida.inf_get_* instead.
"""
return _INF_attrs_accessors[attr][0]()
def set_inf_attr(attr, value):
"""
Deprecated. Please ida_ida.inf_set_* instead.
"""
_INF_attrs_accessors[attr][1](value)
return 1
set_processor_type = ida_idp.set_processor_type
SETPROC_IDB = ida_idp.SETPROC_IDB
SETPROC_LOADER = ida_idp.SETPROC_LOADER
SETPROC_LOADER_NON_FATAL = ida_idp.SETPROC_LOADER_NON_FATAL
SETPROC_USER = ida_idp.SETPROC_USER
def SetPrcsr(processor): return set_processor_type(processor, SETPROC_USER)
set_target_assembler = ida_idp.set_target_assembler
def batch(batch):
"""
Enable/disable batch mode of operation
@param batch: batch mode
0 - ida will display dialog boxes and wait for the user input
1 - ida will not display dialog boxes, warnings, etc.
@return: old balue of batch flag
"""
batch_prev = ida_kernwin.cvar.batch
ida_kernwin.cvar.batch = batch
return batch_prev
#----------------------------------------------------------------------------
# I N T E R A C T I O N W I T H T H E U S E R
#----------------------------------------------------------------------------
def process_ui_action(name, flags=0):
"""
Invokes an IDA UI action by name
@param name: Command name
@param flags: Reserved. Must be zero
@return: Boolean
"""
return ida_kernwin.process_ui_action(name, flags)
ask_seg = ida_kernwin.ask_seg
ask_yn = ida_kernwin.ask_yn
msg = ida_kernwin.msg
warning = ida_kernwin.warning
error = ida_kernwin.error
set_ida_state = ida_auto.set_ida_state
IDA_STATUS_READY = 0 # READY IDA is idle
IDA_STATUS_THINKING = 1 # THINKING Analyzing but the user may press keys
IDA_STATUS_WAITING = 2 # WAITING Waiting for the user input
IDA_STATUS_WORK = 3 # BUSY IDA is busy
refresh_idaview_anyway = ida_kernwin.refresh_idaview_anyway
refresh_lists = ida_kernwin.refresh_choosers
#----------------------------------------------------------------------------
# S E G M E N T A T I O N
#----------------------------------------------------------------------------
def sel2para(sel):
"""
Get a selector value
@param sel: the selector number
@return: selector value if found
otherwise the input value (sel)
@note: selector values are always in paragraphs
"""
s = ida_pro.sel_pointer()
base = ida_pro.ea_pointer()
res,tmp = ida_segment.getn_selector(sel, s.cast(), base.cast())
if not res:
return sel
else:
return base.value()
def find_selector(val):
"""
Find a selector which has the specifed value
@param val: value to search for
@return: the selector number if found,
otherwise the input value (val & 0xFFFF)
@note: selector values are always in paragraphs
"""
return ida_segment.find_selector(val) & 0xFFFF
set_selector = ida_segment.set_selector
del_selector = ida_segment.del_selector
def get_first_seg():
"""
Get first segment
@return: address of the start of the first segment
BADADDR - no segments are defined
"""
seg = ida_segment.get_first_seg()
if not seg:
return BADADDR
else:
return seg.start_ea
def get_next_seg(ea):
"""
Get next segment
@param ea: linear address
@return: start of the next segment
BADADDR - no next segment
"""
nextseg = ida_segment.get_next_seg(ea)
if not nextseg:
return BADADDR
else:
return nextseg.start_ea
def get_segm_start(ea):
"""
Get start address of a segment
@param ea: any address in the segment
@return: start of segment
BADADDR - the specified address doesn't belong to any segment
"""
seg = ida_segment.getseg(ea)
if not seg:
return BADADDR
else:
return seg.start_ea
def get_segm_end(ea):
"""
Get end address of a segment
@param ea: any address in the segment
@return: end of segment (an address past end of the segment)
BADADDR - the specified address doesn't belong to any segment
"""
seg = ida_segment.getseg(ea)
if not seg:
return BADADDR
else:
return seg.end_ea
def get_segm_name(ea):
"""
Get name of a segment
@param ea: any address in the segment
@return: "" - no segment at the specified address
"""
seg = ida_segment.getseg(ea)
if not seg:
return ""
else:
name = ida_segment.get_segm_name(seg)
if not name:
return ""
else:
return name
def add_segm_ex(startea, endea, base, use32, align, comb, flags):
"""
Create a new segment
@param startea: linear address of the start of the segment
@param endea: linear address of the end of the segment
this address will not belong to the segment
'endea' should be higher than 'startea'
@param base: base paragraph or selector of the segment.
a paragraph is 16byte memory chunk.
If a selector value is specified, the selector should be
already defined.
@param use32: 0: 16bit segment, 1: 32bit segment, 2: 64bit segment
@param align: segment alignment. see below for alignment values
@param comb: segment combination. see below for combination values.
@param flags: combination of ADDSEG_... bits
@return: 0-failed, 1-ok
"""
s = ida_segment.segment_t()
s.start_ea = startea
s.end_ea = endea
s.sel = ida_segment.setup_selector(base)
s.bitness = use32
s.align = align
s.comb = comb
return ida_segment.add_segm_ex(s, "", "", flags)
ADDSEG_NOSREG = ida_segment.ADDSEG_NOSREG # set all default segment register values
# to BADSELs
# (undefine all default segment registers)
ADDSEG_OR_DIE = ida_segment. ADDSEG_OR_DIE # qexit() if can't add a segment
ADDSEG_NOTRUNC = ida_segment.ADDSEG_NOTRUNC # don't truncate the new segment at the beginning
# of the next segment if they overlap.
# destroy/truncate old segments instead.
ADDSEG_QUIET = ida_segment.ADDSEG_QUIET # silent mode, no "Adding segment..." in the messages window
ADDSEG_FILLGAP = ida_segment.ADDSEG_FILLGAP # If there is a gap between the new segment
# and the previous one, and this gap is less
# than 64K, then fill the gap by extending the
# previous segment and adding .align directive
# to it. This way we avoid gaps between segments.
# Too many gaps lead to a virtual array failure.
# It cannot hold more than ~1000 gaps.
ADDSEG_SPARSE = ida_segment.ADDSEG_SPARSE # Use sparse storage method for the new segment
def AddSeg(startea, endea, base, use32, align, comb):
return add_segm_ex(startea, endea, base, use32, align, comb, ADDSEG_NOSREG)
del_segm = ida_segment.del_segm
SEGMOD_KILL = ida_segment.SEGMOD_KILL # disable addresses if segment gets
# shrinked or deleted
SEGMOD_KEEP = ida_segment.SEGMOD_KEEP # keep information (code & data, etc)
SEGMOD_SILENT = ida_segment.SEGMOD_SILENT # be silent
def set_segment_bounds(ea, startea, endea, flags):
"""
Change segment boundaries
@param ea: any address in the segment
@param startea: new start address of the segment
@param endea: new end address of the segment
@param flags: combination of SEGMOD_... flags
@return: boolean success
"""
return ida_segment.set_segm_start(ea, startea, flags) & \
ida_segment.set_segm_end(ea, endea, flags)
def set_segm_name(ea, name):
"""
Change name of the segment
@param ea: any address in the segment
@param name: new name of the segment
@return: success (boolean)
"""
seg = ida_segment.getseg(ea)
if not seg:
return False
return ida_segment.set_segm_name(seg, name)
def set_segm_class(ea, segclass):
"""
Change class of the segment
@param ea: any address in the segment
@param segclass: new class of the segment
@return: success (boolean)
"""
seg = ida_segment.getseg(ea)
if not seg:
return False
return ida_segment.set_segm_class(seg, segclass)
def set_segm_alignment(ea, alignment):
"""
Change alignment of the segment
@param ea: any address in the segment
@param alignment: new alignment of the segment (one of the sa... constants)
@return: success (boolean)
"""
return set_segm_attr(ea, SEGATTR_ALIGN, alignment)
if ida_idaapi.uses_swig_builtins:
_scope = ida_segment.segment_t
else:
_scope = ida_segment
saAbs = _scope.saAbs # Absolute segment.
saRelByte = _scope.saRelByte # Relocatable, byte aligned.
saRelWord = _scope.saRelWord # Relocatable, word (2-byte, 16-bit) aligned.
saRelPara = _scope.saRelPara # Relocatable, paragraph (16-byte) aligned.
saRelPage = _scope.saRelPage # Relocatable, aligned on 256-byte boundary
# (a "page" in the original Intel specification).
saRelDble = _scope.saRelDble # Relocatable, aligned on a double word
# (4-byte) boundary. This value is used by
# the PharLap OMF for the same alignment.
saRel4K = _scope.saRel4K # This value is used by the PharLap OMF for
# page (4K) alignment. It is not supported
# by LINK.
saGroup = _scope.saGroup # Segment group
saRel32Bytes = _scope.saRel32Bytes # 32 bytes
saRel64Bytes = _scope.saRel64Bytes # 64 bytes
saRelQword = _scope.saRelQword # 8 bytes
def set_segm_combination(segea, comb):
"""
Change combination of the segment
@param segea: any address in the segment
@param comb: new combination of the segment (one of the sc... constants)
@return: success (boolean)
"""
return set_segm_attr(segea, SEGATTR_COMB, comb)
scPriv = _scope.scPriv # Private. Do not combine with any other program
# segment.
scPub = _scope.scPub # Public. Combine by appending at an offset that
# meets the alignment requirement.
scPub2 = _scope.scPub2 # As defined by Microsoft, same as C=2 (public).
scStack = _scope.scStack # Stack. Combine as for C=2. This combine type
# forces byte alignment.
scCommon = _scope.scCommon # Common. Combine by overlay using maximum size.
scPub3 = _scope.scPub3 # As defined by Microsoft, same as C=2 (public).
def set_segm_addressing(ea, bitness):
"""
Change segment addressing
@param ea: any address in the segment
@param bitness: 0: 16bit, 1: 32bit, 2: 64bit
@return: success (boolean)
"""
seg = ida_segment.getseg(ea)
if not seg:
return False
seg.bitness = bitness
return True
def selector_by_name(segname):
"""
Get segment selector by name
@param segname: name of segment
@return: segment selector or BADADDR
"""
seg = ida_segment.get_segm_by_name(segname)
if not seg:
return BADADDR
return seg.sel
def set_default_sreg_value(ea, reg, value):
"""
Set default segment register value for a segment
@param ea: any address in the segment
if no segment is present at the specified address
then all segments will be affected
@param reg: name of segment register
@param value: default value of the segment register. -1-undefined.
"""
seg = ida_segment.getseg(ea)
reg = ida_idp.str2reg(reg);
if seg and reg >= 0:
return ida_segregs.set_default_sreg_value(seg, reg, value)
else:
return False
def set_segm_type(segea, segtype):
"""
Set segment type
@param segea: any address within segment
@param segtype: new segment type:
@return: !=0 - ok
"""
seg = ida_segment.getseg(segea)
if not seg:
return False
seg.type = segtype
return seg.update()
SEG_NORM = _scope.SEG_NORM
SEG_XTRN = _scope.SEG_XTRN # * segment with 'extern' definitions
# no instructions are allowed
SEG_CODE = _scope.SEG_CODE # pure code segment
SEG_DATA = _scope.SEG_DATA # pure data segment
SEG_IMP = _scope.SEG_IMP # implementation segment
SEG_GRP = _scope.SEG_GRP # * group of segments
# no instructions are allowed
SEG_NULL = _scope.SEG_NULL # zero-length segment
SEG_UNDF = _scope.SEG_UNDF # undefined segment type
SEG_BSS = _scope.SEG_BSS # uninitialized segment
SEG_ABSSYM = _scope.SEG_ABSSYM # * segment with definitions of absolute symbols
# no instructions are allowed
SEG_COMM = _scope.SEG_COMM # * segment with communal definitions
# no instructions are allowed
SEG_IMEM = _scope.SEG_IMEM # internal processor memory & sfr (8051)
def get_segm_attr(segea, attr):
"""
Get segment attribute
@param segea: any address within segment
@param attr: one of SEGATTR_... constants
"""
seg = ida_segment.getseg(segea)
assert seg, "could not find segment at 0x%x" % segea
if attr in [ SEGATTR_ES, SEGATTR_CS, SEGATTR_SS, SEGATTR_DS, SEGATTR_FS, SEGATTR_GS ]:
return ida_segment.get_defsr(seg, _SEGATTRMAP[attr][1])
else:
return _IDC_GetAttr(seg, _SEGATTRMAP, attr)
def set_segm_attr(segea, attr, value):
"""
Set segment attribute
@param segea: any address within segment
@param attr: one of SEGATTR_... constants
@note: Please note that not all segment attributes are modifiable.
Also some of them should be modified using special functions
like set_segm_addressing, etc.
"""
seg = ida_segment.getseg(segea)
assert seg, "could not find segment at 0x%x" % segea
if attr in [ SEGATTR_ES, SEGATTR_CS, SEGATTR_SS, SEGATTR_DS, SEGATTR_FS, SEGATTR_GS ]:
ida_segment.set_defsr(seg, _SEGATTRMAP[attr][1], value)
else:
_IDC_SetAttr(seg, _SEGATTRMAP, attr, value)
return seg.update()
SEGATTR_START = 0 # starting address
SEGATTR_END = 4 # ending address
SEGATTR_ORGBASE = 16
SEGATTR_ALIGN = 20 # alignment
SEGATTR_COMB = 21 # combination
SEGATTR_PERM = 22 # permissions
SEGATTR_BITNESS = 23 # bitness (0: 16, 1: 32, 2: 64 bit segment)
# Note: modifying the attribute directly does
# not lead to the reanalysis of the segment.
# Using set_segm_addressing() is more correct.
SEGATTR_FLAGS = 24 # segment flags
SEGATTR_SEL = 28 # segment selector
SEGATTR_ES = 32 # default ES value
SEGATTR_CS = 36 # default CS value
SEGATTR_SS = 40 # default SS value
SEGATTR_DS = 44 # default DS value
SEGATTR_FS = 48 # default FS value
SEGATTR_GS = 52 # default GS value
SEGATTR_TYPE = 96 # segment type
SEGATTR_COLOR = 100 # segment color
# Redefining these for 64-bit
if __EA64__:
SEGATTR_START = 0
SEGATTR_END = 8
SEGATTR_ORGBASE = 32
SEGATTR_ALIGN = 40
SEGATTR_COMB = 41
SEGATTR_PERM = 42
SEGATTR_BITNESS = 43
SEGATTR_FLAGS = 44
SEGATTR_SEL = 48
SEGATTR_ES = 56
SEGATTR_CS = 64
SEGATTR_SS = 72
SEGATTR_DS = 80
SEGATTR_FS = 88
SEGATTR_GS = 96
SEGATTR_TYPE = 184
SEGATTR_COLOR = 188
_SEGATTRMAP = {
SEGATTR_START : (True, 'start_ea'),
SEGATTR_END : (True, 'end_ea'),
SEGATTR_ORGBASE : (False, 'orgbase'),
SEGATTR_ALIGN : (False, 'align'),
SEGATTR_COMB : (False, 'comb'),
SEGATTR_PERM : (False, 'perm'),
SEGATTR_BITNESS : (False, 'bitness'),
SEGATTR_FLAGS : (False, 'flags'),
SEGATTR_SEL : (False, 'sel'),
SEGATTR_ES : (False, 0),
SEGATTR_CS : (False, 1),
SEGATTR_SS : (False, 2),
SEGATTR_DS : (False, 3),
SEGATTR_FS : (False, 4),
SEGATTR_GS : (False, 5),
SEGATTR_TYPE : (False, 'type'),
SEGATTR_COLOR : (False, 'color'),
}
# Valid segment flags
SFL_COMORG = 0x01 # IDP dependent field (IBM PC: if set, ORG directive is not commented out)
SFL_OBOK = 0x02 # orgbase is present? (IDP dependent field)
SFL_HIDDEN = 0x04 # is the segment hidden?
SFL_DEBUG = 0x08 # is the segment created for the debugger?
SFL_LOADER = 0x10 # is the segment created by the loader?
SFL_HIDETYPE = 0x20 # hide segment type (do not print it in the listing)
def move_segm(ea, to, flags):
"""
Move a segment to a new address
This function moves all information to the new address
It fixes up address sensitive information in the kernel
The total effect is equal to reloading the segment to the target address
@param ea: any address within the segment to move
@param to: new segment start address
@param flags: combination MFS_... constants
@returns: MOVE_SEGM_... error code
"""
seg = ida_segment.getseg(ea)
if not seg:
return MOVE_SEGM_PARAM
return ida_segment.move_segm(seg, to, flags)
MSF_SILENT = 0x0001 # don't display a "please wait" box on the screen
MSF_NOFIX = 0x0002 # don't call the loader to fix relocations
MSF_LDKEEP = 0x0004 # keep the loader in the memory (optimization)
MSF_FIXONCE = 0x0008 # valid for rebase_program(): call loader only once
MOVE_SEGM_OK = 0 # all ok
MOVE_SEGM_PARAM = -1 # The specified segment does not exist
MOVE_SEGM_ROOM = -2 # Not enough free room at the target address
MOVE_SEGM_IDP = -3 # IDP module forbids moving the segment
MOVE_SEGM_CHUNK = -4 # Too many chunks are defined, can't move
MOVE_SEGM_LOADER = -5 # The segment has been moved but the loader complained
MOVE_SEGM_ODD = -6 # Can't move segments by an odd number of bytes
rebase_program = ida_segment.rebase_program
set_storage_type = ida_bytes.change_storage_type
STT_VA = 0 # regular storage: virtual arrays, an explicit flag for each byte
STT_MM = 1 # memory map: sparse storage. useful for huge objects
#----------------------------------------------------------------------------
# C R O S S R E F E R E N C E S
#----------------------------------------------------------------------------
# Flow types (combine with XREF_USER!):
fl_CF = 16 # Call Far
fl_CN = 17 # Call Near
fl_JF = 18 # jumpto Far
fl_JN = 19 # jumpto Near
fl_F = 21 # Ordinary flow
XREF_USER = 32 # All user-specified xref types
# must be combined with this bit
# Mark exec flow 'from' 'to'
add_cref = ida_xref.add_cref
del_cref = ida_xref.del_cref
# The following functions include the ordinary flows:
# (the ordinary flow references are returned first)
get_first_cref_from = ida_xref.get_first_cref_from
get_next_cref_from = ida_xref.get_next_cref_from
get_first_cref_to = ida_xref.get_first_cref_to
get_next_cref_to = ida_xref.get_next_cref_to
# The following functions don't take into account the ordinary flows:
get_first_fcref_from = ida_xref.get_first_fcref_from
get_next_fcref_from = ida_xref.get_next_fcref_from
get_first_fcref_to = ida_xref.get_first_fcref_to
get_next_fcref_to = ida_xref.get_next_fcref_to
# Data reference types (combine with XREF_USER!):
dr_O = ida_xref.dr_O # Offset
dr_W = ida_xref.dr_W # Write
dr_R = ida_xref.dr_R # Read
dr_T = ida_xref.dr_T # Text (names in manual operands)
dr_I = ida_xref.dr_I # Informational
add_dref = ida_xref.add_dref
del_dref = ida_xref.del_dref
get_first_dref_from = ida_xref.get_first_dref_from
get_next_dref_from = ida_xref.get_next_dref_from
get_first_dref_to = ida_xref.get_first_dref_to
get_next_dref_to = ida_xref.get_next_dref_to
def get_xref_type():
"""
Return type of the last xref obtained by
[RD]first/next[B0] functions.
@return: constants fl_* or dr_*
"""
raise DeprecatedIDCError("use XrefsFrom() XrefsTo() from idautils instead.")
#----------------------------------------------------------------------------
# F I L E I / O
#----------------------------------------------------------------------------
def fopen(f, mode):
raise DeprecatedIDCError("fopen() deprecated. Use Python file objects instead.")
def fclose(handle):
raise DeprecatedIDCError("fclose() deprecated. Use Python file objects instead.")
def filelength(handle):
raise DeprecatedIDCError("filelength() deprecated. Use Python file objects instead.")
def fseek(handle, offset, origin):
raise DeprecatedIDCError("fseek() deprecated. Use Python file objects instead.")
def ftell(handle):
raise DeprecatedIDCError("ftell() deprecated. Use Python file objects instead.")
def LoadFile(filepath, pos, ea, size):
"""
Load file into IDA database
@param filepath: path to input file
@param pos: position in the file
@param ea: linear address to load
@param size: number of bytes to load
@return: 0 - error, 1 - ok
"""
li = ida_diskio.open_linput(filepath, False)
if li:
retval = ida_loader.file2base(li, pos, ea, ea+size, False)
ida_diskio.close_linput(li)
return retval
else:
return 0
def loadfile(filepath, pos, ea, size): return LoadFile(filepath, pos, ea, size)
def SaveFile(filepath, pos, ea, size):
"""
Save from IDA database to file
@param filepath: path to output file
@param pos: position in the file
@param ea: linear address to save from
@param size: number of bytes to save
@return: 0 - error, 1 - ok
"""
if ( os.path.isfile(filepath) ):
of = ida_diskio.fopenM(filepath)
else:
of = ida_diskio.fopenWB(filepath)
if of:
retval = ida_loader.base2file(of, pos, ea, ea+size)
ida_diskio.eclose(of)
return retval
else:
return 0
def savefile(filepath, pos, ea, size): return SaveFile(filepath, pos, ea, size)
def fgetc(handle):
raise DeprecatedIDCError("fgetc() deprecated. Use Python file objects instead.")
def fputc(byte, handle):
raise DeprecatedIDCError("fputc() deprecated. Use Python file objects instead.")
def fprintf(handle, format, *args):
raise DeprecatedIDCError("fprintf() deprecated. Use Python file objects instead.")
def readshort(handle, mostfirst):
raise DeprecatedIDCError("readshort() deprecated. Use Python file objects instead.")
def readlong(handle, mostfirst):
raise DeprecatedIDCError("readlong() deprecated. Use Python file objects instead.")
def writeshort(handle, word, mostfirst):
raise DeprecatedIDCError("writeshort() deprecated. Use Python file objects instead.")
def writelong(handle, dword, mostfirst):
raise DeprecatedIDCError("writelong() deprecated. Use Python file objects instead.")
def readstr(handle):
raise DeprecatedIDCError("readstr() deprecated. Use Python file objects instead.")
def writestr(handle, s):
raise DeprecatedIDCError("writestr() deprecated. Use Python file objects instead.")
# ----------------------------------------------------------------------------
# F U N C T I O N S
# ----------------------------------------------------------------------------
add_func = ida_funcs.add_func
del_func = ida_funcs.del_func
set_func_end = ida_funcs.set_func_end
def get_next_func(ea):
"""
Find next function
@param ea: any address belonging to the function
@return: BADADDR - no more functions
otherwise returns the next function start address
"""
func = ida_funcs.get_next_func(ea)
if not func:
return BADADDR
else:
return func.start_ea
def get_prev_func(ea):
"""
Find previous function
@param ea: any address belonging to the function
@return: BADADDR - no more functions
otherwise returns the previous function start address
"""
func = ida_funcs.get_prev_func(ea)
if not func:
return BADADDR
else:
return func.start_ea
def get_func_attr(ea, attr):
"""
Get a function attribute
@param ea: any address belonging to the function
@param attr: one of FUNCATTR_... constants
@return: BADADDR - error otherwise returns the attribute value
"""
func = ida_funcs.get_func(ea)
return _IDC_GetAttr(func, _FUNCATTRMAP, attr) if func else BADADDR
def set_func_attr(ea, attr, value):
"""
Set a function attribute
@param ea: any address belonging to the function
@param attr: one of FUNCATTR_... constants
@param value: new value of the attribute
@return: 1-ok, 0-failed
"""
func = ida_funcs.get_func(ea)
if func:
_IDC_SetAttr(func, _FUNCATTRMAP, attr, value)
return ida_funcs.update_func(func)
return 0
FUNCATTR_START = 0 # readonly: function start address
FUNCATTR_END = 4 # readonly: function end address
FUNCATTR_FLAGS = 8 # function flags
FUNCATTR_FRAME = 16 # readonly: function frame id
FUNCATTR_FRSIZE = 20 # readonly: size of local variables
FUNCATTR_FRREGS = 24 # readonly: size of saved registers area
FUNCATTR_ARGSIZE = 28 # readonly: number of bytes purged from the stack
FUNCATTR_FPD = 32 # frame pointer delta
FUNCATTR_COLOR = 36 # function color code
FUNCATTR_OWNER = 16 # readonly: chunk owner (valid only for tail chunks)
FUNCATTR_REFQTY = 20 # readonly: number of chunk parents (valid only for tail chunks)
# Redefining the constants for ea64
if __EA64__:
FUNCATTR_START = 0
FUNCATTR_END = 8
FUNCATTR_FLAGS = 16
FUNCATTR_FRAME = 24
FUNCATTR_FRSIZE = 32
FUNCATTR_FRREGS = 40
FUNCATTR_ARGSIZE = 48
FUNCATTR_FPD = 56
FUNCATTR_COLOR = 64
FUNCATTR_OWNER = 24
FUNCATTR_REFQTY = 32
_FUNCATTRMAP = {
FUNCATTR_START : (True, 'start_ea'),
FUNCATTR_END : (True, 'end_ea'),
FUNCATTR_FLAGS : (False, 'flags'),
FUNCATTR_FRAME : (True, 'frame'),
FUNCATTR_FRSIZE : (True, 'frsize'),
FUNCATTR_FRREGS : (True, 'frregs'),
FUNCATTR_ARGSIZE : (True, 'argsize'),
FUNCATTR_FPD : (False, 'fpd'),
FUNCATTR_COLOR : (False, 'color'),
FUNCATTR_OWNER : (True, 'owner'),
FUNCATTR_REFQTY : (True, 'refqty')
}
def get_func_flags(ea):
"""
Retrieve function flags
@param ea: any address belonging to the function
@return: -1 - function doesn't exist otherwise returns the flags
"""
func = ida_funcs.get_func(ea)
if not func:
return -1
else:
return func.flags
if ida_idaapi.uses_swig_builtins:
_scope = ida_funcs.func_t
else:
_scope = ida_funcs
FUNC_NORET = _scope.FUNC_NORET # function doesn't return
FUNC_FAR = _scope.FUNC_FAR # far function
FUNC_LIB = _scope.FUNC_LIB # library function
FUNC_STATIC = _scope.FUNC_STATICDEF # static function
FUNC_FRAME = _scope.FUNC_FRAME # function uses frame pointer (BP)
FUNC_USERFAR = _scope.FUNC_USERFAR # user has specified far-ness
# of the function
FUNC_HIDDEN = _scope.FUNC_HIDDEN # a hidden function
FUNC_THUNK = _scope.FUNC_THUNK # thunk (jump) function
FUNC_BOTTOMBP = _scope.FUNC_BOTTOMBP # BP points to the bottom of the stack frame
FUNC_NORET_PENDING = _scope.FUNC_NORET_PENDING # Function 'non-return' analysis
# must be performed. This flag is
# verified upon func_does_return()
FUNC_SP_READY = _scope.FUNC_SP_READY # SP-analysis has been performed
# If this flag is on, the stack
# change points should not be not
# modified anymore. Currently this
# analysis is performed only for PC
FUNC_PURGED_OK = _scope.FUNC_PURGED_OK # 'argsize' field has been validated.
# If this bit is clear and 'argsize'
# is 0, then we do not known the real
# number of bytes removed from
# the stack. This bit is handled
# by the processor module.
FUNC_TAIL = _scope.FUNC_TAIL # This is a function tail.
# Other bits must be clear
# (except FUNC_HIDDEN)
def set_func_flags(ea, flags):
"""
Change function flags
@param ea: any address belonging to the function
@param flags: see get_func_flags() for explanations
@return: !=0 - ok
"""
func = ida_funcs.get_func(ea)
if not func:
return 0
else:
func.flags = flags
ida_funcs.update_func(func)
return 1
def get_func_name(ea):
"""
Retrieve function name
@param ea: any address belonging to the function
@return: null string - function doesn't exist
otherwise returns function name
"""
name = ida_funcs.get_func_name(ea)
if not name:
return ""
else:
return name
def get_func_cmt(ea, repeatable):
"""
Retrieve function comment
@param ea: any address belonging to the function
@param repeatable: 1: get repeatable comment
0: get regular comment
@return: function comment string
"""
func = ida_funcs.get_func(ea)
if not func:
return ""
else:
comment = ida_funcs.get_func_cmt(func, repeatable)
if not comment:
return ""
else:
return comment
def set_func_cmt(ea, cmt, repeatable):
"""
Set function comment
@param ea: any address belonging to the function
@param cmt: a function comment line
@param repeatable: 1: get repeatable comment
0: get regular comment
"""
func = ida_funcs.get_func(ea)
if not func:
return None
else:
return ida_funcs.set_func_cmt(func, cmt, repeatable)
def choose_func(title):
"""
Ask the user to select a function
Arguments:
@param title: title of the dialog box
@return: -1 - user refused to select a function
otherwise returns the selected function start address
"""
f = ida_kernwin.choose_func(title, ida_idaapi.BADADDR)
return BADADDR if f is None else f.start_ea
def get_func_off_str(ea):
"""
Convert address to 'funcname+offset' string
@param ea: address to convert
@return: if the address belongs to a function then return a string
formed as 'name+offset' where 'name' is a function name
'offset' is offset within the function else return null string
"""
flags = ida_name.GNCN_NOCOLOR | ida_name.GNCN_REQFUNC
return ida_name.get_nice_colored_name(ea, flags)
def find_func_end(ea):
"""
Determine a new function boundaries
@param ea: starting address of a new function
@return: if a function already exists, then return its end address.
If a function end cannot be determined, the return BADADDR
otherwise return the end address of the new function
"""
func = ida_funcs.func_t(ea)
res = ida_funcs.find_func_bounds(func, ida_funcs.FIND_FUNC_DEFINE)
if res == ida_funcs.FIND_FUNC_UNDEF:
return BADADDR
else:
return func.end_ea
def get_frame_id(ea):
"""
Get ID of function frame structure
@param ea: any address belonging to the function
@return: ID of function frame or None In order to access stack variables
you need to use structure member manipulaion functions with the
obtained ID.
"""
frame = ida_frame.get_frame(ea)
if frame:
return frame.id
else:
return None
def get_frame_lvar_size(ea):
"""
Get size of local variables in function frame
@param ea: any address belonging to the function
@return: Size of local variables in bytes.
If the function doesn't have a frame, return 0
If the function does't exist, return None
"""
return get_func_attr(ea, FUNCATTR_FRSIZE)
def get_frame_regs_size(ea):
"""
Get size of saved registers in function frame
@param ea: any address belonging to the function
@return: Size of saved registers in bytes.
If the function doesn't have a frame, return 0
This value is used as offset for BP (if FUNC_FRAME is set)
If the function does't exist, return None
"""
return get_func_attr(ea, FUNCATTR_FRREGS)
def get_frame_args_size(ea):
"""
Get size of arguments in function frame which are purged upon return
@param ea: any address belonging to the function
@return: Size of function arguments in bytes.
If the function doesn't have a frame, return 0
If the function does't exist, return -1
"""
return get_func_attr(ea, FUNCATTR_ARGSIZE)
def get_frame_size(ea):
"""
Get full size of function frame
@param ea: any address belonging to the function
@returns: Size of function frame in bytes.
This function takes into account size of local
variables + size of saved registers + size of
return address + size of function arguments
If the function doesn't have a frame, return size of
function return address in the stack.
If the function does't exist, return 0
"""
func = ida_funcs.get_func(ea)
if not func:
return 0
else:
return ida_frame.get_frame_size(func)
def set_frame_size(ea, lvsize, frregs, argsize):
"""
Make function frame
@param ea: any address belonging to the function
@param lvsize: size of function local variables
@param frregs: size of saved registers
@param argsize: size of function arguments
@return: ID of function frame or -1
If the function did not have a frame, the frame
will be created. Otherwise the frame will be modified
"""
func = ida_funcs.get_func(ea)
if func is None:
return -1
frameid = ida_frame.add_frame(func, lvsize, frregs, argsize)
if not frameid:
if not ida_frame.set_frame_size(func, lvsize, frregs, argsize):
return -1
return func.frame
def get_spd(ea):
"""
Get current delta for the stack pointer
@param ea: end address of the instruction
i.e.the last address of the instruction+1
@return: The difference between the original SP upon
entering the function and SP for the specified address
"""
func = ida_funcs.get_func(ea)
if not func:
return None
return ida_frame.get_spd(func, ea)
def get_sp_delta(ea):
"""
Get modification of SP made by the instruction
@param ea: end address of the instruction
i.e.the last address of the instruction+1
@return: Get modification of SP made at the specified location
If the specified location doesn't contain a SP change point, return 0
Otherwise return delta of SP modification
"""
func = ida_funcs.get_func(ea)
if not func:
return None
return ida_frame.get_sp_delta(func, ea)
# ----------------------------------------------------------------------------
# S T A C K
# ----------------------------------------------------------------------------
def add_auto_stkpnt(func_ea, ea, delta):
"""
Add automatical SP register change point
@param func_ea: function start
@param ea: linear address where SP changes
usually this is the end of the instruction which
modifies the stack pointer (insn.ea+insn.size)
@param delta: difference between old and new values of SP
@return: 1-ok, 0-failed
"""
pfn = ida_funcs.get_func(func_ea)
if not pfn:
return 0
return ida_frame.add_auto_stkpnt(pfn, ea, delta)
add_user_stkpnt = ida_frame.add_user_stkpnt
def del_stkpnt(func_ea, ea):
"""
Delete SP register change point
@param func_ea: function start
@param ea: linear address
@return: 1-ok, 0-failed
"""
pfn = ida_funcs.get_func(func_ea)
if not pfn:
return 0
return ida_frame.del_stkpnt(pfn, ea)
def get_min_spd_ea(func_ea):
"""
Return the address with the minimal spd (stack pointer delta)
If there are no SP change points, then return BADADDR.
@param func_ea: function start
@return: BADDADDR - no such function
"""
pfn = ida_funcs.get_func(func_ea)
if not pfn:
return BADADDR
return ida_frame.get_min_spd_ea(pfn)
recalc_spd = ida_frame.recalc_spd
# ----------------------------------------------------------------------------
# E N T R Y P O I N T S
# ----------------------------------------------------------------------------
get_entry_qty = ida_entry.get_entry_qty
add_entry = ida_entry.add_entry
get_entry_ordinal = ida_entry.get_entry_ordinal
get_entry = ida_entry.get_entry
get_entry_name = ida_entry.get_entry_name
rename_entry = ida_entry.rename_entry
# ----------------------------------------------------------------------------
# F I X U P S
# ----------------------------------------------------------------------------
get_next_fixup_ea = ida_fixup.get_next_fixup_ea
get_prev_fixup_ea = ida_fixup.get_prev_fixup_ea
def get_fixup_target_type(ea):
"""
Get fixup target type
@param ea: address to get information about
@return: 0 - no fixup at the specified address
otherwise returns fixup type
"""
fd = ida_fixup.fixup_data_t()
if not fd.get(ea):
return 0
return fd.get_type()
FIXUP_OFF8 = 13 # 8-bit offset.
FIXUP_OFF16 = 1 # 16-bit offset.
FIXUP_SEG16 = 2 # 16-bit base--logical segment base (selector).
FIXUP_PTR32 = 3 # 32-bit long pointer (16-bit base:16-bit
# offset).
FIXUP_OFF32 = 4 # 32-bit offset.
FIXUP_PTR48 = 5 # 48-bit pointer (16-bit base:32-bit offset).
FIXUP_HI8 = 6 # high 8 bits of 16bit offset
FIXUP_HI16 = 7 # high 16 bits of 32bit offset
FIXUP_LOW8 = 8 # low 8 bits of 16bit offset
FIXUP_LOW16 = 9 # low 16 bits of 32bit offset
FIXUP_OFF64 = 12 # 64-bit offset
FIXUP_CUSTOM = 0x8000 # fixups with this bit are processed by
# processor module/plugin
def get_fixup_target_flags(ea):
"""
Get fixup target flags
@param ea: address to get information about
@return: 0 - no fixup at the specified address
otherwise returns fixup target flags
"""
fd = ida_fixup.fixup_data_t()
if not fd.get(ea):
return 0
return fd.get_flags()
FIXUPF_REL = 0x1 # fixup is relative to the linear address
FIXUPF_EXTDEF = 0x2 # target is a location (otherwise - segment)
FIXUPF_UNUSED = 0x4 # fixup is ignored by IDA
FIXUPF_CREATED = 0x8 # fixup was not present in the input file
def get_fixup_target_sel(ea):
"""
Get fixup target selector
@param ea: address to get information about
@return: BADSEL - no fixup at the specified address
otherwise returns fixup target selector
"""
fd = ida_fixup.fixup_data_t()
if not fd.get(ea):
return BADSEL
return fd.sel
def get_fixup_target_off(ea):
"""
Get fixup target offset
@param ea: address to get information about
@return: BADADDR - no fixup at the specified address
otherwise returns fixup target offset
"""
fd = ida_fixup.fixup_data_t()
if not fd.get(ea):
return BADADDR
return fd.off
def get_fixup_target_dis(ea):
"""
Get fixup target displacement
@param ea: address to get information about
@return: 0 - no fixup at the specified address
otherwise returns fixup target displacement
"""
fd = ida_fixup.fixup_data_t()
if not fd.get(ea):
return 0
return fd.displacement
def set_fixup(ea, fixuptype, fixupflags, targetsel, targetoff, displ):
"""
Set fixup information
@param ea: address to set fixup information about
@param fixuptype: fixup type. see get_fixup_target_type()
for possible fixup types.
@param fixupflags: fixup flags. see get_fixup_target_flags()
for possible fixup types.
@param targetsel: target selector
@param targetoff: target offset
@param displ: displacement
@return: none
"""
fd = ida_fixup.fixup_data_t(fixuptype, fixupflags)
fd.sel = targetsel
fd.off = targetoff
fd.displacement = displ
fd.set(ea)
del_fixup = ida_fixup.del_fixup
#----------------------------------------------------------------------------
# M A R K E D P O S I T I O N S
#----------------------------------------------------------------------------
put_bookmark = ida_idc.mark_position
get_bookmark = ida_idc.get_marked_pos
get_bookmark_desc = ida_idc.get_mark_comment
# ----------------------------------------------------------------------------
# S T R U C T U R E S
# ----------------------------------------------------------------------------
get_struc_qty = ida_struct.get_struc_qty
get_first_struc_idx = ida_struct.get_first_struc_idx
get_last_struc_idx = ida_struct.get_last_struc_idx
get_next_struc_idx = ida_struct.get_next_struc_idx
get_prev_struc_idx = ida_struct.get_prev_struc_idx
get_struc_idx = ida_struct.get_struc_idx
get_struc_by_idx = ida_struct.get_struc_by_idx
get_struc_id = ida_struct.get_struc_id
get_struc_name = ida_struct.get_struc_name
get_struc_cmt = ida_struct.get_struc_cmt
get_struc_size = ida_struct.get_struc_size
def get_member_qty(sid):
"""
Get number of members of a structure
@param sid: structure type ID
@return: -1 if bad structure type ID is passed otherwise
returns number of members.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
return -1 if not s else s.memqty
def get_member_id(sid, member_offset):
"""
@param sid: structure type ID
@param member_offset:. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@return: -1 if bad structure type ID is passed or there is
no member at the specified offset.
otherwise returns the member id.
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
m = ida_struct.get_member(s, member_offset)
if not m:
return -1
return m.id
def get_prev_offset(sid, offset):
"""
Get previous offset in a structure
@param sid: structure type ID
@param offset: current offset
@return: -1 if bad structure type ID is passed,
ida_idaapi.BADADDR if no (more) offsets in the structure,
otherwise returns previous offset in a structure.
@note: IDA allows 'holes' between members of a
structure. It treats these 'holes'
as unnamed arrays of bytes.
This function returns a member offset or a hole offset.
It will return size of the structure if input
'offset' is bigger than the structure size.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
return ida_struct.get_struc_prev_offset(s, offset)
def get_next_offset(sid, offset):
"""
Get next offset in a structure
@param sid: structure type ID
@param offset: current offset
@return: -1 if bad structure type ID is passed,
ida_idaapi.BADADDR if no (more) offsets in the structure,
otherwise returns next offset in a structure.
@note: IDA allows 'holes' between members of a
structure. It treats these 'holes'
as unnamed arrays of bytes.
This function returns a member offset or a hole offset.
It will return size of the structure if input
'offset' belongs to the last member of the structure.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
return -1 if not s else ida_struct.get_struc_next_offset(s, offset)
def get_first_member(sid):
"""
Get offset of the first member of a structure
@param sid: structure type ID
@return: -1 if bad structure type ID is passed,
ida_idaapi.BADADDR if structure has no members,
otherwise returns offset of the first member.
@note: IDA allows 'holes' between members of a
structure. It treats these 'holes'
as unnamed arrays of bytes.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
return ida_struct.get_struc_first_offset(s)
def get_last_member(sid):
"""
Get offset of the last member of a structure
@param sid: structure type ID
@return: -1 if bad structure type ID is passed,
ida_idaapi.BADADDR if structure has no members,
otherwise returns offset of the last member.
@note: IDA allows 'holes' between members of a
structure. It treats these 'holes'
as unnamed arrays of bytes.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
return ida_struct.get_struc_last_offset(s)
def get_member_offset(sid, member_name):
"""
Get offset of a member of a structure by the member name
@param sid: structure type ID
@param member_name: name of structure member
@return: -1 if bad structure type ID is passed
or no such member in the structure
otherwise returns offset of the specified member.
@note: Union members are, in IDA's internals, located
at subsequent byte offsets: member 0 -> offset 0x0,
member 1 -> offset 0x1, etc...
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
m = ida_struct.get_member_by_name(s, member_name)
if not m:
return -1
return m.get_soff()
def get_member_name(sid, member_offset):
"""
Get name of a member of a structure
@param sid: structure type ID
@param member_offset: member offset. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@return: None if bad structure type ID is passed
or no such member in the structure
otherwise returns name of the specified member.
"""
s = ida_struct.get_struc(sid)
if not s:
return None
m = ida_struct.get_member(s, member_offset)
if not m:
return None
return ida_struct.get_member_name(m.id)
def get_member_cmt(sid, member_offset, repeatable):
"""
Get comment of a member
@param sid: structure type ID
@param member_offset: member offset. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@param repeatable: 1: get repeatable comment
0: get regular comment
@return: None if bad structure type ID is passed
or no such member in the structure
otherwise returns comment of the specified member.
"""
s = ida_struct.get_struc(sid)
if not s:
return None
m = ida_struct.get_member(s, member_offset)
if not m:
return None
return ida_struct.get_member_cmt(m.id, repeatable)
def get_member_size(sid, member_offset):
"""
Get size of a member
@param sid: structure type ID
@param member_offset: member offset. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@return: None if bad structure type ID is passed,
or no such member in the structure
otherwise returns size of the specified
member in bytes.
"""
s = ida_struct.get_struc(sid)
if not s:
return None
m = ida_struct.get_member(s, member_offset)
if not m:
return None
return ida_struct.get_member_size(m)
def get_member_flag(sid, member_offset):
"""
Get type of a member
@param sid: structure type ID
@param member_offset: member offset. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@return: -1 if bad structure type ID is passed
or no such member in the structure
otherwise returns type of the member, see bit
definitions above. If the member type is a structure
then function GetMemberStrid() should be used to
get the structure type id.
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
m = ida_struct.get_member(s, member_offset)
return -1 if not m else m.flag
def get_member_strid(sid, member_offset):
"""
Get structure id of a member
@param sid: structure type ID
@param member_offset: member offset. The offset can be
any offset in the member. For example,
is a member is 4 bytes long and starts
at offset 2, then 2,3,4,5 denote
the same structure member.
@return: -1 if bad structure type ID is passed
or no such member in the structure
otherwise returns structure id of the member.
If the current member is not a structure, returns -1.
"""
s = ida_struct.get_struc(sid)
if not s:
return -1
m = ida_struct.get_member(s, member_offset)
if not m:
return -1
cs = ida_struct.get_sptr(m)
if cs:
return cs.id
else:
return -1
def is_union(sid):
"""
Is a structure a union?
@param sid: structure type ID
@return: 1: yes, this is a union id
0: no
@note: Unions are a special kind of structures
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return s.is_union()
def add_struc(index, name, is_union):
"""
Define a new structure type
@param index: index of new structure type
If another structure has the specified index,
then index of that structure and all other
structures will be incremented, freeing the specifed
index. If index is == -1, then the biggest index
number will be used.
See get_first_struc_idx() for the explanation of
structure indices and IDs.
@param name: name of the new structure type.
@param is_union: 0: structure
1: union
@return: -1 if can't define structure type because of
bad structure name: the name is ill-formed or is
already used in the program.
otherwise returns ID of the new structure type
"""
if index == -1:
index = BADADDR
return ida_struct.add_struc(index, name, is_union)
def del_struc(sid):
"""
Delete a structure type
@param sid: structure type ID
@return: 0 if bad structure type ID is passed
1 otherwise the structure type is deleted. All data
and other structure types referencing to the
deleted structure type will be displayed as array
of bytes.
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return ida_struct.del_struc(s)
def set_struc_idx(sid, index):
"""
Change structure index
@param sid: structure type ID
@param index: new index of the structure
@return: != 0 - ok
@note: See get_first_struc_idx() for the explanation of
structure indices and IDs.
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return ida_struct.set_struc_idx(s, index)
set_struc_name = ida_struct.set_struc_name
set_struc_cmt = ida_struct.set_struc_cmt
def add_struc_member(sid, name, offset, flag, typeid, nbytes, target=-1, tdelta=0, reftype=REF_OFF32):
"""
Add structure member
@param sid: structure type ID
@param name: name of the new member
@param offset: offset of the new member
-1 means to add at the end of the structure
@param flag: type of the new member. Should be one of
FF_BYTE..FF_PACKREAL (see above) combined with FF_DATA
@param typeid: if is_struct(flag) then typeid specifies the structure id for the member
if is_off0(flag) then typeid specifies the offset base.
if is_strlit(flag) then typeid specifies the string type (STRTYPE_...).
if is_stroff(flag) then typeid specifies the structure id
if is_enum(flag) then typeid specifies the enum id
if is_custom(flags) then typeid specifies the dtid and fid: dtid|(fid<<16)
Otherwise typeid should be -1.
@param nbytes: number of bytes in the new member
@param target: target address of the offset expr. You may specify it as
-1, ida will calculate it itself
@param tdelta: offset target delta. usually 0
@param reftype: see REF_... definitions
@note: The remaining arguments are allowed only if is_off0(flag) and you want
to specify a complex offset expression
@return: 0 - ok, otherwise error code (one of STRUC_ERROR_*)
"""
if is_off0(flag):
return eval_idc('add_struc_member(%d, "%s", %d, %d, %d, %d, %d, %d, %d);' % (sid, ida_kernwin.str2user(name or ""), offset, flag, typeid, nbytes,
target, tdelta, reftype))
else:
return eval_idc('add_struc_member(%d, "%s", %d, %d, %d, %d);' % (sid, ida_kernwin.str2user(name or ""), offset, flag, typeid, nbytes))
STRUC_ERROR_MEMBER_NAME = -1 # already has member with this name (bad name)
STRUC_ERROR_MEMBER_OFFSET = -2 # already has member at this offset
STRUC_ERROR_MEMBER_SIZE = -3 # bad number of bytes or bad sizeof(type)
STRUC_ERROR_MEMBER_TINFO = -4 # bad typeid parameter
STRUC_ERROR_MEMBER_STRUCT = -5 # bad struct id (the 1st argument)
STRUC_ERROR_MEMBER_UNIVAR = -6 # unions can't have variable sized members
STRUC_ERROR_MEMBER_VARLAST = -7 # variable sized member should be the last member in the structure
def del_struc_member(sid, member_offset):
"""
Delete structure member
@param sid: structure type ID
@param member_offset: offset of the member
@return: != 0 - ok.
@note: IDA allows 'holes' between members of a
structure. It treats these 'holes'
as unnamed arrays of bytes.
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return ida_struct.del_struc_member(s, member_offset)
def set_member_name(sid, member_offset, name):
"""
Change structure member name
@param sid: structure type ID
@param member_offset: offset of the member
@param name: new name of the member
@return: != 0 - ok.
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return ida_struct.set_member_name(s, member_offset, name)
def set_member_type(sid, member_offset, flag, typeid, nitems, target=-1, tdelta=0, reftype=REF_OFF32):
"""
Change structure member type
@param sid: structure type ID
@param member_offset: offset of the member
@param flag: new type of the member. Should be one of
FF_BYTE..FF_PACKREAL (see above) combined with FF_DATA
@param typeid: if is_struct(flag) then typeid specifies the structure id for the member
if is_off0(flag) then typeid specifies the offset base.
if is_strlit(flag) then typeid specifies the string type (STRTYPE_...).
if is_stroff(flag) then typeid specifies the structure id
if is_enum(flag) then typeid specifies the enum id
if is_custom(flags) then typeid specifies the dtid and fid: dtid|(fid<<16)
Otherwise typeid should be -1.
@param nitems: number of items in the member
@param target: target address of the offset expr. You may specify it as
-1, ida will calculate it itself
@param tdelta: offset target delta. usually 0
@param reftype: see REF_... definitions
@note: The remaining arguments are allowed only if is_off0(flag) and you want
to specify a complex offset expression
@return: !=0 - ok.
"""
if is_off0(flag):
return eval_idc('set_member_type(%d, %d, %d, %d, %d, %d, %d, %d);' % (sid, member_offset, flag, typeid, nitems,
target, tdelta, reftype))
else:
return eval_idc('set_member_type(%d, %d, %d, %d, %d);' % (sid, member_offset, flag, typeid, nitems))
def set_member_cmt(sid, member_offset, comment, repeatable):
"""
Change structure member comment
@param sid: structure type ID
@param member_offset: offset of the member
@param comment: new comment of the structure member
@param repeatable: 1: change repeatable comment
0: change regular comment
@return: != 0 - ok
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
m = ida_struct.get_member(s, member_offset)
if not m:
return 0
return ida_struct.set_member_cmt(m, comment, repeatable)
def expand_struc(sid, offset, delta, recalc):
"""
Expand or shrink a structure type
@param id: structure type ID
@param offset: offset in the structure
@param delta: how many bytes to add or remove
@param recalc: recalculate the locations where the structure
type is used
@return: != 0 - ok
"""
s = ida_struct.get_struc(sid)
if not s:
return 0
return ida_struct.expand_struc(s, offset, delta, recalc)
def get_fchunk_attr(ea, attr):
"""
Get a function chunk attribute
@param ea: any address in the chunk
@param attr: one of: FUNCATTR_START, FUNCATTR_END, FUNCATTR_OWNER, FUNCATTR_REFQTY
@return: desired attribute or -1
"""
func = ida_funcs.get_fchunk(ea)
return _IDC_GetAttr(func, _FUNCATTRMAP, attr) if func else BADADDR
def set_fchunk_attr(ea, attr, value):
"""
Set a function chunk attribute
@param ea: any address in the chunk
@param attr: only FUNCATTR_START, FUNCATTR_END, FUNCATTR_OWNER
@param value: desired value
@return: 0 if failed, 1 if success
"""
if attr in [ FUNCATTR_START, FUNCATTR_END, FUNCATTR_OWNER ]:
chunk = ida_funcs.get_fchunk(ea)
if chunk:
_IDC_SetAttr(chunk, _FUNCATTRMAP, attr, value)
return ida_funcs.update_func(chunk)
return 0
get_fchunk_referer = ida_funcs.get_fchunk_referer
def get_next_fchunk(ea):
"""
Get next function chunk
@param ea: any address
@return: the starting address of the next function chunk or BADADDR
@note: This function enumerates all chunks of all functions in the database
"""
func = ida_funcs.get_next_fchunk(ea)
if func:
return func.start_ea
else:
return BADADDR
def get_prev_fchunk(ea):
"""
Get previous function chunk
@param ea: any address
@return: the starting address of the function chunk or BADADDR
@note: This function enumerates all chunks of all functions in the database
"""
func = ida_funcs.get_prev_fchunk(ea)
if func:
return func.start_ea
else:
return BADADDR
def append_func_tail(funcea, ea1, ea2):
"""
Append a function chunk to the function
@param funcea: any address in the function
@param ea1: start of function tail
@param ea2: end of function tail
@return: 0 if failed, 1 if success
@note: If a chunk exists at the specified addresses, it must have exactly
the specified boundaries
"""
func = ida_funcs.get_func(funcea)
if not func:
return 0
else:
return ida_funcs.append_func_tail(func, ea1, ea2)
def remove_fchunk(funcea, tailea):
"""
Remove a function chunk from the function
@param funcea: any address in the function
@param tailea: any address in the function chunk to remove
@return: 0 if failed, 1 if success
"""
func = ida_funcs.get_func(funcea)
if not func:
return 0
else:
return ida_funcs.remove_func_tail(func, tailea)
def set_tail_owner(tailea, funcea):
"""
Change the function chunk owner
@param tailea: any address in the function chunk
@param funcea: the starting address of the new owner
@return: False if failed, True if success
@note: The new owner must already have the chunk appended before the call
"""
tail = ida_funcs.get_fchunk(tailea)
if not tail:
return False
else:
return ida_funcs.set_tail_owner(tail, funcea)
def first_func_chunk(funcea):
"""
Get the first function chunk of the specified function
@param funcea: any address in the function
@return: the function entry point or BADADDR
@note: This function returns the first (main) chunk of the specified function
"""
func = ida_funcs.get_func(funcea)
fci = ida_funcs.func_tail_iterator_t(func, funcea)
if fci.main():
return fci.chunk().start_ea
else:
return BADADDR
def next_func_chunk(funcea, tailea):
"""
Get the next function chunk of the specified function
@param funcea: any address in the function
@param tailea: any address in the current chunk
@return: the starting address of the next function chunk or BADADDR
@note: This function returns the next chunk of the specified function
"""
func = ida_funcs.get_func(funcea)
fci = ida_funcs.func_tail_iterator_t(func, funcea)
if not fci.main():
return BADADDR
# Iterate and try to find the current chunk
found = False
while True:
if fci.chunk().start_ea <= tailea and \
fci.chunk().end_ea > tailea:
found = True
break
if not next(fci):
break
# Return the next chunk, if there is one
if found and next(fci):
return fci.chunk().start_ea
else:
return BADADDR
# ----------------------------------------------------------------------------
# E N U M S
# ----------------------------------------------------------------------------
get_enum_qty = ida_enum.get_enum_qty
getn_enum = ida_enum.getn_enum
get_enum_idx = ida_enum.get_enum_idx
get_enum = ida_enum.get_enum
get_enum_name = ida_enum.get_enum_name
get_enum_cmt = ida_enum.get_enum_cmt
get_enum_size = ida_enum.get_enum_size
get_enum_width = ida_enum.get_enum_width
get_enum_flag = ida_enum.get_enum_flag
get_enum_member_by_name = ida_enum.get_enum_member_by_name
get_enum_member_value = ida_enum.get_enum_member_value
get_enum_member_bmask = ida_enum.get_enum_member_bmask
get_enum_member_enum = ida_enum.get_enum_member_enum
def get_enum_member(enum_id, value, serial, bmask):
"""
Get id of constant
@param enum_id: id of enum
@param value: value of constant
@param serial: serial number of the constant in the
enumeration. See op_enum() for details.
@param bmask: bitmask of the constant
ordinary enums accept only ida_enum.DEFMASK as a bitmask
@return: id of constant or -1 if error
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_enum_member(enum_id, value, serial, bmask)
get_first_bmask = ida_enum.get_first_bmask
get_last_bmask = ida_enum.get_last_bmask
get_next_bmask = ida_enum.get_next_bmask
get_prev_bmask = ida_enum.get_prev_bmask
def get_bmask_name(enum_id, bmask):
"""
Get bitmask name (only for bitfields)
@param enum_id: id of enum
@param bmask: bitmask of the constant
@return: name of bitmask or None
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_bmask_name(enum_id, bmask)
def get_bmask_cmt(enum_id, bmask, repeatable):
"""
Get bitmask comment (only for bitfields)
@param enum_id: id of enum
@param bmask: bitmask of the constant
@param repeatable: type of comment, 0-regular, 1-repeatable
@return: comment attached to bitmask or None
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_bmask_cmt(enum_id, bmask, repeatable)
def set_bmask_name(enum_id, bmask, name):
"""
Set bitmask name (only for bitfields)
@param enum_id: id of enum
@param bmask: bitmask of the constant
@param name: name of bitmask
@return: 1-ok, 0-failed
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.set_bmask_name(enum_id, bmask, name)
def set_bmask_cmt(enum_id, bmask, cmt, repeatable):
"""
Set bitmask comment (only for bitfields)
@param enum_id: id of enum
@param bmask: bitmask of the constant
@param cmt: comment
repeatable - type of comment, 0-regular, 1-repeatable
@return: 1-ok, 0-failed
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.set_bmask_cmt(enum_id, bmask, cmt, repeatable)
def get_first_enum_member(enum_id, bmask):
"""
Get first constant in the enum
@param enum_id: id of enum
@param bmask: bitmask of the constant (ordinary enums accept only ida_enum.DEFMASK as a bitmask)
@return: value of constant or idaapi.BADNODE no constants are defined
All constants are sorted by their values as unsigned longs.
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_first_enum_member(enum_id, bmask)
def get_last_enum_member(enum_id, bmask):
"""
Get last constant in the enum
@param enum_id: id of enum
@param bmask: bitmask of the constant (ordinary enums accept only ida_enum.DEFMASK as a bitmask)
@return: value of constant or idaapi.BADNODE no constants are defined
All constants are sorted by their values
as unsigned longs.
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_last_enum_member(enum_id, bmask)
def get_next_enum_member(enum_id, value, bmask):
"""
Get next constant in the enum
@param enum_id: id of enum
@param bmask: bitmask of the constant ordinary enums accept only ida_enum.DEFMASK as a bitmask
@param value: value of the current constant
@return: value of a constant with value higher than the specified
value. idaapi.BADNODE no such constants exist.
All constants are sorted by their values as unsigned longs.
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_next_enum_member(enum_id, value, bmask)
def get_prev_enum_member(enum_id, value, bmask):
"""
Get prev constant in the enum
@param enum_id: id of enum
@param bmask : bitmask of the constant
ordinary enums accept only ida_enum.DEFMASK as a bitmask
@param value: value of the current constant
@return: value of a constant with value lower than the specified
value. idaapi.BADNODE no such constants exist.
All constants are sorted by their values as unsigned longs.
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.get_prev_enum_member(enum_id, value, bmask)
def get_enum_member_name(const_id):
"""
Get name of a constant
@param const_id: id of const
Returns: name of constant
"""
name = ida_enum.get_enum_member_name(const_id)
if not name:
return ""
else:
return name
def get_enum_member_cmt(const_id, repeatable):
"""
Get comment of a constant
@param const_id: id of const
@param repeatable: 0:get regular comment, 1:get repeatable comment
@return: comment string
"""
cmt = ida_enum.get_enum_member_cmt(const_id, repeatable)
if not cmt:
return ""
else:
return cmt
def add_enum(idx, name, flag):
"""
Add a new enum type
@param idx: serial number of the new enum.
If another enum with the same serial number
exists, then all enums with serial
numbers >= the specified idx get their
serial numbers incremented (in other words,
the new enum is put in the middle of the list of enums).
If idx >= get_enum_qty() or idx == idaapi.BADNODE
then the new enum is created at the end of
the list of enums.
@param name: name of the enum.
@param flag: flags for representation of numeric constants
in the definition of enum.
@return: id of new enum or BADADDR
"""
if idx < 0:
idx = idx & SIZE_MAX
return ida_enum.add_enum(idx, name, flag)
del_enum = ida_enum.del_enum
set_enum_idx = ida_enum.set_enum_idx
set_enum_name = ida_enum.set_enum_name
set_enum_cmt = ida_enum.set_enum_cmt
set_enum_flag = ida_enum.set_enum_flag
set_enum_bf = ida_enum.set_enum_bf
set_enum_width = ida_enum.set_enum_width
is_bf = ida_enum.is_bf
def add_enum_member(enum_id, name, value, bmask):
"""
Add a member of enum - a symbolic constant
@param enum_id: id of enum
@param name: name of symbolic constant. Must be unique in the program.
@param value: value of symbolic constant.
@param bmask: bitmask of the constant
ordinary enums accept only ida_enum.DEFMASK as a bitmask
all bits set in value should be set in bmask too
@return: 0-ok, otherwise error code (one of ENUM_MEMBER_ERROR_*)
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.add_enum_member(enum_id, name, value, bmask)
ENUM_MEMBER_ERROR_NAME = ida_enum.ENUM_MEMBER_ERROR_NAME # already have member with this name (bad name)
ENUM_MEMBER_ERROR_VALUE = ida_enum.ENUM_MEMBER_ERROR_VALUE # already have member with this value
ENUM_MEMBER_ERROR_ENUM = ida_enum.ENUM_MEMBER_ERROR_ENUM # bad enum id
ENUM_MEMBER_ERROR_MASK = ida_enum.ENUM_MEMBER_ERROR_MASK # bad bmask
ENUM_MEMBER_ERROR_ILLV = ida_enum.ENUM_MEMBER_ERROR_ILLV # bad bmask and value combination (~bmask & value != 0)
def del_enum_member(enum_id, value, serial, bmask):
"""
Delete a member of enum - a symbolic constant
@param enum_id: id of enum
@param value: value of symbolic constant.
@param serial: serial number of the constant in the
enumeration. See op_enum() for for details.
@param bmask: bitmask of the constant ordinary enums accept
only ida_enum.DEFMASK as a bitmask
@return: 1-ok, 0-failed
"""
if bmask < 0:
bmask &= BADADDR
return ida_enum.del_enum_member(enum_id, value, serial, bmask)
set_enum_member_name = ida_enum.set_enum_member_name
set_enum_member_cmt = ida_enum.set_enum_member_cmt
#----------------------------------------------------------------------------
# A R R A Y S I N I D C
#----------------------------------------------------------------------------
_IDC_ARRAY_PREFIX = "$ idc_array "
def __l2m1(v):
"""
Long to minus 1: If the 'v' appears to be the
'signed long' version of -1, then return -1.
Otherwise, return 'v'.
"""
if v == ida_netnode.BADNODE:
return -1
else:
return v
AR_LONG = ida_netnode.atag
"""Array of longs"""
AR_STR = ida_netnode.stag
"""Array of strings"""
class __dummy_netnode(object):
"""
Implements, in an "always failing" fashion, the
netnode functions that are necessary for the
array-related functions.
The sole purpose of this singleton class is to
serve as a placeholder for netnode-manipulating
functions, that don't want to each have to perform
checks on the existence of the netnode.
(..in other words: it avoids a bunch of if/else's).
See __GetArrayById() for more info.
"""
def rename(self, *args): return 0
def kill(self, *args): pass
def index(self, *args): return -1
def altset(self, *args): return 0
def supset(self, *args): return 0
def altval(self, *args): return 0
def supval(self, *args): return 0
def altdel(self, *args): return 0
def supdel(self, *args): return 0
def altfirst(self, *args): return -1
def supfirst(self, *args): return -1
def altlast(self, *args): return -1
def suplast(self, *args): return -1
def altnext(self, *args): return -1
def supnext(self, *args): return -1
def altprev(self, *args): return -1
def supprev(self, *args): return -1
def hashset(self, *args): return 0
def hashval(self, *args): return 0
def hashstr(self, *args): return 0
def hashstr_buf(self, *args): return 0
def hashset_idx(self, *args): return 0
def hashset_buf(self, *args): return 0
def hashval_long(self, *args): return 0
def hashdel(self, *args): return 0
def hashfirst(self, *args): return 0
def hashnext(self, *args): return 0
def hashprev(self, *args): return 0
def hashlast(self, *args): return 0
__dummy_netnode.instance = __dummy_netnode()
def __GetArrayById(array_id):
"""
Get an array, by its ID.
This (internal) wrapper around 'idaaip.netnode(array_id)'
will ensure a certain safety around the retrieval of
arrays (by catching quite unexpect[ed|able] exceptions,
and making sure we don't create & use `transient' netnodes).
@param array_id: A positive, valid array ID.
"""
try:
node = ida_netnode.netnode(array_id)
nodename = node.get_name()
if nodename is None or not nodename.startswith(_IDC_ARRAY_PREFIX):
return __dummy_netnode.instance
else:
return node
except TypeError:
return __dummy_netnode.instance
except NotImplementedError:
return __dummy_netnode.instance
def create_array(name):
"""
Create array.
@param name: The array name.
@return: -1 in case of failure, a valid array_id otherwise.
"""
node = ida_netnode.netnode()
res = node.create(_IDC_ARRAY_PREFIX + name)
if res == False:
return -1
else:
return node.index()
def get_array_id(name):
"""
Get array array_id, by name.
@param name: The array name.
@return: -1 in case of failure (i.e., no array with that
name exists), a valid array_id otherwise.
"""
return __l2m1(ida_netnode.netnode(_IDC_ARRAY_PREFIX + name, 0, False).index())
def rename_array(array_id, newname):
"""
Rename array, by its ID.
@param id: The ID of the array to rename.
@param newname: The new name of the array.
@return: 1 in case of success, 0 otherwise
"""
return __GetArrayById(array_id).rename(_IDC_ARRAY_PREFIX + newname) == 1
def delete_array(array_id):
"""
Delete array, by its ID.
@param array_id: The ID of the array to delete.
"""
__GetArrayById(array_id).kill()
def set_array_long(array_id, idx, value):
"""
Sets the long value of an array element.
@param array_id: The array ID.
@param idx: Index of an element.
@param value: 32bit or 64bit value to store in the array
@return: 1 in case of success, 0 otherwise
"""
return __GetArrayById(array_id).altset(idx, value)
def set_array_string(array_id, idx, value):
"""
Sets the string value of an array element.
@param array_id: The array ID.
@param idx: Index of an element.
@param value: String value to store in the array
@return: 1 in case of success, 0 otherwise
"""
return __GetArrayById(array_id).supset(idx, value)
def get_array_element(tag, array_id, idx):
"""
Get value of array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@param idx: Index of an element.
@return: Value of the specified array element. Note that
this function may return char or long result. Unexistent
array elements give zero as a result.
"""
node = __GetArrayById(array_id)
if tag == AR_LONG:
return node.altval(idx, tag)
elif tag == AR_STR:
res = node.supval(idx, tag)
return 0 if res is None else res
else:
return 0
def del_array_element(tag, array_id, idx):
"""
Delete an array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@param idx: Index of an element.
@return: 1 in case of success, 0 otherwise.
"""
node = __GetArrayById(array_id)
if tag == AR_LONG:
return node.altdel(idx, tag)
elif tag == AR_STR:
return node.supdel(idx, tag)
else:
return 0
def get_first_index(tag, array_id):
"""
Get index of the first existing array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@return: -1 if the array is empty, otherwise index of first array
element of given type.
"""
node = __GetArrayById(array_id)
if tag == AR_LONG:
return __l2m1(node.altfirst(tag))
elif tag == AR_STR:
return __l2m1(node.supfirst(tag))
else:
return -1
def get_last_index(tag, array_id):
"""
Get index of last existing array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@return: -1 if the array is empty, otherwise index of first array
element of given type.
"""
node = __GetArrayById(array_id)
if tag == AR_LONG:
return __l2m1(node.altlast(tag))
elif tag == AR_STR:
return __l2m1(node.suplast(tag))
else:
return -1
def get_next_index(tag, array_id, idx):
"""
Get index of the next existing array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@param idx: Index of the current element.
@return: -1 if no more elements, otherwise returns index of the
next array element of given type.
"""
node = __GetArrayById(array_id)
try:
if tag == AR_LONG:
return __l2m1(node.altnext(idx, tag))
elif tag == AR_STR:
return __l2m1(node.supnext(idx, tag))
else:
return -1
except OverflowError:
# typically: An index of -1 was passed.
return -1
def get_prev_index(tag, array_id, idx):
"""
Get index of the previous existing array element.
@param tag: Tag of array, specifies one of two array types: AR_LONG, AR_STR
@param array_id: The array ID.
@param idx: Index of the current element.
@return: -1 if no more elements, otherwise returns index of the
previous array element of given type.
"""
node = __GetArrayById(array_id)
try:
if tag == AR_LONG:
return __l2m1(node.altprev(idx, tag))
elif tag == AR_STR:
return __l2m1(node.supprev(idx, tag))
else:
return -1
except OverflowError:
# typically: An index of -1 was passed.
return -1
# -------------------- hashes -----------------------
def set_hash_long(hash_id, key, value):
"""
Sets the long value of a hash element.
@param hash_id: The hash ID.
@param key: Key of an element.
@param value: 32bit or 64bit value to store in the hash
@return: 1 in case of success, 0 otherwise
"""
return __GetArrayById(hash_id).hashset_idx(key, value)
def get_hash_long(hash_id, key):
"""
Gets the long value of a hash element.
@param hash_id: The hash ID.
@param key: Key of an element.
@return: the 32bit or 64bit value of the element, or 0 if no such
element.
"""
return __GetArrayById(hash_id).hashval_long(key);
def set_hash_string(hash_id, key, value):
"""
Sets the string value of a hash element.
@param hash_id: The hash ID.
@param key: Key of an element.
@param value: string value to store in the hash
@return: 1 in case of success, 0 otherwise
"""
return __GetArrayById(hash_id).hashset_buf(key, value)
def get_hash_string(hash_id, key):
"""
Gets the string value of a hash element.
@param hash_id: The hash ID.
@param key: Key of an element.
@return: the string value of the element, or None if no such
element.
"""
return __GetArrayById(hash_id).hashstr_buf(key);
def del_hash_string(hash_id, key):
"""
Delete a hash element.
@param hash_id: The hash ID.
@param key: Key of an element
@return: 1 upon success, 0 otherwise.
"""
return __GetArrayById(hash_id).hashdel(key)
def get_first_hash_key(hash_id):
"""
Get the first key in the hash.
@param hash_id: The hash ID.
@return: the key, 0 otherwise.
"""
r = __GetArrayById(hash_id).hashfirst()
return 0 if r is None else r
def get_last_hash_key(hash_id):
"""
Get the last key in the hash.
@param hash_id: The hash ID.
@return: the key, 0 otherwise.
"""
r = __GetArrayById(hash_id).hashlast()
return 0 if r is None else r
def get_next_hash_key(hash_id, key):
"""
Get the next key in the hash.
@param hash_id: The hash ID.
@param key: The current key.
@return: the next key, 0 otherwise
"""
r = __GetArrayById(hash_id).hashnext(key)
return 0 if r is None else r
def get_prev_hash_key(hash_id, key):
"""
Get the previous key in the hash.
@param hash_id: The hash ID.
@param key: The current key.
@return: the previous key, 0 otherwise
"""
r = __GetArrayById(hash_id).hashprev(key)
return 0 if r is None else r
#----------------------------------------------------------------------------
# S O U R C E F I L E / L I N E N U M B E R S
#----------------------------------------------------------------------------
add_sourcefile = ida_lines.add_sourcefile
get_sourcefile = ida_lines.get_sourcefile
del_sourcefile = ida_lines.del_sourcefile
set_source_linnum = ida_nalt.set_source_linnum
get_source_linnum = ida_nalt.get_source_linnum
del_source_linnum = ida_nalt.del_source_linnum
#----------------------------------------------------------------------------
# T Y P E L I B R A R I E S
#----------------------------------------------------------------------------
def add_default_til(name):
"""
Load a type library
@param name: name of type library.
@return: 1-ok, 0-failed.
"""
til = ida_typeinf.add_til(name, ida_typeinf.ADDTIL_DEFAULT)
if til:
return 1
else:
return 0
def import_type(idx, type_name):
"""
Copy information from type library to database
Copy structure, union, or enum definition from the type library
to the IDA database.
@param idx: the position of the new type in the list of
types (structures or enums) -1 means at the end of the list
@param type_name: name of type to copy
@return: BADNODE-failed, otherwise the type id (structure id or enum id)
"""
return ida_typeinf.import_type(None, idx, type_name)
def get_type(ea):
"""
Get type of function/variable
@param ea: the address of the object
@return: type string or None if failed
"""
return ida_typeinf.idc_get_type(ea)
def SizeOf(typestr):
"""
Returns the size of the type. It is equivalent to IDC's sizeof().
Use name, tp, fld = idc.parse_decl() ; SizeOf(tp) to retrieve the size
@return: -1 if typestring is not valid otherwise the size of the type
"""
return ida_typeinf.calc_type_size(None, typestr)
def get_tinfo(ea):
"""
Get type information of function/variable as 'typeinfo' object
@param ea: the address of the object
@return: None on failure, or (type, fields) tuple.
"""
return ida_typeinf.idc_get_type_raw(ea)
def get_local_tinfo(ordinal):
"""
Get local type information as 'typeinfo' object
@param ordinal: slot number (1...NumberOfLocalTypes)
@return: None on failure, or (type, fields) tuple.
"""
return ida_typeinf.idc_get_local_type_raw(ordinal)
def guess_type(ea):
"""
Guess type of function/variable
@param ea: the address of the object, can be the structure member id too
@return: type string or None if failed
"""
return ida_typeinf.idc_guess_type(ea)
TINFO_GUESSED = 0x0000 # this is a guessed type
TINFO_DEFINITE = 0x0001 # this is a definite type
TINFO_DELAYFUNC = 0x0002 # if type is a function and no function exists at ea,
# schedule its creation and argument renaming to
# auto-analysis otherwise try to create it immediately
def apply_type(ea, py_type, flags = TINFO_DEFINITE):
"""
Apply the specified type to the address
@param ea: the address of the object
@param py_type: typeinfo tuple (type, fields) as get_tinfo() returns
or tuple (name, type, fields) as parse_decl() returns
or None
if specified as None, then the
item associated with 'ea' will be deleted.
@param flags: combination of TINFO_... constants or 0
@return: Boolean
"""
if py_type is None:
py_type = ""
if isinstance(py_type, ida_idaapi.string_types) and len(py_type) == 0:
pt = (b"", b"")
else:
if len(py_type) == 3:
pt = py_type[1:] # skip name component
else:
pt = py_type
return ida_typeinf.apply_type(None, pt[0], pt[1], ea, flags)
PT_SIL = ida_typeinf.PT_SIL # silent, no messages
PT_NDC = ida_typeinf.PT_NDC # don't decorate names
PT_TYP = ida_typeinf.PT_TYP # return declared type information
PT_VAR = ida_typeinf.PT_VAR # return declared object information
PT_PACKMASK = ida_typeinf.PT_PACKMASK # mask for pack alignment values
PT_HIGH = ida_typeinf.PT_HIGH # assume high level prototypes (with hidden args, etc)
PT_LOWER = ida_typeinf.PT_LOWER # lower the function prototypes
PT_REPLACE = ida_typeinf.PT_REPLACE # replace the old type (used in idc)
PT_RAWARGS = ida_typeinf.PT_RAWARGS # leave argument names unchanged (do not remove underscores)
PT_SILENT = PT_SIL # alias
PT_PAKDEF = 0x0000 # default pack value
PT_PAK1 = 0x0010 # #pragma pack(1)
PT_PAK2 = 0x0020 # #pragma pack(2)
PT_PAK4 = 0x0030 # #pragma pack(4)
PT_PAK8 = 0x0040 # #pragma pack(8)
PT_PAK16 = 0x0050 # #pragma pack(16)
# idc.py-specific
PT_FILE = 0x00010000 # input if a file name (otherwise contains type declarations)
def SetType(ea, newtype):
"""
Set type of function/variable
@param ea: the address of the object
@param newtype: the type string in C declaration form.
Must contain the closing ';'
if specified as an empty string, then the
item associated with 'ea' will be deleted.
@return: 1-ok, 0-failed.
"""
if newtype != '':
pt = parse_decl(newtype, PT_SIL)
if pt is None:
# parsing failed
return None
else:
pt = None
return apply_type(ea, pt, TINFO_DEFINITE)
def parse_decl(inputtype, flags):
"""
Parse type declaration
@param inputtype: file name or C declarations (depending on the flags)
@param flags: combination of PT_... constants or 0
@return: None on failure or (name, type, fields) tuple
"""
if len(inputtype) != 0 and inputtype[-1] != ';':
inputtype = inputtype + ';'
return ida_typeinf.idc_parse_decl(None, inputtype, flags)
def parse_decls(inputtype, flags = 0):
"""
Parse type declarations
@param inputtype: file name or C declarations (depending on the flags)
@param flags: combination of PT_... constants or 0
@return: number of parsing errors (0 no errors)
"""
return ida_typeinf.idc_parse_types(inputtype, flags)
def print_decls(ordinals, flags):
"""
Print types in a format suitable for use in a header file
@param ordinals: comma-separated list of type ordinals
@param flags: combination of PDF_... constants or 0
@return: string containing the type definitions
"""
class def_sink(ida_typeinf.text_sink_t):
def __init__(self):
ida_typeinf.text_sink_t.__init__(self)
self.text = ""
def _print(self, defstr):
self.text += defstr
return 0
sink = def_sink()
py_ordinals = list(map(lambda l : int(l), ordinals.split(",")))
ida_typeinf.print_decls(sink, None, py_ordinals, flags)
return sink.text
PDF_INCL_DEPS = 0x1 # include dependencies
PDF_DEF_FWD = 0x2 # allow forward declarations
PDF_DEF_BASE = 0x4 # include base types: __int8, __int16, etc..
PDF_HEADER_CMT = 0x8 # prepend output with a descriptive comment
def get_ordinal_qty():
"""
Get number of local types + 1
@return: value >= 1. 1 means that there are no local types.
"""
return ida_typeinf.get_ordinal_qty(None)
def set_local_type(ordinal, input, flags):
"""
Parse one type declaration and store it in the specified slot
@param ordinal: slot number (1...NumberOfLocalTypes)
-1 means allocate new slot or reuse the slot
of the existing named type
@param input: C declaration. Empty input empties the slot
@param flags: combination of PT_... constants or 0
@return: slot number or 0 if error
"""
return ida_typeinf.idc_set_local_type(ordinal, input, flags)
def GetLocalType(ordinal, flags):
"""
Retrieve a local type declaration
@param flags: any of PRTYPE_* constants
@return: local type as a C declaration or ""
"""
(type, fields) = get_local_tinfo(ordinal)
if type:
name = get_numbered_type_name(ordinal)
return ida_typeinf.idc_print_type(type, fields, name, flags)
return ""
PRTYPE_1LINE = 0x0000 # print to one line
PRTYPE_MULTI = 0x0001 # print to many lines
PRTYPE_TYPE = 0x0002 # print type declaration (not variable declaration)
PRTYPE_PRAGMA = 0x0004 # print pragmas for alignment
def get_numbered_type_name(ordinal):
"""
Retrieve a local type name
@param ordinal: slot number (1...NumberOfLocalTypes)
returns: local type name or None
"""
return ida_typeinf.idc_get_local_type_name(ordinal)
# ----------------------------------------------------------------------------
# H I D D E N A R E A S
# ----------------------------------------------------------------------------
add_hidden_range = ida_bytes.add_hidden_range
def update_hidden_range(ea, visible):
"""
Set hidden range state
@param ea: any address belonging to the hidden range
@param visible: new state of the range
@return: != 0 - ok
"""
ha = ida_bytes.get_hidden_range(ea)
if not ha:
return 0
else:
ha.visible = visible
return ida_bytes.update_hidden_range(ha)
del_hidden_range = ida_bytes.del_hidden_range
#--------------------------------------------------------------------------
# D E B U G G E R I N T E R F A C E
#--------------------------------------------------------------------------
load_debugger = ida_dbg.load_debugger
start_process = ida_dbg.start_process
exit_process = ida_dbg.exit_process
suspend_process = ida_dbg.suspend_process
get_processes = ida_dbg.get_processes
attach_process = ida_dbg.attach_process
detach_process = ida_dbg.detach_process
get_thread_qty = ida_dbg.get_thread_qty
getn_thread = ida_dbg.getn_thread
get_current_thread = ida_dbg.get_current_thread
getn_thread_name = ida_dbg.getn_thread_name
select_thread = ida_dbg.select_thread
suspend_thread = ida_dbg.suspend_thread
resume_thread = ida_dbg.resume_thread
def _get_modules():
"""
INTERNAL: Enumerate process modules
"""
module = ida_idd.modinfo_t()
result = ida_dbg.get_first_module(module)
while result:
yield module
result = ida_dbg.get_next_module(module)
def get_first_module():
"""
Enumerate process modules
@return: first module's base address or None on failure
"""
for module in _get_modules():
return module.base
else:
return None
def get_next_module(base):
"""
Enumerate process modules
@param base: previous module's base address
@return: next module's base address or None on failure
"""
foundit = False
for module in _get_modules():
if foundit:
return module.base
if module.base == base:
foundit = True
else:
return None
def get_module_name(base):
"""
Get process module name
@param base: the base address of the module
@return: required info or None
"""
for module in _get_modules():
if module.base == base:
return module.name
else:
return 0
def get_module_size(base):
""" |
@return: required info or -1
"""
for module in _get_modules():
if module.base == base:
return module.size
else:
return -1
step_into = ida_dbg.step_into
step_over = ida_dbg.step_over
run_to = ida_dbg.run_to
step_until_ret = ida_dbg.step_until_ret
wait_for_next_event = ida_dbg.wait_for_next_event
def resume_process():
return wait_for_next_event(WFNE_CONT|WFNE_NOWAIT, 0)
def send_dbg_command(cmd):
"""Sends a command to the debugger module and returns the output string.
An exception will be raised if the debugger is not running or the current debugger does not export
the 'send_dbg_command' IDC command.
"""
s = eval_idc('send_dbg_command("%s");' % ida_kernwin.str2user(cmd))
if s.startswith("IDC_FAILURE"):
raise Exception("Debugger command is available only when the debugger is active!")
return s
# wfne flag is combination of the following:
WFNE_ANY = 0x0001 # return the first event (even if it doesn't suspend the process)
# if the process is still running, the database
# does not reflect the memory state. you might want
# to call refresh_debugger_memory() in this case
WFNE_SUSP = 0x0002 # wait until the process gets suspended
WFNE_SILENT = 0x0004 # 1: be slient, 0:display modal boxes if necessary
WFNE_CONT = 0x0008 # continue from the suspended state
WFNE_NOWAIT = 0x0010 # do not wait for any event, immediately return DEC_TIMEOUT
# (to be used with WFNE_CONT)
# debugger event codes
NOTASK = -2 # process does not exist
DBG_ERROR = -1 # error (e.g. network problems)
DBG_TIMEOUT = 0 # timeout
PROCESS_STARTED = 0x00000001 # New process started
PROCESS_EXITED = 0x00000002 # Process stopped
THREAD_STARTED = 0x00000004 # New thread started
THREAD_EXITED = 0x00000008 # Thread stopped
BREAKPOINT = 0x00000010 # Breakpoint reached
STEP = 0x00000020 # One instruction executed
EXCEPTION = 0x00000040 # Exception
LIB_LOADED = 0x00000080 # New library loaded
LIB_UNLOADED = 0x00000100 # Library unloaded
INFORMATION = 0x00000200 # User-defined information
PROCESS_ATTACHED = 0x00000400 # Attached to running process
PROCESS_DETACHED = 0x00000800 # Detached from process
PROCESS_SUSPENDED = 0x00001000 # Process has been suspended
refresh_debugger_memory = ida_dbg.refresh_debugger_memory
take_memory_snapshot = ida_segment.take_memory_snapshot
get_process_state = ida_dbg.get_process_state
DSTATE_SUSP = -1 # process is suspended
DSTATE_NOTASK = 0 # no process is currently debugged
DSTATE_RUN = 1 # process is running
DSTATE_RUN_WAIT_ATTACH = 2 # process is running, waiting for process properly attached
DSTATE_RUN_WAIT_END = 3 # process is running, but the user asked to kill/detach the process
# remark: in this case, most events are ignored
"""
Get various information about the current debug event
These functions are valid only when the current event exists
(the process is in the suspended state)
"""
# For all events:
def get_event_id():
"""
Get ID of debug event
@return: event ID
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.eid()
def get_event_pid():
"""
Get process ID for debug event
@return: process ID
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.pid
def get_event_tid():
"""
Get type ID for debug event
@return: type ID
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.tid
def get_event_ea():
"""
Get ea for debug event
@return: ea
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.ea
def is_event_handled():
"""
Is the debug event handled?
@return: boolean
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.handled
# For PROCESS_STARTED, PROCESS_ATTACHED, LIB_LOADED events:
def get_event_module_name():
"""
Get module name for debug event
@return: module name
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_module_name(ev)
def get_event_module_base():
"""
Get module base for debug event
@return: module base
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_module_base(ev)
def get_event_module_size():
"""
Get module size for debug event
@return: module size
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_module_size(ev)
def get_event_exit_code():
"""
Get exit code for debug event
@return: exit code for PROCESS_EXITED, THREAD_EXITED events
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ev.exit_code()
def get_event_info():
"""
Get debug event info
@return: event info: for THREAD_STARTED (thread name)
for LIB_UNLOADED (unloaded library name)
for INFORMATION (message to display)
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_info(ev)
def get_event_bpt_hea():
"""
Get hardware address for BREAKPOINT event
@return: hardware address
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_bpt_hea(ev)
def get_event_exc_code():
"""
Get exception code for EXCEPTION event
@return: exception code
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_exc_code(ev)
def get_event_exc_ea():
"""
Get address for EXCEPTION event
@return: adress of exception
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_exc_ea(ev)
def can_exc_continue():
"""
Can it continue after EXCEPTION event?
@return: boolean
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.can_exc_continue(ev)
def get_event_exc_info():
"""
Get info for EXCEPTION event
@return: info string
"""
ev = ida_dbg.get_debug_event()
assert ev, "Could not retrieve debug event"
return ida_idd.get_event_exc_info(ev)
set_debugger_options = ida_dbg.set_debugger_options
DOPT_SEGM_MSGS = 0x00000001 # print messages on debugger segments modifications
DOPT_START_BPT = 0x00000002 # break on process start
DOPT_THREAD_MSGS = 0x00000004 # print messages on thread start/exit
DOPT_THREAD_BPT = 0x00000008 # break on thread start/exit
DOPT_BPT_MSGS = 0x00000010 # print message on breakpoint
DOPT_LIB_MSGS = 0x00000040 # print message on library load/unlad
DOPT_LIB_BPT = 0x00000080 # break on library load/unlad
DOPT_INFO_MSGS = 0x00000100 # print message on debugging information
DOPT_INFO_BPT = 0x00000200 # break on debugging information
DOPT_REAL_MEMORY = 0x00000400 # don't hide breakpoint instructions
DOPT_REDO_STACK = 0x00000800 # reconstruct the stack
DOPT_ENTRY_BPT = 0x00001000 # break on program entry point
DOPT_EXCDLG = 0x00006000 # exception dialogs:
EXCDLG_NEVER = 0x00000000 # never display exception dialogs
EXCDLG_UNKNOWN = 0x00002000 # display for unknown exceptions
EXCDLG_ALWAYS = 0x00006000 # always display
DOPT_LOAD_DINFO = 0x00008000 # automatically load debug files (pdb)
get_debugger_event_cond = ida_dbg.get_debugger_event_cond
set_debugger_event_cond = ida_dbg.set_debugger_event_cond
set_remote_debugger = ida_dbg.set_remote_debugger
define_exception = ida_dbg.define_exception
EXC_BREAK = 0x0001 # break on the exception
EXC_HANDLE = 0x0002 # should be handled by the debugger?
get_reg_value = ida_dbg.get_reg_val
def set_reg_value(value, name):
"""
Set register value
@param name: the register name
@param value: new register value
@note: The debugger should be running
It is not necessary to use this function to set register values.
A register name in the left side of an assignment will do too.
"""
return ida_dbg.set_reg_val(name, value)
get_bpt_qty = ida_dbg.get_bpt_qty
def get_bpt_ea(n):
"""
Get breakpoint address
@param n: number of breakpoint, is in range 0..get_bpt_qty()-1
@return: address of the breakpoint or BADADDR
"""
bpt = ida_dbg.bpt_t()
if ida_dbg.getn_bpt(n, bpt):
return bpt.ea
else:
return BADADDR
def get_bpt_attr(ea, bptattr):
"""
Get the characteristics of a breakpoint
@param ea: any address in the breakpoint range
@param bptattr: the desired attribute code, one of BPTATTR_... constants
@return: the desired attribute value or -1
"""
bpt = ida_dbg.bpt_t()
if not ida_dbg.get_bpt(ea, bpt):
return -1
else:
if bptattr == BPTATTR_EA:
return bpt.ea
if bptattr == BPTATTR_SIZE:
return bpt.size
if bptattr == BPTATTR_TYPE:
return bpt.type
if bptattr == BPTATTR_COUNT:
return bpt.pass_count
if bptattr == BPTATTR_FLAGS:
return bpt.flags
if bptattr == BPTATTR_COND:
return bpt.condition
if bptattr == BPTATTR_PID:
return bpt.pid
if bptattr == BPTATTR_TID:
return bpt.tid
return -1
BPTATTR_EA = 1 # starting address of the breakpoint
BPTATTR_SIZE = 2 # size of the breakpoint (undefined for software breakpoint)
# type of the breakpoint
BPTATTR_TYPE = 3
# Breakpoint types:
BPT_WRITE = 1 # Hardware: Write access
BPT_RDWR = 3 # Hardware: Read/write access
BPT_SOFT = 4 # Software breakpoint
BPT_EXEC = 8 # Hardware: Execute instruction
BPT_DEFAULT = (BPT_SOFT|BPT_EXEC); # Choose bpt type automaticaly
BPTATTR_COUNT = 4
BPTATTR_FLAGS = 5
BPT_BRK = 0x001 # the debugger stops on this breakpoint
BPT_TRACE = 0x002 # the debugger adds trace information when this breakpoint is reached
BPT_UPDMEM = 0x004 # refresh the memory layout and contents before evaluating bpt condition
BPT_ENABLED = 0x008 # enabled?
BPT_LOWCND = 0x010 # condition is calculated at low level (on the server side)
BPT_TRACEON = 0x020 # enable tracing when the breakpoint is reached
BPT_TRACE_INSN = 0x040 # instruction tracing
BPT_TRACE_FUNC = 0x080 # function tracing
BPT_TRACE_BBLK = 0x100 # basic block tracing
BPTATTR_COND = 6 # Breakpoint condition. NOTE: the return value is a string in this case
BPTATTR_PID = 7 # Brekapoint process id
BPTATTR_TID = 8 # Brekapoint thread id
# Breakpoint location type:
BPLT_ABS = 0 # Absolute address. Attributes:
# - locinfo: absolute address
BPLT_REL = 1 # Module relative address. Attributes:
# - locpath: the module path
# - locinfo: offset from the module base address
BPLT_SYM = 2 # Symbolic name. The name will be resolved on DLL load/unload
# events and on naming an address. Attributes:
# - locpath: symbol name
# - locinfo: offset from the symbol base address
def set_bpt_attr(address, bptattr, value):
"""
modifiable characteristics of a breakpoint
@param address: any address in the breakpoint range
@param bptattr: the attribute code, one of BPTATTR_* constants
BPTATTR_CND is not allowed, see set_bpt_cond()
@param value: the attibute value
@return: success
"""
bpt = ida_dbg.bpt_t()
if not ida_dbg.get_bpt(address, bpt):
return False
else:
if bptattr not in [ BPTATTR_SIZE, BPTATTR_TYPE, BPTATTR_FLAGS, BPTATTR_COUNT, BPTATTR_PID, BPTATTR_TID ]:
return False
if bptattr == BPTATTR_SIZE:
bpt.size = value
if bptattr == BPTATTR_TYPE:
bpt.type = value
if bptattr == BPTATTR_COUNT:
bpt.pass_count = value
if bptattr == BPTATTR_FLAGS:
bpt.flags = value
if bptattr == BPTATTR_PID:
bpt.pid = value
if bptattr == BPTATTR_TID:
bpt.tid = value
return ida_dbg.update_bpt(bpt)
def set_bpt_cond(ea, cnd, is_lowcnd=0):
"""
Set breakpoint condition
@param ea: any address in the breakpoint range
@param cnd: breakpoint condition
@param is_lowcnd: 0 - regular condition, 1 - low level condition
@return: success
"""
bpt = ida_dbg.bpt_t()
if not ida_dbg.get_bpt(ea, bpt):
return False
bpt.condition = cnd
if is_lowcnd:
bpt.flags |= BPT_LOWCND
else:
bpt.flags &= ~BPT_LOWCND
return ida_dbg.update_bpt(bpt)
add_bpt = ida_dbg.add_bpt
del_bpt = ida_dbg.del_bpt
enable_bpt = ida_dbg.enable_bpt
check_bpt = ida_dbg.check_bpt
BPTCK_NONE = -1 # breakpoint does not exist
BPTCK_NO = 0 # breakpoint is disabled
BPTCK_YES = 1 # breakpoint is enabled
BPTCK_ACT = 2 # breakpoint is active (written to the process)
def enable_tracing(trace_level, enable):
"""
Enable step tracing
@param trace_level: what kind of trace to modify
@param enable: 0: turn off, 1: turn on
@return: success
"""
assert trace_level in [ TRACE_STEP, TRACE_INSN, TRACE_FUNC ], \
"trace_level must be one of TRACE_* constants"
if trace_level == TRACE_STEP:
return ida_dbg.enable_step_trace(enable)
if trace_level == TRACE_INSN:
return ida_dbg.enable_insn_trace(enable)
if trace_level == TRACE_FUNC:
return ida_dbg.enable_func_trace(enable)
return False
TRACE_STEP = 0x0 # lowest level trace. trace buffers are not maintained
TRACE_INSN = 0x1 # instruction level trace
TRACE_FUNC = 0x2 # function level trace (calls & rets)
get_step_trace_options = ida_dbg.get_step_trace_options
set_step_trace_options = ida_dbg.set_step_trace_options
ST_OVER_DEBUG_SEG = 0x01 # step tracing will be disabled when IP is in a debugger segment
ST_OVER_LIB_FUNC = 0x02 # step tracing will be disabled when IP is in a library function
ST_ALREADY_LOGGED = 0x04 # step tracing will be disabled when IP is already logged
ST_SKIP_LOOPS = 0x08 # step tracing will try to skip loops already recorded
load_trace_file = ida_dbg.load_trace_file
save_trace_file = ida_dbg.save_trace_file
is_valid_trace_file = ida_dbg.is_valid_trace_file
diff_trace_file = ida_dbg.diff_trace_file
def clear_trace(filename):
"""
Clear the current trace buffer
"""
return ida_dbg.clear_trace()
get_trace_file_desc = ida_dbg.get_trace_file_desc
set_trace_file_desc = ida_dbg.set_trace_file_desc
get_tev_qty = ida_dbg.get_tev_qty
get_tev_ea = ida_dbg.get_tev_ea
TEV_NONE = 0 # no event
TEV_INSN = 1 # an instruction trace
TEV_CALL = 2 # a function call trace
TEV_RET = 3 # a function return trace
TEV_BPT = 4 # write, read/write, execution trace
TEV_MEM = 5 # memory layout changed
TEV_EVENT = 6 # debug event
get_tev_type = ida_dbg.get_tev_type
get_tev_tid = ida_dbg.get_tev_tid
get_tev_reg = ida_dbg.get_tev_reg_val
get_tev_mem_qty = ida_dbg.get_tev_reg_mem_qty
get_tev_mem = ida_dbg.get_tev_reg_mem
get_tev_mem_ea = ida_dbg.get_tev_reg_mem_ea
get_call_tev_callee = ida_dbg.get_call_tev_callee
get_ret_tev_return = ida_dbg.get_ret_tev_return
get_bpt_tev_ea = ida_dbg.get_bpt_tev_ea
#--------------------------------------------------------------------------
# C O L O R S
#--------------------------------------------------------------------------
def get_color(ea, what):
"""
Get item color
@param ea: address of the item
@param what: type of the item (one of CIC_* constants)
@return: color code in RGB (hex 0xBBGGRR)
"""
if what not in [ CIC_ITEM, CIC_FUNC, CIC_SEGM ]:
raise ValueError("'what' must be one of CIC_ITEM, CIC_FUNC and CIC_SEGM")
if what == CIC_ITEM:
return ida_nalt.get_item_color(ea)
if what == CIC_FUNC:
func = ida_funcs.get_func(ea)
if func:
return func.color
else:
return DEFCOLOR
if what == CIC_SEGM:
seg = ida_segment.getseg(ea)
if seg:
return seg.color
else:
return DEFCOLOR
# color item codes:
CIC_ITEM = 1 # one instruction or data
CIC_FUNC = 2 # function
CIC_SEGM = 3 # segment
DEFCOLOR = 0xFFFFFFFF # Default color
def set_color(ea, what, color):
"""
Set item color
@param ea: address of the item
@param what: type of the item (one of CIC_* constants)
@param color: new color code in RGB (hex 0xBBGGRR)
@return: success (True or False)
"""
if what not in [ CIC_ITEM, CIC_FUNC, CIC_SEGM ]:
raise ValueError("'what' must be one of CIC_ITEM, CIC_FUNC and CIC_SEGM")
if what == CIC_ITEM:
return ida_nalt.set_item_color(ea, color)
if what == CIC_FUNC:
func = ida_funcs.get_func(ea)
if func:
func.color = color
return bool(ida_funcs.update_func(func))
else:
return False
if what == CIC_SEGM:
seg = ida_segment.getseg(ea)
if seg:
seg.color = color
return bool(seg.update())
else:
return False
#----------------------------------------------------------------------------
# A R M S P E C I F I C
#----------------------------------------------------------------------------
def force_bl_jump(ea):
"""
Some ARM compilers in Thumb mode use BL (branch-and-link)
instead of B (branch) for long jumps, since BL has more range.
By default, IDA tries to determine if BL is a jump or a call.
You can override IDA's decision using commands in Edit/Other menu
(Force BL call/Force BL jump) or the following two functions.
Force BL instruction to be a jump
@param ea: address of the BL instruction
@return: 1-ok, 0-failed
"""
return eval_idc("force_bl_jump(0x%x)"%ea)
def force_bl_call(ea):
"""
Force BL instruction to be a call
@param ea: address of the BL instruction
@return: 1-ok, 0-failed
"""
return eval_idc("force_bl_call(0x%x)"%ea)
#--------------------------------------------------------------------------
def set_flag(off, bit, value):
v = get_inf_attr(off)
if value:
v = v | bit
else:
v = v & ~bit
set_inf_attr(off, v)
# Convenience functions:
def here(): return get_screen_ea()
def is_mapped(ea): return (prev_addr(ea+1)==ea)
ARGV = []
"""The command line arguments passed to IDA via the -S switch."""
# END OF IDC COMPATIBILY CODE | Get process module size
@param base: the base address of the module |
interceptor.js | angular.module('ruchJow.security.interceptor', ['ruchJow.security.retryQueue'])
// This http interceptor listens for authentication failures
.factory('securityInterceptor', ['$injector', '$q', 'ruchJowSecurityRetryQueue', function($injector, $q, queue) {
return {
responseError: function (rejection) {
if (rejection.status === 401) {
// The request bounced because it was not authorized - add a new request to the retry queue
return queue.pushRetryFn(function retryRequest() {
// We must use $injector to get the $http service to prevent circular dependency
return $injector.get('$http')(rejection.config);
}, 'unauthorized-server');
}
return $q.reject(rejection);
}
};
//return function(promise) {
// // Intercept failed requests
// return promise.then(null, function(originalResponse) {
// if (originalResponse.status === 401) {
// // The request bounced because it was not authorized - add a new request to the retry queue
// promise = queue.pushRetryFn(function retryRequest() {
// // We must use $injector to get the $http service to prevent circular dependency
// return $injector.get('$http')(originalResponse.config);
// }, 'unauthorized-server');
// }
// return promise;
// });
//}; | $httpProvider.interceptors.push('securityInterceptor');
}]); | }])
// We have to add the interceptor to the queue as a string because the interceptor depends upon service instances that are not available in the config block.
.config(['$httpProvider', function($httpProvider) { |
data.rs | #[doc = "Register `DATA` reader"]
pub struct R(crate::R<DATA_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DATA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<DATA_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<DATA_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `DATA` writer"]
pub struct W(crate::W<DATA_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DATA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<DATA_SPEC>> for W { | fn from(writer: crate::W<DATA_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `tx_rx` reader - Tx/Rx Buffer"]
pub struct TX_RX_R(crate::FieldReader<u8>);
impl TX_RX_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
TX_RX_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for TX_RX_R {
type Target = crate::FieldReader<u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `tx_rx` writer - Tx/Rx Buffer"]
pub struct TX_RX_W<'a> {
w: &'a mut W,
}
impl<'a> TX_RX_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | (value as u32 & 0xff);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Tx/Rx Buffer"]
#[inline(always)]
pub fn tx_rx(&self) -> TX_RX_R {
TX_RX_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Tx/Rx Buffer"]
#[inline(always)]
pub fn tx_rx(&mut self) -> TX_RX_W {
TX_RX_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "1-Wire Master Data Buffer\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [data](index.html) module"]
pub struct DATA_SPEC;
impl crate::RegisterSpec for DATA_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [data::R](R) reader structure"]
impl crate::Readable for DATA_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [data::W](W) writer structure"]
impl crate::Writable for DATA_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets DATA to value 0"]
impl crate::Resettable for DATA_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | #[inline(always)] |
getMenu.js | const { app, Menu, dialog } = require('electron');
const { windows } = require('./windows');
function | (oldMenu) {
return Menu.buildFromTemplate([
{
label: 'File',
submenu: [
{
label: 'New Dashboard',
click(_, window) {
window.webContents.send('newDashboard');
}
},
{
label: 'New Window',
click() {
windows.createWindow();
}
},
{ type: 'separator' },
{
label: 'Open Dashboard...',
click(_, window) {
dialog.showOpenDialog(window, {
title: 'Open Dashboard',
filters: [{ name: 'HTML', extensions: ['html', 'htm'] }],
properties: ['openFile']
})
.then(({ canceled, filePaths }) => {
if (!canceled) {
window.webContents.send('dashboardOpen', filePaths);
}
});
}
},
{
label: 'Save Dashboard',
click(_, window) {
if (windows.getLastOpenedDashboard(window.id)) {
window.webContents.send('dashboardSave', windows.getLastOpenedDashboard(window.id));
} else {
dialog.showSaveDialog(window, {
title: 'Save Dashboard',
filters: [{ name: 'HTML', extensions: ['html', 'htm'] }],
})
.then(({ canceled, filePath }) => {
if (!canceled) {
window.webContents.send('dashboardSave', filePath);
}
});
}
}
},
{
label: 'Save Dashboard As...',
click(_, window) {
dialog.showSaveDialog(window, {
title: 'Save Dashboard',
filters: [{ name: 'HTML', extensions: ['html', 'htm'] }],
defaultPath: windows.getLastOpenedDashboard(window.id),
})
.then(({ canceled, filePath }) => {
if (!canceled) {
window.webContents.send('dashboardSave', filePath);
}
});
}
},
{ type: 'separator' },
// {
// label: 'Preferences',
// click(_, window) {
// window.webContents.send('ntModalOpen');
// }
// },
{
label: 'Plugins',
click(_, window) {
window.webContents.send('pluginsModalOpen');
}
},
{ type: 'separator' },
{
label: 'Exit',
click() {
windows.updateWindowPreferences();
app.quit();
}
}
]
},
...oldMenu.items.slice(1)
]);
}
exports.getMenu = getMenu;
| getMenu |
feedback.rs | //! Feeds back the input stream directly into the output stream.
//!
//! Assumes that the input and output devices can use the same stream format and that they support
//! the f32 sample format.
//!
//! Uses a delay of `LATENCY_MS` milliseconds in case the default input and output streams are not
//! precisely synchronised.
extern crate cpal;
extern crate failure;
extern crate ringbuf;
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait};
use ringbuf::RingBuffer;
const LATENCY_MS: f32 = 150.0;
fn main() -> Result<(), failure::Error> {
let host = cpal::default_host();
let event_loop = host.event_loop();
// Default devices.
let input_device = host.default_input_device().expect("failed to get default input device");
let output_device = host.default_output_device().expect("failed to get default output device");
println!("Using default input device: \"{}\"", input_device.name()?);
println!("Using default output device: \"{}\"", output_device.name()?);
// We'll try and use the same format between streams to keep it simple
let mut format = input_device.default_input_format()?;
format.data_type = cpal::SampleFormat::F32;
// Build streams.
println!("Attempting to build both streams with `{:?}`.", format);
let input_stream_id = event_loop.build_input_stream(&input_device, &format)?;
let output_stream_id = event_loop.build_output_stream(&output_device, &format)?;
println!("Successfully built streams.");
// Create a delay in case the input and output devices aren't synced.
let latency_frames = (LATENCY_MS / 1_000.0) * format.sample_rate.0 as f32;
let latency_samples = latency_frames as usize * format.channels as usize;
// The buffer to share samples
let ring = RingBuffer::new(latency_samples * 2);
let (mut producer, mut consumer) = ring.split();
// Fill the samples with 0.0 equal to the length of the delay.
for _ in 0..latency_samples {
// The ring buffer has twice as much space as necessary to add latency here,
// so this should never fail
producer.push(0.0).unwrap();
}
// Play the streams.
println!("Starting the input and output streams with `{}` milliseconds of latency.", LATENCY_MS);
event_loop.play_stream(input_stream_id.clone())?;
event_loop.play_stream(output_stream_id.clone())?;
// Run the event loop on a separate thread.
std::thread::spawn(move || {
event_loop.run(move |id, result| {
let data = match result {
Ok(data) => data,
Err(err) => {
eprintln!("an error occurred on stream {:?}: {}", id, err);
return;
}
};
match data {
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::F32(buffer) } => {
assert_eq!(id, input_stream_id);
let mut output_fell_behind = false;
for &sample in buffer.iter() {
if producer.push(sample).is_err() {
output_fell_behind = true;
}
}
if output_fell_behind {
eprintln!("output stream fell behind: try increasing latency");
}
},
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => {
assert_eq!(id, output_stream_id);
let mut input_fell_behind = None;
for sample in buffer.iter_mut() {
*sample = match consumer.pop() {
Ok(s) => s,
Err(err) => {
input_fell_behind = Some(err);
0.0
},
};
}
if let Some(_) = input_fell_behind {
eprintln!("input stream fell behind: try increasing latency");
}
},
_ => panic!("we're expecting f32 data"),
}
});
});
// Run for 3 seconds before closing.
println!("Playing for 3 seconds... "); | } | std::thread::sleep(std::time::Duration::from_secs(3));
println!("Done!");
Ok(()) |
package.model.ts | export class | {
projectName: string;
projectNo: string;
opportunityName: string;
job: string;
place: string;
locationId: number;
quarter: string;
customerId: number;
classify: string;
customerContactId: number;
consultantUnitCustomerId: number;
consultantAddress: string;
consultantPhone: string;
floorArea: number;
magnitude: string;
constructionTypeId: number;
constructionCategoryId: number;
hbcRole: string;
documentLink: string;
chairEmployeeId: number;
bidStatusId: number;
amount: number;
evaluationId: number;
startTrackingDate: number;
submissionDate: number;
resultEstimatedDate: number;
projectEstimatedStartDate: number;
projectEstimatedEndDate: number;
totalTime: string;
description: string;
receiveWinResultDate: number;
receiveLoseResultDate: number;
receiveCancelResultDate: number;
}
| PackageModel |
cities.go | package meetup
import (
"net/http"
"github.com/dghubble/sling"
)
type City struct {
Zip string `json:"zip"`
Country string `json:"country"`
LocalizedCountryName string `json:"loalized_contry_name"`
Distance float64 `json:"distance"`
Name string `json:"city"`
Longitude float64 `json:"lon"`
Ranking int `json:"ranking"`
Id int `json:"id"`
State string `json:"state"`
MemberCount string `json:"41033"`
Latitude float64 `json:"lat"`
}
type CitiesResponse struct {
Results []City `json:"results"`
Meta ResponseMeta `json:"meta"` | }
type ReqCityParams struct {
Country string `url:"country,omitempty"`
Query string `url:"query,omitempty"`
Longitude float64 `url:"lon,omitempty"`
State string `url:"state,omitempty"`
Radius float64 `url:"radius,omitempty"`
Latitude float64 `url:"lat,omitempty"`
Order string `url:"order,omitempty"`
Page int `url:"page,omitempty"`
Offset int `url:"offset,omitempty"`
Desc string `url:"desc,omitempty"`
Only string `url:"only,omitempty"`
Omit string `url:"omit,omitempty"`
}
type CityService struct {
sling *sling.Sling
}
func newCityService(sling *sling.Sling) *CityService {
return &CityService{
sling: sling.Path("2/cities"),
}
}
func (cs *CityService) GetCities(params *ReqCityParams) (CitiesResponse, *http.Response, error) {
cities := new(CitiesResponse)
apiError := new(ApiError)
resp, err := cs.sling.New().QueryStruct(params).Receive(cities, apiError)
return *cities, resp, relevantError(err, *apiError)
} | |
keygen.rs | // Copyright 2018-2020 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::{create_dir_all, metadata, OpenOptions};
use std::io::prelude::*;
use std::os::unix::fs::OpenOptionsExt;
use std::path::{Path, PathBuf};
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt;
#[cfg(not(target_os = "linux"))]
use std::os::unix::fs::MetadataExt;
use clap::ArgMatches;
use sawtooth_sdk::signing;
use crate::error::CliError;
use super::{chown, Action};
const SYSTEM_KEY_PATH: &str = "/etc/splinter/keys";
pub struct KeyGenAction;
impl Action for KeyGenAction {
fn | <'a>(&mut self, arg_matches: Option<&ArgMatches<'a>>) -> Result<(), CliError> {
let args = arg_matches.ok_or_else(|| CliError::RequiresArgs)?;
let key_name = args
.value_of("key-name")
.map(String::from)
.unwrap_or_else(whoami::username);
let key_dir = if let Some(dir) = args.value_of("key_dir") {
PathBuf::from(dir)
} else if args.is_present("system") {
PathBuf::from(SYSTEM_KEY_PATH)
} else {
dirs::home_dir()
.map(|mut p| {
p.push("splinter/keys");
p
})
.ok_or_else(|| CliError::EnvironmentError("Home directory not found".into()))?
};
create_dir_all(key_dir.as_path()).map_err(|err| {
CliError::EnvironmentError(format!("Failed to create keys directory: {}", err))
})?;
let private_key_path = key_dir.join(&key_name).with_extension("priv");
let public_key_path = key_dir.join(&key_name).with_extension("pub");
create_key_pair(
&key_dir,
private_key_path,
public_key_path,
args.is_present("force"),
true,
)?;
Ok(())
}
}
/// Creates a public/private key pair.
///
/// Returns the public key in hex, if successful.
pub fn create_key_pair(
key_dir: &Path,
private_key_path: PathBuf,
public_key_path: PathBuf,
force_create: bool,
change_permissions: bool,
) -> Result<Vec<u8>, CliError> {
if !force_create {
if private_key_path.exists() {
return Err(CliError::EnvironmentError(format!(
"File already exists: {:?}",
private_key_path
)));
}
if public_key_path.exists() {
return Err(CliError::EnvironmentError(format!(
"File already exists: {:?}",
public_key_path
)));
}
}
let context = signing::create_context("secp256k1").map_err(|err| {
CliError::ActionError(format!("Failed to create signing context: {}", err))
})?;
let private_key = context.new_random_private_key().map_err(|err| {
CliError::ActionError(format!("Failed to generate new private key: {}", err))
})?;
let public_key = context
.get_public_key(&*private_key)
.map_err(|err| CliError::ActionError(format!("Failed to get public key: {}", err)))?;
let key_dir_info = metadata(key_dir).map_err(|err| {
CliError::EnvironmentError(format!(
"Failed to read key directory '{}': {}",
key_dir.display(),
err
))
})?;
#[cfg(not(target_os = "linux"))]
let (key_dir_uid, key_dir_gid) = (key_dir_info.uid(), key_dir_info.gid());
#[cfg(target_os = "linux")]
let (key_dir_uid, key_dir_gid) = (key_dir_info.st_uid(), key_dir_info.st_gid());
{
if private_key_path.exists() {
info!(
"Overwriting private key file: {}",
private_key_path.display()
);
} else {
info!("Writing private key file: {}", private_key_path.display());
}
let private_key_file = OpenOptions::new()
.write(true)
.create(true)
.mode(0o640)
.open(private_key_path.as_path())
.map_err(|err| {
CliError::EnvironmentError(format!(
"Failed to open private key file '{}': {}",
private_key_path.display(),
err
))
})?;
writeln!(&private_key_file, "{}", private_key.as_hex()).map_err(|err| {
CliError::ActionError(format!(
"Failed to write to private key file '{}': {}",
private_key_path.display(),
err
))
})?;
}
{
if public_key_path.exists() {
info!("Overwriting public key file: {}", public_key_path.display());
} else {
info!("writing public key file: {}", public_key_path.display());
}
let public_key_file = OpenOptions::new()
.write(true)
.create(true)
.mode(0o644)
.open(public_key_path.as_path())
.map_err(|err| {
CliError::EnvironmentError(format!(
"Failed to open public key file '{}': {}",
public_key_path.display(),
err
))
})?;
writeln!(&public_key_file, "{}", public_key.as_hex()).map_err(|err| {
CliError::ActionError(format!(
"Failed to write to public key file '{}': {}",
public_key_path.display(),
err
))
})?;
}
if change_permissions {
chown(private_key_path.as_path(), key_dir_uid, key_dir_gid)?;
chown(public_key_path.as_path(), key_dir_uid, key_dir_gid)?;
}
Ok(public_key.as_slice().to_vec())
}
| run |
trait-inheritance-overloading.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp::Eq;
trait MyNum : Add<Self,Self> + Sub<Self,Self> + Mul<Self,Self> + Eq { }
#[deriving(Show)]
struct MyInt { val: int }
impl Add<MyInt, MyInt> for MyInt {
fn add(&self, other: &MyInt) -> MyInt { mi(self.val + other.val) }
}
impl Sub<MyInt, MyInt> for MyInt {
fn sub(&self, other: &MyInt) -> MyInt { mi(self.val - other.val) }
}
| fn mul(&self, other: &MyInt) -> MyInt { mi(self.val * other.val) }
}
impl Eq for MyInt {
fn eq(&self, other: &MyInt) -> bool { self.val == other.val }
fn ne(&self, other: &MyInt) -> bool { !self.eq(other) }
}
impl MyNum for MyInt {}
fn f<T:MyNum>(x: T, y: T) -> (T, T, T) {
return (x + y, x - y, x * y);
}
fn mi(v: int) -> MyInt { MyInt { val: v } }
pub fn main() {
let (x, y) = (mi(3), mi(5));
let (a, b, c) = f(x, y);
assert_eq!(a, mi(8));
assert_eq!(b, mi(-2));
assert_eq!(c, mi(15));
} | impl Mul<MyInt, MyInt> for MyInt { |
docker.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package model
import (
"fmt"
"github.com/blang/semver"
"github.com/golang/glog"
"k8s.io/kops/nodeup/pkg/distros"
"k8s.io/kops/nodeup/pkg/model/resources"
"k8s.io/kops/pkg/systemd"
"k8s.io/kops/upup/pkg/fi"
"k8s.io/kops/upup/pkg/fi/nodeup/nodetasks"
)
// DockerBuilder install docker (just the packages at the moment)
type DockerBuilder struct {
*NodeupModelContext
}
var _ fi.ModelBuilder = &DockerBuilder{}
type dockerVersion struct {
Name string
Version string
Source string
Hash string
DockerVersion string
Distros []distros.Distribution
Dependencies []string
Architectures []Architecture
}
const DefaultDockerVersion = "1.12.3"
var dockerVersions = []dockerVersion{
// 1.11.2 - Jessie
{
DockerVersion: "1.11.2",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionJessie},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.11.2-0~jessie",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.11.2-0~jessie_amd64.deb",
Hash: "c312f1f6fa0b34df4589bb812e4f7af8e28fd51d",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.11.2 - Xenial
{
DockerVersion: "1.11.2",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionXenial},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.11.2-0~xenial",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.11.2-0~xenial_amd64.deb",
Hash: "194bfa864f0424d1bbdc7d499ccfa0445ce09b9f",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.11.2 - Centos / Rhel7 (two packages)
{
DockerVersion: "1.11.2",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.11.2",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-1.11.2-1.el7.centos.x86_64.rpm",
Hash: "432e6d7948df9e05f4190fce2f423eedbfd673d5",
Dependencies: []string{"libtool-ltdl"},
},
{
DockerVersion: "1.11.2",
Name: "docker-engine-selinux",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.11.2",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-selinux-1.11.2-1.el7.centos.noarch.rpm",
Hash: "f6da608fa8eeb2be8071489086ed9ff035f6daba",
},
// 1.12.1 - Jessie
{
DockerVersion: "1.12.1",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionJessie},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.1-0~jessie",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.12.1-0~jessie_amd64.deb",
Hash: "0401866231749abaabe8e09ee24432132839fe53",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.12.1 - Xenial
{
DockerVersion: "1.12.1",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionXenial},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.1-0~xenial",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.12.1-0~xenial_amd64.deb",
Hash: "30f7840704361673db2b62f25b6038628184b056",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.12.1 - Centos / Rhel7 (two packages)
{
DockerVersion: "1.12.1",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.1",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-1.12.1-1.el7.centos.x86_64.rpm",
Hash: "636471665665546224444052c3b48001397036be",
Dependencies: []string{"libtool-ltdl"},
},
{
DockerVersion: "1.12.1",
Name: "docker-engine-selinux",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.1",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-selinux-1.12.1-1.el7.centos.noarch.rpm",
Hash: "52ec22128e70acc2f76b3a8e87ff96785995116a",
},
// 1.12.3 - Jessie
{
DockerVersion: "1.12.3",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionJessie},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.3-0~jessie",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.12.3-0~jessie_amd64.deb",
Hash: "7c7eb45542b67a9cfb33c292ba245710efb5d773",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
//Depends: iptables, init-system-helpers (>= 1.18~), libapparmor1 (>= 2.6~devel), libc6 (>= 2.17), libdevmapper1.02.1 (>= 2:1.02.90), libltdl7 (>= 2.4.2), libsystemd0
//Recommends: aufs-tools, ca-certificates, cgroupfs-mount | cgroup-lite, git, xz-utils
},
// 1.12.3 - Jessie on ARM
{
DockerVersion: "1.12.3",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionJessie},
Architectures: []Architecture{ArchitectureArm},
Version: "1.12.3-0~jessie",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.12.3-0~jessie_armhf.deb",
Hash: "aa2f2f710360268dc5fd3eb066868c5883d95698",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.12.3 - Xenial
{
DockerVersion: "1.12.3",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionXenial},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.3-0~xenial",
Source: "http://apt.dockerproject.org/repo/pool/main/d/docker-engine/docker-engine_1.12.3-0~xenial_amd64.deb",
Hash: "b758fc88346a1e5eebf7408b0d0c99f4f134166c",
Dependencies: []string{"bridge-utils", "libapparmor1", "libltdl7", "perl"},
},
// 1.12.3 - Centos / Rhel7 (two packages)
{
DockerVersion: "1.12.3",
Name: "docker-engine",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.3",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-1.12.3-1.el7.centos.x86_64.rpm",
Hash: "67fbb78cfb9526aaf8142c067c10384df199d8f9",
Dependencies: []string{"libtool-ltdl", "libseccomp"},
},
{
DockerVersion: "1.12.3",
Name: "docker-engine-selinux",
Distros: []distros.Distribution{distros.DistributionRhel7, distros.DistributionCentos7},
Architectures: []Architecture{ArchitectureAmd64},
Version: "1.12.3",
Source: "https://yum.dockerproject.org/repo/main/centos/7/Packages/docker-engine-selinux-1.12.3-1.el7.centos.noarch.rpm",
Hash: "a6b0243af348140236ed96f2e902b259c590eefa",
},
}
func (d *dockerVersion) matches(arch Architecture, dockerVersion string, distro distros.Distribution) bool {
if d.DockerVersion != dockerVersion {
return false
}
foundDistro := false
for _, d := range d.Distros {
if d == distro {
foundDistro = true
}
}
if !foundDistro {
return false
}
foundArch := false
for _, a := range d.Architectures {
if a == arch {
foundArch = true
}
}
if !foundArch {
return false
}
return true
}
func (b *DockerBuilder) Build(c *fi.ModelBuilderContext) error {
// Add Apache2 license
{
t := &nodetasks.File{
Path: "/usr/share/doc/docker/apache.txt",
Contents: fi.NewStringResource(resources.DockerApache2License),
Type: nodetasks.FileType_File,
}
c.AddTask(t)
}
dockerVersion := ""
if b.Cluster.Spec.Docker != nil {
dockerVersion = fi.StringValue(b.Cluster.Spec.Docker.Version)
}
if dockerVersion == "" {
dockerVersion = DefaultDockerVersion
glog.Warningf("DockerVersion not specified; using default %q", dockerVersion)
}
// Add packages
{
for i := range dockerVersions {
dv := &dockerVersions[i]
if !dv.matches(b.Architecture, dockerVersion, b.Distribution) {
continue
}
c.AddTask(&nodetasks.Package{
Name: dv.Name,
Version: s(dv.Version),
Source: s(dv.Source),
Hash: s(dv.Hash),
// TODO: PreventStart is now unused?
PreventStart: fi.Bool(true),
})
for _, dep := range dv.Dependencies {
c.AddTask(&nodetasks.Package{Name: dep})
}
// Note we do _not_ stop looping... centos/rhel comprises multiple packages
}
}
dockerSemver, err := semver.Parse(dockerVersion)
if err != nil {
return fmt.Errorf("error parsing docker version %q as semver: %v", dockerVersion, err)
}
c.AddTask(b.buildSystemdService(dockerSemver))
return nil
}
func (b *DockerBuilder) buildSystemdService(dockerVersion semver.Version) *nodetasks.Service {
oldDocker := dockerVersion.Major <= 1 && dockerVersion.Minor <= 11
usesDockerSocket := true
hasDockerBabysitter := false
var dockerdCommand string
if oldDocker {
dockerdCommand = "/usr/bin/docker dameon"
} else {
dockerdCommand = "/usr/bin/dockerd"
}
if b.Distribution.IsDebianFamily() {
hasDockerBabysitter = true
}
manifest := &systemd.Manifest{}
manifest.Set("Unit", "Description", "Docker Application Container Engine")
manifest.Set("Unit", "Documentation", "https://docs.docker.com")
if usesDockerSocket {
manifest.Set("Unit", "After", "network.target docker.socket")
manifest.Set("Unit", "Requires", "docker.socket")
} else {
manifest.Set("Unit", "After", "network.target")
}
manifest.Set("Service", "Type", "notify")
manifest.Set("Service", "EnvironmentFile", "/etc/sysconfig/docker")
if usesDockerSocket | else {
manifest.Set("Service", "ExecStart", dockerdCommand+" \"$DOCKER_OPTS\"")
}
if !oldDocker {
// This was added by docker 1.12
// TODO: They seem sensible - should we backport them?
manifest.Set("Service", "ExecReload", "/bin/kill -s HUP $MAINPID")
// kill only the docker process, not all processes in the cgroup
manifest.Set("Service", "KillMode", "process")
manifest.Set("Service", "TimeoutStartSec", "0")
}
if oldDocker {
// Only in older versions of docker (< 1.12)
manifest.Set("Service", "MountFlags", "slave")
}
// Having non-zero Limit*s causes performance problems due to accounting overhead
// in the kernel. We recommend using cgroups to do container-local accounting.
// TODO: Should we set this? https://github.com/kubernetes/kubernetes/issues/39682
//service.Set("Service", "LimitNOFILE", "infinity")
//service.Set("Service", "LimitNPROC", "infinity")
//service.Set("Service", "LimitCORE", "infinity")
manifest.Set("Service", "LimitNOFILE", "1048576")
manifest.Set("Service", "LimitNPROC", "1048576")
manifest.Set("Service", "LimitCORE", "infinity")
//# Uncomment TasksMax if your systemd version supports it.
//# Only systemd 226 and above support this version.
//#TasksMax=infinity
manifest.Set("Service", "Restart", "always")
manifest.Set("Service", "RestartSec", "2s")
manifest.Set("Service", "StartLimitInterval", "0")
// set delegate yes so that systemd does not reset the cgroups of docker containers
manifest.Set("Service", "Delegate", "yes")
if hasDockerBabysitter {
manifest.Set("Service", "ExecStartPre", "/opt/kubernetes/helpers/docker-prestart")
}
manifest.Set("Install", "WantedBy", "multi-user.target")
manifestString := manifest.Render()
glog.V(8).Infof("Built service manifest %q\n%s", "docker", manifestString)
service := &nodetasks.Service{
Name: "docker",
Definition: s(manifestString),
}
service.InitDefaults()
return service
}
| {
manifest.Set("Service", "ExecStart", dockerdCommand+" -H fd:// \"$DOCKER_OPTS\"")
} |
token_dataset.py | from torch.utils.data import Dataset
import numpy as np
import torch
from . import functions
class TokensDataset(Dataset):
def __init__(self, X, Y):
self.X = self.encode_x(X)
self.y = Y
@staticmethod
def | (x: list) -> list:
max_len = len(max(x, key=lambda i: len(i)))
encoded = []
for i in x:
encoded.append(np.array(functions.encode(i, max_len)))
return encoded
@staticmethod
def collate_fn(objs: list) -> (torch.LongTensor, torch.Tensor):
data = ([i[0] for i in objs])
labels = ([i[1] for i in objs])
data = torch.LongTensor(data)
labels = torch.tensor(labels)
return data, labels
def __len__(self):
return len(self.y)
def __getitem__(self, idx):
return self.X[idx], self.y[idx]
| encode_x |
DonutSparkline.tsx | // Copyright 2022 Curtin University
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Author: James Diprose
import { HStack, StackProps, Text } from "@chakra-ui/react";
function | (
value: number,
color: string,
strokeWidth = 5,
size = 24
) {
let r = (size - strokeWidth) / 2;
let c = size / 2.0;
let circumference = 2 * Math.PI * r;
let seg1 = (value / 100.0) * circumference;
let seg2 = circumference - seg1;
let bgOffset = 0.25 * circumference; // move 25% anti-clockwise
let fgOffset = seg2 + bgOffset;
let bgStrokeDasharray: Array<number> = [seg1, seg2];
let fgStrokeDasharray: Array<number> = [seg2, seg1];
return {
strokeWidth: strokeWidth,
size: size,
r: r,
c: c,
circumference: circumference,
bgStrokeDasharray: bgStrokeDasharray,
fgStrokeDasharray: fgStrokeDasharray,
bgOffset: bgOffset,
fgOffset: fgOffset,
};
}
interface Props extends StackProps {
value: number;
color: string;
size: number;
showText: Boolean;
}
const DonutSparkline = ({ value, color, size, showText, ...rest }: Props) => {
let s = calcDonutSettings(value, color);
let text = <></>;
if (showText) {
text = <Text>{Math.round(value)}%</Text>;
}
return (
<HStack spacing={1} {...rest}>
{text}
<svg width={size} height={size} viewBox="0 0 24 24">
<circle
cx={s.c}
cy={s.c}
fillOpacity="0"
r={s.r}
stroke={color}
strokeDasharray={s.bgStrokeDasharray.join(" ")}
strokeDashoffset={s.bgOffset}
strokeWidth={s.strokeWidth}
/>
<circle
cx={s.c}
cy={s.c}
fillOpacity={0}
r={s.r}
stroke="#ededed"
strokeDasharray={s.fgStrokeDasharray.join(" ")}
strokeDashoffset={s.fgOffset}
strokeWidth={s.strokeWidth}
/>
</svg>
</HStack>
);
};
DonutSparkline.defaultProps = {
showText: true,
};
export default DonutSparkline;
| calcDonutSettings |
useRangeViewDates.ts | import type { RangeValue, PickerMode } from '../interface';
import type { GenerateConfig } from '../generate';
import { getValue, updateValues } from '../utils/miscUtil';
import { getClosingViewDate, isSameYear, isSameMonth, isSameDecade } from '../utils/dateUtil';
import type { Ref } from 'vue';
import { watch, computed, ref } from 'vue';
function getStartEndDistance<DateType>(
startDate: DateType,
endDate: DateType,
picker: PickerMode,
generateConfig: GenerateConfig<DateType>,
): 'same' | 'closing' | 'far' {
const startNext = getClosingViewDate(startDate, picker, generateConfig, 1);
function getDistance(compareFunc: (start: DateType | null, end: DateType | null) => boolean) {
if (compareFunc(startDate, endDate)) {
return 'same';
}
if (compareFunc(startNext, endDate)) {
return 'closing';
}
return 'far';
}
switch (picker) {
case 'year':
return getDistance((start, end) => isSameDecade(generateConfig, start, end));
case 'quarter':
case 'month':
return getDistance((start, end) => isSameYear(generateConfig, start, end));
default:
return getDistance((start, end) => isSameMonth(generateConfig, start, end));
}
}
function getRangeViewDate<DateType>(
values: RangeValue<DateType>,
index: 0 | 1,
picker: PickerMode,
generateConfig: GenerateConfig<DateType>,
): DateType | null {
const startDate = getValue(values, 0);
const endDate = getValue(values, 1);
if (index === 0) {
return startDate;
}
if (startDate && endDate) {
const distance = getStartEndDistance(startDate, endDate, picker, generateConfig);
switch (distance) {
case 'same':
return startDate;
case 'closing':
return startDate;
default:
return getClosingViewDate(endDate, picker, generateConfig, -1);
}
}
return startDate;
}
export default function useRangeViewDates<DateType>({
values,
picker,
defaultDates,
generateConfig,
}: {
values: Ref<RangeValue<DateType>>;
picker: Ref<PickerMode>;
defaultDates: RangeValue<DateType> | undefined;
generateConfig: Ref<GenerateConfig<DateType>>;
}): [Ref<DateType>, Ref<DateType>, (viewDate: DateType | null, index: 0 | 1) => void] {
const defaultViewDates = ref<[DateType | null, DateType | null]>([
getValue(defaultDates, 0),
getValue(defaultDates, 1),
]);
const viewDates = ref<RangeValue<DateType>>(null);
const startDate = computed(() => getValue(values.value, 0));
const endDate = computed(() => getValue(values.value, 1));
const getViewDate = (index: 0 | 1): DateType => {
// If set default view date, use it
if (defaultViewDates.value[index]) {
return defaultViewDates.value[index]! as DateType;
}
return (
(getValue(viewDates.value, index) as any) ||
getRangeViewDate(values.value, index, picker.value, generateConfig.value) ||
startDate.value ||
endDate.value ||
generateConfig.value.getNow()
);
};
const startViewDate = ref(null);
const endViewDate = ref(null);
watch(
viewDates, | startViewDate.value = getViewDate(0);
endViewDate.value = getViewDate(1);
},
{ immediate: true },
);
function setViewDate(viewDate: DateType | null, index: 0 | 1) {
if (viewDate) {
let newViewDates = updateValues(viewDates.value, viewDate as any, index);
// Set view date will clean up default one
// Should always be an array
defaultViewDates.value = updateValues(defaultViewDates.value, null, index) || [null, null];
// Reset another one when not have value
const anotherIndex = (index + 1) % 2;
if (!getValue(values.value, anotherIndex)) {
newViewDates = updateValues(newViewDates, viewDate, anotherIndex);
}
viewDates.value = newViewDates;
} else if (startDate.value || endDate.value) {
// Reset all when has values when `viewDate` is `null` which means from open trigger
viewDates.value = null;
}
}
return [startViewDate, endViewDate, setViewDate];
} | () => { |
multiplayer.component.spec.ts | import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { MultiplayerComponent } from './multiplayer.component';
describe('MultiplayerComponent', () => {
let component: MultiplayerComponent;
let fixture: ComponentFixture<MultiplayerComponent>;
beforeEach(waitForAsync(() => { | .compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(MultiplayerComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
}); | TestBed.configureTestingModule({
declarations: [ MultiplayerComponent ]
}) |
browser.js | /**
* @license Angular v12.2.6
* (c) 2010-2021 Google LLC. https://angular.io/
* License: MIT
*/
import { ɵAnimationGroupPlayer, NoopAnimationPlayer, AUTO_STYLE, ɵPRE_STYLE, sequence, style } from '@angular/animations';
import { Injectable } from '@angular/core';
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
function isBrowser() {
return (typeof window !== 'undefined' && typeof window.document !== 'undefined');
}
function isNode() {
// Checking only for `process` isn't enough to identify whether or not we're in a Node
// environment, because Webpack by default will polyfill the `process`. While we can discern
// that Webpack polyfilled it by looking at `process.browser`, it's very Webpack-specific and
// might not be future-proof. Instead we look at the stringified version of `process` which
// is `[object process]` in Node and `[object Object]` when polyfilled.
return typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
}
function optimizeGroupPlayer(players) {
switch (players.length) {
case 0:
return new NoopAnimationPlayer();
case 1:
return players[0];
default:
return new ɵAnimationGroupPlayer(players);
}
}
function normalizeKeyframes(driver, normalizer, element, keyframes, preStyles = {}, postStyles = {}) {
const errors = [];
const normalizedKeyframes = [];
let previousOffset = -1;
let previousKeyframe = null;
keyframes.forEach(kf => {
const offset = kf['offset'];
const isSameOffset = offset == previousOffset;
const normalizedKeyframe = (isSameOffset && previousKeyframe) || {};
Object.keys(kf).forEach(prop => {
let normalizedProp = prop;
let normalizedValue = kf[prop];
if (prop !== 'offset') {
normalizedProp = normalizer.normalizePropertyName(normalizedProp, errors);
switch (normalizedValue) {
case ɵPRE_STYLE:
normalizedValue = preStyles[prop];
break;
case AUTO_STYLE:
normalizedValue = postStyles[prop];
break;
default:
normalizedValue =
normalizer.normalizeStyleValue(prop, normalizedProp, normalizedValue, errors);
break;
}
}
normalizedKeyframe[normalizedProp] = normalizedValue;
});
if (!isSameOffset) {
normalizedKeyframes.push(normalizedKeyframe);
}
previousKeyframe = normalizedKeyframe;
previousOffset = offset;
});
if (errors.length) {
const LINE_START = '\n - ';
throw new Error(`Unable to animate due to the following errors:${LINE_START}${errors.join(LINE_START)}`);
}
return normalizedKeyframes;
}
function listenOnPlayer(player, eventName, event, callback) {
switch (eventName) {
case 'start':
player.onStart(() => callback(event && copyAnimationEvent(event, 'start', player)));
break;
case 'done':
player.onDone(() => callback(event && copyAnimationEvent(event, 'done', player)));
break;
case 'destroy':
player.onDestroy(() => callback(event && copyAnimationEvent(event, 'destroy', player)));
break;
}
}
function copyAnimationEvent(e, phaseName, player) {
const totalTime = player.totalTime;
const disabled = player.disabled ? true : false;
const event = makeAnimationEvent(e.element, e.triggerName, e.fromState, e.toState, phaseName || e.phaseName, totalTime == undefined ? e.totalTime : totalTime, disabled);
const data = e['_data'];
if (data != null) {
event['_data'] = data;
}
return event;
}
function makeAnimationEvent(element, triggerName, fromState, toState, phaseName = '', totalTime = 0, disabled) {
return { element, triggerName, fromState, toState, phaseName, totalTime, disabled: !!disabled };
}
function getOrSetAsInMap(map, key, defaultValue) {
let value;
if (map instanceof Map) {
value = map.get(key);
if (!value) {
map.set(key, value = defaultValue);
}
}
else {
value = map[key];
if (!value) {
value = map[key] = defaultValue;
}
}
return value;
}
function parseTimelineCommand(command) {
const separatorPos = command.indexOf(':');
const id = command.substring(1, separatorPos);
const action = command.substr(separatorPos + 1);
return [id, action];
}
let _contains = (elm1, elm2) => false;
const ɵ0 = _contains;
let _matches = (element, selector) => false;
const ɵ1 = _matches;
let _query = (element, selector, multi) => {
return [];
};
const ɵ2 = _query;
// Define utility methods for browsers and platform-server(domino) where Element
// and utility methods exist.
const _isNode = isNode();
if (_isNode || typeof Element !== 'undefined') {
if (!isBrowser()) {
_contains = (elm1, elm2) => elm1.contains(elm2);
}
else {
_contains = (elm1, elm2) => {
while (elm2 && elm2 !== document.documentElement) {
if (elm2 === elm1) {
return true;
}
elm2 = elm2.parentNode || elm2.host; // consider host to support shadow DOM
}
return false;
};
}
_matches = (() => {
if (_isNode || Element.prototype.matches) {
return (element, selector) => element.matches(selector);
}
else {
const proto = Element.prototype;
const fn = proto.matchesSelector || proto.mozMatchesSelector || proto.msMatchesSelector ||
proto.oMatchesSelector || proto.webkitMatchesSelector;
if (fn) {
return (element, selector) => fn.apply(element, [selector]);
}
else {
return _matches;
}
}
})();
_query = (element, selector, multi) => {
let results = [];
if (multi) {
// DO NOT REFACTOR TO USE SPREAD SYNTAX.
// For element queries that return sufficiently large NodeList objects,
// using spread syntax to populate the results array causes a RangeError
// due to the call stack limit being reached. `Array.from` can not be used
// as well, since NodeList is not iterable in IE 11, see
// https://developer.mozilla.org/en-US/docs/Web/API/NodeList
// More info is available in #38551.
const elems = element.querySelectorAll(selector);
for (let i = 0; i < elems.length; i++) {
results.push(elems[i]);
}
}
else {
const elm = element.querySelector(selector);
if (elm) {
results.push(elm);
}
}
return results;
};
}
function containsVendorPrefix(prop) {
// Webkit is the only real popular vendor prefix nowadays
// cc: http://shouldiprefix.com/
return prop.substring(1, 6) == 'ebkit'; // webkit or Webkit
}
let _CACHED_BODY = null;
let _IS_WEBKIT = false;
function validateStyleProperty(prop) {
if (!_CACHED_BODY) {
_CACHED_BODY = getBodyNode() || {};
_IS_WEBKIT = _CACHED_BODY.style ? ('WebkitAppearance' in _CACHED_BODY.style) : false;
}
let result = true;
if (_CACHED_BODY.style && !containsVendorPrefix(prop)) {
result = prop in _CACHED_BODY.style;
if (!result && _IS_WEBKIT) {
const camelProp = 'Webkit' + prop.charAt(0).toUpperCase() + prop.substr(1);
result = camelProp in _CACHED_BODY.style;
}
}
return result;
}
function getBodyNode() {
if (typeof document != 'undefined') {
return document.body;
}
return null;
}
const matchesElement = _matches;
const containsElement = _contains;
const invokeQuery = _query;
function hypenatePropsObject(object) {
const newObj = {};
Object.keys(object).forEach(prop => {
const newProp = prop.replace(/([a-z])([A-Z])/g, '$1-$2');
newObj[newProp] = object[prop];
});
return newObj;
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @publicApi
*/
class NoopAnimationDriver {
validateStyleProperty(prop) {
return validateStyleProperty(prop);
}
matchesElement(element, selector) {
return matchesElement(element, selector);
}
containsElement(elm1, elm2) {
return containsElement(elm1, elm2);
}
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
computeStyle(element, prop, defaultValue) {
return defaultValue || '';
}
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
return new NoopAnimationPlayer(duration, delay);
}
}
NoopAnimationDriver.decorators = [
{ type: Injectable }
];
/**
* @publicApi
*/
class AnimationDriver {
}
AnimationDriver.NOOP = ( /* @__PURE__ */new NoopAnimationDriver());
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const ONE_SECOND = 1000;
const SUBSTITUTION_EXPR_START = '{{';
const SUBSTITUTION_EXPR_END = '}}';
const ENTER_CLASSNAME = 'ng-enter';
const LEAVE_CLASSNAME = 'ng-leave';
const ENTER_SELECTOR = '.ng-enter';
const LEAVE_SELECTOR = '.ng-leave';
const NG_TRIGGER_CLASSNAME = 'ng-trigger';
const NG_TRIGGER_SELECTOR = '.ng-trigger';
const NG_ANIMATING_CLASSNAME = 'ng-animating';
const NG_ANIMATING_SELECTOR = '.ng-animating';
function resolveTimingValue(value) {
if (typeof value == 'number')
return value;
const matches = value.match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2)
return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
function _convertTimeValueToMS(value, unit) {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
function resolveTiming(timings, errors, allowNegativeValues) {
return timings.hasOwnProperty('duration') ?
timings :
parseTimeExpression(timings, errors, allowNegativeValues);
}
function parseTimeExpression(exp, errors, allowNegativeValues) {
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
let duration;
let delay = 0;
let easing = '';
if (typeof exp === 'string') {
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return { duration: 0, delay: 0, easing: '' };
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
}
else {
duration = exp;
}
if (!allowNegativeValues) {
let containsErrors = false;
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return { duration, delay, easing };
}
function copyObj(obj, destination = {}) {
Object.keys(obj).forEach(prop => {
destination[prop] = obj[prop];
});
return destination;
}
function normalizeStyles(styles) {
const normalizedStyles = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
}
else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
function copyStyles(styles, readPrototype, destination = {}) {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
}
else {
copyObj(styles, destination);
}
return destination;
}
function getStyleAttributeString(element, key, value) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
}
else {
return '';
}
}
function writeStyleAttribute(element) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
function setStyles(element, styles, formerStyles) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
function eraseStyles(element, styles) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
function normalizeAnimationEntry(steps) {
if (Array.isArray(steps)) {
if (steps.length == 1)
return steps[0];
return sequence(steps);
}
return steps;
}
function validateStyleParams(value, options, errors) {
const params = options.params || {};
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
const PARAM_REGEX = new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
function extractStyleParams(value) {
let params = [];
if (typeof value === 'string') {
let match;
while (match = PARAM_REGEX.exec(value)) {
params.push(match[1]);
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
function interpolateParams(value, params, errors) {
const original = value.toString();
const str = original.replace(PARAM_REGEX, (_, varName) => {
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
errors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
return localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
function iteratorToArray(iterator) {
const arr = [];
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
function dashCaseToCamelCase(input) {
return input.replace(DASH_CASE_REGEXP, (...m) => m[1].toUpperCase());
}
function camelCaseToDashCase(input) {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
function allowPreviousPlayerStylesMerge(duration, delay) {
return duration === 0 || delay === 0;
}
function balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles) {
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
let startingKeyframe = keyframes[0];
let missingStyleProps = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
let kf = keyframes[i];
missingStyleProps.forEach(function (prop) {
kf[prop] = computeStyle(element, prop);
});
}
}
}
return keyframes;
}
function visitDslNode(visitor, node, context) {
switch (node.type) {
case 7 /* Trigger */:
return visitor.visitTrigger(node, context);
case 0 /* State */:
return visitor.visitState(node, context);
case 1 /* Transition */:
return visitor.visitTransition(node, context);
case 2 /* Sequence */:
return visitor.visitSequence(node, context);
case 3 /* Group */:
return visitor.visitGroup(node, context);
case 4 /* Animate */:
return visitor.visitAnimate(node, context);
case 5 /* Keyframes */:
return visitor.visitKeyframes(node, context);
case 6 /* Style */:
return visitor.visitStyle(node, context);
case 8 /* Reference */:
return visitor.visitReference(node, context);
case 9 /* AnimateChild */:
return visitor.visitAnimateChild(node, context);
case 10 /* AnimateRef */:
return visitor.visitAnimateRef(node, context);
case 11 /* Query */:
return visitor.visitQuery(node, context);
case 12 /* Stagger */:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
function computeStyle(element, prop) {
return window.getComputedStyle(element)[prop];
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const ANY_STATE = '*';
function parseTransitionExpr(transitionValue, errors) {
const expressions = [];
if (typeof transitionValue == 'string') {
transitionValue.split(/\s*,\s*/).forEach(str => parseInnerTransitionStr(str, expressions, errors));
}
else {
expressions.push(transitionValue);
}
return expressions;
}
function parseInnerTransitionStr(eventStr, expressions, errors) {
if (eventStr[0] == ':') {
const result = parseAnimationAlias(eventStr, errors);
if (typeof result == 'function') {
expressions.push(result);
return;
}
eventStr = result;
}
const match = eventStr.match(/^(\*|[-\w]+)\s*(<?[=-]>)\s*(\*|[-\w]+)$/);
if (match == null || match.length < 4) {
errors.push(`The provided transition expression "${eventStr}" is not supported`);
return expressions;
}
const fromState = match[1];
const separator = match[2];
const toState = match[3];
expressions.push(makeLambdaFromStates(fromState, toState));
const isFullAnyStateExpr = fromState == ANY_STATE && toState == ANY_STATE;
if (separator[0] == '<' && !isFullAnyStateExpr) {
expressions.push(makeLambdaFromStates(toState, fromState));
}
}
function parseAnimationAlias(alias, errors) {
switch (alias) {
case ':enter':
return 'void => *';
case ':leave':
return '* => void';
case ':increment':
return (fromState, toState) => parseFloat(toState) > parseFloat(fromState);
case ':decrement':
return (fromState, toState) => parseFloat(toState) < parseFloat(fromState);
default:
errors.push(`The transition alias value "${alias}" is not supported`);
return '* => *';
}
}
// DO NOT REFACTOR ... keep the follow set instantiations
// with the values intact (closure compiler for some reason
// removes follow-up lines that add the values outside of
// the constructor...
const TRUE_BOOLEAN_VALUES = new Set(['true', '1']);
const FALSE_BOOLEAN_VALUES = new Set(['false', '0']);
function makeLambdaFromStates(lhs, rhs) {
const LHS_MATCH_BOOLEAN = TRUE_BOOLEAN_VALUES.has(lhs) || FALSE_BOOLEAN_VALUES.has(lhs);
const RHS_MATCH_BOOLEAN = TRUE_BOOLEAN_VALUES.has(rhs) || FALSE_BOOLEAN_VALUES.has(rhs);
return (fromState, toState) => {
let lhsMatch = lhs == ANY_STATE || lhs == fromState;
let rhsMatch = rhs == ANY_STATE || rhs == toState;
if (!lhsMatch && LHS_MATCH_BOOLEAN && typeof fromState === 'boolean') {
lhsMatch = fromState ? TRUE_BOOLEAN_VALUES.has(lhs) : FALSE_BOOLEAN_VALUES.has(lhs);
}
if (!rhsMatch && RHS_MATCH_BOOLEAN && typeof toState === 'boolean') {
rhsMatch = toState ? TRUE_BOOLEAN_VALUES.has(rhs) : FALSE_BOOLEAN_VALUES.has(rhs);
}
return lhsMatch && rhsMatch;
};
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const SELF_TOKEN = ':self';
const SELF_TOKEN_REGEX = new RegExp(`\s*${SELF_TOKEN}\s*,?`, 'g');
/*
* [Validation]
* The visitor code below will traverse the animation AST generated by the animation verb functions
* (the output is a tree of objects) and attempt to perform a series of validations on the data. The
* following corner-cases will be validated:
*
* 1. Overlap of animations
* Given that a CSS property cannot be animated in more than one place at the same time, it's
* important that this behavior is detected and validated. The way in which this occurs is that
* each time a style property is examined, a string-map containing the property will be updated with
* the start and end times for when the property is used within an animation step.
*
* If there are two or more parallel animations that are currently running (these are invoked by the
* group()) on the same element then the validator will throw an error. Since the start/end timing
* values are collected for each property then if the current animation step is animating the same
* property and its timing values fall anywhere into the window of time that the property is
* currently being animated within then this is what causes an error.
*
* 2. Timing values
* The validator will validate to see if a timing value of `duration delay easing` or
* `durationNumber` is valid or not.
*
* (note that upon validation the code below will replace the timing data with an object containing
* {duration,delay,easing}.
*
* 3. Offset Validation
* Each of the style() calls are allowed to have an offset value when placed inside of keyframes().
* Offsets within keyframes() are considered valid when:
*
* - No offsets are used at all
* - Each style() entry contains an offset value
* - Each offset is between 0 and 1
* - Each offset is greater to or equal than the previous one
*
* Otherwise an error will be thrown.
*/
function buildAnimationAst(driver, metadata, errors) {
return new AnimationAstBuilderVisitor(driver).build(metadata, errors);
}
const ROOT_SELECTOR = '';
class AnimationAstBuilderVisitor {
constructor(_driver) {
this._driver = _driver;
}
build(metadata, errors) {
const context = new AnimationAstBuilderContext(errors);
this._resetContextStyleTimingState(context);
return visitDslNode(this, normalizeAnimationEntry(metadata), context);
}
_resetContextStyleTimingState(context) {
context.currentQuerySelector = ROOT_SELECTOR;
context.collectedStyles = {};
context.collectedStyles[ROOT_SELECTOR] = {};
context.currentTime = 0;
}
visitTrigger(metadata, context) {
let queryCount = context.queryCount = 0;
let depCount = context.depCount = 0;
const states = [];
const transitions = [];
if (metadata.name.charAt(0) == '@') {
context.errors.push('animation triggers cannot be prefixed with an `@` sign (e.g. trigger(\'@foo\', [...]))');
}
metadata.definitions.forEach(def => {
this._resetContextStyleTimingState(context);
if (def.type == 0 /* State */) {
const stateDef = def;
const name = stateDef.name;
name.toString().split(/\s*,\s*/).forEach(n => {
stateDef.name = n;
states.push(this.visitState(stateDef, context));
});
stateDef.name = name;
}
else if (def.type == 1 /* Transition */) {
const transition = this.visitTransition(def, context);
queryCount += transition.queryCount;
depCount += transition.depCount;
transitions.push(transition);
}
else {
context.errors.push('only state() and transition() definitions can sit inside of a trigger()');
}
});
return {
type: 7 /* Trigger */,
name: metadata.name,
states,
transitions,
queryCount,
depCount,
options: null
};
}
visitState(metadata, context) {
const styleAst = this.visitStyle(metadata.styles, context);
const astParams = (metadata.options && metadata.options.params) || null;
if (styleAst.containsDynamicStyles) {
const missingSubs = new Set();
const params = astParams || {};
styleAst.styles.forEach(value => {
if (isObject(value)) {
const stylesObj = value;
Object.keys(stylesObj).forEach(prop => {
extractStyleParams(stylesObj[prop]).forEach(sub => {
if (!params.hasOwnProperty(sub)) {
missingSubs.add(sub);
}
});
});
}
});
if (missingSubs.size) {
const missingSubsArr = iteratorToArray(missingSubs.values());
context.errors.push(`state("${metadata
.name}", ...) must define default values for all the following style substitutions: ${missingSubsArr.join(', ')}`);
}
}
return {
type: 0 /* State */,
name: metadata.name,
style: styleAst,
options: astParams ? { params: astParams } : null
};
}
visitTransition(metadata, context) {
context.queryCount = 0;
context.depCount = 0;
const animation = visitDslNode(this, normalizeAnimationEntry(metadata.animation), context);
const matchers = parseTransitionExpr(metadata.expr, context.errors);
return {
type: 1 /* Transition */,
matchers,
animation,
queryCount: context.queryCount,
depCount: context.depCount,
options: normalizeAnimationOptions(metadata.options)
};
}
visitSequence(metadata, context) {
return {
type: 2 /* Sequence */,
steps: metadata.steps.map(s => visitDslNode(this, s, context)),
options: normalizeAnimationOptions(metadata.options)
};
}
visitGroup(metadata, context) {
const currentTime = context.currentTime;
let furthestTime = 0;
const steps = metadata.steps.map(step => {
context.currentTime = currentTime;
const innerAst = visitDslNode(this, step, context);
furthestTime = Math.max(furthestTime, context.currentTime);
return innerAst;
});
context.currentTime = furthestTime;
return {
type: 3 /* Group */,
steps,
options: normalizeAnimationOptions(metadata.options)
};
}
visitAnimate(metadata, context) {
const timingAst = constructTimingAst(metadata.timings, context.errors);
context.currentAnimateTimings = timingAst;
let styleAst;
let styleMetadata = metadata.styles ? metadata.styles : style({});
if (styleMetadata.type == 5 /* Keyframes */) {
styleAst = this.visitKeyframes(styleMetadata, context);
}
else {
let styleMetadata = metadata.styles;
let isEmpty = false;
if (!styleMetadata) {
isEmpty = true;
const newStyleData = {};
if (timingAst.easing) {
newStyleData['easing'] = timingAst.easing;
}
styleMetadata = style(newStyleData);
}
context.currentTime += timingAst.duration + timingAst.delay;
const _styleAst = this.visitStyle(styleMetadata, context);
_styleAst.isEmptyStep = isEmpty;
styleAst = _styleAst;
}
context.currentAnimateTimings = null;
return {
type: 4 /* Animate */,
timings: timingAst,
style: styleAst,
options: null
};
}
visitStyle(metadata, context) {
const ast = this._makeStyleAst(metadata, context);
this._validateStyleAst(ast, context);
return ast;
}
_makeStyleAst(metadata, context) {
const styles = [];
if (Array.isArray(metadata.styles)) {
metadata.styles.forEach(styleTuple => {
if (typeof styleTuple == 'string') {
if (styleTuple == AUTO_STYLE) {
styles.push(styleTuple);
}
else {
context.errors.push(`The provided style string value ${styleTuple} is not allowed.`);
}
}
else {
styles.push(styleTuple);
}
});
}
else {
styles.push(metadata.styles);
}
let containsDynamicStyles = false;
let collectedEasing = null;
styles.forEach(styleData => {
if (isObject(styleData)) {
const styleMap = styleData;
const easing = styleMap['easing'];
if (easing) {
collectedEasing = easing;
delete styleMap['easing'];
}
if (!containsDynamicStyles) {
for (let prop in styleMap) {
const value = styleMap[prop];
if (value.toString().indexOf(SUBSTITUTION_EXPR_START) >= 0) {
containsDynamicStyles = true;
break;
}
}
}
}
});
return {
type: 6 /* Style */,
styles,
easing: collectedEasing,
offset: metadata.offset,
containsDynamicStyles,
options: null
};
}
_validateStyleAst(ast, context) {
const timings = context.currentAnimateTimings;
let endTime = context.currentTime;
let startTime = context.currentTime;
if (timings && startTime > 0) {
startTime -= timings.duration + timings.delay;
}
ast.styles.forEach(tuple => {
if (typeof tuple == 'string')
return;
Object.keys(tuple).forEach(prop => {
if (!this._driver.validateStyleProperty(prop)) {
context.errors.push(`The provided animation property "${prop}" is not a supported CSS property for animations`);
return;
}
const collectedStyles = context.collectedStyles[context.currentQuerySelector];
const collectedEntry = collectedStyles[prop];
let updateCollectedStyle = true;
if (collectedEntry) {
if (startTime != endTime && startTime >= collectedEntry.startTime &&
endTime <= collectedEntry.endTime) {
context.errors.push(`The CSS property "${prop}" that exists between the times of "${collectedEntry.startTime}ms" and "${collectedEntry
.endTime}ms" is also being animated in a parallel animation between the times of "${startTime}ms" and "${endTime}ms"`);
updateCollectedStyle = false;
}
// we always choose the smaller start time value since we
// want to have a record of the entire animation window where
// the style property is being animated in between
startTime = collectedEntry.startTime;
}
if (updateCollectedStyle) {
collectedStyles[prop] = { startTime, endTime };
}
if (context.options) {
validateStyleParams(tuple[prop], context.options, context.errors);
}
});
});
}
visitKeyframes(metadata, context) {
const ast = { type: 5 /* Keyframes */, styles: [], options: null };
if (!context.currentAnimateTimings) {
context.errors.push(`keyframes() must be placed inside of a call to animate()`);
return ast;
}
const MAX_KEYFRAME_OFFSET = 1;
let totalKeyframesWithOffsets = 0;
const offsets = [];
let offsetsOutOfOrder = false;
let keyframesOutOfRange = false;
let previousOffset = 0;
const keyframes = metadata.steps.map(styles => {
const style = this._makeStyleAst(styles, context);
let offsetVal = style.offset != null ? style.offset : consumeOffset(style.styles);
let offset = 0;
if (offsetVal != null) {
totalKeyframesWithOffsets++;
offset = style.offset = offsetVal;
}
keyframesOutOfRange = keyframesOutOfRange || offset < 0 || offset > 1;
offsetsOutOfOrder = offsetsOutOfOrder || offset < previousOffset;
previousOffset = offset;
offsets.push(offset);
return style;
});
if (keyframesOutOfRange) {
context.errors.push(`Please ensure that all keyframe offsets are between 0 and 1`);
}
if (offsetsOutOfOrder) {
context.errors.push(`Please ensure that all keyframe offsets are in order`);
}
const length = metadata.steps.length;
let generatedOffset = 0;
if (totalKeyframesWithOffsets > 0 && totalKeyframesWithOffsets < length) {
context.errors.push(`Not all style() steps within the declared keyframes() contain offsets`);
}
else if (totalKeyframesWithOffsets == 0) {
generatedOffset = MAX_KEYFRAME_OFFSET / (length - 1);
}
const limit = length - 1;
const currentTime = context.currentTime;
const currentAnimateTimings = context.currentAnimateTimings;
const animateDuration = currentAnimateTimings.duration;
keyframes.forEach((kf, i) => {
const offset = generatedOffset > 0 ? (i == limit ? 1 : (generatedOffset * i)) : offsets[i];
const durationUpToThisFrame = offset * animateDuration;
context.currentTime = currentTime + currentAnimateTimings.delay + durationUpToThisFrame;
currentAnimateTimings.duration = durationUpToThisFrame;
this._validateStyleAst(kf, context);
kf.offset = offset;
ast.styles.push(kf);
});
return ast;
}
visitReference(metadata, context) {
return {
type: 8 /* Reference */,
animation: visitDslNode(this, normalizeAnimationEntry(metadata.animation), context),
options: normalizeAnimationOptions(metadata.options)
};
}
visitAnimateChild(metadata, context) {
context.depCount++;
return {
type: 9 /* AnimateChild */,
options: normalizeAnimationOptions(metadata.options)
};
}
visitAnimateRef(metadata, context) {
return {
type: 10 /* AnimateRef */,
animation: this.visitReference(metadata.animation, context),
options: normalizeAnimationOptions(metadata.options)
};
}
visitQuery(metadata, context) {
const parentSelector = context.currentQuerySelector;
const options = (metadata.options || {});
context.queryCount++;
context.currentQuery = metadata;
const [selector, includeSelf] = normalizeSelector(metadata.selector);
context.currentQuerySelector =
parentSelector.length ? (parentSelector + ' ' + selector) : selector;
getOrSetAsInMap(context.collectedStyles, context.currentQuerySelector, {});
const animation = visitDslNode(this, normalizeAnimationEntry(metadata.animation), context);
context.currentQuery = null;
context.currentQuerySelector = parentSelector;
return {
type: 11 /* Query */,
selector,
limit: options.limit || 0,
optional: !!options.optional,
includeSelf,
animation,
originalSelector: metadata.selector,
options: normalizeAnimationOptions(metadata.options)
};
}
visitStagger(metadata, context) {
if (!context.currentQuery) {
context.errors.push(`stagger() can only be used inside of query()`);
}
const timings = metadata.timings === 'full' ?
{ duration: 0, delay: 0, easing: 'full' } :
resolveTiming(metadata.timings, context.errors, true);
return {
type: 12 /* Stagger */,
animation: visitDslNode(this, normalizeAnimationEntry(metadata.animation), context),
timings,
options: null
};
}
}
function normalizeSelector(selector) {
const hasAmpersand = selector.split(/\s*,\s*/).find(token => token == SELF_TOKEN) ? true : false;
if (hasAmpersand) {
selector = selector.replace(SELF_TOKEN_REGEX, '');
}
// the :enter and :leave selectors are filled in at runtime during timeline building
selector = selector.replace(/@\*/g, NG_TRIGGER_SELECTOR)
.replace(/@\w+/g, match => NG_TRIGGER_SELECTOR + '-' + match.substr(1))
.replace(/:animating/g, NG_ANIMATING_SELECTOR);
return [selector, hasAmpersand];
}
function normalizeParams(obj) {
return obj ? copyObj(obj) : null;
}
class AnimationAstBuilderContext {
constructor(errors) {
this.errors = errors;
this.queryCount = 0;
this.depCount = 0;
this.currentTransition = null;
this.currentQuery = null;
this.currentQuerySelector = null;
this.currentAnimateTimings = null;
this.currentTime = 0;
this.collectedStyles = {};
this.options = null;
}
}
function consumeOffset(styles) {
if (typeof styles == 'string')
return null;
let offset = null;
if (Array.isArray(styles)) {
styles.forEach(styleTuple => {
if (isObject(styleTuple) && styleTuple.hasOwnProperty('offset')) {
const obj = styleTuple;
offset = parseFloat(obj['offset']);
delete obj['offset'];
}
});
}
else if (isObject(styles) && styles.hasOwnProperty('offset')) {
const obj = styles;
offset = parseFloat(obj['offset']);
delete obj['offset'];
}
return offset;
}
function isObject(value) {
return !Array.isArray(value) && typeof value == 'object';
}
function constructTimingAst(value, errors) {
let timings = null;
if (value.hasOwnProperty('duration')) {
timings = value;
}
else if (typeof value == 'number') {
const duration = resolveTiming(value, errors).duration;
return makeTimingAst(duration, 0, '');
}
const strValue = value;
const isDynamic = strValue.split(/\s+/).some(v => v.charAt(0) == '{' && v.charAt(1) == '{');
if (isDynamic) {
const ast = makeTimingAst(0, 0, '');
ast.dynamic = true;
ast.strValue = strValue;
return ast;
}
timings = timings || resolveTiming(strValue, errors);
return makeTimingAst(timings.duration, timings.delay, timings.easing);
}
function normalizeAnimationOptions(options) {
if (options) {
options = copyObj(options);
if (options['params']) {
options['params'] = normalizeParams(options['params']);
}
}
else {
options = {};
}
return options;
}
function makeTimingAst(duration, delay, easing) {
return { duration, delay, easing };
}
function createTimelineInstruction(element, keyframes, preStyleProps, postStyleProps, duration, delay, easing = null, subTimeline = false) {
return {
type: 1 /* TimelineAnimation */,
element,
keyframes,
preStyleProps,
postStyleProps,
duration,
delay,
totalTime: duration + delay,
easing,
subTimeline
};
}
class ElementInstructionMap {
constructor() {
this._map = new Map();
}
consume(element) {
let instructions = this._map.get(element);
if (instructions) {
this._map.delete(element);
}
else {
instructions = [];
}
return instructions;
}
append(element, instructions) {
let existingInstructions = this._map.get(element);
if (!existingInstructions) {
this._map.set(element, existingInstructions = []);
}
existingInstructions.push(...instructions);
}
has(element) {
return this._map.has(element);
}
clear() {
this._map.clear();
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const ONE_FRAME_IN_MILLISECONDS = 1;
const ENTER_TOKEN = ':enter';
const ENTER_TOKEN_REGEX = new RegExp(ENTER_TOKEN, 'g');
const LEAVE_TOKEN = ':leave';
const LEAVE_TOKEN_REGEX = new RegExp(LEAVE_TOKEN, 'g');
/*
* The code within this file aims to generate web-animations-compatible keyframes from Angular's
* animation DSL code.
*
* The code below will be converted from:
*
* ```
* sequence([
* style({ opacity: 0 }),
* animate(1000, style({ opacity: 0 }))
* ])
* ```
*
* To:
* ```
* keyframes = [{ opacity: 0, offset: 0 }, { opacity: 1, offset: 1 }]
* duration = 1000
* delay = 0
* easing = ''
* ```
*
* For this operation to cover the combination of animation verbs (style, animate, group, etc...) a
* combination of prototypical inheritance, AST traversal and merge-sort-like algorithms are used.
*
* [AST Traversal]
* Each of the animation verbs, when executed, will return an string-map object representing what
* type of action it is (style, animate, group, etc...) and the data associated with it. This means
* that when functional composition mix of these functions is evaluated (like in the example above)
* then it will end up producing a tree of objects representing the animation itself.
*
* When this animation object tree is processed by the visitor code below it will visit each of the
* verb statements within the visitor. And during each visit it will build the context of the
* animation keyframes by interacting with the `TimelineBuilder`.
*
* [TimelineBuilder]
* This class is responsible for tracking the styles and building a series of keyframe objects for a
* timeline between a start and end time. The builder starts off with an initial timeline and each
* time the AST comes across a `group()`, `keyframes()` or a combination of the two wihtin a
* `sequence()` then it will generate a sub timeline for each step as well as a new one after
* they are complete.
*
* As the AST is traversed, the timing state on each of the timelines will be incremented. If a sub
* timeline was created (based on one of the cases above) then the parent timeline will attempt to
* merge the styles used within the sub timelines into itself (only with group() this will happen).
* This happens with a merge operation (much like how the merge works in mergesort) and it will only
* copy the most recently used styles from the sub timelines into the parent timeline. This ensures
* that if the styles are used later on in another phase of the animation then they will be the most
* up-to-date values.
*
* [How Missing Styles Are Updated]
* Each timeline has a `backFill` property which is responsible for filling in new styles into
* already processed keyframes if a new style shows up later within the animation sequence.
*
* ```
* sequence([
* style({ width: 0 }),
* animate(1000, style({ width: 100 })),
* animate(1000, style({ width: 200 })),
* animate(1000, style({ width: 300 }))
* animate(1000, style({ width: 400, height: 400 })) // notice how `height` doesn't exist anywhere
* else
* ])
* ```
*
* What is happening here is that the `height` value is added later in the sequence, but is missing
* from all previous animation steps. Therefore when a keyframe is created it would also be missing
* from all previous keyframes up until where it is first used. For the timeline keyframe generation
* to properly fill in the style it will place the previous value (the value from the parent
* timeline) or a default value of `*` into the backFill object. Given that each of the keyframe
* styles are objects that prototypically inhert from the backFill object, this means that if a
* value is added into the backFill then it will automatically propagate any missing values to all
* keyframes. Therefore the missing `height` value will be properly filled into the already
* processed keyframes.
*
* When a sub-timeline is created it will have its own backFill property. This is done so that
* styles present within the sub-timeline do not accidentally seep into the previous/future timeline
* keyframes
*
* (For prototypically-inherited contents to be detected a `for(i in obj)` loop must be used.)
*
* [Validation]
* The code in this file is not responsible for validation. That functionality happens with within
* the `AnimationValidatorVisitor` code.
*/
function buildAnimationTimelines(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles = {}, finalStyles = {}, options, subInstructions, errors = []) {
return new AnimationTimelineBuilderVisitor().buildKeyframes(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles, finalStyles, options, subInstructions, errors);
}
class AnimationTimelineBuilderVisitor {
buildKeyframes(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles, finalStyles, options, subInstructions, errors = []) {
subInstructions = subInstructions || new ElementInstructionMap();
const context = new AnimationTimelineContext(driver, rootElement, subInstructions, enterClassName, leaveClassName, errors, []);
context.options = options;
context.currentTimeline.setStyles([startingStyles], null, context.errors, options);
visitDslNode(this, ast, context);
// this checks to see if an actual animation happened
const timelines = context.timelines.filter(timeline => timeline.containsAnimation());
if (timelines.length && Object.keys(finalStyles).length) {
const tl = timelines[timelines.length - 1];
if (!tl.allowOnlyTimelineStyles()) {
tl.setStyles([finalStyles], null, context.errors, options);
}
}
return timelines.length ? timelines.map(timeline => timeline.buildKeyframes()) :
[createTimelineInstruction(rootElement, [], [], [], 0, 0, '', false)];
}
visitTrigger(ast, context) {
// these values are not visited in this AST
}
visitState(ast, context) {
// these values are not visited in this AST
}
visitTransition(ast, context) {
// these values are not visited in this AST
}
visitAnimateChild(ast, context) {
const elementInstructions = context.subInstructions.consume(context.element);
if (elementInstructions) {
const innerContext = context.createSubContext(ast.options);
const startTime = context.currentTimeline.currentTime;
const endTime = this._visitSubInstructions(elementInstructions, innerContext, innerContext.options);
if (startTime != endTime) {
// we do this on the upper context because we created a sub context for
// the sub child animations
context.transformIntoNewTimeline(endTime);
}
}
context.previousNode = ast;
}
visitAnimateRef(ast, context) {
const innerContext = context.createSubContext(ast.options);
innerContext.transformIntoNewTimeline();
this.visitReference(ast.animation, innerContext);
context.transformIntoNewTimeline(innerContext.currentTimeline.currentTime);
context.previousNode = ast;
}
_visitSubInstructions(instructions, context, options) {
const startTime = context.currentTimeline.currentTime;
let furthestTime = startTime;
// this is a special-case for when a user wants to skip a sub
// animation from being fired entirely.
const duration = options.duration != null ? resolveTimingValue(options.duration) : null;
const delay = options.delay != null ? resolveTimingValue(options.delay) : null;
if (duration !== 0) {
instructions.forEach(instruction => {
const instructionTimings = context.appendInstructionToTimeline(instruction, duration, delay);
furthestTime =
Math.max(furthestTime, instructionTimings.duration + instructionTimings.delay);
});
}
return furthestTime;
}
visitReference(ast, context) {
context.updateOptions(ast.options, true);
visitDslNode(this, ast.animation, context);
context.previousNode = ast;
}
visitSequence(ast, context) {
const subContextCount = context.subContextCount;
let ctx = context;
const options = ast.options;
if (options && (options.params || options.delay)) {
ctx = context.createSubContext(options);
ctx.transformIntoNewTimeline();
if (options.delay != null) {
if (ctx.previousNode.type == 6 /* Style */) {
ctx.currentTimeline.snapshotCurrentStyles();
ctx.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
}
const delay = resolveTimingValue(options.delay);
ctx.delayNextStep(delay);
}
}
if (ast.steps.length) {
ast.steps.forEach(s => visitDslNode(this, s, ctx));
// this is here just incase the inner steps only contain or end with a style() call
ctx.currentTimeline.applyStylesToKeyframe();
// this means that some animation function within the sequence
// ended up creating a sub timeline (which means the current
// timeline cannot overlap with the contents of the sequence)
if (ctx.subContextCount > subContextCount) {
ctx.transformIntoNewTimeline();
}
}
context.previousNode = ast;
}
visitGroup(ast, context) {
const innerTimelines = [];
let furthestTime = context.currentTimeline.currentTime;
const delay = ast.options && ast.options.delay ? resolveTimingValue(ast.options.delay) : 0;
ast.steps.forEach(s => {
const innerContext = context.createSubContext(ast.options);
if (delay) {
innerContext.delayNextStep(delay);
}
visitDslNode(this, s, innerContext);
furthestTime = Math.max(furthestTime, innerContext.currentTimeline.currentTime);
innerTimelines.push(innerContext.currentTimeline);
});
// this operation is run after the AST loop because otherwise
// if the parent timeline's collected styles were updated then
// it would pass in invalid data into the new-to-be forked items
innerTimelines.forEach(timeline => context.currentTimeline.mergeTimelineCollectedStyles(timeline));
context.transformIntoNewTimeline(furthestTime);
context.previousNode = ast;
}
_visitTiming(ast, context) {
if (ast.dynamic) {
const strValue = ast.strValue;
const timingValue = context.params ? interpolateParams(strValue, context.params, context.errors) : strValue;
return resolveTiming(timingValue, context.errors);
}
else {
return { duration: ast.duration, delay: ast.delay, easing: ast.easing };
}
}
visitAnimate(ast, context) {
const timings = context.currentAnimateTimings = this._visitTiming(ast.timings, context);
const timeline = context.currentTimeline;
if (timings.delay) {
context.incrementTime(timings.delay);
timeline.snapshotCurrentStyles();
}
const style = ast.style;
if (style.type == 5 /* Keyframes */) {
this.visitKeyframes(style, context);
}
else {
context.incrementTime(timings.duration);
this.visitStyle(style, context);
timeline.applyStylesToKeyframe();
}
context.currentAnimateTimings = null;
context.previousNode = ast;
}
visitStyle(ast, context) {
const timeline = context.currentTimeline;
const timings = context.currentAnimateTimings;
// this is a special case for when a style() call
// directly follows an animate() call (but not inside of an animate() call)
if (!timings && timeline.getCurrentStyleProperties().length) {
timeline.forwardFrame();
}
const easing = (timings && timings.easing) || ast.easing;
if (ast.isEmptyStep) {
timeline.applyEmptyStep(easing);
}
else {
timeline.setStyles(ast.styles, easing, context.errors, context.options);
}
context.previousNode = ast;
}
visitKeyframes(ast, context) {
const currentAnimateTimings = context.currentAnimateTimings;
const startTime = (context.currentTimeline).duration;
const duration = currentAnimateTimings.duration;
const innerContext = context.createSubContext();
const innerTimeline = innerContext.currentTimeline;
innerTimeline.easing = currentAnimateTimings.easing;
ast.styles.forEach(step => {
const offset = step.offset || 0;
innerTimeline.forwardTime(offset * duration);
innerTimeline.setStyles(step.styles, step.easing, context.errors, context.options);
innerTimeline.applyStylesToKeyframe();
});
// this will ensure that the parent timeline gets all the styles from
// the child even if the new timeline below is not used
context.currentTimeline.mergeTimelineCollectedStyles(innerTimeline);
// we do this because the window between this timeline and the sub timeline
// should ensure that the styles within are exactly the same as they were before
context.transformIntoNewTimeline(startTime + duration);
context.previousNode = ast;
}
visitQuery(ast, context) {
// in the event that the first step before this is a style step we need
// to ensure the styles are applied before the children are animated
const startTime = context.currentTimeline.currentTime;
const options = (ast.options || {});
const delay = options.delay ? resolveTimingValue(options.delay) : 0;
if (delay &&
(context.previousNode.type === 6 /* Style */ ||
(startTime == 0 && context.currentTimeline.getCurrentStyleProperties().length))) {
context.currentTimeline.snapshotCurrentStyles();
context.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
}
let furthestTime = startTime;
const elms = context.invokeQuery(ast.selector, ast.originalSelector, ast.limit, ast.includeSelf, options.optional ? true : false, context.errors);
context.currentQueryTotal = elms.length;
let sameElementTimeline = null;
elms.forEach((element, i) => {
context.currentQueryIndex = i;
const innerContext = context.createSubContext(ast.options, element);
if (delay) {
innerContext.delayNextStep(delay);
}
if (element === context.element) {
sameElementTimeline = innerContext.currentTimeline;
}
visitDslNode(this, ast.animation, innerContext);
// this is here just incase the inner steps only contain or end
// with a style() call (which is here to signal that this is a preparatory
// call to style an element before it is animated again)
innerContext.currentTimeline.applyStylesToKeyframe();
const endTime = innerContext.currentTimeline.currentTime;
furthestTime = Math.max(furthestTime, endTime);
});
context.currentQueryIndex = 0;
context.currentQueryTotal = 0;
context.transformIntoNewTimeline(furthestTime);
if (sameElementTimeline) {
context.currentTimeline.mergeTimelineCollectedStyles(sameElementTimeline);
context.currentTimeline.snapshotCurrentStyles();
}
context.previousNode = ast;
}
visitStagger(ast, context) {
const parentContext = context.parentContext;
const tl = context.currentTimeline;
const timings = ast.timings;
const duration = Math.abs(timings.duration);
const maxTime = duration * (context.currentQueryTotal - 1);
let delay = duration * context.currentQueryIndex;
let staggerTransformer = timings.duration < 0 ? 'reverse' : timings.easing;
switch (staggerTransformer) {
case 'reverse':
delay = maxTime - delay;
break;
case 'full':
delay = parentContext.currentStaggerTime;
break;
}
const timeline = context.currentTimeline;
if (delay) {
timeline.delayNextStep(delay);
}
const startingTime = timeline.currentTime;
visitDslNode(this, ast.animation, context);
context.previousNode = ast;
// time = duration + delay
// the reason why this computation is so complex is because
// the inner timeline may either have a delay value or a stretched
// keyframe depending on if a subtimeline is not used or is used.
parentContext.currentStaggerTime =
(tl.currentTime - startingTime) + (tl.startTime - parentContext.currentTimeline.startTime);
}
}
const DEFAULT_NOOP_PREVIOUS_NODE = {};
class AnimationTimelineContext {
constructor(_driver, element, subInstructions, _enterClassName, _leaveClassName, errors, timelines, initialTimeline) {
this._driver = _driver;
this.element = element;
this.subInstructions = subInstructions;
this._enterClassName = _enterClassName;
this._leaveClassName = _leaveClassName;
this.errors = errors;
this.timelines = timelines;
this.parentContext = null;
this.currentAnimateTimings = null;
this.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
this.subContextCount = 0;
this.options = {};
this.currentQueryIndex = 0;
this.currentQueryTotal = 0;
this.currentStaggerTime = 0;
this.currentTimeline = initialTimeline || new TimelineBuilder(this._driver, element, 0);
timelines.push(this.currentTimeline);
}
get params() {
return this.options.params;
}
updateOptions(options, skipIfExists) {
if (!options)
return;
const newOptions = options;
let optionsToUpdate = this.options;
// NOTE: this will get patched up when other animation methods support duration overrides
if (newOptions.duration != null) {
optionsToUpdate.duration = resolveTimingValue(newOptions.duration);
}
if (newOptions.delay != null) {
optionsToUpdate.delay = resolveTimingValue(newOptions.delay);
}
const newParams = newOptions.params;
if (newParams) {
let paramsToUpdate = optionsToUpdate.params;
if (!paramsToUpdate) {
paramsToUpdate = this.options.params = {};
}
Object.keys(newParams).forEach(name => {
if (!skipIfExists || !paramsToUpdate.hasOwnProperty(name)) {
paramsToUpdate[name] = interpolateParams(newParams[name], paramsToUpdate, this.errors);
}
});
}
}
_copyOptions() {
const options = {};
if (this.options) {
const oldParams = this.options.params;
if (oldParams) {
const params = options['params'] = {};
Object.keys(oldParams).forEach(name => {
params[name] = oldParams[name];
});
}
}
return options;
}
createSubContext(options = null, element, newTime) {
const target = element || this.element;
const context = new AnimationTimelineContext(this._driver, target, this.subInstructions, this._enterClassName, this._leaveClassName, this.errors, this.timelines, this.currentTimeline.fork(target, newTime || 0));
context.previousNode = this.previousNode;
context.currentAnimateTimings = this.currentAnimateTimings;
context.options = this._copyOptions();
context.updateOptions(options);
context.currentQueryIndex = this.currentQueryIndex;
context.currentQueryTotal = this.currentQueryTotal;
context.parentContext = this;
this.subContextCount++;
return context;
}
transformIntoNewTimeline(newTime) {
this.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
this.currentTimeline = this.currentTimeline.fork(this.element, newTime);
this.timelines.push(this.currentTimeline);
return this.currentTimeline;
}
appendInstructionToTimeline(instruction, duration, delay) {
const updatedTimings = {
duration: duration != null ? duration : instruction.duration,
delay: this.currentTimeline.currentTime + (delay != null ? delay : 0) + instruction.delay,
easing: ''
};
const builder = new SubTimelineBuilder(this._driver, instruction.element, instruction.keyframes, instruction.preStyleProps, instruction.postStyleProps, updatedTimings, instruction.stretchStartingKeyframe);
this.timelines.push(builder);
return updatedTimings;
}
incrementTime(time) {
this.currentTimeline.forwardTime(this.currentTimeline.duration + time);
}
delayNextStep(delay) {
// negative delays are not yet supported
if (delay > 0) {
this.currentTimeline.delayNextStep(delay);
}
}
invokeQuery(selector, originalSelector, limit, includeSelf, optional, errors) {
let results = [];
if (includeSelf) {
results.push(this.element);
}
if (selector.length > 0) { // if :self is only used then the selector is empty
selector = selector.replace(ENTER_TOKEN_REGEX, '.' + this._enterClassName);
selector = selector.replace(LEAVE_TOKEN_REGEX, '.' + this._leaveClassName);
const multi = limit != 1;
let elements = this._driver.query(this.element, selector, multi);
if (limit !== 0) {
elements = limit < 0 ? elements.slice(elements.length + limit, elements.length) :
elements.slice(0, limit);
}
results.push(...elements);
}
if (!optional && results.length == 0) {
errors.push(`\`query("${originalSelector}")\` returned zero elements. (Use \`query("${originalSelector}", { optional: true })\` if you wish to allow this.)`);
}
return results;
}
}
class TimelineBuilder {
constructor(_driver, element, startTime, _elementTimelineStylesLookup) {
this._driver = _driver;
this.element = element;
this.startTime = startTime;
this._elementTimelineStylesLookup = _elementTimelineStylesLookup;
this.duration = 0;
this._previousKeyframe = {};
this._currentKeyframe = {};
this._keyframes = new Map();
this._styleSummary = {};
this._pendingStyles = {};
this._backFill = {};
this._currentEmptyStepKeyframe = null;
if (!this._elementTimelineStylesLookup) {
this._elementTimelineStylesLookup = new Map();
}
this._localTimelineStyles = Object.create(this._backFill, {});
this._globalTimelineStyles = this._elementTimelineStylesLookup.get(element);
if (!this._globalTimelineStyles) {
this._globalTimelineStyles = this._localTimelineStyles;
this._elementTimelineStylesLookup.set(element, this._localTimelineStyles);
}
this._loadKeyframe();
}
containsAnimation() {
switch (this._keyframes.size) {
case 0:
return false;
case 1:
return this.getCurrentStyleProperties().length > 0;
default:
return true;
}
}
getCurrentStyleProperties() {
return Object.keys(this._currentKeyframe);
}
get currentTime() {
return this.startTime + this.duration;
}
delayNextStep(delay) {
// in the event that a style() step is placed right before a stagger()
// and that style() step is the very first style() value in the animation
// then we need to make a copy of the keyframe [0, copy, 1] so that the delay
// properly applies the style() values to work with the stagger...
const hasPreStyleStep = this._keyframes.size == 1 && Object.keys(this._pendingStyles).length;
if (this.duration || hasPreStyleStep) {
this.forwardTime(this.currentTime + delay);
if (hasPreStyleStep) {
this.snapshotCurrentStyles();
}
}
else {
this.startTime += delay;
}
}
fork(element, currentTime) {
this.applyStylesToKeyframe();
return new TimelineBuilder(this._driver, element, currentTime || this.currentTime, this._elementTimelineStylesLookup);
}
_loadKeyframe() {
if (this._currentKeyframe) {
this._previousKeyframe = this._currentKeyframe;
}
this._currentKeyframe = this._keyframes.get(this.duration);
if (!this._currentKeyframe) {
this._currentKeyframe = Object.create(this._backFill, {});
this._keyframes.set(this.duration, this._currentKeyframe);
}
}
forwardFrame() {
this.duration += ONE_FRAME_IN_MILLISECONDS;
this._loadKeyframe();
}
forwardTime(time) {
this.applyStylesToKeyframe();
this.duration = time;
this._loadKeyframe();
}
_updateStyle(prop, value) {
this._localTimelineStyles[prop] = value;
this._globalTimelineStyles[prop] = value;
this._styleSummary[prop] = { time: this.currentTime, value };
}
allowOnlyTimelineStyles() {
return this._currentEmptyStepKeyframe !== this._currentKeyframe;
}
applyEmptyStep(easing) {
if (easing) {
this._previousKeyframe['easing'] = easing;
}
// special case for animate(duration):
// all missing styles are filled with a `*` value then
// if any destination styles are filled in later on the same
// keyframe then they will override the overridden styles
// We use `_globalTimelineStyles` here because there may be
// styles in previous keyframes that are not present in this timeline
Object.keys(this._globalTimelineStyles).forEach(prop => {
this._backFill[prop] = this._globalTimelineStyles[prop] || AUTO_STYLE;
this._currentKeyframe[prop] = AUTO_STYLE;
});
this._currentEmptyStepKeyframe = this._currentKeyframe;
}
setStyles(input, easing, errors, options) {
if (easing) {
this._previousKeyframe['easing'] = easing;
}
const params = (options && options.params) || {};
const styles = flattenStyles(input, this._globalTimelineStyles);
Object.keys(styles).forEach(prop => {
const val = interpolateParams(styles[prop], params, errors);
this._pendingStyles[prop] = val;
if (!this._localTimelineStyles.hasOwnProperty(prop)) {
this._backFill[prop] = this._globalTimelineStyles.hasOwnProperty(prop) ?
this._globalTimelineStyles[prop] :
AUTO_STYLE;
}
this._updateStyle(prop, val);
});
}
applyStylesToKeyframe() {
const styles = this._pendingStyles;
const props = Object.keys(styles);
if (props.length == 0)
return;
this._pendingStyles = {};
props.forEach(prop => {
const val = styles[prop];
this._currentKeyframe[prop] = val;
});
Object.keys(this._localTimelineStyles).forEach(prop => {
if (!this._currentKeyframe.hasOwnProperty(prop)) {
this._currentKeyframe[prop] = this._localTimelineStyles[prop];
}
});
}
snapshotCurrentStyles() {
Object.keys(this._localTimelineStyles).forEach(prop => {
const val = this._localTimelineStyles[prop];
this._pendingStyles[prop] = val;
this._updateStyle(prop, val);
});
}
getFinalKeyframe() {
return this._keyframes.get(this.duration);
}
get properties() {
const properties = [];
for (let prop in this._currentKeyframe) {
properties.push(prop);
}
return properties;
}
mergeTimelineCollectedStyles(timeline) {
Object.keys(timeline._styleSummary).forEach(prop => {
const details0 = this._styleSummary[prop];
const details1 = timeline._styleSummary[prop];
if (!details0 || details1.time > details0.time) {
this._updateStyle(prop, details1.value);
}
});
}
buildKeyframes() {
this.applyStylesToKeyframe();
const preStyleProps = new Set();
const postStyleProps = new Set();
const isEmpty = this._keyframes.size === 1 && this.duration === 0;
let finalKeyframes = [];
this._keyframes.forEach((keyframe, time) => {
const finalKeyframe = copyStyles(keyframe, true);
Object.keys(finalKeyframe).forEach(prop => {
const value = finalKeyframe[prop];
if (value == ɵPRE_STYLE) {
preStyleProps.add(prop);
}
else if (value == AUTO_STYLE) {
postStyleProps.add(prop);
}
});
if (!isEmpty) {
finalKeyframe['offset'] = time / this.duration;
}
finalKeyframes.push(finalKeyframe);
});
const preProps = preStyleProps.size ? iteratorToArray(preStyleProps.values()) : [];
const postProps = postStyleProps.size ? iteratorToArray(postStyleProps.values()) : [];
// special case for a 0-second animation (which is designed just to place styles onscreen)
if (isEmpty) {
const kf0 = finalKeyframes[0];
const kf1 = copyObj(kf0);
kf0['offset'] = 0;
kf1['offset'] = 1;
finalKeyframes = [kf0, kf1];
}
return createTimelineInstruction(this.element, finalKeyframes, preProps, postProps, this.duration, this.startTime, this.easing, false);
}
}
class SubTimelineBuilder extends TimelineBuilder {
constructor(driver, element, keyframes, preStyleProps, postStyleProps, timings, _stretchStartingKeyframe = false) {
super(driver, element, timings.delay);
this.keyframes = keyframes;
this.preStyleProps = preStyleProps;
this.postStyleProps = postStyleProps;
this._stretchStartingKeyframe = _stretchStartingKeyframe;
this.timings = { duration: timings.duration, delay: timings.delay, easing: timings.easing };
}
containsAnimation() {
return this.keyframes.length > 1;
}
buildKeyframes() {
let keyframes = this.keyframes;
let { delay, duration, easing } = this.timings;
if (this._stretchStartingKeyframe && delay) {
const newKeyframes = [];
const totalTime = duration + delay;
const startingGap = delay / totalTime;
// the original starting keyframe now starts once the delay is done
const newFirstKeyframe = copyStyles(keyframes[0], false);
newFirstKeyframe['offset'] = 0;
newKeyframes.push(newFirstKeyframe);
const oldFirstKeyframe = copyStyles(keyframes[0], false);
oldFirstKeyframe['offset'] = roundOffset(startingGap);
newKeyframes.push(oldFirstKeyframe);
/*
When the keyframe is stretched then it means that the delay before the animation
starts is gone. Instead the first keyframe is placed at the start of the animation
and it is then copied to where it starts when the original delay is over. This basically
means nothing animates during that delay, but the styles are still renderered. For this
to work the original offset values that exist in the original keyframes must be "warped"
so that they can take the new keyframe + delay into account.
delay=1000, duration=1000, keyframes = 0 .5 1
turns into
delay=0, duration=2000, keyframes = 0 .33 .66 1
*/
// offsets between 1 ... n -1 are all warped by the keyframe stretch
const limit = keyframes.length - 1;
for (let i = 1; i <= limit; i++) {
let kf = copyStyles(keyframes[i], false);
const oldOffset = kf['offset'];
const timeAtKeyframe = delay + oldOffset * duration;
kf['offset'] = roundOffset(timeAtKeyframe / totalTime);
newKeyframes.push(kf);
}
// the new starting keyframe should be added at the start
duration = totalTime;
delay = 0;
easing = '';
keyframes = newKeyframes;
}
return createTimelineInstruction(this.element, keyframes, this.preStyleProps, this.postStyleProps, duration, delay, easing, true);
}
}
function roundOffset(offset, decimalPoints = 3) {
const mult = Math.pow(10, decimalPoints - 1);
return Math.round(offset * mult) / mult;
}
function flattenStyles(input, allStyles) {
const styles = {};
let allProperties;
input.forEach(token => {
if (token === '*') {
allProperties = allProperties || Object.keys(allStyles);
allProperties.forEach(prop => {
styles[prop] = AUTO_STYLE;
});
}
else {
copyStyles(token, false, styles);
}
});
return styles;
}
class Animation {
constructor(_driver, input) {
this._driver = _driver;
const errors = [];
const ast = buildAnimationAst(_driver, input, errors);
if (errors.length) {
const errorMessage = `animation validation failed:\n${errors.join('\n')}`;
throw new Error(errorMessage);
}
this._animationAst = ast;
}
buildTimelines(element, startingStyles, destinationStyles, options, subInstructions) {
const start = Array.isArray(startingStyles) ? normalizeStyles(startingStyles) :
startingStyles;
const dest = Array.isArray(destinationStyles) ? normalizeStyles(destinationStyles) :
destinationStyles;
const errors = [];
subInstructions = subInstructions || new ElementInstructionMap();
const result = buildAnimationTimelines(this._driver, element, this._animationAst, ENTER_CLASSNAME, LEAVE_CLASSNAME, start, dest, options, subInstructions, errors);
if (errors.length) {
const errorMessage = `animation building failed:\n${errors.join('\n')}`;
throw new Error(errorMessage);
}
return result;
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @publicApi
*/
class AnimationStyleNormalizer {
}
/**
* @publicApi
*/
class NoopAnimationStyleNormalizer {
normalizePropertyName(propertyName, errors) {
return propertyName;
}
normalizeStyleValue(userProvidedProperty, normalizedProperty, value, errors) {
return value;
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
class WebAnimationsStyleNormalizer extends AnimationStyleNormalizer {
normalizePropertyName(propertyName, errors) {
return dashCaseToCamelCase(propertyName);
}
normalizeStyleValue(userProvidedProperty, normalizedProperty, value, errors) {
let unit = '';
const strVal = value.toString().trim();
if (DIMENSIONAL_PROP_MAP[normalizedProperty] && value !== 0 && value !== '0') {
if (typeof value === 'number') {
unit = 'px';
}
else {
const valAndSuffixMatch = value.match(/^[+-]?[\d\.]+([a-z]*)$/);
if (valAndSuffixMatch && valAndSuffixMatch[1].length == 0) {
errors.push(`Please provide a CSS unit value for ${userProvidedProperty}:${value}`);
}
}
}
return strVal + unit;
}
}
const ɵ0$1 = () => makeBooleanMap('width,height,minWidth,minHeight,maxWidth,maxHeight,left,top,bottom,right,fontSize,outlineWidth,outlineOffset,paddingTop,paddingLeft,paddingBottom,paddingRight,marginTop,marginLeft,marginBottom,marginRight,borderRadius,borderWidth,borderTopWidth,borderLeftWidth,borderRightWidth,borderBottomWidth,textIndent,perspective'
.split(','));
const DIMENSIONAL_PROP_MAP = (ɵ0$1)();
function makeBooleanMap(keys) {
const map = {};
keys.forEach(key => map[key] = true);
return map;
}
function createTransitionInstruction(element, triggerName, fromState, toState, isRemovalTransition, fromStyles, toStyles, timelines, queriedElements, preStyleProps, postStyleProps, totalTime, errors) {
return {
type: 0 /* TransitionAnimation */,
element,
triggerName,
isRemovalTransition,
fromState,
fromStyles,
toState,
toStyles,
timelines,
queriedElements,
preStyleProps,
postStyleProps,
totalTime,
errors
};
}
const EMPTY_OBJECT = {};
class AnimationTransitionFactory {
constructor(_triggerName, ast, _stateStyles) {
this._triggerName = _triggerName;
this.ast = ast;
this._stateStyles = _stateStyles;
}
match(currentState, nextState, element, params) {
return oneOrMoreTransitionsMatch(this.ast.matchers, currentState, nextState, element, params);
}
buildStyles(stateName, params, errors) {
const backupStateStyler = this._stateStyles['*'];
const stateStyler = this._stateStyles[stateName];
const backupStyles = backupStateStyler ? backupStateStyler.buildStyles(params, errors) : {};
return stateStyler ? stateStyler.buildStyles(params, errors) : backupStyles;
}
build(driver, element, currentState, nextState, enterClassName, leaveClassName, currentOptions, nextOptions, subInstructions, skipAstBuild) {
const errors = [];
const transitionAnimationParams = this.ast.options && this.ast.options.params || EMPTY_OBJECT;
const currentAnimationParams = currentOptions && currentOptions.params || EMPTY_OBJECT;
const currentStateStyles = this.buildStyles(currentState, currentAnimationParams, errors);
const nextAnimationParams = nextOptions && nextOptions.params || EMPTY_OBJECT;
const nextStateStyles = this.buildStyles(nextState, nextAnimationParams, errors);
const queriedElements = new Set();
const preStyleMap = new Map();
const postStyleMap = new Map();
const isRemoval = nextState === 'void';
const animationOptions = { params: Object.assign(Object.assign({}, transitionAnimationParams), nextAnimationParams) };
const timelines = skipAstBuild ?
[] :
buildAnimationTimelines(driver, element, this.ast.animation, enterClassName, leaveClassName, currentStateStyles, nextStateStyles, animationOptions, subInstructions, errors);
let totalTime = 0;
timelines.forEach(tl => {
totalTime = Math.max(tl.duration + tl.delay, totalTime);
});
if (errors.length) {
return createTransitionInstruction(element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles, nextStateStyles, [], [], preStyleMap, postStyleMap, totalTime, errors);
}
timelines.forEach(tl => {
const elm = tl.element;
const preProps = getOrSetAsInMap(preStyleMap, elm, {});
tl.preStyleProps.forEach(prop => preProps[prop] = true);
const postProps = getOrSetAsInMap(postStyleMap, elm, {});
tl.postStyleProps.forEach(prop => postProps[prop] = true);
if (elm !== element) {
queriedElements.add(elm);
}
});
const queriedElementsList = iteratorToArray(queriedElements.values());
return createTransitionInstruction(element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles, nextStateStyles, timelines, queriedElementsList, preStyleMap, postStyleMap, totalTime);
}
}
function oneOrMoreTransitionsMatch(matchFns, currentState, nextState, element, params) {
return matchFns.some(fn => fn(currentState, nextState, element, params));
}
class AnimationStateStyles {
constructor(styles, defaultParams, normalizer) {
this.styles = styles;
this.defaultParams = defaultParams;
this.normalizer = normalizer;
}
buildStyles(params, errors) {
const finalStyles = {};
const combinedParams = copyObj(this.defaultParams);
Object.keys(params).forEach(key => {
const value = params[key];
if (value != null) {
combinedParams[key] = value;
}
});
this.styles.styles.forEach(value => {
if (typeof value !== 'string') {
const styleObj = value;
Object.keys(styleObj).forEach(prop => {
let val = styleObj[prop];
if (val.length > 1) {
val = interpolateParams(val, combinedParams, errors);
}
const normalizedProp = this.normalizer.normalizePropertyName(prop, errors);
val = this.normalizer.normalizeStyleValue(prop, normalizedProp, val, errors);
finalStyles[normalizedProp] = val;
});
}
});
return finalStyles;
}
}
function buildTrigger(name, ast, normalizer) {
return new AnimationTrigger(name, ast, normalizer);
}
class AnimationTrigger {
constructor(name, ast, _normalizer) {
this.name = name;
this.ast = ast;
this._normalizer = _normalizer;
this.transitionFactories = [];
this.states = {};
ast.states.forEach(ast => {
const defaultParams = (ast.options && ast.options.params) || {};
this.states[ast.name] = new AnimationStateStyles(ast.style, defaultParams, _normalizer);
});
balanceProperties(this.states, 'true', '1');
balanceProperties(this.states, 'false', '0');
ast.transitions.forEach(ast => {
this.transitionFactories.push(new AnimationTransitionFactory(name, ast, this.states));
});
this.fallbackTransition = createFallbackTransition(name, this.states, this._normalizer);
}
get containsQueries() {
return this.ast.queryCount > 0;
}
matchTransition(currentState, nextState, element, params) {
const entry = this.transitionFactories.find(f => f.match(currentState, nextState, element, params));
return entry || null;
}
matchStyles(currentState, params, errors) {
return this.fallbackTransition.buildStyles(currentState, params, errors);
}
}
function createFallbackTransition(triggerName, states, normalizer) {
const matchers = [(fromState, toState) => true];
const animation = { type: 2 /* Sequence */, steps: [], options: null };
const transition = {
type: 1 /* Transition */,
animation,
matchers,
options: null,
queryCount: 0,
depCount: 0
};
return new AnimationTransitionFactory(triggerName, transition, states);
}
function balanceProperties(obj, key1, key2) {
if (obj.hasOwnProperty(key1)) {
if (!obj.hasOwnProperty(key2)) {
obj[key2] = obj[key1];
}
}
else if (obj.hasOwnProperty(key2)) {
obj[key1] = obj[key2];
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const EMPTY_INSTRUCTION_MAP = new ElementInstructionMap();
class TimelineAnimationEngine {
constructor(bodyNode, _driver, _normalizer) {
this.bodyNode = bodyNode;
this._driver = _driver;
this._normalizer = _normalizer;
this._animations = {};
this._playersById = {};
this.players = [];
}
register(id, metadata) {
const errors = [];
const ast = buildAnimationAst(this._driver, metadata, errors);
if (errors.length) {
throw new Error(`Unable to build the animation due to the following errors: ${errors.join('\n')}`);
}
else {
this._animations[id] = ast;
}
}
_buildPlayer(i, preStyles, postStyles) {
const element = i.element;
const keyframes = normalizeKeyframes(this._driver, this._normalizer, element, i.keyframes, preStyles, postStyles);
return this._driver.animate(element, keyframes, i.duration, i.delay, i.easing, [], true);
}
create(id, element, options = {}) {
const errors = [];
const ast = this._animations[id];
let instructions;
const autoStylesMap = new Map();
if (ast) {
instructions = buildAnimationTimelines(this._driver, element, ast, ENTER_CLASSNAME, LEAVE_CLASSNAME, {}, {}, options, EMPTY_INSTRUCTION_MAP, errors);
instructions.forEach(inst => {
const styles = getOrSetAsInMap(autoStylesMap, inst.element, {});
inst.postStyleProps.forEach(prop => styles[prop] = null);
});
}
else {
errors.push('The requested animation doesn\'t exist or has already been destroyed');
instructions = [];
}
if (errors.length) {
throw new Error(`Unable to create the animation due to the following errors: ${errors.join('\n')}`);
}
autoStylesMap.forEach((styles, element) => {
Object.keys(styles).forEach(prop => {
styles[prop] = this._driver.computeStyle(element, prop, AUTO_STYLE);
});
});
const players = instructions.map(i => {
const styles = autoStylesMap.get(i.element);
return this._buildPlayer(i, {}, styles);
});
const player = optimizeGroupPlayer(players);
this._playersById[id] = player;
player.onDestroy(() => this.destroy(id));
this.players.push(player);
return player;
}
destroy(id) {
const player = this._getPlayer(id);
player.destroy();
delete this._playersById[id];
const index = this.players.indexOf(player);
if (index >= 0) {
this.players.splice(index, 1);
}
}
_getPlayer(id) {
const player = this._playersById[id];
if (!player) {
throw new Error(`Unable to find the timeline player referenced by ${id}`);
}
return player;
}
listen(id, element, eventName, callback) {
// triggerName, fromState, toState are all ignored for timeline animations
const baseEvent = makeAnimationEvent(element, '', '', '');
listenOnPlayer(this._getPlayer(id), eventName, baseEvent, callback);
return () => { };
}
command(id, element, command, args) {
if (command == 'register') {
this.register(id, args[0]);
return;
}
if (command == 'create') {
const options = (args[0] || {});
this.create(id, element, options);
return;
}
const player = this._getPlayer(id);
switch (command) {
case 'play':
player.play();
break;
case 'pause':
player.pause();
break;
case 'reset':
player.reset();
break;
case 'restart':
player.restart();
break;
case 'finish':
player.finish();
break;
case 'init':
player.init();
break;
case 'setPosition':
player.setPosition(parseFloat(args[0]));
break;
case 'destroy':
this.destroy(id);
break;
}
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const QUEUED_CLASSNAME = 'ng-animate-queued';
const QUEUED_SELECTOR = '.ng-animate-queued';
const DISABLED_CLASSNAME = 'ng-animate-disabled';
const DISABLED_SELECTOR = '.ng-animate-disabled';
const STAR_CLASSNAME = 'ng-star-inserted';
const STAR_SELECTOR = '.ng-star-inserted';
const EMPTY_PLAYER_ARRAY = [];
const NULL_REMOVAL_STATE = {
namespaceId: '',
setForRemoval: false,
setForMove: false,
hasAnimation: false,
removedBeforeQueried: false
};
const NULL_REMOVED_QUERIED_STATE = {
namespaceId: '',
setForMove: false,
setForRemoval: false,
hasAnimation: false,
removedBeforeQueried: true
};
const REMOVAL_FLAG = '__ng_removed';
class StateValue {
constructor(input, namespaceId = '') {
this.namespaceId = namespaceId;
const isObj = input && input.hasOwnProperty('value');
const value = isObj ? input['value'] : input;
this.value = normalizeTriggerValue(value);
if (isObj) {
const options = copyObj(input);
delete options['value'];
this.options = options;
}
else {
this.options = {};
}
if (!this.options.params) {
this.options.params = {};
}
}
get params() {
return this.options.params;
}
absorbOptions(options) {
const newParams = options.params;
if (newParams) {
const oldParams = this.options.params;
Object.keys(newParams).forEach(prop => {
if (oldParams[prop] == null) {
oldParams[prop] = newParams[prop];
}
});
}
}
}
const VOID_VALUE = 'void';
const DEFAULT_STATE_VALUE = new StateValue(VOID_VALUE);
class AnimationTransitionNamespace {
constructor(id, hostElement, _engine) {
this.id = id;
this.hostElement = hostElement;
this._engine = _engine;
this.players = [];
this._triggers = {};
this._queue = [];
this._elementListeners = new Map();
this._hostClassName = 'ng-tns-' + id;
addClass(hostElement, this._hostClassName);
}
listen(element, name, phase, callback) {
if (!this._triggers.hasOwnProperty(name)) {
throw new Error(`Unable to listen on the animation trigger event "${phase}" because the animation trigger "${name}" doesn\'t exist!`);
}
if (phase == null || phase.length == 0) {
throw new Error(`Unable to listen on the animation trigger "${name}" because the provided event is undefined!`);
}
if (!isTriggerEventValid(phase)) {
throw new Error(`The provided animation trigger event "${phase}" for the animation trigger "${name}" is not supported!`);
}
const listeners = getOrSetAsInMap(this._elementListeners, element, []);
const data = { name, phase, callback };
listeners.push(data);
const triggersWithStates = getOrSetAsInMap(this._engine.statesByElement, element, {});
if (!triggersWithStates.hasOwnProperty(name)) {
addClass(element, NG_TRIGGER_CLASSNAME);
addClass(element, NG_TRIGGER_CLASSNAME + '-' + name);
triggersWithStates[name] = DEFAULT_STATE_VALUE;
}
return () => {
// the event listener is removed AFTER the flush has occurred such
// that leave animations callbacks can fire (otherwise if the node
// is removed in between then the listeners would be deregistered)
this._engine.afterFlush(() => {
const index = listeners.indexOf(data);
if (index >= 0) {
listeners.splice(index, 1);
}
if (!this._triggers[name]) {
delete triggersWithStates[name];
}
});
};
}
register(name, ast) {
if (this._triggers[name]) {
// throw
return false;
}
else {
this._triggers[name] = ast;
return true;
}
}
_getTrigger(name) {
const trigger = this._triggers[name];
if (!trigger) {
throw new Error(`The provided animation trigger "${name}" has not been registered!`);
}
return trigger;
}
trigger(element, triggerName, value, defaultToFallback = true) {
const trigger = this._getTrigger(triggerName);
const player = new TransitionAnimationPlayer(this.id, triggerName, element);
let triggersWithStates = this._engine.statesByElement.get(element);
if (!triggersWithStates) {
addClass(element, NG_TRIGGER_CLASSNAME);
addClass(element, NG_TRIGGER_CLASSNAME + '-' + triggerName);
this._engine.statesByElement.set(element, triggersWithStates = {});
}
let fromState = triggersWithStates[triggerName];
const toState = new StateValue(value, this.id);
const isObj = value && value.hasOwnProperty('value');
if (!isObj && fromState) {
toState.absorbOptions(fromState.options);
}
triggersWithStates[triggerName] = toState;
if (!fromState) {
fromState = DEFAULT_STATE_VALUE;
}
const isRemoval = toState.value === VOID_VALUE;
// normally this isn't reached by here, however, if an object expression
// is passed in then it may be a new object each time. Comparing the value
// is important since that will stay the same despite there being a new object.
// The removal arc here is special cased because the same element is triggered
// twice in the event that it contains animations on the outer/inner portions
// of the host container
if (!isRemoval && fromState.value === toState.value) {
// this means that despite the value not changing, some inner params
// have changed which means that the animation final styles need to be applied
if (!objEquals(fromState.params, toState.params)) {
const errors = [];
const fromStyles = trigger.matchStyles(fromState.value, fromState.params, errors);
const toStyles = trigger.matchStyles(toState.value, toState.params, errors);
if (errors.length) {
this._engine.reportError(errors);
}
else {
this._engine.afterFlush(() => {
eraseStyles(element, fromStyles);
setStyles(element, toStyles);
});
}
}
return;
}
const playersOnElement = getOrSetAsInMap(this._engine.playersByElement, element, []);
playersOnElement.forEach(player => {
// only remove the player if it is queued on the EXACT same trigger/namespace
// we only also deal with queued players here because if the animation has
// started then we want to keep the player alive until the flush happens
// (which is where the previousPlayers are passed into the new palyer)
if (player.namespaceId == this.id && player.triggerName == triggerName && player.queued) {
player.destroy();
}
});
let transition = trigger.matchTransition(fromState.value, toState.value, element, toState.params);
let isFallbackTransition = false;
if (!transition) {
if (!defaultToFallback)
return;
transition = trigger.fallbackTransition;
isFallbackTransition = true;
}
this._engine.totalQueuedPlayers++;
this._queue.push({ element, triggerName, transition, fromState, toState, player, isFallbackTransition });
if (!isFallbackTransition) {
addClass(element, QUEUED_CLASSNAME);
player.onStart(() => {
removeClass(element, QUEUED_CLASSNAME);
});
}
player.onDone(() => {
let index = this.players.indexOf(player);
if (index >= 0) {
this.players.splice(index, 1);
}
const players = this._engine.playersByElement.get(element);
if (players) {
let index = players.indexOf(player);
if (index >= 0) {
players.splice(index, 1);
}
}
});
this.players.push(player);
playersOnElement.push(player);
return player;
}
deregister(name) {
delete this._triggers[name];
this._engine.statesByElement.forEach((stateMap, element) => {
delete stateMap[name];
});
this._elementListeners.forEach((listeners, element) => {
this._elementListeners.set(element, listeners.filter(entry => {
return entry.name != name;
}));
});
}
clearElementCache(element) {
this._engine.statesByElement.delete(element);
this._elementListeners.delete(element);
const elementPlayers = this._engine.playersByElement.get(element);
if (elementPlayers) {
elementPlayers.forEach(player => player.destroy());
this._engine.playersByElement.delete(element);
}
}
_signalRemovalForInnerTriggers(rootElement, context) {
const elements = this._engine.driver.query(rootElement, NG_TRIGGER_SELECTOR, true);
// emulate a leave animation for all inner nodes within this node.
// If there are no animations found for any of the nodes then clear the cache
// for the element.
elements.forEach(elm => {
// this means that an inner remove() operation has already kicked off
// the animation on this element...
if (elm[REMOVAL_FLAG])
return;
const namespaces = this._engine.fetchNamespacesByElement(elm);
if (namespaces.size) {
namespaces.forEach(ns => ns.triggerLeaveAnimation(elm, context, false, true));
}
else {
this.clearElementCache(elm);
}
});
// If the child elements were removed along with the parent, their animations might not
// have completed. Clear all the elements from the cache so we don't end up with a memory leak.
this._engine.afterFlushAnimationsDone(() => elements.forEach(elm => this.clearElementCache(elm)));
}
triggerLeaveAnimation(element, context, destroyAfterComplete, defaultToFallback) {
const triggerStates = this._engine.statesByElement.get(element);
if (triggerStates) {
const players = [];
Object.keys(triggerStates).forEach(triggerName => {
// this check is here in the event that an element is removed
// twice (both on the host level and the component level)
if (this._triggers[triggerName]) {
const player = this.trigger(element, triggerName, VOID_VALUE, defaultToFallback);
if (player) {
players.push(player);
}
}
});
if (players.length) {
this._engine.markElementAsRemoved(this.id, element, true, context);
if (destroyAfterComplete) {
optimizeGroupPlayer(players).onDone(() => this._engine.processLeaveNode(element));
}
return true;
}
}
return false;
}
prepareLeaveAnimationListeners(element) {
const listeners = this._elementListeners.get(element);
const elementStates = this._engine.statesByElement.get(element);
// if this statement fails then it means that the element was picked up
// by an earlier flush (or there are no listeners at all to track the leave).
if (listeners && elementStates) {
const visitedTriggers = new Set();
listeners.forEach(listener => {
const triggerName = listener.name;
if (visitedTriggers.has(triggerName))
return;
visitedTriggers.add(triggerName);
const trigger = this._triggers[triggerName];
const transition = trigger.fallbackTransition;
const fromState = elementStates[triggerName] || DEFAULT_STATE_VALUE;
const toState = new StateValue(VOID_VALUE);
const player = new TransitionAnimationPlayer(this.id, triggerName, element);
this._engine.totalQueuedPlayers++;
this._queue.push({
element,
triggerName,
transition,
fromState,
toState,
player,
isFallbackTransition: true
});
});
}
}
removeNode(element, context) {
const engine = this._engine;
if (element.childElementCount) {
this._signalRemovalForInnerTriggers(element, context);
}
// this means that a * => VOID animation was detected and kicked off
if (this.triggerLeaveAnimation(element, context, true))
return;
// find the player that is animating and make sure that the
// removal is delayed until that player has completed
let containsPotentialParentTransition = false;
if (engine.totalAnimations) {
const currentPlayers = engine.players.length ? engine.playersByQueriedElement.get(element) : [];
// when this `if statement` does not continue forward it means that
// a previous animation query has selected the current element and
// is animating it. In this situation want to continue forwards and
// allow the element to be queued up for animation later.
if (currentPlayers && currentPlayers.length) {
containsPotentialParentTransition = true;
}
else {
let parent = element;
while (parent = parent.parentNode) {
const triggers = engine.statesByElement.get(parent);
if (triggers) {
containsPotentialParentTransition = true;
break;
}
}
}
}
// at this stage we know that the element will either get removed
// during flush or will be picked up by a parent query. Either way
// we need to fire the listeners for this element when it DOES get
// removed (once the query parent animation is done or after flush)
this.prepareLeaveAnimationListeners(element);
// whether or not a parent has an animation we need to delay the deferral of the leave
// operation until we have more information (which we do after flush() has been called)
if (containsPotentialParentTransition) {
engine.markElementAsRemoved(this.id, element, false, context);
}
else {
const removalFlag = element[REMOVAL_FLAG];
if (!removalFlag || removalFlag === NULL_REMOVAL_STATE) {
// we do this after the flush has occurred such
// that the callbacks can be fired
engine.afterFlush(() => this.clearElementCache(element));
engine.destroyInnerAnimations(element);
engine._onRemovalComplete(element, context);
}
}
}
insertNode(element, parent) {
addClass(element, this._hostClassName);
}
drainQueuedTransitions(microtaskId) {
const instructions = [];
this._queue.forEach(entry => {
const player = entry.player;
if (player.destroyed)
return;
const element = entry.element; | const listeners = this._elementListeners.get(element);
if (listeners) {
listeners.forEach((listener) => {
if (listener.name == entry.triggerName) {
const baseEvent = makeAnimationEvent(element, entry.triggerName, entry.fromState.value, entry.toState.value);
baseEvent['_data'] = microtaskId;
listenOnPlayer(entry.player, listener.phase, baseEvent, listener.callback);
}
});
}
if (player.markedForDestroy) {
this._engine.afterFlush(() => {
// now we can destroy the element properly since the event listeners have
// been bound to the player
player.destroy();
});
}
else {
instructions.push(entry);
}
});
this._queue = [];
return instructions.sort((a, b) => {
// if depCount == 0 them move to front
// otherwise if a contains b then move back
const d0 = a.transition.ast.depCount;
const d1 = b.transition.ast.depCount;
if (d0 == 0 || d1 == 0) {
return d0 - d1;
}
return this._engine.driver.containsElement(a.element, b.element) ? 1 : -1;
});
}
destroy(context) {
this.players.forEach(p => p.destroy());
this._signalRemovalForInnerTriggers(this.hostElement, context);
}
elementContainsData(element) {
let containsData = false;
if (this._elementListeners.has(element))
containsData = true;
containsData =
(this._queue.find(entry => entry.element === element) ? true : false) || containsData;
return containsData;
}
}
class TransitionAnimationEngine {
constructor(bodyNode, driver, _normalizer) {
this.bodyNode = bodyNode;
this.driver = driver;
this._normalizer = _normalizer;
this.players = [];
this.newHostElements = new Map();
this.playersByElement = new Map();
this.playersByQueriedElement = new Map();
this.statesByElement = new Map();
this.disabledNodes = new Set();
this.totalAnimations = 0;
this.totalQueuedPlayers = 0;
this._namespaceLookup = {};
this._namespaceList = [];
this._flushFns = [];
this._whenQuietFns = [];
this.namespacesByHostElement = new Map();
this.collectedEnterElements = [];
this.collectedLeaveElements = [];
// this method is designed to be overridden by the code that uses this engine
this.onRemovalComplete = (element, context) => { };
}
/** @internal */
_onRemovalComplete(element, context) {
this.onRemovalComplete(element, context);
}
get queuedPlayers() {
const players = [];
this._namespaceList.forEach(ns => {
ns.players.forEach(player => {
if (player.queued) {
players.push(player);
}
});
});
return players;
}
createNamespace(namespaceId, hostElement) {
const ns = new AnimationTransitionNamespace(namespaceId, hostElement, this);
if (this.bodyNode && this.driver.containsElement(this.bodyNode, hostElement)) {
this._balanceNamespaceList(ns, hostElement);
}
else {
// defer this later until flush during when the host element has
// been inserted so that we know exactly where to place it in
// the namespace list
this.newHostElements.set(hostElement, ns);
// given that this host element is apart of the animation code, it
// may or may not be inserted by a parent node that is of an
// animation renderer type. If this happens then we can still have
// access to this item when we query for :enter nodes. If the parent
// is a renderer then the set data-structure will normalize the entry
this.collectEnterElement(hostElement);
}
return this._namespaceLookup[namespaceId] = ns;
}
_balanceNamespaceList(ns, hostElement) {
const limit = this._namespaceList.length - 1;
if (limit >= 0) {
let found = false;
for (let i = limit; i >= 0; i--) {
const nextNamespace = this._namespaceList[i];
if (this.driver.containsElement(nextNamespace.hostElement, hostElement)) {
this._namespaceList.splice(i + 1, 0, ns);
found = true;
break;
}
}
if (!found) {
this._namespaceList.splice(0, 0, ns);
}
}
else {
this._namespaceList.push(ns);
}
this.namespacesByHostElement.set(hostElement, ns);
return ns;
}
register(namespaceId, hostElement) {
let ns = this._namespaceLookup[namespaceId];
if (!ns) {
ns = this.createNamespace(namespaceId, hostElement);
}
return ns;
}
registerTrigger(namespaceId, name, trigger) {
let ns = this._namespaceLookup[namespaceId];
if (ns && ns.register(name, trigger)) {
this.totalAnimations++;
}
}
destroy(namespaceId, context) {
if (!namespaceId)
return;
const ns = this._fetchNamespace(namespaceId);
this.afterFlush(() => {
this.namespacesByHostElement.delete(ns.hostElement);
delete this._namespaceLookup[namespaceId];
const index = this._namespaceList.indexOf(ns);
if (index >= 0) {
this._namespaceList.splice(index, 1);
}
});
this.afterFlushAnimationsDone(() => ns.destroy(context));
}
_fetchNamespace(id) {
return this._namespaceLookup[id];
}
fetchNamespacesByElement(element) {
// normally there should only be one namespace per element, however
// if @triggers are placed on both the component element and then
// its host element (within the component code) then there will be
// two namespaces returned. We use a set here to simply the dedupe
// of namespaces incase there are multiple triggers both the elm and host
const namespaces = new Set();
const elementStates = this.statesByElement.get(element);
if (elementStates) {
const keys = Object.keys(elementStates);
for (let i = 0; i < keys.length; i++) {
const nsId = elementStates[keys[i]].namespaceId;
if (nsId) {
const ns = this._fetchNamespace(nsId);
if (ns) {
namespaces.add(ns);
}
}
}
}
return namespaces;
}
trigger(namespaceId, element, name, value) {
if (isElementNode(element)) {
const ns = this._fetchNamespace(namespaceId);
if (ns) {
ns.trigger(element, name, value);
return true;
}
}
return false;
}
insertNode(namespaceId, element, parent, insertBefore) {
if (!isElementNode(element))
return;
// special case for when an element is removed and reinserted (move operation)
// when this occurs we do not want to use the element for deletion later
const details = element[REMOVAL_FLAG];
if (details && details.setForRemoval) {
details.setForRemoval = false;
details.setForMove = true;
const index = this.collectedLeaveElements.indexOf(element);
if (index >= 0) {
this.collectedLeaveElements.splice(index, 1);
}
}
// in the event that the namespaceId is blank then the caller
// code does not contain any animation code in it, but it is
// just being called so that the node is marked as being inserted
if (namespaceId) {
const ns = this._fetchNamespace(namespaceId);
// This if-statement is a workaround for router issue #21947.
// The router sometimes hits a race condition where while a route
// is being instantiated a new navigation arrives, triggering leave
// animation of DOM that has not been fully initialized, until this
// is resolved, we need to handle the scenario when DOM is not in a
// consistent state during the animation.
if (ns) {
ns.insertNode(element, parent);
}
}
// only *directives and host elements are inserted before
if (insertBefore) {
this.collectEnterElement(element);
}
}
collectEnterElement(element) {
this.collectedEnterElements.push(element);
}
markElementAsDisabled(element, value) {
if (value) {
if (!this.disabledNodes.has(element)) {
this.disabledNodes.add(element);
addClass(element, DISABLED_CLASSNAME);
}
}
else if (this.disabledNodes.has(element)) {
this.disabledNodes.delete(element);
removeClass(element, DISABLED_CLASSNAME);
}
}
removeNode(namespaceId, element, isHostElement, context) {
if (isElementNode(element)) {
const ns = namespaceId ? this._fetchNamespace(namespaceId) : null;
if (ns) {
ns.removeNode(element, context);
}
else {
this.markElementAsRemoved(namespaceId, element, false, context);
}
if (isHostElement) {
const hostNS = this.namespacesByHostElement.get(element);
if (hostNS && hostNS.id !== namespaceId) {
hostNS.removeNode(element, context);
}
}
}
else {
this._onRemovalComplete(element, context);
}
}
markElementAsRemoved(namespaceId, element, hasAnimation, context) {
this.collectedLeaveElements.push(element);
element[REMOVAL_FLAG] =
{ namespaceId, setForRemoval: context, hasAnimation, removedBeforeQueried: false };
}
listen(namespaceId, element, name, phase, callback) {
if (isElementNode(element)) {
return this._fetchNamespace(namespaceId).listen(element, name, phase, callback);
}
return () => { };
}
_buildInstruction(entry, subTimelines, enterClassName, leaveClassName, skipBuildAst) {
return entry.transition.build(this.driver, entry.element, entry.fromState.value, entry.toState.value, enterClassName, leaveClassName, entry.fromState.options, entry.toState.options, subTimelines, skipBuildAst);
}
destroyInnerAnimations(containerElement) {
let elements = this.driver.query(containerElement, NG_TRIGGER_SELECTOR, true);
elements.forEach(element => this.destroyActiveAnimationsForElement(element));
if (this.playersByQueriedElement.size == 0)
return;
elements = this.driver.query(containerElement, NG_ANIMATING_SELECTOR, true);
elements.forEach(element => this.finishActiveQueriedAnimationOnElement(element));
}
destroyActiveAnimationsForElement(element) {
const players = this.playersByElement.get(element);
if (players) {
players.forEach(player => {
// special case for when an element is set for destruction, but hasn't started.
// in this situation we want to delay the destruction until the flush occurs
// so that any event listeners attached to the player are triggered.
if (player.queued) {
player.markedForDestroy = true;
}
else {
player.destroy();
}
});
}
}
finishActiveQueriedAnimationOnElement(element) {
const players = this.playersByQueriedElement.get(element);
if (players) {
players.forEach(player => player.finish());
}
}
whenRenderingDone() {
return new Promise(resolve => {
if (this.players.length) {
return optimizeGroupPlayer(this.players).onDone(() => resolve());
}
else {
resolve();
}
});
}
processLeaveNode(element) {
const details = element[REMOVAL_FLAG];
if (details && details.setForRemoval) {
// this will prevent it from removing it twice
element[REMOVAL_FLAG] = NULL_REMOVAL_STATE;
if (details.namespaceId) {
this.destroyInnerAnimations(element);
const ns = this._fetchNamespace(details.namespaceId);
if (ns) {
ns.clearElementCache(element);
}
}
this._onRemovalComplete(element, details.setForRemoval);
}
if (this.driver.matchesElement(element, DISABLED_SELECTOR)) {
this.markElementAsDisabled(element, false);
}
this.driver.query(element, DISABLED_SELECTOR, true).forEach(node => {
this.markElementAsDisabled(node, false);
});
}
flush(microtaskId = -1) {
let players = [];
if (this.newHostElements.size) {
this.newHostElements.forEach((ns, element) => this._balanceNamespaceList(ns, element));
this.newHostElements.clear();
}
if (this.totalAnimations && this.collectedEnterElements.length) {
for (let i = 0; i < this.collectedEnterElements.length; i++) {
const elm = this.collectedEnterElements[i];
addClass(elm, STAR_CLASSNAME);
}
}
if (this._namespaceList.length &&
(this.totalQueuedPlayers || this.collectedLeaveElements.length)) {
const cleanupFns = [];
try {
players = this._flushAnimations(cleanupFns, microtaskId);
}
finally {
for (let i = 0; i < cleanupFns.length; i++) {
cleanupFns[i]();
}
}
}
else {
for (let i = 0; i < this.collectedLeaveElements.length; i++) {
const element = this.collectedLeaveElements[i];
this.processLeaveNode(element);
}
}
this.totalQueuedPlayers = 0;
this.collectedEnterElements.length = 0;
this.collectedLeaveElements.length = 0;
this._flushFns.forEach(fn => fn());
this._flushFns = [];
if (this._whenQuietFns.length) {
// we move these over to a variable so that
// if any new callbacks are registered in another
// flush they do not populate the existing set
const quietFns = this._whenQuietFns;
this._whenQuietFns = [];
if (players.length) {
optimizeGroupPlayer(players).onDone(() => {
quietFns.forEach(fn => fn());
});
}
else {
quietFns.forEach(fn => fn());
}
}
}
reportError(errors) {
throw new Error(`Unable to process animations due to the following failed trigger transitions\n ${errors.join('\n')}`);
}
_flushAnimations(cleanupFns, microtaskId) {
const subTimelines = new ElementInstructionMap();
const skippedPlayers = [];
const skippedPlayersMap = new Map();
const queuedInstructions = [];
const queriedElements = new Map();
const allPreStyleElements = new Map();
const allPostStyleElements = new Map();
const disabledElementsSet = new Set();
this.disabledNodes.forEach(node => {
disabledElementsSet.add(node);
const nodesThatAreDisabled = this.driver.query(node, QUEUED_SELECTOR, true);
for (let i = 0; i < nodesThatAreDisabled.length; i++) {
disabledElementsSet.add(nodesThatAreDisabled[i]);
}
});
const bodyNode = this.bodyNode;
const allTriggerElements = Array.from(this.statesByElement.keys());
const enterNodeMap = buildRootMap(allTriggerElements, this.collectedEnterElements);
// this must occur before the instructions are built below such that
// the :enter queries match the elements (since the timeline queries
// are fired during instruction building).
const enterNodeMapIds = new Map();
let i = 0;
enterNodeMap.forEach((nodes, root) => {
const className = ENTER_CLASSNAME + i++;
enterNodeMapIds.set(root, className);
nodes.forEach(node => addClass(node, className));
});
const allLeaveNodes = [];
const mergedLeaveNodes = new Set();
const leaveNodesWithoutAnimations = new Set();
for (let i = 0; i < this.collectedLeaveElements.length; i++) {
const element = this.collectedLeaveElements[i];
const details = element[REMOVAL_FLAG];
if (details && details.setForRemoval) {
allLeaveNodes.push(element);
mergedLeaveNodes.add(element);
if (details.hasAnimation) {
this.driver.query(element, STAR_SELECTOR, true).forEach(elm => mergedLeaveNodes.add(elm));
}
else {
leaveNodesWithoutAnimations.add(element);
}
}
}
const leaveNodeMapIds = new Map();
const leaveNodeMap = buildRootMap(allTriggerElements, Array.from(mergedLeaveNodes));
leaveNodeMap.forEach((nodes, root) => {
const className = LEAVE_CLASSNAME + i++;
leaveNodeMapIds.set(root, className);
nodes.forEach(node => addClass(node, className));
});
cleanupFns.push(() => {
enterNodeMap.forEach((nodes, root) => {
const className = enterNodeMapIds.get(root);
nodes.forEach(node => removeClass(node, className));
});
leaveNodeMap.forEach((nodes, root) => {
const className = leaveNodeMapIds.get(root);
nodes.forEach(node => removeClass(node, className));
});
allLeaveNodes.forEach(element => {
this.processLeaveNode(element);
});
});
const allPlayers = [];
const erroneousTransitions = [];
for (let i = this._namespaceList.length - 1; i >= 0; i--) {
const ns = this._namespaceList[i];
ns.drainQueuedTransitions(microtaskId).forEach(entry => {
const player = entry.player;
const element = entry.element;
allPlayers.push(player);
if (this.collectedEnterElements.length) {
const details = element[REMOVAL_FLAG];
// move animations are currently not supported...
if (details && details.setForMove) {
player.destroy();
return;
}
}
const nodeIsOrphaned = !bodyNode || !this.driver.containsElement(bodyNode, element);
const leaveClassName = leaveNodeMapIds.get(element);
const enterClassName = enterNodeMapIds.get(element);
const instruction = this._buildInstruction(entry, subTimelines, enterClassName, leaveClassName, nodeIsOrphaned);
if (instruction.errors && instruction.errors.length) {
erroneousTransitions.push(instruction);
return;
}
// even though the element may not be apart of the DOM, it may
// still be added at a later point (due to the mechanics of content
// projection and/or dynamic component insertion) therefore it's
// important we still style the element.
if (nodeIsOrphaned) {
player.onStart(() => eraseStyles(element, instruction.fromStyles));
player.onDestroy(() => setStyles(element, instruction.toStyles));
skippedPlayers.push(player);
return;
}
// if a unmatched transition is queued to go then it SHOULD NOT render
// an animation and cancel the previously running animations.
if (entry.isFallbackTransition) {
player.onStart(() => eraseStyles(element, instruction.fromStyles));
player.onDestroy(() => setStyles(element, instruction.toStyles));
skippedPlayers.push(player);
return;
}
// this means that if a parent animation uses this animation as a sub trigger
// then it will instruct the timeline builder to not add a player delay, but
// instead stretch the first keyframe gap up until the animation starts. The
// reason this is important is to prevent extra initialization styles from being
// required by the user in the animation.
instruction.timelines.forEach(tl => tl.stretchStartingKeyframe = true);
subTimelines.append(element, instruction.timelines);
const tuple = { instruction, player, element };
queuedInstructions.push(tuple);
instruction.queriedElements.forEach(element => getOrSetAsInMap(queriedElements, element, []).push(player));
instruction.preStyleProps.forEach((stringMap, element) => {
const props = Object.keys(stringMap);
if (props.length) {
let setVal = allPreStyleElements.get(element);
if (!setVal) {
allPreStyleElements.set(element, setVal = new Set());
}
props.forEach(prop => setVal.add(prop));
}
});
instruction.postStyleProps.forEach((stringMap, element) => {
const props = Object.keys(stringMap);
let setVal = allPostStyleElements.get(element);
if (!setVal) {
allPostStyleElements.set(element, setVal = new Set());
}
props.forEach(prop => setVal.add(prop));
});
});
}
if (erroneousTransitions.length) {
const errors = [];
erroneousTransitions.forEach(instruction => {
errors.push(`@${instruction.triggerName} has failed due to:\n`);
instruction.errors.forEach(error => errors.push(`- ${error}\n`));
});
allPlayers.forEach(player => player.destroy());
this.reportError(errors);
}
const allPreviousPlayersMap = new Map();
// this map works to tell which element in the DOM tree is contained by
// which animation. Further down below this map will get populated once
// the players are built and in doing so it can efficiently figure out
// if a sub player is skipped due to a parent player having priority.
const animationElementMap = new Map();
queuedInstructions.forEach(entry => {
const element = entry.element;
if (subTimelines.has(element)) {
animationElementMap.set(element, element);
this._beforeAnimationBuild(entry.player.namespaceId, entry.instruction, allPreviousPlayersMap);
}
});
skippedPlayers.forEach(player => {
const element = player.element;
const previousPlayers = this._getPreviousPlayers(element, false, player.namespaceId, player.triggerName, null);
previousPlayers.forEach(prevPlayer => {
getOrSetAsInMap(allPreviousPlayersMap, element, []).push(prevPlayer);
prevPlayer.destroy();
});
});
// this is a special case for nodes that will be removed (either by)
// having their own leave animations or by being queried in a container
// that will be removed once a parent animation is complete. The idea
// here is that * styles must be identical to ! styles because of
// backwards compatibility (* is also filled in by default in many places).
// Otherwise * styles will return an empty value or auto since the element
// that is being getComputedStyle'd will not be visible (since * = destination)
const replaceNodes = allLeaveNodes.filter(node => {
return replacePostStylesAsPre(node, allPreStyleElements, allPostStyleElements);
});
// POST STAGE: fill the * styles
const postStylesMap = new Map();
const allLeaveQueriedNodes = cloakAndComputeStyles(postStylesMap, this.driver, leaveNodesWithoutAnimations, allPostStyleElements, AUTO_STYLE);
allLeaveQueriedNodes.forEach(node => {
if (replacePostStylesAsPre(node, allPreStyleElements, allPostStyleElements)) {
replaceNodes.push(node);
}
});
// PRE STAGE: fill the ! styles
const preStylesMap = new Map();
enterNodeMap.forEach((nodes, root) => {
cloakAndComputeStyles(preStylesMap, this.driver, new Set(nodes), allPreStyleElements, ɵPRE_STYLE);
});
replaceNodes.forEach(node => {
const post = postStylesMap.get(node);
const pre = preStylesMap.get(node);
postStylesMap.set(node, Object.assign(Object.assign({}, post), pre));
});
const rootPlayers = [];
const subPlayers = [];
const NO_PARENT_ANIMATION_ELEMENT_DETECTED = {};
queuedInstructions.forEach(entry => {
const { element, player, instruction } = entry;
// this means that it was never consumed by a parent animation which
// means that it is independent and therefore should be set for animation
if (subTimelines.has(element)) {
if (disabledElementsSet.has(element)) {
player.onDestroy(() => setStyles(element, instruction.toStyles));
player.disabled = true;
player.overrideTotalTime(instruction.totalTime);
skippedPlayers.push(player);
return;
}
// this will flow up the DOM and query the map to figure out
// if a parent animation has priority over it. In the situation
// that a parent is detected then it will cancel the loop. If
// nothing is detected, or it takes a few hops to find a parent,
// then it will fill in the missing nodes and signal them as having
// a detected parent (or a NO_PARENT value via a special constant).
let parentWithAnimation = NO_PARENT_ANIMATION_ELEMENT_DETECTED;
if (animationElementMap.size > 1) {
let elm = element;
const parentsToAdd = [];
while (elm = elm.parentNode) {
const detectedParent = animationElementMap.get(elm);
if (detectedParent) {
parentWithAnimation = detectedParent;
break;
}
parentsToAdd.push(elm);
}
parentsToAdd.forEach(parent => animationElementMap.set(parent, parentWithAnimation));
}
const innerPlayer = this._buildAnimation(player.namespaceId, instruction, allPreviousPlayersMap, skippedPlayersMap, preStylesMap, postStylesMap);
player.setRealPlayer(innerPlayer);
if (parentWithAnimation === NO_PARENT_ANIMATION_ELEMENT_DETECTED) {
rootPlayers.push(player);
}
else {
const parentPlayers = this.playersByElement.get(parentWithAnimation);
if (parentPlayers && parentPlayers.length) {
player.parentPlayer = optimizeGroupPlayer(parentPlayers);
}
skippedPlayers.push(player);
}
}
else {
eraseStyles(element, instruction.fromStyles);
player.onDestroy(() => setStyles(element, instruction.toStyles));
// there still might be a ancestor player animating this
// element therefore we will still add it as a sub player
// even if its animation may be disabled
subPlayers.push(player);
if (disabledElementsSet.has(element)) {
skippedPlayers.push(player);
}
}
});
// find all of the sub players' corresponding inner animation player
subPlayers.forEach(player => {
// even if any players are not found for a sub animation then it
// will still complete itself after the next tick since it's Noop
const playersForElement = skippedPlayersMap.get(player.element);
if (playersForElement && playersForElement.length) {
const innerPlayer = optimizeGroupPlayer(playersForElement);
player.setRealPlayer(innerPlayer);
}
});
// the reason why we don't actually play the animation is
// because all that a skipped player is designed to do is to
// fire the start/done transition callback events
skippedPlayers.forEach(player => {
if (player.parentPlayer) {
player.syncPlayerEvents(player.parentPlayer);
}
else {
player.destroy();
}
});
// run through all of the queued removals and see if they
// were picked up by a query. If not then perform the removal
// operation right away unless a parent animation is ongoing.
for (let i = 0; i < allLeaveNodes.length; i++) {
const element = allLeaveNodes[i];
const details = element[REMOVAL_FLAG];
removeClass(element, LEAVE_CLASSNAME);
// this means the element has a removal animation that is being
// taken care of and therefore the inner elements will hang around
// until that animation is over (or the parent queried animation)
if (details && details.hasAnimation)
continue;
let players = [];
// if this element is queried or if it contains queried children
// then we want for the element not to be removed from the page
// until the queried animations have finished
if (queriedElements.size) {
let queriedPlayerResults = queriedElements.get(element);
if (queriedPlayerResults && queriedPlayerResults.length) {
players.push(...queriedPlayerResults);
}
let queriedInnerElements = this.driver.query(element, NG_ANIMATING_SELECTOR, true);
for (let j = 0; j < queriedInnerElements.length; j++) {
let queriedPlayers = queriedElements.get(queriedInnerElements[j]);
if (queriedPlayers && queriedPlayers.length) {
players.push(...queriedPlayers);
}
}
}
const activePlayers = players.filter(p => !p.destroyed);
if (activePlayers.length) {
removeNodesAfterAnimationDone(this, element, activePlayers);
}
else {
this.processLeaveNode(element);
}
}
// this is required so the cleanup method doesn't remove them
allLeaveNodes.length = 0;
rootPlayers.forEach(player => {
this.players.push(player);
player.onDone(() => {
player.destroy();
const index = this.players.indexOf(player);
this.players.splice(index, 1);
});
player.play();
});
return rootPlayers;
}
elementContainsData(namespaceId, element) {
let containsData = false;
const details = element[REMOVAL_FLAG];
if (details && details.setForRemoval)
containsData = true;
if (this.playersByElement.has(element))
containsData = true;
if (this.playersByQueriedElement.has(element))
containsData = true;
if (this.statesByElement.has(element))
containsData = true;
return this._fetchNamespace(namespaceId).elementContainsData(element) || containsData;
}
afterFlush(callback) {
this._flushFns.push(callback);
}
afterFlushAnimationsDone(callback) {
this._whenQuietFns.push(callback);
}
_getPreviousPlayers(element, isQueriedElement, namespaceId, triggerName, toStateValue) {
let players = [];
if (isQueriedElement) {
const queriedElementPlayers = this.playersByQueriedElement.get(element);
if (queriedElementPlayers) {
players = queriedElementPlayers;
}
}
else {
const elementPlayers = this.playersByElement.get(element);
if (elementPlayers) {
const isRemovalAnimation = !toStateValue || toStateValue == VOID_VALUE;
elementPlayers.forEach(player => {
if (player.queued)
return;
if (!isRemovalAnimation && player.triggerName != triggerName)
return;
players.push(player);
});
}
}
if (namespaceId || triggerName) {
players = players.filter(player => {
if (namespaceId && namespaceId != player.namespaceId)
return false;
if (triggerName && triggerName != player.triggerName)
return false;
return true;
});
}
return players;
}
_beforeAnimationBuild(namespaceId, instruction, allPreviousPlayersMap) {
const triggerName = instruction.triggerName;
const rootElement = instruction.element;
// when a removal animation occurs, ALL previous players are collected
// and destroyed (even if they are outside of the current namespace)
const targetNameSpaceId = instruction.isRemovalTransition ? undefined : namespaceId;
const targetTriggerName = instruction.isRemovalTransition ? undefined : triggerName;
for (const timelineInstruction of instruction.timelines) {
const element = timelineInstruction.element;
const isQueriedElement = element !== rootElement;
const players = getOrSetAsInMap(allPreviousPlayersMap, element, []);
const previousPlayers = this._getPreviousPlayers(element, isQueriedElement, targetNameSpaceId, targetTriggerName, instruction.toState);
previousPlayers.forEach(player => {
const realPlayer = player.getRealPlayer();
if (realPlayer.beforeDestroy) {
realPlayer.beforeDestroy();
}
player.destroy();
players.push(player);
});
}
// this needs to be done so that the PRE/POST styles can be
// computed properly without interfering with the previous animation
eraseStyles(rootElement, instruction.fromStyles);
}
_buildAnimation(namespaceId, instruction, allPreviousPlayersMap, skippedPlayersMap, preStylesMap, postStylesMap) {
const triggerName = instruction.triggerName;
const rootElement = instruction.element;
// we first run this so that the previous animation player
// data can be passed into the successive animation players
const allQueriedPlayers = [];
const allConsumedElements = new Set();
const allSubElements = new Set();
const allNewPlayers = instruction.timelines.map(timelineInstruction => {
const element = timelineInstruction.element;
allConsumedElements.add(element);
// FIXME (matsko): make sure to-be-removed animations are removed properly
const details = element[REMOVAL_FLAG];
if (details && details.removedBeforeQueried)
return new NoopAnimationPlayer(timelineInstruction.duration, timelineInstruction.delay);
const isQueriedElement = element !== rootElement;
const previousPlayers = flattenGroupPlayers((allPreviousPlayersMap.get(element) || EMPTY_PLAYER_ARRAY)
.map(p => p.getRealPlayer()))
.filter(p => {
// the `element` is not apart of the AnimationPlayer definition, but
// Mock/WebAnimations
// use the element within their implementation. This will be added in Angular5 to
// AnimationPlayer
const pp = p;
return pp.element ? pp.element === element : false;
});
const preStyles = preStylesMap.get(element);
const postStyles = postStylesMap.get(element);
const keyframes = normalizeKeyframes(this.driver, this._normalizer, element, timelineInstruction.keyframes, preStyles, postStyles);
const player = this._buildPlayer(timelineInstruction, keyframes, previousPlayers);
// this means that this particular player belongs to a sub trigger. It is
// important that we match this player up with the corresponding (@trigger.listener)
if (timelineInstruction.subTimeline && skippedPlayersMap) {
allSubElements.add(element);
}
if (isQueriedElement) {
const wrappedPlayer = new TransitionAnimationPlayer(namespaceId, triggerName, element);
wrappedPlayer.setRealPlayer(player);
allQueriedPlayers.push(wrappedPlayer);
}
return player;
});
allQueriedPlayers.forEach(player => {
getOrSetAsInMap(this.playersByQueriedElement, player.element, []).push(player);
player.onDone(() => deleteOrUnsetInMap(this.playersByQueriedElement, player.element, player));
});
allConsumedElements.forEach(element => addClass(element, NG_ANIMATING_CLASSNAME));
const player = optimizeGroupPlayer(allNewPlayers);
player.onDestroy(() => {
allConsumedElements.forEach(element => removeClass(element, NG_ANIMATING_CLASSNAME));
setStyles(rootElement, instruction.toStyles);
});
// this basically makes all of the callbacks for sub element animations
// be dependent on the upper players for when they finish
allSubElements.forEach(element => {
getOrSetAsInMap(skippedPlayersMap, element, []).push(player);
});
return player;
}
_buildPlayer(instruction, keyframes, previousPlayers) {
if (keyframes.length > 0) {
return this.driver.animate(instruction.element, keyframes, instruction.duration, instruction.delay, instruction.easing, previousPlayers);
}
// special case for when an empty transition|definition is provided
// ... there is no point in rendering an empty animation
return new NoopAnimationPlayer(instruction.duration, instruction.delay);
}
}
class TransitionAnimationPlayer {
constructor(namespaceId, triggerName, element) {
this.namespaceId = namespaceId;
this.triggerName = triggerName;
this.element = element;
this._player = new NoopAnimationPlayer();
this._containsRealPlayer = false;
this._queuedCallbacks = {};
this.destroyed = false;
this.markedForDestroy = false;
this.disabled = false;
this.queued = true;
this.totalTime = 0;
}
setRealPlayer(player) {
if (this._containsRealPlayer)
return;
this._player = player;
Object.keys(this._queuedCallbacks).forEach(phase => {
this._queuedCallbacks[phase].forEach(callback => listenOnPlayer(player, phase, undefined, callback));
});
this._queuedCallbacks = {};
this._containsRealPlayer = true;
this.overrideTotalTime(player.totalTime);
this.queued = false;
}
getRealPlayer() {
return this._player;
}
overrideTotalTime(totalTime) {
this.totalTime = totalTime;
}
syncPlayerEvents(player) {
const p = this._player;
if (p.triggerCallback) {
player.onStart(() => p.triggerCallback('start'));
}
player.onDone(() => this.finish());
player.onDestroy(() => this.destroy());
}
_queueEvent(name, callback) {
getOrSetAsInMap(this._queuedCallbacks, name, []).push(callback);
}
onDone(fn) {
if (this.queued) {
this._queueEvent('done', fn);
}
this._player.onDone(fn);
}
onStart(fn) {
if (this.queued) {
this._queueEvent('start', fn);
}
this._player.onStart(fn);
}
onDestroy(fn) {
if (this.queued) {
this._queueEvent('destroy', fn);
}
this._player.onDestroy(fn);
}
init() {
this._player.init();
}
hasStarted() {
return this.queued ? false : this._player.hasStarted();
}
play() {
!this.queued && this._player.play();
}
pause() {
!this.queued && this._player.pause();
}
restart() {
!this.queued && this._player.restart();
}
finish() {
this._player.finish();
}
destroy() {
this.destroyed = true;
this._player.destroy();
}
reset() {
!this.queued && this._player.reset();
}
setPosition(p) {
if (!this.queued) {
this._player.setPosition(p);
}
}
getPosition() {
return this.queued ? 0 : this._player.getPosition();
}
/** @internal */
triggerCallback(phaseName) {
const p = this._player;
if (p.triggerCallback) {
p.triggerCallback(phaseName);
}
}
}
function deleteOrUnsetInMap(map, key, value) {
let currentValues;
if (map instanceof Map) {
currentValues = map.get(key);
if (currentValues) {
if (currentValues.length) {
const index = currentValues.indexOf(value);
currentValues.splice(index, 1);
}
if (currentValues.length == 0) {
map.delete(key);
}
}
}
else {
currentValues = map[key];
if (currentValues) {
if (currentValues.length) {
const index = currentValues.indexOf(value);
currentValues.splice(index, 1);
}
if (currentValues.length == 0) {
delete map[key];
}
}
}
return currentValues;
}
function normalizeTriggerValue(value) {
// we use `!= null` here because it's the most simple
// way to test against a "falsy" value without mixing
// in empty strings or a zero value. DO NOT OPTIMIZE.
return value != null ? value : null;
}
function isElementNode(node) {
return node && node['nodeType'] === 1;
}
function isTriggerEventValid(eventName) {
return eventName == 'start' || eventName == 'done';
}
function cloakElement(element, value) {
const oldValue = element.style.display;
element.style.display = value != null ? value : 'none';
return oldValue;
}
function cloakAndComputeStyles(valuesMap, driver, elements, elementPropsMap, defaultStyle) {
const cloakVals = [];
elements.forEach(element => cloakVals.push(cloakElement(element)));
const failedElements = [];
elementPropsMap.forEach((props, element) => {
const styles = {};
props.forEach(prop => {
const value = styles[prop] = driver.computeStyle(element, prop, defaultStyle);
// there is no easy way to detect this because a sub element could be removed
// by a parent animation element being detached.
if (!value || value.length == 0) {
element[REMOVAL_FLAG] = NULL_REMOVED_QUERIED_STATE;
failedElements.push(element);
}
});
valuesMap.set(element, styles);
});
// we use a index variable here since Set.forEach(a, i) does not return
// an index value for the closure (but instead just the value)
let i = 0;
elements.forEach(element => cloakElement(element, cloakVals[i++]));
return failedElements;
}
/*
Since the Angular renderer code will return a collection of inserted
nodes in all areas of a DOM tree, it's up to this algorithm to figure
out which nodes are roots for each animation @trigger.
By placing each inserted node into a Set and traversing upwards, it
is possible to find the @trigger elements and well any direct *star
insertion nodes, if a @trigger root is found then the enter element
is placed into the Map[@trigger] spot.
*/
function buildRootMap(roots, nodes) {
const rootMap = new Map();
roots.forEach(root => rootMap.set(root, []));
if (nodes.length == 0)
return rootMap;
const NULL_NODE = 1;
const nodeSet = new Set(nodes);
const localRootMap = new Map();
function getRoot(node) {
if (!node)
return NULL_NODE;
let root = localRootMap.get(node);
if (root)
return root;
const parent = node.parentNode;
if (rootMap.has(parent)) { // ngIf inside @trigger
root = parent;
}
else if (nodeSet.has(parent)) { // ngIf inside ngIf
root = NULL_NODE;
}
else { // recurse upwards
root = getRoot(parent);
}
localRootMap.set(node, root);
return root;
}
nodes.forEach(node => {
const root = getRoot(node);
if (root !== NULL_NODE) {
rootMap.get(root).push(node);
}
});
return rootMap;
}
const CLASSES_CACHE_KEY = '$$classes';
function containsClass(element, className) {
if (element.classList) {
return element.classList.contains(className);
}
else {
const classes = element[CLASSES_CACHE_KEY];
return classes && classes[className];
}
}
function addClass(element, className) {
if (element.classList) {
element.classList.add(className);
}
else {
let classes = element[CLASSES_CACHE_KEY];
if (!classes) {
classes = element[CLASSES_CACHE_KEY] = {};
}
classes[className] = true;
}
}
function removeClass(element, className) {
if (element.classList) {
element.classList.remove(className);
}
else {
let classes = element[CLASSES_CACHE_KEY];
if (classes) {
delete classes[className];
}
}
}
function removeNodesAfterAnimationDone(engine, element, players) {
optimizeGroupPlayer(players).onDone(() => engine.processLeaveNode(element));
}
function flattenGroupPlayers(players) {
const finalPlayers = [];
_flattenGroupPlayersRecur(players, finalPlayers);
return finalPlayers;
}
function _flattenGroupPlayersRecur(players, finalPlayers) {
for (let i = 0; i < players.length; i++) {
const player = players[i];
if (player instanceof ɵAnimationGroupPlayer) {
_flattenGroupPlayersRecur(player.players, finalPlayers);
}
else {
finalPlayers.push(player);
}
}
}
function objEquals(a, b) {
const k1 = Object.keys(a);
const k2 = Object.keys(b);
if (k1.length != k2.length)
return false;
for (let i = 0; i < k1.length; i++) {
const prop = k1[i];
if (!b.hasOwnProperty(prop) || a[prop] !== b[prop])
return false;
}
return true;
}
function replacePostStylesAsPre(element, allPreStyleElements, allPostStyleElements) {
const postEntry = allPostStyleElements.get(element);
if (!postEntry)
return false;
let preEntry = allPreStyleElements.get(element);
if (preEntry) {
postEntry.forEach(data => preEntry.add(data));
}
else {
allPreStyleElements.set(element, postEntry);
}
allPostStyleElements.delete(element);
return true;
}
class AnimationEngine {
constructor(bodyNode, _driver, _normalizer) {
this.bodyNode = bodyNode;
this._driver = _driver;
this._normalizer = _normalizer;
this._triggerCache = {};
// this method is designed to be overridden by the code that uses this engine
this.onRemovalComplete = (element, context) => { };
this._transitionEngine = new TransitionAnimationEngine(bodyNode, _driver, _normalizer);
this._timelineEngine = new TimelineAnimationEngine(bodyNode, _driver, _normalizer);
this._transitionEngine.onRemovalComplete = (element, context) => this.onRemovalComplete(element, context);
}
registerTrigger(componentId, namespaceId, hostElement, name, metadata) {
const cacheKey = componentId + '-' + name;
let trigger = this._triggerCache[cacheKey];
if (!trigger) {
const errors = [];
const ast = buildAnimationAst(this._driver, metadata, errors);
if (errors.length) {
throw new Error(`The animation trigger "${name}" has failed to build due to the following errors:\n - ${errors.join('\n - ')}`);
}
trigger = buildTrigger(name, ast, this._normalizer);
this._triggerCache[cacheKey] = trigger;
}
this._transitionEngine.registerTrigger(namespaceId, name, trigger);
}
register(namespaceId, hostElement) {
this._transitionEngine.register(namespaceId, hostElement);
}
destroy(namespaceId, context) {
this._transitionEngine.destroy(namespaceId, context);
}
onInsert(namespaceId, element, parent, insertBefore) {
this._transitionEngine.insertNode(namespaceId, element, parent, insertBefore);
}
onRemove(namespaceId, element, context, isHostElement) {
this._transitionEngine.removeNode(namespaceId, element, isHostElement || false, context);
}
disableAnimations(element, disable) {
this._transitionEngine.markElementAsDisabled(element, disable);
}
process(namespaceId, element, property, value) {
if (property.charAt(0) == '@') {
const [id, action] = parseTimelineCommand(property);
const args = value;
this._timelineEngine.command(id, element, action, args);
}
else {
this._transitionEngine.trigger(namespaceId, element, property, value);
}
}
listen(namespaceId, element, eventName, eventPhase, callback) {
// @@listen
if (eventName.charAt(0) == '@') {
const [id, action] = parseTimelineCommand(eventName);
return this._timelineEngine.listen(id, element, action, callback);
}
return this._transitionEngine.listen(namespaceId, element, eventName, eventPhase, callback);
}
flush(microtaskId = -1) {
this._transitionEngine.flush(microtaskId);
}
get players() {
return this._transitionEngine.players
.concat(this._timelineEngine.players);
}
whenRenderingDone() {
return this._transitionEngine.whenRenderingDone();
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* Returns an instance of `SpecialCasedStyles` if and when any special (non animateable) styles are
* detected.
*
* In CSS there exist properties that cannot be animated within a keyframe animation
* (whether it be via CSS keyframes or web-animations) and the animation implementation
* will ignore them. This function is designed to detect those special cased styles and
* return a container that will be executed at the start and end of the animation.
*
* @returns an instance of `SpecialCasedStyles` if any special styles are detected otherwise `null`
*/
function packageNonAnimatableStyles(element, styles) {
let startStyles = null;
let endStyles = null;
if (Array.isArray(styles) && styles.length) {
startStyles = filterNonAnimatableStyles(styles[0]);
if (styles.length > 1) {
endStyles = filterNonAnimatableStyles(styles[styles.length - 1]);
}
}
else if (styles) {
startStyles = filterNonAnimatableStyles(styles);
}
return (startStyles || endStyles) ? new SpecialCasedStyles(element, startStyles, endStyles) :
null;
}
/**
* Designed to be executed during a keyframe-based animation to apply any special-cased styles.
*
* When started (when the `start()` method is run) then the provided `startStyles`
* will be applied. When finished (when the `finish()` method is called) the
* `endStyles` will be applied as well any any starting styles. Finally when
* `destroy()` is called then all styles will be removed.
*/
class SpecialCasedStyles {
constructor(_element, _startStyles, _endStyles) {
this._element = _element;
this._startStyles = _startStyles;
this._endStyles = _endStyles;
this._state = 0 /* Pending */;
let initialStyles = SpecialCasedStyles.initialStylesByElement.get(_element);
if (!initialStyles) {
SpecialCasedStyles.initialStylesByElement.set(_element, initialStyles = {});
}
this._initialStyles = initialStyles;
}
start() {
if (this._state < 1 /* Started */) {
if (this._startStyles) {
setStyles(this._element, this._startStyles, this._initialStyles);
}
this._state = 1 /* Started */;
}
}
finish() {
this.start();
if (this._state < 2 /* Finished */) {
setStyles(this._element, this._initialStyles);
if (this._endStyles) {
setStyles(this._element, this._endStyles);
this._endStyles = null;
}
this._state = 1 /* Started */;
}
}
destroy() {
this.finish();
if (this._state < 3 /* Destroyed */) {
SpecialCasedStyles.initialStylesByElement.delete(this._element);
if (this._startStyles) {
eraseStyles(this._element, this._startStyles);
this._endStyles = null;
}
if (this._endStyles) {
eraseStyles(this._element, this._endStyles);
this._endStyles = null;
}
setStyles(this._element, this._initialStyles);
this._state = 3 /* Destroyed */;
}
}
}
SpecialCasedStyles.initialStylesByElement = ( /* @__PURE__ */new WeakMap());
function filterNonAnimatableStyles(styles) {
let result = null;
const props = Object.keys(styles);
for (let i = 0; i < props.length; i++) {
const prop = props[i];
if (isNonAnimatableStyle(prop)) {
result = result || {};
result[prop] = styles[prop];
}
}
return result;
}
function isNonAnimatableStyle(prop) {
return prop === 'display' || prop === 'position';
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const ELAPSED_TIME_MAX_DECIMAL_PLACES = 3;
const ANIMATION_PROP = 'animation';
const ANIMATIONEND_EVENT = 'animationend';
const ONE_SECOND$1 = 1000;
class ElementAnimationStyleHandler {
constructor(_element, _name, _duration, _delay, _easing, _fillMode, _onDoneFn) {
this._element = _element;
this._name = _name;
this._duration = _duration;
this._delay = _delay;
this._easing = _easing;
this._fillMode = _fillMode;
this._onDoneFn = _onDoneFn;
this._finished = false;
this._destroyed = false;
this._startTime = 0;
this._position = 0;
this._eventFn = (e) => this._handleCallback(e);
}
apply() {
applyKeyframeAnimation(this._element, `${this._duration}ms ${this._easing} ${this._delay}ms 1 normal ${this._fillMode} ${this._name}`);
addRemoveAnimationEvent(this._element, this._eventFn, false);
this._startTime = Date.now();
}
pause() {
playPauseAnimation(this._element, this._name, 'paused');
}
resume() {
playPauseAnimation(this._element, this._name, 'running');
}
setPosition(position) {
const index = findIndexForAnimation(this._element, this._name);
this._position = position * this._duration;
setAnimationStyle(this._element, 'Delay', `-${this._position}ms`, index);
}
getPosition() {
return this._position;
}
_handleCallback(event) {
const timestamp = event._ngTestManualTimestamp || Date.now();
const elapsedTime = parseFloat(event.elapsedTime.toFixed(ELAPSED_TIME_MAX_DECIMAL_PLACES)) * ONE_SECOND$1;
if (event.animationName == this._name &&
Math.max(timestamp - this._startTime, 0) >= this._delay && elapsedTime >= this._duration) {
this.finish();
}
}
finish() {
if (this._finished)
return;
this._finished = true;
this._onDoneFn();
addRemoveAnimationEvent(this._element, this._eventFn, true);
}
destroy() {
if (this._destroyed)
return;
this._destroyed = true;
this.finish();
removeKeyframeAnimation(this._element, this._name);
}
}
function playPauseAnimation(element, name, status) {
const index = findIndexForAnimation(element, name);
setAnimationStyle(element, 'PlayState', status, index);
}
function applyKeyframeAnimation(element, value) {
const anim = getAnimationStyle(element, '').trim();
let index = 0;
if (anim.length) {
index = countChars(anim, ',') + 1;
value = `${anim}, ${value}`;
}
setAnimationStyle(element, '', value);
return index;
}
function removeKeyframeAnimation(element, name) {
const anim = getAnimationStyle(element, '');
const tokens = anim.split(',');
const index = findMatchingTokenIndex(tokens, name);
if (index >= 0) {
tokens.splice(index, 1);
const newValue = tokens.join(',');
setAnimationStyle(element, '', newValue);
}
}
function findIndexForAnimation(element, value) {
const anim = getAnimationStyle(element, '');
if (anim.indexOf(',') > 0) {
const tokens = anim.split(',');
return findMatchingTokenIndex(tokens, value);
}
return findMatchingTokenIndex([anim], value);
}
function findMatchingTokenIndex(tokens, searchToken) {
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].indexOf(searchToken) >= 0) {
return i;
}
}
return -1;
}
function addRemoveAnimationEvent(element, fn, doRemove) {
doRemove ? element.removeEventListener(ANIMATIONEND_EVENT, fn) :
element.addEventListener(ANIMATIONEND_EVENT, fn);
}
function setAnimationStyle(element, name, value, index) {
const prop = ANIMATION_PROP + name;
if (index != null) {
const oldValue = element.style[prop];
if (oldValue.length) {
const tokens = oldValue.split(',');
tokens[index] = value;
value = tokens.join(',');
}
}
element.style[prop] = value;
}
function getAnimationStyle(element, name) {
return element.style[ANIMATION_PROP + name] || '';
}
function countChars(value, char) {
let count = 0;
for (let i = 0; i < value.length; i++) {
const c = value.charAt(i);
if (c === char)
count++;
}
return count;
}
const DEFAULT_FILL_MODE = 'forwards';
const DEFAULT_EASING = 'linear';
class CssKeyframesPlayer {
constructor(element, keyframes, animationName, _duration, _delay, easing, _finalStyles, _specialStyles) {
this.element = element;
this.keyframes = keyframes;
this.animationName = animationName;
this._duration = _duration;
this._delay = _delay;
this._finalStyles = _finalStyles;
this._specialStyles = _specialStyles;
this._onDoneFns = [];
this._onStartFns = [];
this._onDestroyFns = [];
this.currentSnapshot = {};
this._state = 0;
this.easing = easing || DEFAULT_EASING;
this.totalTime = _duration + _delay;
this._buildStyler();
}
onStart(fn) {
this._onStartFns.push(fn);
}
onDone(fn) {
this._onDoneFns.push(fn);
}
onDestroy(fn) {
this._onDestroyFns.push(fn);
}
destroy() {
this.init();
if (this._state >= 4 /* DESTROYED */)
return;
this._state = 4 /* DESTROYED */;
this._styler.destroy();
this._flushStartFns();
this._flushDoneFns();
if (this._specialStyles) {
this._specialStyles.destroy();
}
this._onDestroyFns.forEach(fn => fn());
this._onDestroyFns = [];
}
_flushDoneFns() {
this._onDoneFns.forEach(fn => fn());
this._onDoneFns = [];
}
_flushStartFns() {
this._onStartFns.forEach(fn => fn());
this._onStartFns = [];
}
finish() {
this.init();
if (this._state >= 3 /* FINISHED */)
return;
this._state = 3 /* FINISHED */;
this._styler.finish();
this._flushStartFns();
if (this._specialStyles) {
this._specialStyles.finish();
}
this._flushDoneFns();
}
setPosition(value) {
this._styler.setPosition(value);
}
getPosition() {
return this._styler.getPosition();
}
hasStarted() {
return this._state >= 2 /* STARTED */;
}
init() {
if (this._state >= 1 /* INITIALIZED */)
return;
this._state = 1 /* INITIALIZED */;
const elm = this.element;
this._styler.apply();
if (this._delay) {
this._styler.pause();
}
}
play() {
this.init();
if (!this.hasStarted()) {
this._flushStartFns();
this._state = 2 /* STARTED */;
if (this._specialStyles) {
this._specialStyles.start();
}
}
this._styler.resume();
}
pause() {
this.init();
this._styler.pause();
}
restart() {
this.reset();
this.play();
}
reset() {
this._state = 0 /* RESET */;
this._styler.destroy();
this._buildStyler();
this._styler.apply();
}
_buildStyler() {
this._styler = new ElementAnimationStyleHandler(this.element, this.animationName, this._duration, this._delay, this.easing, DEFAULT_FILL_MODE, () => this.finish());
}
/** @internal */
triggerCallback(phaseName) {
const methods = phaseName == 'start' ? this._onStartFns : this._onDoneFns;
methods.forEach(fn => fn());
methods.length = 0;
}
beforeDestroy() {
this.init();
const styles = {};
if (this.hasStarted()) {
const finished = this._state >= 3 /* FINISHED */;
Object.keys(this._finalStyles).forEach(prop => {
if (prop != 'offset') {
styles[prop] = finished ? this._finalStyles[prop] : computeStyle(this.element, prop);
}
});
}
this.currentSnapshot = styles;
}
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
class DirectStylePlayer extends NoopAnimationPlayer {
constructor(element, styles) {
super();
this.element = element;
this._startingStyles = {};
this.__initialized = false;
this._styles = hypenatePropsObject(styles);
}
init() {
if (this.__initialized || !this._startingStyles)
return;
this.__initialized = true;
Object.keys(this._styles).forEach(prop => {
this._startingStyles[prop] = this.element.style[prop];
});
super.init();
}
play() {
if (!this._startingStyles)
return;
this.init();
Object.keys(this._styles)
.forEach(prop => this.element.style.setProperty(prop, this._styles[prop]));
super.play();
}
destroy() {
if (!this._startingStyles)
return;
Object.keys(this._startingStyles).forEach(prop => {
const value = this._startingStyles[prop];
if (value) {
this.element.style.setProperty(prop, value);
}
else {
this.element.style.removeProperty(prop);
}
});
this._startingStyles = null;
super.destroy();
}
}
const KEYFRAMES_NAME_PREFIX = 'gen_css_kf_';
const TAB_SPACE = ' ';
class CssKeyframesDriver {
constructor() {
this._count = 0;
}
validateStyleProperty(prop) {
return validateStyleProperty(prop);
}
matchesElement(element, selector) {
return matchesElement(element, selector);
}
containsElement(elm1, elm2) {
return containsElement(elm1, elm2);
}
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
computeStyle(element, prop, defaultValue) {
return window.getComputedStyle(element)[prop];
}
buildKeyframeElement(element, name, keyframes) {
keyframes = keyframes.map(kf => hypenatePropsObject(kf));
let keyframeStr = `@keyframes ${name} {\n`;
let tab = '';
keyframes.forEach(kf => {
tab = TAB_SPACE;
const offset = parseFloat(kf['offset']);
keyframeStr += `${tab}${offset * 100}% {\n`;
tab += TAB_SPACE;
Object.keys(kf).forEach(prop => {
const value = kf[prop];
switch (prop) {
case 'offset':
return;
case 'easing':
if (value) {
keyframeStr += `${tab}animation-timing-function: ${value};\n`;
}
return;
default:
keyframeStr += `${tab}${prop}: ${value};\n`;
return;
}
});
keyframeStr += `${tab}}\n`;
});
keyframeStr += `}\n`;
const kfElm = document.createElement('style');
kfElm.textContent = keyframeStr;
return kfElm;
}
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
if ((typeof ngDevMode === 'undefined' || ngDevMode) && scrubberAccessRequested) {
notifyFaultyScrubber();
}
const previousCssKeyframePlayers = previousPlayers.filter(player => player instanceof CssKeyframesPlayer);
const previousStyles = {};
if (allowPreviousPlayerStylesMerge(duration, delay)) {
previousCssKeyframePlayers.forEach(player => {
let styles = player.currentSnapshot;
Object.keys(styles).forEach(prop => previousStyles[prop] = styles[prop]);
});
}
keyframes = balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles);
const finalStyles = flattenKeyframesIntoStyles(keyframes);
// if there is no animation then there is no point in applying
// styles and waiting for an event to get fired. This causes lag.
// It's better to just directly apply the styles to the element
// via the direct styling animation player.
if (duration == 0) {
return new DirectStylePlayer(element, finalStyles);
}
const animationName = `${KEYFRAMES_NAME_PREFIX}${this._count++}`;
const kfElm = this.buildKeyframeElement(element, animationName, keyframes);
const nodeToAppendKfElm = findNodeToAppendKeyframeElement(element);
nodeToAppendKfElm.appendChild(kfElm);
const specialStyles = packageNonAnimatableStyles(element, keyframes);
const player = new CssKeyframesPlayer(element, keyframes, animationName, duration, delay, easing, finalStyles, specialStyles);
player.onDestroy(() => removeElement(kfElm));
return player;
}
}
function findNodeToAppendKeyframeElement(element) {
var _a;
const rootNode = (_a = element.getRootNode) === null || _a === void 0 ? void 0 : _a.call(element);
if (typeof ShadowRoot !== 'undefined' && rootNode instanceof ShadowRoot) {
return rootNode;
}
return document.head;
}
function flattenKeyframesIntoStyles(keyframes) {
let flatKeyframes = {};
if (keyframes) {
const kfs = Array.isArray(keyframes) ? keyframes : [keyframes];
kfs.forEach(kf => {
Object.keys(kf).forEach(prop => {
if (prop == 'offset' || prop == 'easing')
return;
flatKeyframes[prop] = kf[prop];
});
});
}
return flatKeyframes;
}
function removeElement(node) {
node.parentNode.removeChild(node);
}
let warningIssued = false;
function notifyFaultyScrubber() {
if (warningIssued)
return;
console.warn('@angular/animations: please load the web-animations.js polyfill to allow programmatic access...\n', ' visit https://bit.ly/IWukam to learn more about using the web-animation-js polyfill.');
warningIssued = true;
}
class WebAnimationsPlayer {
constructor(element, keyframes, options, _specialStyles) {
this.element = element;
this.keyframes = keyframes;
this.options = options;
this._specialStyles = _specialStyles;
this._onDoneFns = [];
this._onStartFns = [];
this._onDestroyFns = [];
this._initialized = false;
this._finished = false;
this._started = false;
this._destroyed = false;
this.time = 0;
this.parentPlayer = null;
this.currentSnapshot = {};
this._duration = options['duration'];
this._delay = options['delay'] || 0;
this.time = this._duration + this._delay;
}
_onFinish() {
if (!this._finished) {
this._finished = true;
this._onDoneFns.forEach(fn => fn());
this._onDoneFns = [];
}
}
init() {
this._buildPlayer();
this._preparePlayerBeforeStart();
}
_buildPlayer() {
if (this._initialized)
return;
this._initialized = true;
const keyframes = this.keyframes;
this.domPlayer =
this._triggerWebAnimation(this.element, keyframes, this.options);
this._finalKeyframe = keyframes.length ? keyframes[keyframes.length - 1] : {};
this.domPlayer.addEventListener('finish', () => this._onFinish());
}
_preparePlayerBeforeStart() {
// this is required so that the player doesn't start to animate right away
if (this._delay) {
this._resetDomPlayerState();
}
else {
this.domPlayer.pause();
}
}
/** @internal */
_triggerWebAnimation(element, keyframes, options) {
// jscompiler doesn't seem to know animate is a native property because it's not fully
// supported yet across common browsers (we polyfill it for Edge/Safari) [CL #143630929]
return element['animate'](keyframes, options);
}
onStart(fn) {
this._onStartFns.push(fn);
}
onDone(fn) {
this._onDoneFns.push(fn);
}
onDestroy(fn) {
this._onDestroyFns.push(fn);
}
play() {
this._buildPlayer();
if (!this.hasStarted()) {
this._onStartFns.forEach(fn => fn());
this._onStartFns = [];
this._started = true;
if (this._specialStyles) {
this._specialStyles.start();
}
}
this.domPlayer.play();
}
pause() {
this.init();
this.domPlayer.pause();
}
finish() {
this.init();
if (this._specialStyles) {
this._specialStyles.finish();
}
this._onFinish();
this.domPlayer.finish();
}
reset() {
this._resetDomPlayerState();
this._destroyed = false;
this._finished = false;
this._started = false;
}
_resetDomPlayerState() {
if (this.domPlayer) {
this.domPlayer.cancel();
}
}
restart() {
this.reset();
this.play();
}
hasStarted() {
return this._started;
}
destroy() {
if (!this._destroyed) {
this._destroyed = true;
this._resetDomPlayerState();
this._onFinish();
if (this._specialStyles) {
this._specialStyles.destroy();
}
this._onDestroyFns.forEach(fn => fn());
this._onDestroyFns = [];
}
}
setPosition(p) {
if (this.domPlayer === undefined) {
this.init();
}
this.domPlayer.currentTime = p * this.time;
}
getPosition() {
return this.domPlayer.currentTime / this.time;
}
get totalTime() {
return this._delay + this._duration;
}
beforeDestroy() {
const styles = {};
if (this.hasStarted()) {
Object.keys(this._finalKeyframe).forEach(prop => {
if (prop != 'offset') {
styles[prop] =
this._finished ? this._finalKeyframe[prop] : computeStyle(this.element, prop);
}
});
}
this.currentSnapshot = styles;
}
/** @internal */
triggerCallback(phaseName) {
const methods = phaseName == 'start' ? this._onStartFns : this._onDoneFns;
methods.forEach(fn => fn());
methods.length = 0;
}
}
class WebAnimationsDriver {
constructor() {
this._isNativeImpl = /\{\s*\[native\s+code\]\s*\}/.test(getElementAnimateFn().toString());
this._cssKeyframesDriver = new CssKeyframesDriver();
}
validateStyleProperty(prop) {
return validateStyleProperty(prop);
}
matchesElement(element, selector) {
return matchesElement(element, selector);
}
containsElement(elm1, elm2) {
return containsElement(elm1, elm2);
}
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
computeStyle(element, prop, defaultValue) {
return window.getComputedStyle(element)[prop];
}
overrideWebAnimationsSupport(supported) {
this._isNativeImpl = supported;
}
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
const useKeyframes = !scrubberAccessRequested && !this._isNativeImpl;
if (useKeyframes) {
return this._cssKeyframesDriver.animate(element, keyframes, duration, delay, easing, previousPlayers);
}
const fill = delay == 0 ? 'both' : 'forwards';
const playerOptions = { duration, delay, fill };
// we check for this to avoid having a null|undefined value be present
// for the easing (which results in an error for certain browsers #9752)
if (easing) {
playerOptions['easing'] = easing;
}
const previousStyles = {};
const previousWebAnimationPlayers = previousPlayers.filter(player => player instanceof WebAnimationsPlayer);
if (allowPreviousPlayerStylesMerge(duration, delay)) {
previousWebAnimationPlayers.forEach(player => {
let styles = player.currentSnapshot;
Object.keys(styles).forEach(prop => previousStyles[prop] = styles[prop]);
});
}
keyframes = keyframes.map(styles => copyStyles(styles, false));
keyframes = balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles);
const specialStyles = packageNonAnimatableStyles(element, keyframes);
return new WebAnimationsPlayer(element, keyframes, playerOptions, specialStyles);
}
}
function supportsWebAnimations() {
return typeof getElementAnimateFn() === 'function';
}
function getElementAnimateFn() {
return (isBrowser() && Element.prototype['animate']) || {};
}
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* Generated bundle index. Do not edit.
*/
export { AnimationDriver, Animation as ɵAnimation, AnimationEngine as ɵAnimationEngine, AnimationStyleNormalizer as ɵAnimationStyleNormalizer, CssKeyframesDriver as ɵCssKeyframesDriver, CssKeyframesPlayer as ɵCssKeyframesPlayer, NoopAnimationDriver as ɵNoopAnimationDriver, NoopAnimationStyleNormalizer as ɵNoopAnimationStyleNormalizer, WebAnimationsDriver as ɵWebAnimationsDriver, WebAnimationsPlayer as ɵWebAnimationsPlayer, WebAnimationsStyleNormalizer as ɵWebAnimationsStyleNormalizer, allowPreviousPlayerStylesMerge as ɵallowPreviousPlayerStylesMerge, SpecialCasedStyles as ɵangular_packages_animations_browser_browser_a, containsElement as ɵcontainsElement, invokeQuery as ɵinvokeQuery, matchesElement as ɵmatchesElement, supportsWebAnimations as ɵsupportsWebAnimations, validateStyleProperty as ɵvalidateStyleProperty };
//# sourceMappingURL=browser.js.map | |
raftkv.rs | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use engine_rocks::{RocksEngine, RocksSnapshot, RocksTablePropertiesCollection};
use engine_traits::CfName;
use engine_traits::IterOptions;
use engine_traits::CF_DEFAULT;
use engine_traits::{Peekable, TablePropertiesExt};
use kvproto::errorpb;
use kvproto::kvrpcpb::Context;
use kvproto::raft_cmdpb::{
CmdType, DeleteRangeRequest, DeleteRequest, PutRequest, RaftCmdRequest, RaftCmdResponse,
RaftRequestHeader, Request, Response,
};
use std::fmt::{self, Debug, Display, Formatter};
use std::io::Error as IoError;
use std::result;
use std::time::Duration;
use txn_types::{Key, Value};
use super::metrics::*;
use crate::storage::kv::{
Callback, CbContext, Cursor, Engine, Error as KvError, ErrorInner as KvErrorInner,
Iterator as EngineIterator, Modify, ScanMode, Snapshot, WriteData,
};
use crate::storage::{self, kv};
use raftstore::errors::Error as RaftServerError;
use raftstore::router::RaftStoreRouter;
use raftstore::store::{Callback as StoreCallback, ReadResponse, WriteResponse};
use raftstore::store::{RegionIterator, RegionSnapshot};
use tikv_util::time::Instant;
quick_error! {
#[derive(Debug)]
pub enum Error {
RequestFailed(e: errorpb::Error) {
from()
display("{}", e.get_message())
}
Io(e: IoError) {
from()
cause(e)
display("{}", e)
}
Server(e: RaftServerError) {
from()
cause(e)
display("{}", e)
}
InvalidResponse(reason: String) {
display("{}", reason)
}
InvalidRequest(reason: String) {
display("{}", reason)
}
Timeout(d: Duration) {
display("timeout after {:?}", d)
}
}
}
fn get_status_kind_from_error(e: &Error) -> RequestStatusKind {
match *e {
Error::RequestFailed(ref header) => {
RequestStatusKind::from(storage::get_error_kind_from_header(header))
}
Error::Io(_) => RequestStatusKind::err_io,
Error::Server(_) => RequestStatusKind::err_server,
Error::InvalidResponse(_) => RequestStatusKind::err_invalid_resp,
Error::InvalidRequest(_) => RequestStatusKind::err_invalid_req,
Error::Timeout(_) => RequestStatusKind::err_timeout,
}
}
fn get_status_kind_from_engine_error(e: &kv::Error) -> RequestStatusKind {
match *e {
KvError(box KvErrorInner::Request(ref header)) => {
RequestStatusKind::from(storage::get_error_kind_from_header(header))
}
KvError(box KvErrorInner::Timeout(_)) => RequestStatusKind::err_timeout,
KvError(box KvErrorInner::EmptyRequest) => RequestStatusKind::err_empty_request,
KvError(box KvErrorInner::Other(_)) => RequestStatusKind::err_other,
}
}
pub type Result<T> = result::Result<T, Error>;
impl From<Error> for kv::Error {
fn from(e: Error) -> kv::Error {
match e {
Error::RequestFailed(e) => KvError::from(KvErrorInner::Request(e)),
Error::Server(e) => e.into(),
e => box_err!(e),
}
}
}
impl From<RaftServerError> for KvError {
fn from(e: RaftServerError) -> KvError {
KvError(Box::new(KvErrorInner::Request(e.into())))
}
}
/// `RaftKv` is a storage engine base on `RaftStore`.
#[derive(Clone)]
pub struct RaftKv<S: RaftStoreRouter<RocksSnapshot> + 'static> {
router: S,
engine: RocksEngine,
}
pub enum CmdRes {
Resp(Vec<Response>),
Snap(RegionSnapshot<RocksSnapshot>),
}
fn new_ctx(resp: &RaftCmdResponse) -> CbContext {
let mut cb_ctx = CbContext::new();
cb_ctx.term = Some(resp.get_header().get_current_term());
cb_ctx
}
fn check_raft_cmd_response(resp: &mut RaftCmdResponse, req_cnt: usize) -> Result<()> {
if resp.get_header().has_error() {
return Err(Error::RequestFailed(resp.take_header().take_error()));
}
if req_cnt != resp.get_responses().len() {
return Err(Error::InvalidResponse(format!(
"responses count {} is not equal to requests count {}",
resp.get_responses().len(),
req_cnt
)));
}
Ok(())
}
fn on_write_result(mut write_resp: WriteResponse, req_cnt: usize) -> (CbContext, Result<CmdRes>) {
let cb_ctx = new_ctx(&write_resp.response);
if let Err(e) = check_raft_cmd_response(&mut write_resp.response, req_cnt) {
return (cb_ctx, Err(e));
}
let resps = write_resp.response.take_responses();
(cb_ctx, Ok(CmdRes::Resp(resps.into())))
}
fn on_read_result(
mut read_resp: ReadResponse<RocksSnapshot>,
req_cnt: usize,
) -> (CbContext, Result<CmdRes>) {
// TODO(5kbpers): set ExtraOp for cb_ctx here.
let cb_ctx = new_ctx(&read_resp.response);
if let Err(e) = check_raft_cmd_response(&mut read_resp.response, req_cnt) {
return (cb_ctx, Err(e));
}
let resps = read_resp.response.take_responses();
if !resps.is_empty() || resps[0].get_cmd_type() == CmdType::Snap {
(cb_ctx, Ok(CmdRes::Snap(read_resp.snapshot.unwrap())))
} else {
(cb_ctx, Ok(CmdRes::Resp(resps.into())))
}
}
impl<S: RaftStoreRouter<RocksSnapshot>> RaftKv<S> {
/// Create a RaftKv using specified configuration.
pub fn new(router: S, engine: RocksEngine) -> RaftKv<S> {
RaftKv { router, engine }
}
fn new_request_header(&self, ctx: &Context) -> RaftRequestHeader {
let mut header = RaftRequestHeader::default();
header.set_region_id(ctx.get_region_id());
header.set_peer(ctx.get_peer().clone());
header.set_region_epoch(ctx.get_region_epoch().clone());
if ctx.get_term() != 0 {
header.set_term(ctx.get_term());
}
header.set_sync_log(ctx.get_sync_log());
header.set_replica_read(ctx.get_replica_read());
header
}
fn exec_read_requests(
&self,
ctx: &Context,
reqs: Vec<Request>,
cb: Callback<CmdRes>,
) -> Result<()> {
let len = reqs.len();
let header = self.new_request_header(ctx);
let mut cmd = RaftCmdRequest::default();
cmd.set_header(header);
cmd.set_requests(reqs.into());
self.router
.send_command(
cmd,
StoreCallback::Read(Box::new(move |resp| {
let (cb_ctx, res) = on_read_result(resp, len);
cb((cb_ctx, res.map_err(Error::into)));
})),
)
.map_err(From::from)
}
fn exec_write_requests(
&self,
ctx: &Context,
reqs: Vec<Request>,
cb: Callback<CmdRes>,
) -> Result<()> {
#[cfg(feature = "failpoints")]
{
// If rid is some, only the specified region reports error.
// If rid is None, all regions report error.
let raftkv_early_error_report_fp = || -> Result<()> {
fail_point!("raftkv_early_error_report", |rid| {
let region_id = ctx.get_region_id();
rid.and_then(|rid| {
let rid: u64 = rid.parse().unwrap();
if rid == region_id {
None
} else {
Some(())
}
})
.ok_or_else(|| RaftServerError::RegionNotFound(region_id).into())
});
Ok(())
};
raftkv_early_error_report_fp()?;
}
let len = reqs.len();
let header = self.new_request_header(ctx);
let mut cmd = RaftCmdRequest::default();
cmd.set_header(header);
cmd.set_requests(reqs.into());
self.router
.send_command(
cmd,
StoreCallback::Write(Box::new(move |resp| {
let (cb_ctx, res) = on_write_result(resp, len);
cb((cb_ctx, res.map_err(Error::into)));
})),
)
.map_err(From::from)
}
}
| fn invalid_resp_type(exp: CmdType, act: CmdType) -> Error {
Error::InvalidResponse(format!(
"cmd type not match, want {:?}, got {:?}!",
exp, act
))
}
impl<S: RaftStoreRouter<RocksSnapshot>> Display for RaftKv<S> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "RaftKv")
}
}
impl<S: RaftStoreRouter<RocksSnapshot>> Debug for RaftKv<S> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "RaftKv")
}
}
impl<S: RaftStoreRouter<RocksSnapshot>> Engine for RaftKv<S> {
type Snap = RegionSnapshot<RocksSnapshot>;
fn async_write(&self, ctx: &Context, batch: WriteData, cb: Callback<()>) -> kv::Result<()> {
fail_point!("raftkv_async_write");
if batch.modifies.is_empty() {
return Err(KvError::from(KvErrorInner::EmptyRequest));
}
let mut reqs = Vec::with_capacity(batch.modifies.len());
for m in batch.modifies {
let mut req = Request::default();
match m {
Modify::Delete(cf, k) => {
let mut delete = DeleteRequest::default();
delete.set_key(k.into_encoded());
if cf != CF_DEFAULT {
delete.set_cf(cf.to_string());
}
req.set_cmd_type(CmdType::Delete);
req.set_delete(delete);
}
Modify::Put(cf, k, v) => {
let mut put = PutRequest::default();
put.set_key(k.into_encoded());
put.set_value(v);
if cf != CF_DEFAULT {
put.set_cf(cf.to_string());
}
req.set_cmd_type(CmdType::Put);
req.set_put(put);
}
Modify::DeleteRange(cf, start_key, end_key, notify_only) => {
let mut delete_range = DeleteRangeRequest::default();
delete_range.set_cf(cf.to_string());
delete_range.set_start_key(start_key.into_encoded());
delete_range.set_end_key(end_key.into_encoded());
delete_range.set_notify_only(notify_only);
req.set_cmd_type(CmdType::DeleteRange);
req.set_delete_range(delete_range);
}
}
reqs.push(req);
}
ASYNC_REQUESTS_COUNTER_VEC.write.all.inc();
let begin_instant = Instant::now_coarse();
// TODO(5kbpers): send WriteData::TxnExtra to raftstore here.
self.exec_write_requests(
ctx,
reqs,
Box::new(move |(cb_ctx, res)| match res {
Ok(CmdRes::Resp(_)) => {
ASYNC_REQUESTS_COUNTER_VEC.write.success.inc();
ASYNC_REQUESTS_DURATIONS_VEC
.write
.observe(begin_instant.elapsed_secs());
fail_point!("raftkv_async_write_finish");
cb((cb_ctx, Ok(())))
}
Ok(CmdRes::Snap(_)) => cb((
cb_ctx,
Err(box_err!("unexpect snapshot, should mutate instead.")),
)),
Err(e) => {
let status_kind = get_status_kind_from_engine_error(&e);
ASYNC_REQUESTS_COUNTER_VEC.write.get(status_kind).inc();
cb((cb_ctx, Err(e)))
}
}),
)
.map_err(|e| {
let status_kind = get_status_kind_from_error(&e);
ASYNC_REQUESTS_COUNTER_VEC.write.get(status_kind).inc();
e.into()
})
}
fn async_snapshot(&self, ctx: &Context, cb: Callback<Self::Snap>) -> kv::Result<()> {
fail_point!("raftkv_async_snapshot");
let mut req = Request::default();
req.set_cmd_type(CmdType::Snap);
ASYNC_REQUESTS_COUNTER_VEC.snapshot.all.inc();
let begin_instant = Instant::now_coarse();
self.exec_read_requests(
ctx,
vec![req],
Box::new(move |(cb_ctx, res)| match res {
Ok(CmdRes::Resp(r)) => cb((
cb_ctx,
Err(invalid_resp_type(CmdType::Snap, r[0].get_cmd_type()).into()),
)),
Ok(CmdRes::Snap(s)) => {
ASYNC_REQUESTS_DURATIONS_VEC
.snapshot
.observe(begin_instant.elapsed_secs());
ASYNC_REQUESTS_COUNTER_VEC.snapshot.success.inc();
cb((cb_ctx, Ok(s)))
}
Err(e) => {
let status_kind = get_status_kind_from_engine_error(&e);
ASYNC_REQUESTS_COUNTER_VEC.snapshot.get(status_kind).inc();
cb((cb_ctx, Err(e)))
}
}),
)
.map_err(|e| {
let status_kind = get_status_kind_from_error(&e);
ASYNC_REQUESTS_COUNTER_VEC.snapshot.get(status_kind).inc();
e.into()
})
}
fn get_properties_cf(
&self,
cf: CfName,
start: &[u8],
end: &[u8],
) -> kv::Result<RocksTablePropertiesCollection> {
let start = keys::data_key(start);
let end = keys::data_end_key(end);
self.engine
.get_range_properties_cf(cf, &start, &end)
.map_err(|e| e.into())
}
}
impl Snapshot for RegionSnapshot<RocksSnapshot> {
type Iter = RegionIterator<RocksSnapshot>;
fn get(&self, key: &Key) -> kv::Result<Option<Value>> {
fail_point!("raftkv_snapshot_get", |_| Err(box_err!(
"injected error for get"
)));
let v = box_try!(self.get_value(key.as_encoded()));
Ok(v.map(|v| v.to_vec()))
}
fn get_cf(&self, cf: CfName, key: &Key) -> kv::Result<Option<Value>> {
fail_point!("raftkv_snapshot_get_cf", |_| Err(box_err!(
"injected error for get_cf"
)));
let v = box_try!(self.get_value_cf(cf, key.as_encoded()));
Ok(v.map(|v| v.to_vec()))
}
fn iter(&self, iter_opt: IterOptions, mode: ScanMode) -> kv::Result<Cursor<Self::Iter>> {
fail_point!("raftkv_snapshot_iter", |_| Err(box_err!(
"injected error for iter"
)));
Ok(Cursor::new(RegionSnapshot::iter(self, iter_opt), mode))
}
fn iter_cf(
&self,
cf: CfName,
iter_opt: IterOptions,
mode: ScanMode,
) -> kv::Result<Cursor<Self::Iter>> {
fail_point!("raftkv_snapshot_iter_cf", |_| Err(box_err!(
"injected error for iter_cf"
)));
Ok(Cursor::new(
RegionSnapshot::iter_cf(self, cf, iter_opt)?,
mode,
))
}
#[inline]
fn lower_bound(&self) -> Option<&[u8]> {
Some(self.get_start_key())
}
#[inline]
fn upper_bound(&self) -> Option<&[u8]> {
Some(self.get_end_key())
}
#[inline]
fn get_data_version(&self) -> Option<u64> {
self.get_apply_index().ok()
}
}
impl EngineIterator for RegionIterator<RocksSnapshot> {
fn next(&mut self) -> kv::Result<bool> {
RegionIterator::next(self).map_err(KvError::from)
}
fn prev(&mut self) -> kv::Result<bool> {
RegionIterator::prev(self).map_err(KvError::from)
}
fn seek(&mut self, key: &Key) -> kv::Result<bool> {
fail_point!("raftkv_iter_seek", |_| Err(box_err!(
"injected error for iter_seek"
)));
RegionIterator::seek(self, key.as_encoded()).map_err(From::from)
}
fn seek_for_prev(&mut self, key: &Key) -> kv::Result<bool> {
fail_point!("raftkv_iter_seek_for_prev", |_| Err(box_err!(
"injected error for iter_seek_for_prev"
)));
RegionIterator::seek_for_prev(self, key.as_encoded()).map_err(From::from)
}
fn seek_to_first(&mut self) -> kv::Result<bool> {
RegionIterator::seek_to_first(self).map_err(KvError::from)
}
fn seek_to_last(&mut self) -> kv::Result<bool> {
RegionIterator::seek_to_last(self).map_err(KvError::from)
}
fn valid(&self) -> kv::Result<bool> {
RegionIterator::valid(self).map_err(KvError::from)
}
fn validate_key(&self, key: &Key) -> kv::Result<()> {
self.should_seekable(key.as_encoded()).map_err(From::from)
}
fn key(&self) -> &[u8] {
RegionIterator::key(self)
}
fn value(&self) -> &[u8] {
RegionIterator::value(self)
}
} | |
usym.rs | //! Parser for the Usym format.
//!
//! This format can map il2cpp instruction addresses to managed file names and line numbers.
use std::borrow::Cow;
use std::error::Error;
use std::fmt;
use std::mem;
use std::ptr;
use std::str::FromStr;
use symbolic_common::Arch;
use symbolic_common::DebugId;
use thiserror::Error;
/// The error type for [`UsymError`].
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum UsymErrorKind {
/// Buffer to usym file is misaligned.
MisalignedBuffer,
/// The header to the usym file is missing or undersized.
BadHeader,
/// The magic string in the header is missing or malformed.
BadMagic,
/// The version string in the usym file's header is missing or malformed.
BadVersion, | /// according to its header.
BufferSmallerThanAdvertised,
/// The strings section is missing.
MissingStrings,
/// A valid slice to the usym's source records could not be created.
BadRecords,
/// The assembly ID is missing or can't be read.
BadId,
/// The assembly name is missing or can't be read.
BadName,
/// The architecture is missing or can't be read.
BadOperatingSystem,
/// The architecture is missing or can't be read.
BadArchitecture,
/// A part of the file is not encoded in valid UTF-8.
BadEncoding,
}
impl fmt::Display for UsymErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
UsymErrorKind::MisalignedBuffer => write!(f, "misaligned pointer to buffer"),
UsymErrorKind::BadHeader => write!(f, "missing or undersized header"),
UsymErrorKind::BadMagic => write!(f, "missing or wrong usym magic bytes"),
UsymErrorKind::BadVersion => write!(f, "missing or wrong version number"),
UsymErrorKind::BadRecordCount => write!(f, "unreadable record count"),
UsymErrorKind::BufferSmallerThanAdvertised => {
write!(f, "buffer does not contain all data header claims it has")
}
UsymErrorKind::MissingStrings => write!(f, "strings section is missing"),
UsymErrorKind::BadRecords => write!(f, "could not construct list of source records"),
UsymErrorKind::BadId => write!(f, "assembly ID is missing or unreadable"),
UsymErrorKind::BadName => write!(f, "assembly name is missing or unreadable"),
UsymErrorKind::BadOperatingSystem => {
write!(f, "operating system is missing or unreadable")
}
UsymErrorKind::BadArchitecture => write!(f, "architecture is missing or unreadable"),
UsymErrorKind::BadEncoding => {
write!(f, "part of the file is not encoded in valid UTF-8")
}
}
}
}
/// An error when dealing with [`UsymSymbols`].
#[derive(Debug, Error)]
#[error("{kind}")]
pub struct UsymError {
kind: UsymErrorKind,
#[source]
source: Option<Box<dyn Error + Send + Sync + 'static>>,
}
impl UsymError {
/// Creates a new [`UsymError`] from a [`UsymErrorKind`] and an arbitrary source error payload.
fn new<E>(kind: UsymErrorKind, source: E) -> Self
where
E: Into<Box<dyn Error + Send + Sync>>,
{
let source = Some(source.into());
Self { kind, source }
}
/// Returns the corresponding [`UsymErrorKind`] for this error.
pub fn kind(&self) -> UsymErrorKind {
self.kind
}
}
impl From<UsymErrorKind> for UsymError {
fn from(kind: UsymErrorKind) -> Self {
Self { kind, source: None }
}
}
// TODO: consider introducing newtype for strings section offsets and the strings section itself
/// The raw C structures.
mod raw {
/// The header of the usym file format.
#[derive(Debug, Clone)]
#[repr(C)]
pub(super) struct Header {
/// Magic number identifying the file, `b"usym"`.
pub(super) magic: u32,
/// Version of the usym file format.
pub(super) version: u32,
/// Number of [`UsymRecord`] entries.
///
/// These follow right after the header, and after them is the strings section.
pub(super) record_count: u32,
/// UUID of the assembly, as an offset into the strings section.
pub(super) id: u32,
/// Name of the "assembly", as an offset into the strings section.
pub(super) name: u32,
/// Name of OS, as an offset into the strings section.
pub(super) os: u32,
/// Name of architecture, as an offset into the strings section.
pub(super) arch: u32,
}
/// A record mapping an IL2CPP instruction address to a managed code location.
///
/// This is the raw record as it appears in the file, see [`UsymRecord`] for a record with
/// the names resolved.
#[derive(Debug, Clone, Copy)]
#[repr(C, packed)]
pub(super) struct SourceRecord {
/// Instruction pointer address, relative to base address of assembly.
pub(super) address: u64,
/// Native symbol name, as an offset into the strings section.
pub(super) native_symbol: u32,
/// Native source file, as an offset into the strings section.
pub(super) native_file: u32,
/// Native line number.
pub(super) native_line: u32,
/// Managed code symbol name, as an offset into the strings section.
///
/// Most of the time, this is 0 if the record does not map to managed code. We haven't seen
/// this happen yet, but it's possible that a nonzero offset may lead to an empty string,
/// meaning that there is no managed symbol for this record.
pub(super) managed_symbol: u32,
/// Managed code file name, as an offset into the strings section.
///
/// Most of the time, this is 0 if code does not map to managed code. We haven't seen this
/// happen yet, but it's possible that a nonzero offset may lead to an empty string,
/// meaning that there is no managed file for this record.
pub(super) managed_file: u32,
/// Managed code line number. This is 0 if the record does not map to any managed code.
pub(super) managed_line: u32,
pub(super) _unknown: u32,
}
}
/// A record mapping an IL2CPP instruction address to managed code location.
///
/// Not all native code maps back to managed code, for those records the managed info will
/// be `None`.
#[derive(Debug, Clone)]
pub struct UsymSourceRecord<'a> {
/// Instruction pointer address, relative to the base of the assembly.
pub address: u64,
/// Symbol name of the native code.
pub native_symbol: Cow<'a, str>,
/// File name of the native code.
pub native_file: Cow<'a, str>,
/// Line number of the native code.
pub native_line: u32,
/// Symbol name of the managed code.
pub managed_symbol: Option<Cow<'a, str>>,
/// File name of the managed code.
pub managed_file: Option<Cow<'a, str>>,
/// Line number of the managed code.
pub managed_line: Option<u32>,
}
/// A usym file containing data on how to map native code generated by Unity's IL2CPP back to their
/// C# (i.e. managed) equivalents.
pub struct UsymSymbols<'a> {
/// File header.
header: &'a raw::Header,
/// Instruction address to managed code mapping records.
records: &'a [raw::SourceRecord],
/// All the strings.
///
/// This is not a traditional strings table, but rather a large slice of bytes with
/// length-prefixed strings where the length is a little-endian u16. The header and records
/// refer to strings by byte offsets into this slice of bytes, which must fall on the
/// the length prefixed part of the string.
strings: &'a [u8],
/// The ID of the assembly.
id: &'a str,
/// The name of the assembly.
name: &'a str,
/// The operating system.
os: &'a str,
/// The architecture.
arch: &'a str,
}
impl<'a> UsymSymbols<'a> {
const MAGIC: &'static [u8] = b"usym";
/// Parse a usym file.
///
/// # Panics
///
/// If `std::mem::size_of::<usize>()` is smaller than `std::mem::size_of::<u32>()` on
/// the machine being run on.
pub fn parse(buf: &'a [u8]) -> Result<UsymSymbols<'a>, UsymError> {
if buf.as_ptr().align_offset(8) != 0 {
return Err(UsymErrorKind::MisalignedBuffer.into());
}
if buf.len() < mem::size_of::<raw::Header>() {
return Err(UsymErrorKind::BadHeader.into());
}
if buf.get(..Self::MAGIC.len()) != Some(Self::MAGIC) {
return Err(UsymErrorKind::BadMagic.into());
}
// SAFETY: We checked the buffer is large enough above.
let header = unsafe { &*(buf.as_ptr() as *const raw::Header) };
if header.version != 2 {
return Err(UsymErrorKind::BadVersion.into());
}
let record_count: usize = header
.record_count
.try_into()
.map_err(|e| UsymError::new(UsymErrorKind::BadRecordCount, e))?;
// TODO: consider trying to just grab the records and give up on their strings if something
// is wrong with the strings section
let strings_offset =
mem::size_of::<raw::Header>() + record_count * mem::size_of::<raw::SourceRecord>();
if buf.len() < strings_offset {
return Err(UsymErrorKind::BufferSmallerThanAdvertised.into());
}
// SAFETY: We checked the buffer is at least the size_of::<UsymHeader>() above.
let first_record_ptr = unsafe { buf.as_ptr().add(mem::size_of::<raw::Header>()) };
// SAFETY: We checked the buffer has enough space for all the source records above.
let records = unsafe {
let first_record_ptr: *const raw::SourceRecord = first_record_ptr.cast();
let records_ptr = ptr::slice_from_raw_parts(first_record_ptr, record_count);
records_ptr
.as_ref()
.ok_or_else(|| UsymError::from(UsymErrorKind::BadRecords))
}?;
let strings = buf
.get(strings_offset..)
.ok_or_else(|| UsymError::from(UsymErrorKind::MissingStrings))?;
let id_offset = header.id.try_into().unwrap();
let id = match Self::get_string_from_offset(strings, id_offset)
.ok_or_else(|| UsymError::from(UsymErrorKind::BadId))?
{
Cow::Borrowed(id) => id,
Cow::Owned(_) => return Err(UsymErrorKind::BadEncoding.into()),
};
let name_offset = header.name.try_into().unwrap();
let name = match Self::get_string_from_offset(strings, name_offset)
.ok_or_else(|| UsymError::from(UsymErrorKind::BadName))?
{
Cow::Borrowed(name) => name,
Cow::Owned(_) => return Err(UsymErrorKind::BadEncoding.into()),
};
let os_offset = header.os.try_into().unwrap();
let os = match Self::get_string_from_offset(strings, os_offset)
.ok_or_else(|| UsymError::from(UsymErrorKind::BadOperatingSystem))?
{
Cow::Borrowed(name) => name,
Cow::Owned(_) => return Err(UsymErrorKind::BadEncoding.into()),
};
let arch_offset = header.arch.try_into().unwrap();
let arch = match Self::get_string_from_offset(strings, arch_offset)
.ok_or_else(|| UsymError::from(UsymErrorKind::BadArchitecture))?
{
Cow::Borrowed(name) => name,
Cow::Owned(_) => return Err(UsymErrorKind::BadEncoding.into()),
};
// accumulate and store all of the errors that don't completely block parsing
// - bad encoding
// - missing sys info fields
Ok(Self {
header,
records,
strings,
id,
name,
os,
arch,
})
}
/// Returns the version of the usym file these symbols were read from.
pub fn version(&self) -> u32 {
self.header.version
}
fn get_string_from_offset(data: &[u8], offset: usize) -> Option<Cow<str>> {
let size_bytes = data.get(offset..offset + 2)?;
let size: usize = u16::from_le_bytes([size_bytes[0], size_bytes[1]]).into();
let start_offset = offset + 2;
let end_offset = start_offset + size;
let string_bytes = data.get(start_offset..end_offset)?;
Some(String::from_utf8_lossy(string_bytes))
}
/// Returns a string from the strings section at the given offset.
///
/// Offsets are as provided by some [`UsymLiteHeader`] and [`UsymLiteLine`] fields.
fn get_string(&self, offset: usize) -> Option<Cow<'a, str>> {
Self::get_string_from_offset(self.strings, offset)
}
/// The ID of the assembly.
///
/// This should match the ID of the debug symbols.
pub fn id(&self) -> Result<DebugId, UsymError> {
DebugId::from_str(self.id).map_err(|e| UsymError::new(UsymErrorKind::BadId, e))
}
/// The name of the assembly.
pub fn name(&self) -> &str {
self.name
}
/// The Operating System name.
pub fn os(&self) -> &str {
self.os
}
/// The architecture.
pub fn arch(&self) -> Result<Arch, UsymError> {
Arch::from_str(self.arch).map_err(|e| UsymError::new(UsymErrorKind::BadArchitecture, e))
}
/// Returns a [`UsymSourceRecord`] at the given index it was stored.
///
/// Not that useful, you have no idea what index you want.
pub fn get_record(&self, index: usize) -> Option<UsymSourceRecord> {
let raw = self.records.get(index)?;
let native_symbol = self.get_string(raw.native_symbol.try_into().unwrap())?;
let native_file = self.get_string(raw.native_file.try_into().unwrap())?;
let managed_symbol = self.get_string(raw.managed_symbol.try_into().unwrap())?;
let managed_symbol = match managed_symbol.is_empty() {
true => None,
false => Some(managed_symbol),
};
if managed_symbol.is_none() && raw.managed_symbol > 0 {
println!("A managed symbol with a >0 offset into the string table points to an empty string. We normally expect empty strings to have an offset of 0.");
println!("Native entry: {}::{}", native_file, native_symbol);
}
let managed_file = self.get_string(raw.managed_file.try_into().unwrap())?;
let managed_file = match managed_file.is_empty() {
true => None,
false => Some(managed_file),
};
if managed_file.is_none() && raw.managed_file > 0 {
println!("A managed file with a >0 offset into the string table points to an empty string. We normally expect empty strings to have an offset of 0.");
println!("Native entry: {}::{}", native_file, native_symbol);
}
let managed_line = match raw.managed_line {
0 => None,
n => Some(n),
};
Some(UsymSourceRecord {
address: raw.address,
native_symbol,
native_file,
native_line: raw.native_line,
managed_symbol,
managed_file,
managed_line,
})
}
/// Lookup the managed code source location for an IL2CPP instruction pointer.
pub fn lookup_source_record(&self, ip: u64) -> Option<UsymSourceRecord> {
// TODO: need to subtract the image base to get relative address
match self.records.binary_search_by_key(&ip, |r| r.address) {
Ok(index) => self.get_record(index),
Err(index) => self.get_record(index - 1),
}
}
// TODO: Add iterator over records?
}
#[cfg(test)]
mod tests {
use std::fs::File;
use std::io::Write;
use symbolic_common::ByteView;
use symbolic_testutils::fixture;
use super::*;
#[test]
fn test_write_usym() {
// Not really a test but rather a quick and dirty way to write a small usym file
// given a large one. This was used to generate a small enough usym file to use as
// a test fixture, however this still tests the reader and writer can round-trip.
// let file = File::open(
// "/Users/flub/code/sentry-unity-il2cpp-line-numbers/Builds/iOS/UnityFramework.usym",
// )
// .unwrap();
let file = File::open(fixture("il2cpp/managed.usym")).unwrap();
let orig_data = ByteView::map_file_ref(&file).unwrap();
let usyms = UsymSymbols::parse(&orig_data).unwrap();
// Our strings and helper to build it by pushing new strings. We keep strings and
// strings_offsets so we can de-duplicate, raw_strings is the thing we are really
// building.
let mut strings: Vec<String> = Vec::new();
let mut raw_strings: Vec<u8> = Vec::new();
let mut string_offsets: Vec<u64> = Vec::new();
let mut push_string = |s: Cow<'_, str>| match strings.iter().position(|i| i == s.as_ref()) {
Some(pos) => string_offsets[pos],
None => {
let offset = raw_strings.len() as u64;
let len = s.len() as u16;
raw_strings.extend_from_slice(&len.to_le_bytes());
raw_strings.extend_from_slice(s.as_bytes());
strings.push(s.to_string());
string_offsets.push(offset);
offset
}
};
// The string table always starts with an entry for the empty string.
push_string(Cow::Borrowed(""));
// Construct new header.
let mut header = usyms.header.clone();
header.id = push_string(usyms.get_string(header.id as usize).unwrap()) as u32;
header.name = push_string(usyms.get_string(header.name as usize).unwrap()) as u32;
header.os = push_string(usyms.get_string(header.os as usize).unwrap()) as u32;
header.arch = push_string(usyms.get_string(header.arch as usize).unwrap()) as u32;
// Construct new records. Skims the top 5 records, then grabs the 3 records that have
// mappings to managed symbols.
header.record_count = 5 + 3;
let first_five = usyms.records.iter().take(5);
let actual_mappings = usyms.records.iter().filter(|r| r.managed_symbol != 0);
let mut records = Vec::new();
for mut record in first_five.chain(actual_mappings).cloned() {
if record.native_symbol > 0 {
record.native_symbol =
push_string(usyms.get_string(record.native_symbol as usize).unwrap()) as u32;
}
if record.native_file > 0 {
record.native_file =
push_string(usyms.get_string(record.native_file as usize).unwrap()) as u32;
}
if record.managed_symbol > 0 {
record.managed_symbol =
push_string(usyms.get_string(record.managed_symbol as usize).unwrap()) as u32;
}
if record.managed_file > 0 {
record.managed_file =
push_string(usyms.get_string(record.managed_file as usize).unwrap()) as u32;
}
records.push(record);
}
// let mut dest = File::create(fixture("il2cpp/artificial.usym")).unwrap();
let mut dest = Vec::new();
// Write the header.
let data = &[header];
let ptr = data.as_ptr() as *const u8;
let len = std::mem::size_of_val(data);
let buf = unsafe { std::slice::from_raw_parts(ptr, len) };
dest.write_all(buf).unwrap();
// Write the records.
let ptr = records.as_ptr() as *const u8;
let len = records.len() * std::mem::size_of::<raw::SourceRecord>();
let buf = unsafe { std::slice::from_raw_parts(ptr, len) };
dest.write_all(buf).unwrap();
// Write the strings.
dest.write_all(&raw_strings).unwrap();
assert_eq!(orig_data.as_ref(), dest);
}
#[test]
fn test_basic() {
let file = File::open(fixture("il2cpp/artificial.usym")).unwrap();
let data = ByteView::map_file_ref(&file).unwrap();
let usyms = UsymSymbols::parse(&data).unwrap();
assert_eq!(usyms.version(), 2);
assert_eq!(
usyms.id().unwrap(),
DebugId::from_str("153d10d10db033d6aacda4e1948da97b").unwrap()
);
assert_eq!(usyms.name(), "UnityFramework");
assert_eq!(usyms.os(), "mac");
assert_eq!(usyms.arch().unwrap(), Arch::Arm64);
for i in 0..5 {
assert!(usyms.get_record(i).is_some());
}
}
#[test]
fn test_with_managed() {
let file = File::open(fixture("il2cpp/managed.usym")).unwrap();
let data = ByteView::map_file_ref(&file).unwrap();
let usyms = UsymSymbols::parse(&data).unwrap();
assert_eq!(usyms.version(), 2);
assert_eq!(
usyms.id().unwrap(),
DebugId::from_str("153d10d10db033d6aacda4e1948da97b").unwrap()
);
assert_eq!(usyms.name(), "UnityFramework");
assert_eq!(usyms.os(), "mac");
assert_eq!(usyms.arch().unwrap(), Arch::Arm64);
let first_mapping = usyms.lookup_source_record(8253832).unwrap();
assert_eq!(
first_mapping.managed_symbol.unwrap(),
"NewBehaviourScript.Start()"
);
assert_eq!(
first_mapping.managed_file.unwrap(),
"/Users/bitfox/_Workspace/IL2CPP/Assets/NewBehaviourScript.cs"
);
assert_eq!(first_mapping.managed_line.unwrap(), 10);
let second_mapping = usyms.lookup_source_record(8253836).unwrap();
assert_eq!(
second_mapping.managed_symbol.unwrap(),
"NewBehaviourScript.Start()"
);
assert_eq!(
second_mapping.managed_file.unwrap(),
"/Users/bitfox/_Workspace/IL2CPP/Assets/NewBehaviourScript.cs"
);
assert_eq!(second_mapping.managed_line.unwrap(), 10,);
let third_mapping = usyms.lookup_source_record(8253840).unwrap();
assert_eq!(
third_mapping.managed_symbol.unwrap(),
"NewBehaviourScript.Update()"
);
assert_eq!(
third_mapping.managed_file.unwrap(),
"/Users/bitfox/_Workspace/IL2CPP/Assets/NewBehaviourScript.cs"
);
assert_eq!(third_mapping.managed_line.unwrap(), 17);
}
#[test]
fn test_sorted_addresses() {
let file = File::open(fixture("il2cpp/artificial.usym")).unwrap();
let data = ByteView::map_file_ref(&file).unwrap();
let usyms = UsymSymbols::parse(&data).unwrap();
let mut last_address = usyms.records[0].address;
for i in 1..usyms.header.record_count as usize {
// The addresses should be weakly monotonic
assert!(usyms.records[i].address >= last_address);
last_address = usyms.records[i].address;
}
}
} | /// The record count in the header can't be read.
BadRecordCount,
/// The size of the usym file is smaller than the amount of data it is supposed to hold |
_core.py | import importlib
import inspect
import json
import os
import sqlite3 | import tempfile
import typing
from shutil import copyfile
from typing import Any, Generator, List, Union
from pydantic import BaseModel, root_validator
from pydantic.fields import ModelField
from sqlite_utils import Database as _Database
from typing_inspect import is_literal_type, is_union_type
from ._misc import iterable_in_type_repr
SPECIALTYPE = [
typing.Any,
typing.Literal,
typing.Union]
class TableBaseModel(BaseModel):
table: str
moduleclass: typing.Any
modulename: str
pks: List[str]
@root_validator(pre=True)
def extract_modulename(cls, values):
v = values['moduleclass']
values.update(
{'modulename': str(v).split("<class '")[1].split("'>")[0]})
return values
def data(self):
return dict(
table=self.table,
modulename=self.modulename,
pks=self.pks)
class DataBase():
def __init__(self, **kwargs):
self._basemodels = {}
self._db = _Database(memory=True)
def __call__(self, tablename) -> Generator[BaseModel, None, None]:
"""returns a Generator for all values in the Table. The returned values are subclasses of pydantic.BaseModel"""
try:
basemodel = self._basemodels[tablename]
foreign_refs = {key.column: key.other_table for key in self._db[tablename].foreign_keys}
except KeyError:
raise KeyError(f"can not find Table: {tablename} in Database") from None
for row in self._db[tablename].rows:
yield self._build_basemodel_from_dict(basemodel, row, foreign_refs)
def _special_conversion(self, field_value: Any) -> Union[bool, Any]:
def special_possible(obj_class):
try:
if not hasattr(obj_class.SQConfig, 'convert'):
return False
return True if obj_class.SQConfig.special_insert else False
except AttributeError:
return False
if isinstance(field_value, List):
if len(field_value) == 0:
return False
if not special_possible(obj_class := field_value[0].__class__):
return False
if not all(isinstance(value, type(field_value[0])) for value in field_value):
raise ValueError(f"not all values in the List are from the same type: '{field_value}'")
return [obj_class.SQConfig.convert(value) for value in field_value]
else:
if not special_possible(obj_class := field_value.__class__):
return False
return obj_class.SQConfig.convert(field_value)
def add(self, tablename: str, value: BaseModel, foreign_tables={}, update_nested_models=True, pk: str = "uuid") -> None:
"""adds a new value to the table tablename"""
# unkown Tablename -> means new Table -> update the table_basemodel_ref list
if tablename not in self._basemodels:
self._basemodels_add_model(table=tablename, moduleclass=value.__class__, pks=[pk])
# check whether the value matches the basemodels in the table
if not self._basemodels[tablename].moduleclass == type(value):
raise ValueError(
f"Can not add type '{type(value)}' to the table '{tablename}', which contains values of type '{self._basemodels[tablename].moduleclass}'")
# create dict for writing to the Table
data_for_save = value.dict() if not hasattr(value, "sqlite_repr") else value.sqlite_repr
foreign_keys = []
for field_name, field in value.__fields__.items():
field_value = getattr(value, field_name)
if res := self._special_conversion(field_value): # Special Insert with SQConfig.convert
data_for_save[field_name] = res
elif field.type_ in SPECIALTYPE or typing.get_origin(field.type_):
# typing._SpecialForm: Any, NoReturn, ClassVar, Union, Optional
# typing.get_origin(field.type_) -> e.g. Literal
data_for_save[field_name] = self._typing_conversion(field, field_value)
elif issubclass(field.type_, BaseModel): # nested BaseModels in this value
# the value has got a field which is of type BaseModel, so this filed must be in a foreign table
# if the field is already in the Table it continues, but if is it not in the table it will add this to the table
# !recursive call to self.add
if field_name not in foreign_tables.keys():
keys = list(foreign_tables.keys())
raise KeyError(f"detect field of Type BaseModel, but can not find '{field_name}' in foreign_tables (Keys: {keys})") from None
else:
foreign_table_name = foreign_tables[field_name]
if foreign_table_name not in self._db.table_names():
raise KeyError(f"Can not add a value, which has a foreign Key '{foreign_tables}' to a Table '{foreign_table_name}' which does not exists")
nested_obj_ids = self._upsert_value_in_foreign_table(field_value, foreign_table_name, update_nested_models)
data_for_save[field_name] = nested_obj_ids
foreign_keys.append((field_name, foreign_table_name, pk)) # ignore=True
self._db[tablename].upsert(data_for_save, pk=pk, foreign_keys=foreign_keys)
def uuid_in_table(self, tablename: str, uuid: str) -> bool:
"""checks if the given uuid is used as a primary key in the table"""
hits = [row for row in self._db[tablename].rows_where("uuid = ?", [uuid])]
if len(hits) > 1:
raise Exception("uuid is two times in table") # TODO choice correct exceptiontype
return False if not hits else True
def value_in_table(self, tablename: str, value: BaseModel) -> bool:
"""checks if the given value is in the table"""
return self.uuid_in_table(tablename, value.uuid)
def value_from_table(self, tablename: str, uuid: str) -> typing.Any:
"""searchs the Objekt with the given uuid in the table and returns it. Returns a subclass of type pydantic.BaseModel"""
hits = [row for row in self._db[tablename].rows_where("uuid = ?", [uuid])]
if len(hits) > 1:
raise Exception("uuid is two times in table") # TODO choice correct exceptiontype
model = self._basemodels[tablename]
foreign_refs = {key.column: key.other_table for key in self._db[tablename].foreign_keys}
return None if not hits else self._build_basemodel_from_dict(model, hits[0], foreign_refs=foreign_refs)
def values_in_table(self, tablename) -> int:
"""returns the number of values in the Table"""
return self._db[tablename].count
def load(self, filename: str) -> None:
"""loads all data from the given file and adds them to the in-memory database"""
if not os.path.isfile(filename):
raise FileNotFoundError(f"Can not load {filename}")
file_db = sqlite3.connect(filename)
query = "".join(line for line in file_db.iterdump())
self._db.conn.executescript(query)
file_db.close()
for model in self._db["__basemodels__"].rows:
classname = model['modulename'].split('.')[-1]
modulename = '.'.join(model['modulename'].split('.')[:-1])
my_module = importlib.import_module(modulename)
self._basemodels_add_model(
table=model['table'],
moduleclass=getattr(my_module, classname),
pks=json.loads(model['pks']))
def save(self, filename: str) -> None:
"""saves alle values from the in_memory database to a file"""
if not filename.endswith(".db"):
filename += ".db"
tmp_dir = tempfile.mkdtemp()
name = filename.split(os.path.sep)[-1]
tmp_name = tmp_dir + os.path.sep + name
backup = tmp_dir + os.path.sep + "_backup.db"
if os.path.isfile(filename):
copyfile(filename, backup)
try:
file_db = sqlite3.connect(tmp_name)
query = "".join(line for line in self._db.conn.iterdump())
file_db.executescript(query)
file_db.close()
copyfile(tmp_name, filename)
except Exception:
print(f"saved the backup file under '{backup}'")
def _basemodels_add_model(self, **kwargs):
model = TableBaseModel(**kwargs)
self._basemodels.update({kwargs['table']: model})
self._db["__basemodels__"].upsert(model.data(), pk="modulename")
def _build_basemodel_from_dict(self, basemodel: TableBaseModel, row: dict, foreign_refs: dict):
# returns a subclass object of type BaseModel which is build out of class basemodel.moduleclass and the data out of the dict
members = inspect.getmembers(basemodel.moduleclass, lambda a: not(inspect.isroutine(a)))
field_models = next(line[1] for line in members if '__fields__' in line)
d = {}
for field_name, field_value in row.items():
type_repr = field_models[field_name].__str__().split(' ')[1] # 'type=Any'
if field_name in foreign_refs.keys(): # the column contains another subclass of BaseModel
if not iterable_in_type_repr(type_repr):
data = self.value_from_table(foreign_refs[field_name], field_value)
else:
data = [self.value_from_table(foreign_refs[field_name], val) for val in json.loads(field_value)]
else:
data = field_value if not iterable_in_type_repr(type_repr) else json.loads(field_value)
d.update({field_name: data})
return basemodel.moduleclass(**d)
def _upsert_value_in_foreign_table(self, field_value, foreign_table_name, update_nested_models) -> Union[str, List[str]]:
# The nested BaseModel will be inserted or upserted to the foreign table if it is not contained there,
# or the update_nested_models parameter is True. If the value is Iterable (e.g. List) all values in the
# List will be be inserted or upserted. The function returns the ids of the values
# The foreign keys of this table are needed to add the nested basemodel object.
foreign_refs = {key.column: key.other_table for key in self._db.table(foreign_table_name).foreign_keys}
def add_nested_model(value):
if not self.value_in_table(foreign_table_name, value) or update_nested_models:
self.add(foreign_table_name, value, foreign_tables=foreign_refs)
return value.uuid
if not isinstance(field_value, List):
return add_nested_model(field_value)
else:
return [add_nested_model(element) for element in field_value]
def _typing_conversion(self, field: ModelField, field_value: typing) -> typing.Any:
if field.type_ == typing.Any:
return field_value
elif is_union_type(field.type_):
return str(field_value)
elif is_literal_type(field.type_):
return str(field_value)
else:
raise NotImplementedError(f"type {field.type_} is not supported yet") | |
embedding_base.py | #-*- coding:utf-8 -*-
#所有encoder的基类
import copy
class Base(object):
def __init__(self, **kwargs):
pass
| return input_dict | def embed_fun(self, text_id, name = 'base_embedding', **kwargs):
input_dict = {}
input_dict[name] = text_id |
parser.py | import numpy as np
from torch import Tensor, FloatTensor
from kospeech.data.audio.core import load_audio
from kospeech.data.audio.augment import NoiseInjector, SpecAugment
from kospeech.data.audio.feature import MelSpectrogram, MFCC, Spectrogram, FilterBank
class AudioParser(object):
"""
Provides inteface of audio parser.
Note:
Do not use this class directly, use one of the sub classes.
Method:
- **parse_audio()**: abstract method. you have to override this method.
- **parse_transcript()**: abstract method. you have to override this method.
"""
def __init__(self, dataset_path, noiseset_size, sample_rate=16000, noise_level=0.7, noise_augment=False):
if noise_augment:
self.noise_injector = NoiseInjector(dataset_path, noiseset_size, sample_rate, noise_level)
def parse_audio(self, *args, **kwargs):
raise NotImplementedError
def parse_transcript(self, *args, **kwargs):
raise NotImplementedError
class SpectrogramParser(AudioParser):
"""
Parses audio file into (spectrogram / mel spectrogram / mfcc) with various options.
Args:
transform_method (str): which feature to use (default: mel)
sample_rate (int): Sample rate of audio signal. (Default: 16000)
n_mels (int): Number of mfc coefficients to retain. (Default: 40)
frame_length (int): frame length for spectrogram (ms) (Default : 20)
frame_shift (int): Length of hop between STFT windows. (ms) (Default: 10)
feature_extract_by (str): which library to use for feature extraction(default: librosa)
del_silence (bool): flag indication whether to delete silence or not (default: True)
input_reverse (bool): flag indication whether to reverse input or not (default: True)
normalize (bool): flag indication whether to normalize spectrum or not (default:True)
time_mask_para (int): Hyper Parameter for Time Masking to limit time masking length
freq_mask_para (int): Hyper Parameter for Freq Masking to limit freq masking length
time_mask_num (int): how many time-masked area to make
freq_mask_num (int): how many freq-masked area to make
sos_id (int): start of sentence token`s identification
eos_id (int): end of sentence token`s identification
target_dict (dict): dictionary of filename and labels
"""
VANILLA = 0 # Not apply augmentation
SPEC_AUGMENT = 1 # SpecAugment
NOISE_INJECTION = 2 # Noise Injection
HYBRID_AUGMENT = 3 # Noise Injection & SpecAugment
def __init__(self, feature_extract_by: str = 'librosa', sample_rate: int = 16000,
n_mels: int = 80, frame_length: int = 20, frame_shift: int = 10,
del_silence: bool = False, input_reverse: bool = True,
normalize: bool = False, transform_method: str = 'mel',
time_mask_para: int = 70, freq_mask_para: int = 12, time_mask_num: int = 2, freq_mask_num: int = 2,
sos_id: int = 1, eos_id: int = 2, target_dict: dict = None, noise_augment: bool = False, | dataset_path: str = None, noiseset_size: int = 0, noise_level: float = 0.7) -> None:
super(SpectrogramParser, self).__init__(dataset_path, noiseset_size, sample_rate, noise_level, noise_augment)
self.del_silence = del_silence
self.input_reverse = input_reverse
self.normalize = normalize
self.sos_id = sos_id
self.eos_id = eos_id
self.target_dict = target_dict
self.spec_augment = SpecAugment(time_mask_para, freq_mask_para, time_mask_num, freq_mask_num)
if transform_method.lower() == 'mel':
self.transforms = MelSpectrogram(sample_rate, n_mels, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'mfcc':
self.transforms = MFCC(sample_rate, n_mels, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'spect':
self.transforms = Spectrogram(sample_rate, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'fbank':
self.transforms = FilterBank(sample_rate, n_mels, frame_length, frame_shift)
else:
raise ValueError("Unsupported feature : {0}".format(transform_method))
def parse_audio(self, audio_path: str, augment_method: int) -> Tensor:
"""
Parses audio.
Args:
audio_path (str): path of audio file
augment_method (int): flag indication which augmentation method to use.
Returns: feature_vector
- **feature_vector** (torch.FloatTensor): feature from audio file.
"""
signal = load_audio(audio_path, self.del_silence)
if signal is None:
return None
if augment_method == SpectrogramParser.NOISE_INJECTION or augment_method == SpectrogramParser.HYBRID_AUGMENT:
signal = self.noise_injector(signal)
feature_vector = self.transforms(signal)
if self.normalize:
feature_vector -= feature_vector.mean()
if self.input_reverse: # Refer to "Sequence to Sequence Learning with Neural Network" paper
feature_vector = feature_vector[:, ::-1]
feature_vector = FloatTensor(np.ascontiguousarray(np.swapaxes(feature_vector, 0, 1)))
else:
feature_vector = FloatTensor(feature_vector).transpose(0, 1)
if augment_method == SpectrogramParser.SPEC_AUGMENT or augment_method == SpectrogramParser.HYBRID_AUGMENT:
feature_vector = self.spec_augment(feature_vector)
return feature_vector
def parse_transcript(self, *args, **kwargs):
raise NotImplementedError | |
main.py | from ulauncher.api.client.Extension import Extension
from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction
from ulauncher.api.client.EventListener import EventListener
from ulauncher.api.shared.event import KeywordQueryEvent, ItemEnterEvent
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction
import webbrowser
import re
class OpenInBrowser(Extension):
def __init__(self):
super(OpenInBrowser, self).__init__()
self.subscribe(KeywordQueryEvent, KeywordQueryEventListener())
self.subscribe(ItemEnterEvent, ItemEnterEventListener())
class KeywordQueryEventListener(EventListener):
def on_event(self, event, extension):
data = event.get_argument()
items = [
ExtensionResultItem(
icon='images/icon.png',
name=event.get_argument(),
description='Open "%s" in the browser' % event.get_argument(),
on_enter=ExtensionCustomAction(data, keep_app_open=True)
)
]
return RenderResultListAction(items)
class ItemEnterEventListener(EventListener):
def on_event(self, event, extension):
data = event.get_data()
if not re.match(r'^https?://', data):
|
webbrowser.open_new_tab(data)
return RenderResultListAction([])
if __name__ == '__main__':
OpenInBrowser().run() | data = 'https://'+ data |
ACMPProblemParser.ts | import { Parser } from '../Parser';
export class ACMPProblemParser extends Parser {
public getMatchPatterns(): string[] {
return ['http://acmp.ru/*/index.asp*', 'https://acmp.ru/*/index.asp*'];
}
public getRegularExpressions(): RegExp[] {
return [/https?:\/\/acmp\.ru\/.*\/?index\.asp\?.*((id_task=\d+)|(id_problem=\d+)).*/];
}
public async parse(url: string, html: string): Promise<Sendable> {
const elem = htmlToElement(html);
const task = new TaskBuilder('ACMP').setUrl(url);
const main = elem.querySelector('tr[valign="top"] > td[background="/images/notepad2.gif"]');
task.setName(main.querySelector('h1').textContent);
const limitsStr = main.querySelector('center > i').textContent;
const limits = /: (\d+).*: (\d+).*: (\d+)/.exec(limitsStr);
task.setTimeLimit(parseInt(limits[1], 10) * 1000);
task.setMemoryLimit(parseInt(limits[2], 10));
elem.querySelectorAll('table.main tbody > tr:not(:first-child)').forEach(row => {
const input = row.querySelector('td:nth-child(2)').innerHTML;
const output = row.querySelector('td:nth-child(3)').innerHTML;
task.addTest(input, output);
});
return task.build();
}
} | import { Sendable } from '../../models/Sendable';
import { TaskBuilder } from '../../models/TaskBuilder';
import { htmlToElement } from '../../utils/dom'; |
|
main.go | // Copyright 2020 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"flag"
"log"
"net"
"net/http"
"net/url"
"os"
"os/signal"
"strings"
"syscall"
"github.com/prometheus-community/prom-label-proxy/injectproxy"
)
func | () {
var (
insecureListenAddress string
upstream string
label string
headerName string
queryParam string
enableLabelAPIs bool
unsafePassthroughPaths string // Comma-delimited string.
errorOnReplace bool
header string
)
flagset := flag.NewFlagSet(os.Args[0], flag.ExitOnError)
flagset.StringVar(&insecureListenAddress, "insecure-listen-address", "", "The address the prom-label-proxy HTTP server should listen on.")
flagset.StringVar(&upstream, "upstream", "", "The upstream URL to proxy to.")
flagset.StringVar(&queryParam, "query-param", "", "The query parameter to obtain the label value from. This or -header is required.")
flagset.StringVar(&headerName, "headerName", "", "The HTTP header name to obtain the label value from. This or -query-param is required.")
flagset.StringVar(&label, "label", "", "The label to enforce in all proxied PromQL queries. "+
"This label will be also required as the URL parameter to get the value to be injected. For example: -label=tenant will"+
" make it required for this proxy to have URL in form of: <URL>?tenant=abc&other_params...")
flagset.BoolVar(&enableLabelAPIs, "enable-label-apis", false, "When specified proxy allows to inject label to label APIs like /api/v1/labels and /api/v1/label/<name>/values. "+
"NOTE: Enable with care because filtering by matcher is not implemented in older versions of Prometheus (>= v2.24.0 required) and Thanos (>= v0.18.0 required, >= v0.23.0 recommended). If enabled and "+
"any labels endpoint does not support selectors, the injected matcher will have no effect.")
flagset.StringVar(&unsafePassthroughPaths, "unsafe-passthrough-paths", "", "Comma delimited allow list of exact HTTP path segments that should be allowed to hit upstream URL without any enforcement. "+
"This option is checked after Prometheus APIs, you cannot override enforced API endpoints to be not enforced with this option. Use carefully as it can easily cause a data leak if the provided path is an important "+
"API (like /api/v1/configuration) which isn't enforced by prom-label-proxy. NOTE: \"all\" matching paths like \"/\" or \"\" and regex are not allowed.")
flagset.BoolVar(&errorOnReplace, "error-on-replace", false, "When specified, the proxy will return HTTP status code 400 if the query already contains a label matcher that differs from the one the proxy would inject.")
flagset.StringVar(&header, "header", "", "(Optional) An HTTP header to get the label value from.")
//nolint: errcheck // Parse() will exit on error.
flagset.Parse(os.Args[1:])
if label == "" {
log.Fatalf("-label flag cannot be empty")
}
var opts []injectproxy.Option
if (queryParam != "" && header != "") || (queryParam == "" && header == "") {
log.Fatal("exactly one of -query-param and -header must be given")
}
if queryParam != "" {
opts = append(opts, injectproxy.WithValueFromQuery(queryParam))
}
if header != "" {
opts = append(opts, injectproxy.WithValueFromHeader(header))
}
upstreamURL, err := url.Parse(upstream)
if err != nil {
log.Fatalf("Failed to build parse upstream URL: %v", err)
}
if upstreamURL.Scheme != "http" && upstreamURL.Scheme != "https" {
log.Fatalf("Invalid scheme for upstream URL %q, only 'http' and 'https' are supported", upstream)
}
if enableLabelAPIs {
opts = append(opts, injectproxy.WithEnabledLabelsAPI())
}
if len(unsafePassthroughPaths) > 0 {
opts = append(opts, injectproxy.WithPassthroughPaths(strings.Split(unsafePassthroughPaths, ",")))
}
if errorOnReplace {
opts = append(opts, injectproxy.WithErrorOnReplace())
}
routes, err := injectproxy.NewRoutes(upstreamURL, label, opts...)
if err != nil {
log.Fatalf("Failed to create injectproxy Routes: %v", err)
}
mux := http.NewServeMux()
mux.Handle("/", routes)
srv := &http.Server{Handler: mux}
l, err := net.Listen("tcp", insecureListenAddress)
if err != nil {
log.Fatalf("Failed to listen on insecure address: %v", err)
}
errCh := make(chan error)
go func() {
log.Printf("Listening insecurely on %v", l.Addr())
errCh <- srv.Serve(l)
}()
term := make(chan os.Signal, 1)
signal.Notify(term, os.Interrupt, syscall.SIGTERM)
select {
case <-term:
log.Print("Received SIGTERM, exiting gracefully...")
srv.Close()
case err := <-errCh:
if err != http.ErrServerClosed {
log.Printf("Server stopped with %v", err)
}
os.Exit(1)
}
}
| main |
app.module.ts | import { NgModule } from '@angular/core';
| import { BrowserModule } from '@angular/platform-browser';
import { FormsModule } from '@angular/forms';
import { JsonpModule } from '@angular/http';
import { HttpModule } from '@angular/http';
import { MoviesComponent } from './components/movies/movies.component';
import { MovieComponent } from './components/movie/movie.component';
import { AppComponent } from './app.component';
import { routing, appRoutingProviders } from './app.routes';
import { RouterModule } from '@angular/router';
@NgModule({
imports: [ BrowserModule, HttpModule, JsonpModule, FormsModule, routing, RouterModule ],
declarations: [ AppComponent, MoviesComponent, MovieComponent ],
providers: [ appRoutingProviders ],
bootstrap: [ AppComponent ]
})
export class AppModule { } | |
beam_factorization_BF.py | ##########################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
##########################################################################################
"""Implementation of NLO beam_factorization currents. These are the PDF counterterms as well
as the integrated initial state collinear counterterms."""
import os
import math
from madgraph.core.base_objects import EpsilonExpansion
import madgraph.various.misc as misc
import commons.utils as utils
import commons.QCD_local_currents as currents
import commons.factors_and_cuts as factors_and_cuts
from commons.integrated_current_expressions import HE
pjoin = os.path.join
CurrentImplementationError = utils.CurrentImplementationError
log = math.log
pi = math.pi
# All counterterms here adopt a xi-dependent distribution of the following form:
#
# Counterterm(xi) = F_+(xi) + [F] \delta(xi-1)
# (which can also be explicitely written)
# Counterterm(xi) = F(xi) + {F(xi)} \delta(xi-1) + [F] \delta(xi-1)
#
# where 'F' can either be a PDF counterterm or an interated collinear ISR counterterm.
# Then each piece of the distribution is assigned a different value for its attribute
# 'distribution_type' as follows:
#
# F(xi) --> distribution_type = 'bulk'
# {F(xi)} --> distribution_type = 'counterterm'
# [F(xi)] --> distribution_type = 'endpoint'
#=========================================================================================
# PDF Counterterm
#=========================================================================================
class QCD_beam_factorization_F0(currents.QCDBeamFactorizationCurrent):
"""Implements the NLO QCD PDF counterterm of type F(xi)"""
distribution_types_implemented_in_this_class = ['bulk','counterterm','endpoint']
@classmethod
def does_implement_this_current(cls, current, model):
# Check the general properties common to NLO QCD collinear tree-level currents
|
def evaluate_kernel(self, PS_point, process, xi, mu_r, mu_f, Q, normalization,
allowed_backward_evolved_flavors='ALL'):
""" Return an instance of BeamFactorizationCurrentEvaluation, whose 'values' entry
are dictionaries specifying the counterterm in flavor space, for the value of xi
specified in argument."""
if allowed_backward_evolved_flavors != 'ALL':
raise CurrentImplementationError('The current %s must always be called with'%self.__class__.__name__+
"allowed_backward_evolved_flavors='ALL', not %s"%str(allowed_backward_evolved_flavors))
# Only the order epsilon of the scales pre-factor matters here.
prefactor = EpsilonExpansion({
0 : 1.,
1 : log(mu_r**2 / mu_f**2)
})
prefactor *= EpsilonExpansion({-1:1.})*normalization
# Assign a fake xi for now if the distribution type is 'endpoint'
# TODO: this is not optimal, eventually we should put each of these three pieces in
# separate currents
if self.distribution_type == 'endpoint':
xi = 0.5
# Define the NLO QCD PDF counterterms kernels
kernel_gg = {
'bulk' : prefactor*(
2.*self.CA*( 1./ (1.-xi) + (1.-xi)/xi -1. + xi*(1-xi) )
),
'counterterm' : prefactor*( 2.*self.CA / (1.-xi) ),
'endpoint' : prefactor*( 11./6.*self.CA - 2./3.*self.NF*self.TR)
}
kernel_gq = {
'bulk' : prefactor*( self.CF*(1.+(1.-xi)**2)/xi ),
'counterterm' : None,
'endpoint' : None
}
kernel_qg = {
'bulk' : prefactor*( self.TR*(xi**2 + (1.-xi)**2) ),
'counterterm' : None,
'endpoint' : None
}
kernel_qq = {
'bulk' : prefactor*( self.CF*((1.+xi**2)/(1.-xi)) ),
'counterterm' : prefactor*( self.CF*((1.+xi**2)/(1.-xi)) ),
'endpoint' : None
}
active_quark_PDGs = tuple([pdg for pdg in range(1,7)+range(-1,-7,-1)
if pdg in self.beam_PDGs])
# Build the NLO flavor matrix
flavor_matrix = {}
for reduced_flavor in self.beam_PDGs:
# Gluon backward evolution
if reduced_flavor==21:
gluon_dict = {}
if kernel_gg[self.distribution_type] is not None:
gluon_dict[(21,)] = kernel_gg[self.distribution_type]
if active_quark_PDGs and kernel_gq[self.distribution_type] is not None:
gluon_dict[active_quark_PDGs] = kernel_gq[self.distribution_type]
if gluon_dict:
flavor_matrix[21] = gluon_dict
# Quark backward evolution
if reduced_flavor in active_quark_PDGs:
quark_dict = {}
if kernel_qg[self.distribution_type] is not None:
quark_dict[(21,)] = kernel_qg[self.distribution_type]
if kernel_qq[self.distribution_type] is not None:
quark_dict[(reduced_flavor,)] = kernel_qq[self.distribution_type]
if quark_dict:
flavor_matrix[reduced_flavor] = quark_dict
# Truncate all entries of the flavor matrix so as to remove irrelevant O(\eps) terms
for flav_in, flav_outs in flavor_matrix.items():
for flav_out, eps_expansion in flav_outs.items():
eps_expansion.truncate(max_power=0)
# Now assign the flavor matrix in the BeamFactorizationCurrentEvaluation instance
# If this is a physical contribution (i.e. not a counterterm) then we must enforce that
# the reduced kinematics is None as it will not even be read by MadNkLO.
evaluation = utils.BeamFactorizationCurrentEvaluation({
'spin_correlations' : [None,],
'color_correlations' : [None,],
'values' : { (0,0) : flavor_matrix }
})
return evaluation
#=========================================================================================
# PDF integrated initial-state single collinear counterterm
#=========================================================================================
class QCD_beam_factorization_single_collinear(currents.QCDBeamFactorizationCurrent):
"""Implements the NLO QCD initial-state single collinear integratated counterterm of type F(xi)"""
distribution_types_implemented_in_this_class = ['bulk','counterterm','endpoint']
@classmethod
def does_implement_this_current(cls, current, model):
# Check the general properties common to NLO QCD collinear tree-level currents
init_vars = cls.common_does_implement_this_current(current, 2, 0)
if init_vars is None: return None
# Retrieve singular structure
ss = current.get('singular_structure').substructures[0]
# Check that it involves exactly one collinear structure with two legs.
if len(ss.substructures)!=1:
return None
collinear_structure = ss.substructures[0]
if collinear_structure.name() != 'C':
return None
if len(collinear_structure.legs) != 2:
return None
# Make sure that one of the two legs of the C structure is initial-state
if not any(cls.is_initial(leg) for leg in collinear_structure.legs):
return None
# The current is valid (remember that this implements the integrated
# initial state collinear counterterm of all possible incoming flavors.
return init_vars
def evaluate_kernel(self, PS_point, process, xi, mu_r, mu_f, Q, normalization,
allowed_backward_evolved_flavors='ALL'):
""" Return an instance of BeamFactorizationCurrentEvaluation, whose 'values' entry
are dictionaries specifying the counterterm in flavor space, for the value of xi
specified in argument."""
# Obtain Q_square.
Q_square = Q.square()
# Only up to the order epsilon^2 of the scales prefactor matters here.
logMuQ = log(mu_r**2/Q_square)
prefactor = EpsilonExpansion({ 0 : 1., 1 : logMuQ, 2 : 0.5*logMuQ**2 })
prefactor *= normalization
# The additional 1/x part of the prefactor is included later during the PDF
# convolution of the event (using its 'Bjorken rescaling' attribute) because
# we must make sure that the plus distribution hits on it.
# Also, the same 1/x appears in the PDF counterterms as a result of the change
# of variable necessary to bring them in the form where the plus distribution
# only acts on the PDF. So it makes sense to keep it completely factorised.
# Input variables
y_0 = factors_and_cuts.y_0_prime
logy0 = log(y_0)
# Assign a fake x for now if the distribution type is 'endpoint'
# TODO: this is not optimal, eventually we should put each of these three pieces in
# separate currents
if self.distribution_type == 'endpoint':
x = 0.5
else:
x = xi
# In MadNkLO, we use the change of variable xb' = xb*xi so that the factor
# (Q^2)^\eps in Eq. 5.21 of https://arxiv.org/pdf/0903.1218.pdf actually reads
# (Q^2/(xi1*xi2))^\eps and the '+' distributions also act on it, which we realize
# by simply multiplying the Q^2 provided by the xi factor that must be set to one.
logMuQ_plus = log(mu_r**2/(Q_square*x))
prefactor_plus = EpsilonExpansion({ 0 : 1., 1 : logMuQ_plus, 2 : 0.5*logMuQ_plus**2 })
prefactor_plus *= normalization
log1mx = log(1.-x)
# Heaviside
theta_x_1my0 = 1. if (x-(1-y_0)) >= 0. else 0.
theta_1my0_x = 1. if ((1-y_0)-x) >= 0. else 0.
# Define the NLO QCD integrate initial-state single collinear counterterms kernels
color_factor = self.CA
kernel_gg = {
'bulk' : prefactor*color_factor*(EpsilonExpansion({
-1 : -2.*( 1./(1.-x) + (1.-x)/x - 1 + x*(1-x) ),
0 : (2.*log1mx / (1.-x))*(1.+theta_x_1my0) + (2.*logy0/(1.-x))*theta_1my0_x
+ 2.*( ((1.-x)/x) -1. + x*(1.-x) )*( log1mx*(1.+theta_x_1my0) + logy0*theta_1my0_x )
})),
'counterterm' : prefactor_plus*color_factor*(EpsilonExpansion({
-1 : -2.* ( 1./(1.-x) ) ,
0 : (2.*log1mx / (1.-x))*(1.+theta_x_1my0) ,
})),
'endpoint' : prefactor*color_factor*(EpsilonExpansion({
-2 : 1. ,
-1 : 0. ,
0 : -(math.pi**2/6.) + logy0**2
}))
}
color_factor = self.CA
kernel_gq = {
'bulk' : prefactor*color_factor*(EpsilonExpansion({
-1 : -(self.CF/self.CA)*(1.+(1.-x)**2) / x ,
0 : (self.CF/self.CA)*( ((1.+(1.-x)**2)/x)*( log1mx*(1.+theta_x_1my0) + logy0*theta_1my0_x ) + x )
})),
'counterterm' : None,
'endpoint' : None
}
color_factor = self.CF
kernel_qg = {
'bulk' : prefactor*color_factor*(EpsilonExpansion({
-1 : -(self.TR/self.CF)*(x**2+(1.-x)**2) ,
0 : (self.TR/self.CF)*( (x**2 + (1.-x)**2)*( log1mx*(1.+theta_x_1my0) + logy0*theta_1my0_x ) + 2.*x*(1.-x) )
})),
'counterterm' : None,
'endpoint' : None
}
color_factor = self.CF
kernel_qq = {
'bulk' : prefactor*color_factor*(EpsilonExpansion({
-1 : -((1.+x**2)/(1.-x)) ,
0 : (2.*log1mx / (1.-x))*(1.+theta_x_1my0) + (2.*logy0/(1.-x))*theta_1my0_x
- ( (1.+x)*( log1mx*(1.+theta_x_1my0)+logy0*theta_1my0_x ) -1.+x )
})),
'counterterm' : prefactor_plus*color_factor*(EpsilonExpansion({
-1 : -((1.+x**2)/(1.-x)) ,
0 : (2.*log1mx / (1.-x))*(1.+theta_x_1my0) ,
})),
'endpoint' : prefactor*color_factor*(EpsilonExpansion({
-2 : 1. ,
-1 : 3./2. ,
0 : -(math.pi**2/6.) + logy0**2
}))
}
active_quark_PDGs = tuple([pdg for pdg in range(1,7)+range(-1,-7,-1)
if pdg in self.beam_PDGs])
# Build the NLO flavor matrix
flavor_matrix = {}
for reduced_flavor in self.beam_PDGs:
# Gluon backward evolution
if reduced_flavor==21:
gluon_dict = {}
if kernel_gg[self.distribution_type] is not None:
gluon_dict[(21,)] = kernel_gg[self.distribution_type]
if active_quark_PDGs and kernel_gq[self.distribution_type] is not None:
gluon_dict[active_quark_PDGs] = kernel_gq[self.distribution_type]
if gluon_dict:
flavor_matrix[21] = gluon_dict
# Quark backward evolution
if reduced_flavor in active_quark_PDGs:
quark_dict = {}
if kernel_qg[self.distribution_type] is not None:
quark_dict[(21,)] = kernel_qg[self.distribution_type]
if kernel_qq[self.distribution_type] is not None:
quark_dict[(reduced_flavor,)] = kernel_qq[self.distribution_type]
if quark_dict:
flavor_matrix[reduced_flavor] = quark_dict
# Truncate all entries of the flavor matrix so as to remove irrelevant O(\eps) terms
for flav_in, flav_outs in flavor_matrix.items():
for flav_out, eps_expansion in flav_outs.items():
eps_expansion.truncate(max_power=0)
# Now apply the mask 'allowed_backward_evolved_flavors' if not set to 'ALL'
filtered_flavor_matrix = self.apply_flavor_mask(flavor_matrix,allowed_backward_evolved_flavors)
# Now assign the flavor matrix in the BeamFactorizationCurrentEvaluation instance
evaluation = utils.BeamFactorizationCurrentEvaluation({
'spin_correlations' : [None,],
'color_correlations' : [None,],
'values' : { (0,0) : filtered_flavor_matrix }
})
return evaluation
#=========================================================================================
# PDF integrated initial-state single soft-collinear counterterm
#=========================================================================================
class QCD_beam_factorization_single_softcollinear(currents.QCDBeamFactorizationCurrent):
"""Implements the NLO QCD initial-state single soft-collinear integgratated counterterm
of type F(xi). These are zero here since they have already been accounted for
in the soft counterterms."""
distribution_types_implemented_in_this_class = ['bulk','counterterm','endpoint']
# These integrated contributions are not really directly related to the physical
# properties of beam factorization (for instance they don't act on the flavor space) and
# therefore apply independely of it.
beam_types_implemented_in_this_class = 'ALL'
beam_PDGs_implemented_in_this_class = 'ALL'
# The soft-collinear integrated counterterm has been accounted for completely in the
# soft integrated counterterm
is_zero = True
def __init__(self, *args, **opts):
# Make sure it is initialized with the proper set of options and remove them
# before calling the mother constructor
if 'color_charge' not in opts:
raise CurrentImplementationError(
"The current '%s' must be instantiated with "%self.__class__.__name__+
" a 'color_charge' option specified.")
color_charge = opts.pop('color_charge')
super(QCD_beam_factorization_single_softcollinear, self).__init__(*args, **opts)
self.supports_helicity_assignment = False
# At this state color_charge is the string of the argument to retrieve ('CA' or 'CF')
# And now that the mother constructor is called, the group factors have been initialized
# and we can retrieve them.
self.color_charge = getattr(self, color_charge)
@classmethod
def does_implement_this_current(cls, current, model):
# Check the general properties common to NLO QCD collinear tree-level currents
init_vars = cls.common_does_implement_this_current(current, 2, 0)
if init_vars is None:
return None
# If this is a BF current it will not have substructures
ss = current.get('singular_structure')
if len(ss.substructures)==0:
return None
# Retrieve singular structure
ss = current.get('singular_structure').substructures[0]
# Check that it involves exactly one collinear structure with two legs.
if len(ss.substructures)!=1:
return None
# Finally check that the singular structure and PDG matches
singular_structure = ss.substructures[0]
# It main structure should be of collinear type
if singular_structure.name()!='C':
return None
# It should have only one leg left, the other one being in the nested soft structure
# It must be an initial-state leg.
if len(singular_structure.legs)!=1:
return None
# The leg not soft must be quark or a gluon
if not abs(singular_structure.legs[0].pdg) in [21,]+range(1,7):
return None
# It should have exactly one nested structures
if len(singular_structure.substructures)!=1:
return None
sub_singular_structure = singular_structure.substructures[0]
# Make sure this substructure is soft
if sub_singular_structure.name()!='S':
return None
# Make sure it contains a single soft leg
if len(sub_singular_structure.legs)!=1:
return None
soft_leg = sub_singular_structure.legs[0]
# Make sure the soft leg is massless final and a gluon
if model.get_particle(soft_leg.pdg).get('mass').upper()!='ZERO':
return None
if soft_leg.pdg != 21:
return None
# We now know that this current is implemented here. We return
# the specific color charge to instantiate this kernel with,
# in the form of a the name of the group factor to retrieve upon
# initialization.
if singular_structure.legs[0].pdg == 21:
# This is a 'g > g g' soft-collinear splitting
init_vars['color_charge'] = 'CA'
else:
# This is a 'q > g g' soft-collinear splitting
init_vars['color_charge'] = 'CA'
return init_vars
| init_vars = cls.common_does_implement_this_current(current, 2, 0)
if init_vars is None: return None
# Retrieve singular structure
ss = current.get('singular_structure').substructures[0]
# Check that it involves exactly one F structure with one leg.
if len(ss.substructures)==0:
factorization_structure = ss
elif len(ss.substructures)==1 and len(ss.substructures[0].substructures)==0:
factorization_structure = ss.substructures[0]
else:
return None
if factorization_structure.name() != 'F':
return None
if len(factorization_structure.legs) != 1:
return None
# Make sure the one leg of the F structure is initial-state
if not cls.is_initial(factorization_structure.legs[0]):
return None
# The current is valid (remember that this implements the PDF counterterm of
# all possible incoming flavors.
return init_vars |
MemoryRegionHandleDropSafety.rs | // This file is part of ucx. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/ucx/master/COPYRIGHT. No part of predicator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of ucx. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/ucx/master/COPYRIGHT.
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub(crate) struct MemoryRegionHandleDropSafety(NonNull<c_void>, NonNull<uct_md>, Arc<MemoryDomainHandleDropSafety>);
impl Drop for MemoryRegionHandleDropSafety
{
#[inline(always)]
fn drop(&mut self)
{ | }
impl MemoryRegionHandleDropSafety
{
#[inline(always)]
pub(crate) fn new(value: NonNull<c_void>, memory_domain: NonNull<uct_md>, memory_domain_handle_drop_safety: Arc<MemoryDomainHandleDropSafety>) -> Arc<Self>
{
Arc::new(MemoryRegionHandleDropSafety(value, memory_domain, memory_domain_handle_drop_safety))
}
}
|
let status = unsafe { uct_md_mem_free(self.1.as_ptr(), self.0.as_ptr()) };
if !status.is_ok()
{
panic!("Unexpected status '{:?}'", status.parse())
}
}
|
parallel_list_str.py | def count_matches(s1, s2):
''' (str, str) -> int
Return the number of positions in s1 that contain the
same character at the corresponding position of s2.
Precondition: len(s1) == len(s2)
>>> count_matches('ate', 'ape')
2
>>> count_matches('head', 'hard')
2
'''
num_matches = 0
for i in range(len(s1)):
if s1[i] == s2[i]:
num_matches = num_matches + 1
return num_matches
def | (list1, list2):
''' (list of number, list of number) -> list of number
Return a new list in which each item is the sum of the items at the
corresponding position of list1 and list2.
Precondition: len(list1) == len(list2)
>> sum_items([1, 2, 3], [2, 4, 2])
[3, 6, 5]
'''
sum_list = []
for i in range(len(list1)):
sum_list.append(list1[i] + list2[i])
return sum_list
| sum_items |
index.ts | /**
* Copyright 2018, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and | * limitations under the License.
*/
export * from './stackdriver'; |
|
hls.py | #!/usr/bin/env python3
import os
import time
import itertools
import contextlib
import fanout_utils
def fanout_hls(context):
context += {
"starttime": int(time.time()),
}
cleanup(context)
context += calculate_map_and_varmap(context)
generate_master_playlists(context)
fanout(context)
print("Cleaning up")
cleanup(context)
def cleanup(c):
with contextlib.suppress(FileExistsError):
os.mkdir(os.path.join(c.hls_write_path, c.stream))
with contextlib.suppress(FileNotFoundError):
fanout_utils.remove_glob(os.path.join(
c.hls_write_path, c.stream, "*.ts"))
fanout_utils.remove_glob(os.path.join(
c.hls_write_path, "%s/*.m3u8" % c.stream))
fanout_utils.remove_glob(os.path.join(
c.hls_write_path, "%s_*.m3u8" % c.stream))
def calculate_map_and_varmap(c):
first_audio_stream_index = len(c.video_tracks)
# HD+Native
maps = ["-map 0:v:0 -map 0:a:0"]
varmaps = ["v:0,a:0"]
if 'SD' in c.video_tracks:
# SD+Native
maps += ["-map 0:v:1 -map 0:a:0"]
varmaps += ["v:1,a:1"]
if 'Slides' in c.video_tracks:
# Slides+Native
maps += ["-map 0:v:2 -map 0:a:0"]
varmaps += ["v:2,a:2"]
if 'Translated' in c.audio_tracks:
# Translated
maps += ["-map 0:a:1"]
varmaps += ["a:%d" % (first_audio_stream_index+0)]
if 'Translated-2' in c.audio_tracks:
# Translated-2
maps += ["-map 0:a:2"]
varmaps += ["a:%d" % (first_audio_stream_index+1)]
return {
"maps": maps,
"varmaps": varmaps,
"first_audio_stream_index": first_audio_stream_index,
}
def generate_master_playlists(c):
for video_track, audio_track in itertools.product(c.video_tracks, c.audio_tracks):
playlist_context = c + {
"video_track": video_track,
"audio_track": audio_track,
}
master_playlist = fanout_utils.format_and_strip(playlist_context, """
#EXTM3U
#EXT-X-VERSION:3
#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="Untranslated",DEFAULT={{ 'YES' if audio_track == 'Native' else 'NO' }}
{% if 'Translated' in audio_tracks %}
#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="Translation 1",DEFAULT={{ 'YES' if audio_track == 'Translated' else 'NO' }},URI="{{ stream }}/chunks_{{ first_audio_stream_index+0 }}.m3u8"
{% endif %}
{% if 'Translated-2' in audio_tracks %}
#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="Translation 2",DEFAULT={{ 'YES' if audio_track == 'Translated-2' else 'NO' }},URI="{{ stream }}/chunks_{{ first_audio_stream_index+1 }}.m3u8"
{% endif %}
{% if video_track in ['HD'] %}
#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=5000000,RESOLUTION=1920x1080,CODECS="avc1.4d0028,mp4a.40.2",AUDIO="audio"
{{ stream }}/chunks_0.m3u8
{% endif %}
{% if 'SD' in video_tracks and video_track in ['HD', 'SD'] %}
#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=800000,RESOLUTION=1024x576,CODECS="avc1.4d0028,mp4a.40.2",AUDIO="audio"
{{ stream }}/chunks_1.m3u8
{% endif %}
{% if 'Slides' in video_tracks and video_track in ['HD', 'SD', 'Slides'] %}
#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=100000,RESOLUTION=1024x576,CODECS="avc1.4d0028,mp4a.40.2",AUDIO="audio"
{{ stream }}/chunks_2.m3u8
{% endif %}
""")
master_playlist_file = os.path.join(
c.hls_write_path,
"%s_%s_%s.m3u8" % (c.stream, audio_track.lower(), video_track.lower())
)
print("Writing Master Playlist-File %s" % master_playlist_file)
with open(master_playlist_file, "w") as f:
f.write(master_playlist)
def fanout(c):
|
if __name__ == "__main__":
parser = fanout_utils.setup_argparse(name="hls")
parser.add_argument('--hls_write_path', metavar='PATH', type=str,
help='Path to write the HLS-Pieces and Master-Playlists to')
args = parser.parse_args()
fanout_utils.mainloop(name="hls", transcoding_stream="h264", calback=fanout_hls, args=args)
| command = fanout_utils.format_and_strip(c, """
ffmpeg -v warning -nostats -nostdin -y -analyzeduration 3000000
-i {{ pull_url }}
-c:v copy
-c:a copy
{{ maps | join("\n\t") }}
-hls_time 6
-hls_list_size 200
-hls_segment_filename "{{ hls_write_path }}/{{ stream }}/{{ starttime }}-%d_%v.ts"
-hls_flags +delete_segments+omit_endlist+independent_segments
-var_stream_map '{{ varmaps | join(" ") }}'
"{{ hls_write_path }}/{{ stream }}/chunks_%v.m3u8"
""")
fanout_utils.call(command) |
region_snapshot.rs | // Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use kvproto::metapb::Region;
use rocksdb::{DBIterator, DBVector, SeekKey, TablePropertiesCollection, DB};
use std::cmp;
use std::sync::Arc;
use crate::raftstore::store::engine::{IterOption, Peekable, Snapshot, SyncSnapshot};
use crate::raftstore::store::{keys, util, PeerStorage};
use crate::raftstore::Result;
use crate::util::{panic_when_key_exceed_bound, set_panic_mark};
use super::metrics::*;
/// Snapshot of a region.
///
/// Only data within a region can be accessed.
#[derive(Debug)]
pub struct RegionSnapshot {
snap: SyncSnapshot,
region: Arc<Region>,
}
impl RegionSnapshot {
pub fn new(ps: &PeerStorage) -> RegionSnapshot {
RegionSnapshot::from_snapshot(ps.raw_snapshot().into_sync(), ps.region().clone())
}
pub fn from_raw(db: Arc<DB>, region: Region) -> RegionSnapshot {
RegionSnapshot::from_snapshot(Snapshot::new(db).into_sync(), region)
}
pub fn from_snapshot(snap: SyncSnapshot, region: Region) -> RegionSnapshot {
RegionSnapshot {
snap,
region: Arc::new(region),
}
}
pub fn get_region(&self) -> &Region {
&self.region
}
pub fn iter(&self, iter_opt: IterOption) -> RegionIterator {
RegionIterator::new(&self.snap, Arc::clone(&self.region), iter_opt)
}
pub fn iter_cf(&self, cf: &str, iter_opt: IterOption) -> Result<RegionIterator> {
Ok(RegionIterator::new_cf(
&self.snap,
Arc::clone(&self.region),
iter_opt,
cf,
))
}
// scan scans database using an iterator in range [start_key, end_key), calls function f for
// each iteration, if f returns false, terminates this scan.
pub fn scan<F>(&self, start_key: &[u8], end_key: &[u8], fill_cache: bool, f: F) -> Result<()>
where
F: FnMut(&[u8], &[u8]) -> Result<bool>,
{
let iter_opt =
IterOption::new(Some(start_key.to_vec()), Some(end_key.to_vec()), fill_cache);
self.scan_impl(self.iter(iter_opt), start_key, f)
}
// like `scan`, only on a specific column family.
pub fn scan_cf<F>(
&self,
cf: &str,
start_key: &[u8],
end_key: &[u8],
fill_cache: bool,
f: F,
) -> Result<()>
where
F: FnMut(&[u8], &[u8]) -> Result<bool>,
{
let iter_opt =
IterOption::new(Some(start_key.to_vec()), Some(end_key.to_vec()), fill_cache);
self.scan_impl(self.iter_cf(cf, iter_opt)?, start_key, f)
}
fn scan_impl<F>(&self, mut it: RegionIterator, start_key: &[u8], mut f: F) -> Result<()>
where
F: FnMut(&[u8], &[u8]) -> Result<bool>,
{
if !it.seek(start_key)? {
return Ok(());
}
while it.valid() {
let r = f(it.key(), it.value())?;
if !r || !it.next() {
break;
}
}
Ok(())
}
pub fn get_properties_cf(&self, cf: &str) -> Result<TablePropertiesCollection> {
util::get_region_properties_cf(&self.snap.get_db(), cf, self.get_region())
}
pub fn get_start_key(&self) -> &[u8] {
self.region.get_start_key()
}
pub fn get_end_key(&self) -> &[u8] {
self.region.get_end_key()
}
}
impl Clone for RegionSnapshot {
fn clone(&self) -> Self {
RegionSnapshot {
snap: self.snap.clone(),
region: Arc::clone(&self.region),
}
}
}
impl Peekable for RegionSnapshot {
fn get_value(&self, key: &[u8]) -> Result<Option<DBVector>> {
util::check_key_in_region(key, &self.region)?;
let data_key = keys::data_key(key);
self.snap.get_value(&data_key)
}
fn get_value_cf(&self, cf: &str, key: &[u8]) -> Result<Option<DBVector>> {
util::check_key_in_region(key, &self.region)?;
let data_key = keys::data_key(key);
self.snap.get_value_cf(cf, &data_key)
}
}
/// `RegionIterator` wrap a rocksdb iterator and only allow it to
/// iterate in the region. It behaves as if underlying
/// db only contains one region.
pub struct RegionIterator {
iter: DBIterator<Arc<DB>>,
valid: bool,
region: Arc<Region>,
start_key: Vec<u8>,
end_key: Vec<u8>,
}
fn set_lower_bound(iter_opt: &mut IterOption, region: &Region) {
let region_start_key = keys::enc_start_key(region);
let lower_bound = match iter_opt.lower_bound() {
Some(k) if !k.is_empty() => {
let k = keys::data_key(k);
cmp::max(k, region_start_key)
}
_ => region_start_key,
};
iter_opt.set_lower_bound(lower_bound);
}
fn set_upper_bound(iter_opt: &mut IterOption, region: &Region) {
let region_end_key = keys::enc_end_key(region);
let upper_bound = match iter_opt.upper_bound() {
Some(k) if !k.is_empty() => {
let k = keys::data_key(k);
cmp::min(k, region_end_key)
}
_ => region_end_key,
};
iter_opt.set_upper_bound(upper_bound);
}
// we use rocksdb's style iterator, doesn't need to impl std iterator.
impl RegionIterator {
pub fn new(snap: &Snapshot, region: Arc<Region>, mut iter_opt: IterOption) -> RegionIterator {
set_lower_bound(&mut iter_opt, ®ion);
set_upper_bound(&mut iter_opt, ®ion);
let start_key = iter_opt.lower_bound().unwrap().to_vec();
let end_key = iter_opt.upper_bound().unwrap().to_vec();
let iter = snap.db_iterator(iter_opt);
RegionIterator {
iter,
valid: false,
start_key,
end_key,
region,
}
}
pub fn new_cf(
snap: &Snapshot,
region: Arc<Region>,
mut iter_opt: IterOption,
cf: &str,
) -> RegionIterator {
set_lower_bound(&mut iter_opt, ®ion);
set_upper_bound(&mut iter_opt, ®ion);
let start_key = iter_opt.lower_bound().unwrap().to_vec();
let end_key = iter_opt.upper_bound().unwrap().to_vec();
let iter = snap.db_iterator_cf(cf, iter_opt).unwrap();
RegionIterator {
iter,
valid: false,
start_key,
end_key,
region,
}
}
pub fn seek_to_first(&mut self) -> bool {
self.valid = self.iter.seek(self.start_key.as_slice().into());
self.update_valid(true)
}
#[inline]
fn update_valid(&mut self, forward: bool) -> bool {
if self.valid {
let key = self.iter.key();
self.valid = if forward {
key < self.end_key.as_slice()
} else {
key >= self.start_key.as_slice()
};
}
self.valid
}
pub fn seek_to_last(&mut self) -> bool {
if !self.iter.seek(self.end_key.as_slice().into()) && !self.iter.seek(SeekKey::End) {
self.valid = false;
return self.valid;
}
while self.iter.key() >= self.end_key.as_slice() && self.iter.prev() {}
self.valid = self.iter.valid();
self.update_valid(false)
}
pub fn seek(&mut self, key: &[u8]) -> Result<bool> {
self.should_seekable(key)?;
let key = keys::data_key(key);
if key == self.end_key {
self.valid = false;
} else {
self.valid = self.iter.seek(key.as_slice().into());
}
Ok(self.update_valid(true))
}
pub fn seek_for_prev(&mut self, key: &[u8]) -> Result<bool> {
self.should_seekable(key)?;
let key = keys::data_key(key);
self.valid = self.iter.seek_for_prev(key.as_slice().into());
if self.valid && self.iter.key() == self.end_key.as_slice() {
self.valid = self.iter.prev();
}
Ok(self.update_valid(false))
}
pub fn | (&mut self) -> bool {
if !self.valid {
return false;
}
self.valid = self.iter.prev();
self.update_valid(false)
}
pub fn next(&mut self) -> bool {
if !self.valid {
return false;
}
self.valid = self.iter.next();
self.update_valid(true)
}
#[inline]
pub fn key(&self) -> &[u8] {
assert!(self.valid);
keys::origin_key(self.iter.key())
}
#[inline]
pub fn value(&self) -> &[u8] {
assert!(self.valid);
self.iter.value()
}
#[inline]
pub fn valid(&self) -> bool {
self.valid
}
#[inline]
pub fn should_seekable(&self, key: &[u8]) -> Result<()> {
if let Err(e) = util::check_key_in_region_inclusive(key, &self.region) {
KEY_NOT_IN_REGION.inc();
if panic_when_key_exceed_bound() {
set_panic_mark();
panic!("key exceed bound: {:?}", e);
} else {
return Err(e);
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use std::sync::Arc;
use kvproto::metapb::{Peer, Region};
use rocksdb::Writable;
use tempdir::TempDir;
use crate::raftstore::store::engine::*;
use crate::raftstore::store::keys::*;
use crate::raftstore::store::{Engines, PeerStorage};
use crate::raftstore::Result;
use crate::storage::{CFStatistics, Cursor, Key, ScanMode, ALL_CFS, CF_DEFAULT};
use crate::util::rocksdb_util::{self, compact_files_in_range};
use crate::util::{escape, worker};
use super::*;
type DataSet = Vec<(Vec<u8>, Vec<u8>)>;
fn new_temp_engine(path: &TempDir) -> Engines {
let raft_path = path.path().join(Path::new("raft"));
Engines::new(
Arc::new(
rocksdb_util::new_engine(path.path().to_str().unwrap(), None, ALL_CFS, None)
.unwrap(),
),
Arc::new(
rocksdb_util::new_engine(raft_path.to_str().unwrap(), None, &[CF_DEFAULT], None)
.unwrap(),
),
)
}
fn new_peer_storage(engines: Engines, r: &Region) -> PeerStorage {
PeerStorage::new(engines, r, worker::dummy_scheduler(), "".to_owned()).unwrap()
}
fn load_default_dataset(engines: Engines) -> (PeerStorage, DataSet) {
let mut r = Region::new();
r.mut_peers().push(Peer::new());
r.set_id(10);
r.set_start_key(b"a2".to_vec());
r.set_end_key(b"a7".to_vec());
let base_data = vec![
(b"a1".to_vec(), b"v1".to_vec()),
(b"a3".to_vec(), b"v3".to_vec()),
(b"a5".to_vec(), b"v5".to_vec()),
(b"a7".to_vec(), b"v7".to_vec()),
(b"a9".to_vec(), b"v9".to_vec()),
];
for &(ref k, ref v) in &base_data {
engines.kv.put(&data_key(k), v).unwrap();
}
let store = new_peer_storage(engines, &r);
(store, base_data)
}
fn load_multiple_levels_dataset(engines: Engines) -> (PeerStorage, DataSet) {
let mut r = Region::new();
r.mut_peers().push(Peer::new());
r.set_id(10);
r.set_start_key(b"a04".to_vec());
r.set_end_key(b"a15".to_vec());
let levels = vec![
(b"a01".to_vec(), 1),
(b"a02".to_vec(), 5),
(b"a03".to_vec(), 3),
(b"a04".to_vec(), 4),
(b"a05".to_vec(), 1),
(b"a06".to_vec(), 2),
(b"a07".to_vec(), 2),
(b"a08".to_vec(), 5),
(b"a09".to_vec(), 6),
(b"a10".to_vec(), 0),
(b"a11".to_vec(), 1),
(b"a12".to_vec(), 4),
(b"a13".to_vec(), 2),
(b"a14".to_vec(), 5),
(b"a15".to_vec(), 3),
(b"a16".to_vec(), 2),
(b"a17".to_vec(), 1),
(b"a18".to_vec(), 0),
];
let mut data = vec![];
{
let db = &engines.kv;
for &(ref k, level) in &levels {
db.put(&data_key(k), k).unwrap();
db.flush(true).unwrap();
data.push((k.to_vec(), k.to_vec()));
compact_files_in_range(&db, Some(&data_key(k)), Some(&data_key(k)), Some(level))
.unwrap();
}
}
let store = new_peer_storage(engines, &r);
(store, data)
}
#[test]
fn test_peekable() {
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let mut r = Region::new();
r.set_id(10);
r.set_start_key(b"key0".to_vec());
r.set_end_key(b"key4".to_vec());
let store = new_peer_storage(engines.clone(), &r);
let (key1, value1) = (b"key1", 2u64);
engines.kv.put_u64(&data_key(key1), value1).expect("");
let (key2, value2) = (b"key2", 2i64);
engines.kv.put_i64(&data_key(key2), value2).expect("");
let key3 = b"key3";
engines.kv.put_msg(&data_key(key3), &r).expect("");
let snap = RegionSnapshot::new(&store);
let v1 = snap.get_u64(key1).expect("");
assert_eq!(v1, Some(value1));
let v2 = snap.get_i64(key2).expect("");
assert_eq!(v2, Some(value2));
let v3 = snap.get_msg(key3).expect("");
assert_eq!(v3, Some(r));
let v0 = snap.get_value(b"key0").expect("");
assert!(v0.is_none());
let v4 = snap.get_value(b"key5");
assert!(v4.is_err());
}
#[allow(clippy::type_complexity)]
#[test]
fn test_seek_and_seek_prev() {
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let (store, _) = load_default_dataset(engines.clone());
let snap = RegionSnapshot::new(&store);
let check_seek_result = |snap: &RegionSnapshot,
lower_bound: Option<&[u8]>,
upper_bound: Option<&[u8]>,
seek_table: &Vec<(
&[u8],
bool,
Option<(&[u8], &[u8])>,
Option<(&[u8], &[u8])>,
)>| {
let iter_opt = IterOption::new(
lower_bound.map(|v| v.to_vec()),
upper_bound.map(|v| v.to_vec()),
true,
);
let mut iter = snap.iter(iter_opt);
for (seek_key, in_range, seek_exp, prev_exp) in seek_table.clone() {
let check_res =
|iter: &RegionIterator, res: Result<bool>, exp: Option<(&[u8], &[u8])>| {
if !in_range {
assert!(res.is_err(), "exp failed at {}", escape(seek_key));
return;
}
if exp.is_none() {
assert!(!res.unwrap(), "exp none at {}", escape(seek_key));
return;
}
assert!(res.unwrap(), "should succeed at {}", escape(seek_key));
let (exp_key, exp_val) = exp.unwrap();
assert_eq!(iter.key(), exp_key);
assert_eq!(iter.value(), exp_val);
};
let seek_res = iter.seek(seek_key);
check_res(&iter, seek_res, seek_exp);
let prev_res = iter.seek_for_prev(seek_key);
check_res(&iter, prev_res, prev_exp);
}
};
let mut seek_table: Vec<(&[u8], bool, Option<(&[u8], &[u8])>, Option<(&[u8], &[u8])>)> = vec![
(b"a1", false, None, None),
(b"a2", true, Some((b"a3", b"v3")), None),
(b"a3", true, Some((b"a3", b"v3")), Some((b"a3", b"v3"))),
(b"a4", true, Some((b"a5", b"v5")), Some((b"a3", b"v3"))),
(b"a6", true, None, Some((b"a5", b"v5"))),
(b"a7", true, None, Some((b"a5", b"v5"))),
(b"a9", false, None, None),
];
check_seek_result(&snap, None, None, &seek_table);
check_seek_result(&snap, None, Some(b"a9"), &seek_table);
check_seek_result(&snap, Some(b"a1"), None, &seek_table);
check_seek_result(&snap, Some(b""), Some(b""), &seek_table);
check_seek_result(&snap, Some(b"a1"), Some(b"a9"), &seek_table);
check_seek_result(&snap, Some(b"a2"), Some(b"a9"), &seek_table);
check_seek_result(&snap, Some(b"a2"), Some(b"a7"), &seek_table);
check_seek_result(&snap, Some(b"a1"), Some(b"a7"), &seek_table);
seek_table = vec![
(b"a1", false, None, None),
(b"a2", true, None, None),
(b"a3", true, None, None),
(b"a4", true, None, None),
(b"a6", true, None, None),
(b"a7", true, None, None),
(b"a9", false, None, None),
];
check_seek_result(&snap, None, Some(b"a1"), &seek_table);
check_seek_result(&snap, Some(b"a8"), None, &seek_table);
check_seek_result(&snap, Some(b"a7"), Some(b"a2"), &seek_table);
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let (store, _) = load_multiple_levels_dataset(engines.clone());
let snap = RegionSnapshot::new(&store);
seek_table = vec![
(b"a01", false, None, None),
(b"a03", false, None, None),
(b"a05", true, Some((b"a05", b"a05")), Some((b"a05", b"a05"))),
(b"a10", true, Some((b"a10", b"a10")), Some((b"a10", b"a10"))),
(b"a14", true, Some((b"a14", b"a14")), Some((b"a14", b"a14"))),
(b"a15", true, None, Some((b"a14", b"a14"))),
(b"a18", false, None, None),
(b"a19", false, None, None),
];
check_seek_result(&snap, None, None, &seek_table);
check_seek_result(&snap, None, Some(b"a20"), &seek_table);
check_seek_result(&snap, Some(b"a00"), None, &seek_table);
check_seek_result(&snap, Some(b""), Some(b""), &seek_table);
check_seek_result(&snap, Some(b"a00"), Some(b"a20"), &seek_table);
check_seek_result(&snap, Some(b"a01"), Some(b"a20"), &seek_table);
check_seek_result(&snap, Some(b"a01"), Some(b"a15"), &seek_table);
check_seek_result(&snap, Some(b"a00"), Some(b"a15"), &seek_table);
}
#[allow(clippy::type_complexity)]
#[test]
fn test_iterate() {
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let (store, base_data) = load_default_dataset(engines.clone());
let snap = RegionSnapshot::new(&store);
let mut data = vec![];
snap.scan(b"a2", &[0xFF, 0xFF], false, |key, value| {
data.push((key.to_vec(), value.to_vec()));
Ok(true)
})
.unwrap();
assert_eq!(data.len(), 2);
assert_eq!(data, &base_data[1..3]);
data.clear();
snap.scan(b"a2", &[0xFF, 0xFF], false, |key, value| {
data.push((key.to_vec(), value.to_vec()));
Ok(false)
})
.unwrap();
assert_eq!(data.len(), 1);
let mut iter = snap.iter(IterOption::default());
assert!(iter.seek_to_first());
let mut res = vec![];
loop {
res.push((iter.key().to_vec(), iter.value().to_vec()));
if !iter.next() {
break;
}
}
assert_eq!(res, base_data[1..3].to_vec());
// test last region
let mut region = Region::new();
region.mut_peers().push(Peer::new());
let store = new_peer_storage(engines.clone(), ®ion);
let snap = RegionSnapshot::new(&store);
data.clear();
snap.scan(b"", &[0xFF, 0xFF], false, |key, value| {
data.push((key.to_vec(), value.to_vec()));
Ok(true)
})
.unwrap();
assert_eq!(data.len(), 5);
assert_eq!(data, base_data);
let mut iter = snap.iter(IterOption::default());
assert!(iter.seek(b"a1").unwrap());
assert!(iter.seek_to_first());
let mut res = vec![];
loop {
res.push((iter.key().to_vec(), iter.value().to_vec()));
if !iter.next() {
break;
}
}
assert_eq!(res, base_data);
// test iterator with upper bound
let store = new_peer_storage(engines, ®ion);
let snap = RegionSnapshot::new(&store);
let mut iter = snap.iter(IterOption::new(None, Some(b"a5".to_vec()), true));
assert!(iter.seek_to_first());
let mut res = vec![];
loop {
res.push((iter.key().to_vec(), iter.value().to_vec()));
if !iter.next() {
break;
}
}
assert_eq!(res, base_data[0..2].to_vec());
}
#[test]
fn test_reverse_iterate() {
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let (store, test_data) = load_default_dataset(engines.clone());
let snap = RegionSnapshot::new(&store);
let mut statistics = CFStatistics::default();
let it = snap.iter(IterOption::default());
let mut iter = Cursor::new(it, ScanMode::Mixed);
assert!(!iter
.reverse_seek(&Key::from_encoded_slice(b"a2"), &mut statistics)
.unwrap());
assert!(iter
.reverse_seek(&Key::from_encoded_slice(b"a7"), &mut statistics)
.unwrap());
let mut pair = (
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
);
assert_eq!(pair, (b"a5".to_vec(), b"v5".to_vec()));
assert!(iter
.reverse_seek(&Key::from_encoded_slice(b"a5"), &mut statistics)
.unwrap());
pair = (
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
);
assert_eq!(pair, (b"a3".to_vec(), b"v3".to_vec()));
assert!(!iter
.reverse_seek(&Key::from_encoded_slice(b"a3"), &mut statistics)
.unwrap());
assert!(iter
.reverse_seek(&Key::from_encoded_slice(b"a1"), &mut statistics)
.is_err());
assert!(iter
.reverse_seek(&Key::from_encoded_slice(b"a8"), &mut statistics)
.is_err());
assert!(iter.seek_to_last(&mut statistics));
let mut res = vec![];
loop {
res.push((
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
));
if !iter.prev(&mut statistics) {
break;
}
}
let mut expect = test_data[1..3].to_vec();
expect.reverse();
assert_eq!(res, expect);
// test last region
let mut region = Region::new();
region.mut_peers().push(Peer::new());
let store = new_peer_storage(engines, ®ion);
let snap = RegionSnapshot::new(&store);
let it = snap.iter(IterOption::default());
let mut iter = Cursor::new(it, ScanMode::Mixed);
assert!(!iter
.reverse_seek(&Key::from_encoded_slice(b"a1"), &mut statistics)
.unwrap());
assert!(iter
.reverse_seek(&Key::from_encoded_slice(b"a2"), &mut statistics)
.unwrap());
let pair = (
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
);
assert_eq!(pair, (b"a1".to_vec(), b"v1".to_vec()));
for kv_pairs in test_data.windows(2) {
let seek_key = Key::from_encoded(kv_pairs[1].0.clone());
assert!(
iter.reverse_seek(&seek_key, &mut statistics).unwrap(),
"{}",
seek_key
);
let pair = (
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
);
assert_eq!(pair, kv_pairs[0]);
}
assert!(iter.seek_to_last(&mut statistics));
let mut res = vec![];
loop {
res.push((
iter.key(&mut statistics).to_vec(),
iter.value(&mut statistics).to_vec(),
));
if !iter.prev(&mut statistics) {
break;
}
}
let mut expect = test_data.clone();
expect.reverse();
assert_eq!(res, expect);
}
#[test]
fn test_reverse_iterate_with_lower_bound() {
let path = TempDir::new("test-raftstore").unwrap();
let engines = new_temp_engine(&path);
let (store, test_data) = load_default_dataset(engines);
let snap = RegionSnapshot::new(&store);
let mut iter_opt = IterOption::default();
iter_opt.set_lower_bound(b"a3".to_vec());
let mut iter = snap.iter(iter_opt);
assert!(iter.seek_to_last());
let mut res = vec![];
loop {
res.push((iter.key().to_vec(), iter.value().to_vec()));
if !iter.prev() {
break;
}
}
res.sort();
assert_eq!(res, test_data[1..3].to_vec());
}
}
| prev |
http-server.rs | #[macro_use] extern crate log;
extern crate http;
extern crate mio;
use http::HttpServer;
use mio::{EventLoop, EventSet, Token, Handler};
use log::{Log, LogRecord, LogLevel, LogMetadata, SetLoggerError, LogLevelFilter};
// Log everything at info level or above
struct InfoLogger;
impl InfoLogger {
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(|max_log_level| {
max_log_level.set(LogLevelFilter::Info);
Box::new(InfoLogger)
})
}
}
impl Log for InfoLogger {
fn enabled(&self, metadata: &LogMetadata) -> bool |
fn log(&self, record: &LogRecord) {
if self.enabled(record.metadata()) {
println!("{} - {}", record.level(), record.args());
}
}
}
const TOK_SERVER: Token = Token(0);
struct MyHandler(HttpServer);
impl Handler for MyHandler {
type Timeout = ();
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<MyHandler>,
token: Token, _: EventSet) {
match token {
TOK_SERVER => {
let MyHandler(ref mut server) = *self;
match server.accept() {
Ok(Some(con)) => {
info!("Accepted connection from {}",
con.peer_addr().unwrap());
event_loop.shutdown();
},
Ok(None) => {
info!("Not actually ready");
},
Err(e) => {
error!("{:?}", e);
},
}
},
_ => panic!("Unrecognized token!"),
}
}
}
fn main() {
InfoLogger::init().unwrap();
let mut event_loop = mio::EventLoop::new().unwrap();
let server = http::HttpServer::bind("localhost:8080").unwrap();
match server.local_addr() {
Ok(addr) => { info!("Listening on {}", addr); },
Err(err) => {
warn!("{}", err);
info!("Listening on unknown address");
},
}
server.register_self(&mut event_loop, TOK_SERVER).unwrap();
event_loop.run(&mut MyHandler(server)).unwrap();
}
| {
metadata.level() <= LogLevel::Info
} |
Portal.d.ts | import * as React from 'react'
import { default as PortalInner } from 'semantic-ui-react/dist/commonjs/addons/Portal/PortalInner'
export interface PortalProps extends StrictPortalProps {
[key: string]: any
}
export interface StrictPortalProps {
/** Primary content. */
children?: React.ReactNode
/** Controls whether or not the portal should close on a click outside. */
closeOnDocumentClick?: boolean
/** Controls whether or not the portal should close when escape is pressed is displayed. */
closeOnEscape?: boolean
/**
* Controls whether or not the portal should close when mousing out of the portal.
* NOTE: This will prevent `closeOnTriggerMouseLeave` when mousing over the
* gap from the trigger to the portal.
*/
closeOnPortalMouseLeave?: boolean
/** Controls whether or not the portal should close on blur of the trigger. */
closeOnTriggerBlur?: boolean
/** Controls whether or not the portal should close on click of the trigger. */
closeOnTriggerClick?: boolean
/** Controls whether or not the portal should close when mousing out of the trigger. */
closeOnTriggerMouseLeave?: boolean
/** Initial value of open. */
defaultOpen?: boolean
/** Event pool namespace that is used to handle component events. */
eventPool?: string
/** The node where the portal should mount. */
mountNode?: any
/** Milliseconds to wait before opening on mouse over */
mouseEnterDelay?: number
/** Milliseconds to wait before closing on mouse leave */
mouseLeaveDelay?: number
/**
* Called when a close event happens
*
* @param {SyntheticEvent} event - React's original SyntheticEvent.
* @param {object} data - All props.
*/
onClose?: (event: React.MouseEvent<HTMLElement>, data: PortalProps) => void
/**
* Called when the portal is mounted on the DOM
*
* @param {null}
* @param {object} data - All props.
*/
onMount?: (nothing: null, data: PortalProps) => void
/**
* Called when an open event happens
*
* @param {SyntheticEvent} event - React's original SyntheticEvent.
* @param {object} data - All props.
*/
onOpen?: (event: React.MouseEvent<HTMLElement>, data: PortalProps) => void
/**
* Called when the portal is unmounted from the DOM
*
* @param {null}
* @param {object} data - All props.
*/
onUnmount?: (nothing: null, data: PortalProps) => void
/** Controls whether or not the portal is displayed. */
open?: boolean
/** Controls whether or not the portal should open when the trigger is clicked. */
openOnTriggerClick?: boolean
/** Controls whether or not the portal should open on focus of the trigger. */
openOnTriggerFocus?: boolean
/** Controls whether or not the portal should open when mousing over the trigger. */
openOnTriggerMouseEnter?: boolean
/** Element to be rendered in-place where the portal is defined. */
trigger?: React.ReactNode
/** Called with a ref to the trigger node. */
triggerRef?: React.Ref<any>
}
declare class | extends React.Component<PortalProps, {}> {
static Inner: typeof PortalInner
}
export default Portal
| Portal |
net.py | from typing import Dict, List, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from .module import ConvBnReLU, depth_regression
from .patchmatch import PatchMatch
class FeatureNet(nn.Module):
"""Feature Extraction Network: to extract features of original images from each view"""
def __init__(self):
"""Initialize different layers in the network"""
super(FeatureNet, self).__init__()
self.conv0 = ConvBnReLU(3, 8, 3, 1, 1)
# [B,8,H,W]
self.conv1 = ConvBnReLU(8, 8, 3, 1, 1)
# [B,16,H/2,W/2]
self.conv2 = ConvBnReLU(8, 16, 5, 2, 2)
self.conv3 = ConvBnReLU(16, 16, 3, 1, 1)
self.conv4 = ConvBnReLU(16, 16, 3, 1, 1)
# [B,32,H/4,W/4]
self.conv5 = ConvBnReLU(16, 32, 5, 2, 2)
self.conv6 = ConvBnReLU(32, 32, 3, 1, 1)
self.conv7 = ConvBnReLU(32, 32, 3, 1, 1)
# [B,64,H/8,W/8]
self.conv8 = ConvBnReLU(32, 64, 5, 2, 2)
self.conv9 = ConvBnReLU(64, 64, 3, 1, 1)
self.conv10 = ConvBnReLU(64, 64, 3, 1, 1)
self.output1 = nn.Conv2d(64, 64, 1, bias=False)
self.inner1 = nn.Conv2d(32, 64, 1, bias=True)
self.inner2 = nn.Conv2d(16, 64, 1, bias=True)
self.output2 = nn.Conv2d(64, 32, 1, bias=False)
self.output3 = nn.Conv2d(64, 16, 1, bias=False)
def forward(self, x: torch.Tensor) -> Dict[int, torch.Tensor]:
"""Forward method
Args:
x: images from a single view, in the shape of [B, C, H, W]. Generally, C=3
Returns:
output_feature: a python dictionary contains extracted features from stage 1 to stage 3
keys are 1, 2, and 3
"""
output_feature: Dict[int, torch.Tensor] = {}
conv1 = self.conv1(self.conv0(x))
conv4 = self.conv4(self.conv3(self.conv2(conv1)))
conv7 = self.conv7(self.conv6(self.conv5(conv4)))
conv10 = self.conv10(self.conv9(self.conv8(conv7)))
output_feature[3] = self.output1(conv10)
intra_feat = F.interpolate(conv10, scale_factor=2.0, mode="bilinear", align_corners=False) + self.inner1(conv7)
del conv7
del conv10
output_feature[2] = self.output2(intra_feat)
intra_feat = F.interpolate(
intra_feat, scale_factor=2.0, mode="bilinear", align_corners=False) + self.inner2(conv4)
del conv4
output_feature[1] = self.output3(intra_feat)
del intra_feat
return output_feature
class Refinement(nn.Module):
"""Depth map refinement network"""
def __init__(self):
"""Initialize"""
super(Refinement, self).__init__()
# img: [B,3,H,W]
self.conv0 = ConvBnReLU(in_channels=3, out_channels=8)
# depth map:[B,1,H/2,W/2]
self.conv1 = ConvBnReLU(in_channels=1, out_channels=8)
self.conv2 = ConvBnReLU(in_channels=8, out_channels=8)
self.deconv = nn.ConvTranspose2d(
in_channels=8, out_channels=8, kernel_size=3, padding=1, output_padding=1, stride=2, bias=False
)
self.bn = nn.BatchNorm2d(8)
self.conv3 = ConvBnReLU(in_channels=16, out_channels=8)
self.res = nn.Conv2d(in_channels=8, out_channels=1, kernel_size=3, padding=1, bias=False)
def | (
self, img: torch.Tensor, depth_0: torch.Tensor, depth_min: torch.Tensor, depth_max: torch.Tensor
) -> torch.Tensor:
"""Forward method
Args:
img: input reference images (B, 3, H, W)
depth_0: current depth map (B, 1, H//2, W//2)
depth_min: pre-defined minimum depth (B, )
depth_max: pre-defined maximum depth (B, )
Returns:
depth: refined depth map (B, 1, H, W)
"""
batch_size = depth_min.size()[0]
# pre-scale the depth map into [0,1]
depth = (depth_0 - depth_min.view(batch_size, 1, 1, 1)) / (depth_max - depth_min).view(batch_size, 1, 1, 1)
conv0 = self.conv0(img)
deconv = F.relu(self.bn(self.deconv(self.conv2(self.conv1(depth)))), inplace=True)
# depth residual
res = self.res(self.conv3(torch.cat((deconv, conv0), dim=1)))
del conv0
del deconv
depth = F.interpolate(depth, scale_factor=2.0, mode="nearest") + res
# convert the normalized depth back
return depth * (depth_max - depth_min).view(batch_size, 1, 1, 1) + depth_min.view(batch_size, 1, 1, 1)
class PatchmatchNet(nn.Module):
""" Implementation of complete structure of PatchmatchNet"""
def __init__(
self,
patchmatch_interval_scale: List[float],
propagation_range: List[int],
patchmatch_iteration: List[int],
patchmatch_num_sample: List[int],
propagate_neighbors: List[int],
evaluate_neighbors: List[int],
) -> None:
"""Initialize modules in PatchmatchNet
Args:
patchmatch_interval_scale: depth interval scale in patchmatch module
propagation_range: propagation range
patchmatch_iteration: patchmatch iteration number
patchmatch_num_sample: patchmatch number of samples
propagate_neighbors: number of propagation neighbors
evaluate_neighbors: number of propagation neighbors for evaluation
"""
super(PatchmatchNet, self).__init__()
self.stages = 4
self.feature = FeatureNet()
self.patchmatch_num_sample = patchmatch_num_sample
num_features = [16, 32, 64]
self.propagate_neighbors = propagate_neighbors
self.evaluate_neighbors = evaluate_neighbors
# number of groups for group-wise correlation
self.G = [4, 8, 8]
for i in range(self.stages - 1):
patchmatch = PatchMatch(
propagation_out_range=propagation_range[i],
patchmatch_iteration=patchmatch_iteration[i],
patchmatch_num_sample=patchmatch_num_sample[i],
patchmatch_interval_scale=patchmatch_interval_scale[i],
num_feature=num_features[i],
G=self.G[i],
propagate_neighbors=self.propagate_neighbors[i],
evaluate_neighbors=evaluate_neighbors[i],
stage=i + 1,
)
setattr(self, f"patchmatch_{i+1}", patchmatch)
self.upsample_net = Refinement()
def forward(
self,
images: List[torch.Tensor],
intrinsics: torch.Tensor,
extrinsics: torch.Tensor,
depth_min: torch.Tensor,
depth_max: torch.Tensor,
) -> Tuple[torch.Tensor, torch.Tensor, Dict[int, List[torch.Tensor]]]:
"""Forward method for PatchMatchNet
Args:
images: N images (B, 3, H, W) stored in list
intrinsics: intrinsic 3x3 matrices for all images (B, N, 3, 3)
extrinsics: extrinsic 4x4 matrices for all images (B, N, 4, 4)
depth_min: minimum virtual depth (B, 1)
depth_max: maximum virtual depth (B, 1)
Returns:
output tuple of PatchMatchNet, containing refined depthmap, depth patchmatch, and photometric confidence.
"""
assert len(images) == intrinsics.size()[1], "Different number of images and intrinsic matrices"
assert len(images) == extrinsics.size()[1], 'Different number of images and extrinsic matrices'
images, intrinsics, orig_height, orig_width = adjust_image_dims(images, intrinsics)
ref_image = images[0]
_, _, ref_height, ref_width = ref_image.size()
# step 1. Multi-scale feature extraction
features: List[Dict[int, torch.Tensor]] = []
for img in images:
output_feature = self.feature(img)
features.append(output_feature)
del images
ref_feature, src_features = features[0], features[1:]
depth_min = depth_min.float()
depth_max = depth_max.float()
# step 2. Learning-based patchmatch
device = intrinsics.device
depth = torch.empty(0, device=device)
depths: List[torch.Tensor] = []
score = torch.empty(0, device=device)
view_weights = torch.empty(0, device=device)
depth_patchmatch: Dict[int, List[torch.Tensor]] = {}
scale = 0.125
for stage in range(self.stages - 1, 0, -1):
src_features_l = [src_fea[stage] for src_fea in src_features]
# Create projection matrix for specific stage
intrinsics_l = intrinsics.clone()
intrinsics_l[:, :, :2] *= scale
proj = extrinsics.clone()
proj[:, :, :3, :4] = torch.matmul(intrinsics_l, extrinsics[:, :, :3, :4])
proj_l = torch.unbind(proj, 1)
ref_proj, src_proj = proj_l[0], proj_l[1:]
scale *= 2.0
# Need conditional since TorchScript only allows "getattr" access with string literals
if stage == 3:
depths, score, view_weights = self.patchmatch_3(
ref_feature=ref_feature[stage],
src_features=src_features_l,
ref_proj=ref_proj,
src_projs=src_proj,
depth_min=depth_min,
depth_max=depth_max,
depth=depth,
view_weights=view_weights,
)
elif stage == 2:
depths, score, view_weights = self.patchmatch_2(
ref_feature=ref_feature[stage],
src_features=src_features_l,
ref_proj=ref_proj,
src_projs=src_proj,
depth_min=depth_min,
depth_max=depth_max,
depth=depth,
view_weights=view_weights,
)
elif stage == 1:
depths, score, view_weights = self.patchmatch_1(
ref_feature=ref_feature[stage],
src_features=src_features_l,
ref_proj=ref_proj,
src_projs=src_proj,
depth_min=depth_min,
depth_max=depth_max,
depth=depth,
view_weights=view_weights,
)
depth_patchmatch[stage] = depths
depth = depths[-1].detach()
if stage > 1:
# upsampling the depth map and pixel-wise view weight for next stage
depth = F.interpolate(depth, scale_factor=2.0, mode="nearest")
view_weights = F.interpolate(view_weights, scale_factor=2.0, mode="nearest")
del ref_feature
del src_features
# step 3. Refinement
depth = self.upsample_net(ref_image, depth, depth_min, depth_max)
if ref_width != orig_width or ref_height != orig_height:
depth = F.interpolate(depth, size=[orig_height, orig_width], mode='bilinear', align_corners=False)
depth_patchmatch[0] = [depth]
if self.training:
return depth, torch.empty(0, device=device), depth_patchmatch
else:
num_depth = self.patchmatch_num_sample[0]
score_sum4 = 4 * F.avg_pool3d(
F.pad(score.unsqueeze(1), pad=(0, 0, 0, 0, 1, 2)), (4, 1, 1), stride=1, padding=0
).squeeze(1)
# [B, 1, H, W]
depth_index = depth_regression(
score, depth_values=torch.arange(num_depth, device=score.device, dtype=torch.float)
).long().clamp(0, num_depth - 1)
photometric_confidence = torch.gather(score_sum4, 1, depth_index)
photometric_confidence = F.interpolate(
photometric_confidence, size=[orig_height, orig_width], mode="nearest").squeeze(1)
return depth, photometric_confidence, depth_patchmatch
def adjust_image_dims(
images: List[torch.Tensor], intrinsics: torch.Tensor) -> Tuple[List[torch.Tensor], torch.Tensor, int, int]:
# stretch or compress image slightly to ensure width and height are multiples of 8
_, _, ref_height, ref_width = images[0].size()
for i in range(len(images)):
_, _, height, width = images[i].size()
new_height = int(round(height / 8)) * 8
new_width = int(round(width / 8)) * 8
if new_width != width or new_height != height:
intrinsics[:, i, 0] *= new_width / width
intrinsics[:, i, 1] *= new_height / height
images[i] = nn.functional.interpolate(
images[i], size=[new_height, new_width], mode='bilinear', align_corners=False)
return images, intrinsics, ref_height, ref_width
def patchmatchnet_loss(
depth_patchmatch: Dict[int, List[torch.Tensor]],
depth_gt: List[torch.Tensor],
mask: List[torch.Tensor],
) -> torch.Tensor:
"""Patchmatch Net loss function
Args:
depth_patchmatch: depth map predicted by patchmatch net
depth_gt: ground truth depth map
mask: mask for filter valid points
Returns:
loss: result loss value
"""
loss = 0
for i in range(0, 4):
gt_depth = depth_gt[i][mask[i]]
for depth in depth_patchmatch[i]:
loss = loss + F.smooth_l1_loss(depth[mask[i]], gt_depth, reduction="mean")
return loss
| forward |
template.go | package prepared_query
import (
"fmt"
"reflect"
"regexp"
"strings"
"github.com/hashicorp/consul/agent/consul/structs"
"github.com/hashicorp/hil"
"github.com/hashicorp/hil/ast"
"github.com/mitchellh/copystructure"
)
// IsTemplate returns true if the given query is a template.
func IsTemplate(query *structs.PreparedQuery) bool {
return query.Template.Type != ""
}
// CompiledTemplate is an opaque object that can be used later to render a
// prepared query template.
type CompiledTemplate struct {
// query keeps a copy of the original query for rendering.
query *structs.PreparedQuery
// trees contains a map with paths to string fields in a structure to
// parsed syntax trees, suitable for later evaluation.
trees map[string]ast.Node
// re is the compiled regexp, if they supplied one (this can be nil).
re *regexp.Regexp
}
// Compile validates a prepared query template and returns an opaque compiled
// object that can be used later to render the template.
func Compile(query *structs.PreparedQuery) (*CompiledTemplate, error) {
// Make sure it's a type we understand.
if query.Template.Type != structs.QueryTemplateTypeNamePrefixMatch {
return nil, fmt.Errorf("Bad Template.Type '%s'", query.Template.Type)
}
// Start compile.
ct := &CompiledTemplate{
trees: make(map[string]ast.Node),
}
// Make a copy of the query to use as the basis for rendering later.
dup, err := copystructure.Copy(query)
if err != nil {
return nil, err
}
var ok bool
ct.query, ok = dup.(*structs.PreparedQuery)
if !ok {
return nil, fmt.Errorf("Failed to copy query")
}
// Walk over all the string fields in the Service sub-structure and
// parse them as HIL.
parse := func(path string, v reflect.Value) error {
tree, err := hil.Parse(v.String())
if err != nil {
return fmt.Errorf("Bad format '%s' in Service%s: %s", v.String(), path, err)
}
ct.trees[path] = tree
return nil
}
if err := walk(&ct.query.Service, parse); err != nil {
return nil, err
}
// If they supplied a regexp then compile it.
if ct.query.Template.Regexp != "" {
var err error
ct.re, err = regexp.Compile(ct.query.Template.Regexp)
if err != nil |
}
// Finally do a test render with the supplied name prefix. This will
// help catch errors before run time, and this is the most minimal
// prefix it will be expected to run with. The results might not make
// sense and create a valid service to lookup, but it should render
// without any errors.
if _, err = ct.Render(ct.query.Name); err != nil {
return nil, err
}
return ct, nil
}
// Render takes a compiled template and renders it for the given name. For
// example, if the user looks up foobar.query.consul via DNS then we will call
// this function with "foobar" on the compiled template.
func (ct *CompiledTemplate) Render(name string) (*structs.PreparedQuery, error) {
// Make it "safe" to render a default structure.
if ct == nil {
return nil, fmt.Errorf("Cannot render an uncompiled template")
}
// Start with a fresh, detached copy of the original so we don't disturb
// the prototype.
dup, err := copystructure.Copy(ct.query)
if err != nil {
return nil, err
}
query, ok := dup.(*structs.PreparedQuery)
if !ok {
return nil, fmt.Errorf("Failed to copy query")
}
// Run the regular expression, if provided. We execute on a copy here
// to avoid internal lock contention because we expect this to be called
// from multiple goroutines.
var matches []string
if ct.re != nil {
re := ct.re.Copy()
matches = re.FindStringSubmatch(name)
}
// Create a safe match function that can't fail at run time. It will
// return an empty string for any invalid input.
match := ast.Function{
ArgTypes: []ast.Type{ast.TypeInt},
ReturnType: ast.TypeString,
Variadic: false,
Callback: func(inputs []interface{}) (interface{}, error) {
i, ok := inputs[0].(int)
if ok && i >= 0 && i < len(matches) {
return matches[i], nil
}
return "", nil
},
}
// Build up the HIL evaluation context.
config := &hil.EvalConfig{
GlobalScope: &ast.BasicScope{
VarMap: map[string]ast.Variable{
"name.full": ast.Variable{
Type: ast.TypeString,
Value: name,
},
"name.prefix": ast.Variable{
Type: ast.TypeString,
Value: query.Name,
},
"name.suffix": ast.Variable{
Type: ast.TypeString,
Value: strings.TrimPrefix(name, query.Name),
},
},
FuncMap: map[string]ast.Function{
"match": match,
},
},
}
// Run through the Service sub-structure and evaluate all the strings
// as HIL.
eval := func(path string, v reflect.Value) error {
tree, ok := ct.trees[path]
if !ok {
return nil
}
res, err := hil.Eval(tree, config)
if err != nil {
return fmt.Errorf("Bad evaluation for '%s' in Service%s: %s", v.String(), path, err)
}
if res.Type != hil.TypeString {
return fmt.Errorf("Expected Service%s field to be a string, got %s", path, res.Type)
}
v.SetString(res.Value.(string))
return nil
}
if err := walk(&query.Service, eval); err != nil {
return nil, err
}
return query, nil
}
| {
return nil, fmt.Errorf("Bad Regexp: %s", err)
} |
version.js | 'use strict';
angular.module('eventsApp.version', [ |
.value('version', '0.1'); | 'eventsApp.version.interpolate-filter',
'eventsApp.version.version-directive'
]) |
Combo_EngineList.py | # -*- coding: UTF-8 -*-
import os, sqlite3
from PySide2.QtWidgets import QComboBox
from moduels.component.NormalValue import 常量
# 添加预设对话框
class Combo_EngineList(QComboBox):
def __init__(self):
super().__init__()
self.initElements() # 先初始化各个控件
self.initSlots() # 再将各个控件连接到信号槽
self.initLayouts() # 然后布局
self.initValues() # 再定义各个控件的值
def initElements(self):
pass
def initSlots(self):
pass
def initLayouts(self):
pass
def initValues(self):
self.初始化列表()
def mousePressEvent(self, e):
self.列表更新()
self.showPopup()
def 初始化列表(self):
| = []
数据库连接 = 常量.数据库连接
cursor = 数据库连接.cursor()
result = cursor.execute(f'''select 引擎名称 from {常量.语音引擎表单名} order by id;''').fetchall()
if len(result) != 0:
for item in result:
self.列表项.append(item[0])
self.addItems(self.列表项)
# if not os.path.exists(常量.音效文件路径): os.makedirs(常量.音效文件路径)
# with os.scandir(常量.音效文件路径) as 目录条目:
# for entry in 目录条目:
# if not entry.name.startswith('.') and entry.is_dir():
# self.列表项.append(entry.name)
def 列表更新(self):
新列表 = []
数据库连接 = 常量.数据库连接
cursor = 数据库连接.cursor()
result = cursor.execute(f'''select 引擎名称 from {常量.语音引擎表单名} order by id;''').fetchall()
if len(result) != 0:
for item in result:
新列表.append(item[0])
if self.列表项 == 新列表: return True
self.clear()
self.列表项 = 新列表
self.addItems(self.列表项)
| self.列表项 |
backend.go | // Copyright 2016-2018, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package httpstate
import (
"context"
cryptorand "crypto/rand"
"encoding/hex"
"fmt"
"io"
"net"
"net/http"
"net/url"
"os"
"path"
"regexp"
"strconv"
"strings"
"time"
"github.com/opentracing/opentracing-go"
"github.com/pkg/errors"
"github.com/skratchdot/open-golang/open"
"github.com/pulumi/pulumi/pkg/apitype"
"github.com/pulumi/pulumi/pkg/backend"
"github.com/pulumi/pulumi/pkg/backend/display"
"github.com/pulumi/pulumi/pkg/backend/filestate"
"github.com/pulumi/pulumi/pkg/backend/httpstate/client"
"github.com/pulumi/pulumi/pkg/diag"
"github.com/pulumi/pulumi/pkg/diag/colors"
"github.com/pulumi/pulumi/pkg/engine"
"github.com/pulumi/pulumi/pkg/operations"
"github.com/pulumi/pulumi/pkg/resource"
"github.com/pulumi/pulumi/pkg/resource/config"
"github.com/pulumi/pulumi/pkg/resource/deploy"
"github.com/pulumi/pulumi/pkg/tokens"
"github.com/pulumi/pulumi/pkg/util/cmdutil"
"github.com/pulumi/pulumi/pkg/util/contract"
"github.com/pulumi/pulumi/pkg/util/logging"
"github.com/pulumi/pulumi/pkg/util/result"
"github.com/pulumi/pulumi/pkg/util/retry"
"github.com/pulumi/pulumi/pkg/workspace"
)
const (
// PulumiCloudURL is the Cloud URL used if no environment or explicit cloud is chosen.
PulumiCloudURL = "https://" + defaultAPIDomainPrefix + "pulumi.com"
// defaultAPIDomainPrefix is the assumed Cloud URL prefix for typical Pulumi Cloud API endpoints.
defaultAPIDomainPrefix = "api."
// defaultConsoleDomainPrefix is the assumed Cloud URL prefix typically used for the Pulumi Console.
defaultConsoleDomainPrefix = "app."
// defaultAPIEnvVar can be set to override the default cloud chosen, if `--cloud` is not present.
defaultURLEnvVar = "PULUMI_API"
// AccessTokenEnvVar is the environment variable used to bypass a prompt on login.
AccessTokenEnvVar = "PULUMI_ACCESS_TOKEN"
)
// DefaultURL returns the default cloud URL. This may be overridden using the PULUMI_API environment
// variable. If no override is found, and we are authenticated with a cloud, choose that. Otherwise,
// we will default to the https://api.pulumi.com/ endpoint.
func DefaultURL() string {
return ValueOrDefaultURL("")
}
// ValueOrDefaultURL returns the value if specified, or the default cloud URL otherwise.
func ValueOrDefaultURL(cloudURL string) string {
// If we have a cloud URL, just return it.
if cloudURL != "" {
return cloudURL
}
// Otherwise, respect the PULUMI_API override.
if cloudURL := os.Getenv(defaultURLEnvVar); cloudURL != "" {
return cloudURL
}
// If that didn't work, see if we have a current cloud, and use that. Note we need to be careful
// to ignore the local cloud.
if creds, err := workspace.GetStoredCredentials(); err == nil {
if creds.Current != "" && !filestate.IsFileStateBackendURL(creds.Current) {
return creds.Current
}
}
// If none of those led to a cloud URL, simply return the default.
return PulumiCloudURL
}
// Backend extends the base backend interface with specific information about cloud backends.
type Backend interface {
backend.Backend
CloudURL() string
CancelCurrentUpdate(ctx context.Context, stackRef backend.StackReference) error
StackConsoleURL(stackRef backend.StackReference) (string, error)
Client() *client.Client
}
type cloudBackend struct {
d diag.Sink
url string
client *client.Client
currentProject *workspace.Project
}
// New creates a new Pulumi backend for the given cloud API URL and token.
func New(d diag.Sink, cloudURL string) (Backend, error) {
cloudURL = ValueOrDefaultURL(cloudURL)
apiToken, err := workspace.GetAccessToken(cloudURL)
if err != nil {
return nil, errors.Wrap(err, "getting stored credentials")
}
// When stringifying backend references, we take the current project (if present) into account.
currentProject, err := workspace.DetectProject()
if err != nil {
currentProject = nil
}
return &cloudBackend{
d: d,
url: cloudURL,
client: client.NewClient(cloudURL, apiToken, d),
currentProject: currentProject,
}, nil
}
// loginWithBrowser uses a web-browser to log into the cloud and returns the cloud backend for it.
func loginWithBrowser(ctx context.Context, d diag.Sink, cloudURL string) (Backend, error) {
// Locally, we generate a nonce and spin up a web server listening on a random port on localhost. We then open a
// browser to a special endpoint on the Pulumi.com console, passing the generated nonce as well as the port of the
// webserver we launched. This endpoint does the OAuth flow and when it completes, redirects to localhost passing
// the nonce and the pulumi access token we created as part of the OAuth flow. If the nonces match, we set the
// access token that was passed to us and the redirect to a special welcome page on Pulumi.com
loginURL := cloudConsoleURL(cloudURL, "cli-login")
finalWelcomeURL := cloudConsoleURL(cloudURL, "welcome", "cli")
if loginURL == "" || finalWelcomeURL == "" {
return nil, errors.New("could not determine login url")
}
// Listen on localhost, have the kernel pick a random port for us
c := make(chan string)
l, err := net.Listen("tcp", "127.0.0.1:")
if err != nil {
return nil, errors.Wrap(err, "could not start listener")
}
// Extract the port
_, port, err := net.SplitHostPort(l.Addr().String())
if err != nil {
return nil, errors.Wrap(err, "could not determine port")
}
// Generate a nonce we'll send with the request.
nonceBytes := make([]byte, 32)
_, err = cryptorand.Read(nonceBytes)
contract.AssertNoErrorf(err, "could not get random bytes")
nonce := hex.EncodeToString(nonceBytes)
u, err := url.Parse(loginURL)
contract.AssertNoError(err)
// Generate a description to associate with the access token we'll generate, for display on the Account Settings
// page.
var tokenDescription string
if host, hostErr := os.Hostname(); hostErr == nil {
tokenDescription = fmt.Sprintf("Generated by pulumi login on %s at %s", host, time.Now().Format(time.RFC822))
} else {
tokenDescription = fmt.Sprintf("Generated by pulumi login at %s", time.Now().Format(time.RFC822))
}
// Pass our state around as query parameters on the URL we'll open the user's preferred browser to
q := u.Query()
q.Add("cliSessionPort", port)
q.Add("cliSessionNonce", nonce)
q.Add("cliSessionDescription", tokenDescription)
u.RawQuery = q.Encode()
// Start the webserver to listen to handle the response
go serveBrowserLoginServer(l, nonce, finalWelcomeURL, c)
// Launch the web browser and navigate to the login URL.
if openErr := open.Run(u.String()); openErr != nil {
fmt.Printf("We couldn't launch your web browser for some reason. Please visit:\n\n%s\n\n"+
"to finish the login process.", u)
} else {
fmt.Println("We've launched your web browser to complete the login process.")
}
fmt.Println("\nWaiting for login to complete...")
accessToken := <-c
// Save the token and return the backend
if err = workspace.StoreAccessToken(cloudURL, accessToken, true); err != nil {
return nil, err
}
return New(d, cloudURL)
}
// Login logs into the target cloud URL and returns the cloud backend for it.
func Login(ctx context.Context, d diag.Sink, cloudURL string, opts display.Options) (Backend, error) {
cloudURL = ValueOrDefaultURL(cloudURL)
// If we have a saved access token, and it is valid, use it.
existingToken, err := workspace.GetAccessToken(cloudURL)
if err == nil && existingToken != "" {
if valid, _ := IsValidAccessToken(ctx, cloudURL, existingToken); valid {
// Save the token. While it hasn't changed this will update the current cloud we are logged into, as well.
if err = workspace.StoreAccessToken(cloudURL, existingToken, true); err != nil {
return nil, err
}
return New(d, cloudURL)
}
}
// We intentionally don't accept command-line args for the user's access token. Having it in
// .bash_history is not great, and specifying it via flag isn't of much use.
accessToken := os.Getenv(AccessTokenEnvVar)
accountLink := cloudConsoleURL(cloudURL, "account", "tokens")
if accessToken != "" {
// If there's already a token from the environment, use it.
_, err = fmt.Fprintf(os.Stderr, "Logging in using access token from %s\n", AccessTokenEnvVar)
contract.IgnoreError(err)
} else if !cmdutil.Interactive() {
// If interactive mode isn't enabled, the only way to specify a token is through the environment variable.
// Fail the attempt to login.
return nil, errors.Errorf(
"%s must be set for login during non-interactive CLI sessions", AccessTokenEnvVar)
} else {
// If no access token is available from the environment, and we are interactive, prompt and offer to
// open a browser to make it easy to generate and use a fresh token.
line1 := fmt.Sprintf("Manage your Pulumi stacks by logging in.")
line1len := len(line1)
line1 = colors.Highlight(line1, "Pulumi stacks", colors.Underline+colors.Bold)
fmt.Printf(opts.Color.Colorize(line1) + "\n")
maxlen := line1len
line2 := "Run `pulumi login --help` for alternative login options."
line2len := len(line2)
fmt.Printf(opts.Color.Colorize(line2) + "\n")
if line2len > maxlen {
maxlen = line2len
}
// In the case where we could not construct a link to the pulumi console based on the API server's hostname,
// don't offer magic log-in or text about where to find your access token.
if accountLink == "" {
for {
if accessToken, err = cmdutil.ReadConsoleNoEcho("Enter your access token"); err != nil {
return nil, err
}
if accessToken != "" {
break
}
}
} else {
line3 := fmt.Sprintf("Enter your access token from %s", accountLink)
line3len := len(line3)
line3 = colors.Highlight(line3, "access token", colors.BrightCyan+colors.Bold)
line3 = colors.Highlight(line3, accountLink, colors.BrightBlue+colors.Underline+colors.Bold)
fmt.Printf(opts.Color.Colorize(line3) + "\n")
if line3len > maxlen {
maxlen = line3len
}
line4 := " or hit <ENTER> to log in using your browser"
var padding string
if pad := maxlen - len(line4); pad > 0 {
padding = strings.Repeat(" ", pad)
}
line4 = colors.Highlight(line4, "<ENTER>", colors.BrightCyan+colors.Bold)
if accessToken, err = cmdutil.ReadConsoleNoEcho(opts.Color.Colorize(line4) + padding); err != nil {
return nil, err
}
if accessToken == "" {
return loginWithBrowser(ctx, d, cloudURL)
}
}
}
// Try and use the credentials to see if they are valid.
valid, err := IsValidAccessToken(ctx, cloudURL, accessToken)
if err != nil {
return nil, err
} else if !valid {
return nil, errors.Errorf("invalid access token")
}
// Save them.
if err = workspace.StoreAccessToken(cloudURL, accessToken, true); err != nil {
return nil, err
}
return New(d, cloudURL)
}
func (b *cloudBackend) StackConsoleURL(stackRef backend.StackReference) (string, error) {
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return "", err
}
path := b.cloudConsoleStackPath(stackID)
url := b.CloudConsoleURL(path)
if url == "" {
return "", errors.New("could not determine clould console URL")
}
return url, nil
}
func (b *cloudBackend) Name() string {
if b.url == PulumiCloudURL {
return "pulumi.com"
}
return b.url
}
func (b *cloudBackend) URL() string {
user, err := b.CurrentUser()
if err != nil {
return cloudConsoleURL(b.url)
}
return cloudConsoleURL(b.url, user)
}
func (b *cloudBackend) CurrentUser() (string, error) {
return b.client.GetPulumiAccountName(context.Background())
}
func (b *cloudBackend) CloudURL() string { return b.url }
func (b *cloudBackend) parsePolicyPackReference(s string) (backend.PolicyPackReference, error) {
split := strings.Split(s, "/")
var orgName string
var policyPackName string
switch len(split) {
case 2:
orgName = split[0]
policyPackName = split[1]
default:
return nil, errors.Errorf("could not parse policy pack name '%s'; must be of the form "+
"<orgName>/<policyPackName>", s)
}
return newCloudBackendPolicyPackReference(orgName, tokens.QName(policyPackName)), nil
}
func (b *cloudBackend) GetPolicyPack(ctx context.Context, policyPack string,
d diag.Sink) (backend.PolicyPack, error) {
policyPackRef, err := b.parsePolicyPackReference(policyPack)
if err != nil {
return nil, err
}
apiToken, err := workspace.GetAccessToken(b.CloudURL())
if err != nil {
return nil, err
}
return &cloudPolicyPack{
ref: newCloudBackendPolicyPackReference(
policyPackRef.OrgName(), policyPackRef.Name()),
b: b,
cl: client.NewClient(b.CloudURL(), apiToken, d)}, nil
}
// SupportsOrganizations tells whether a user can belong to multiple organizations in this backend.
func (b *cloudBackend) SupportsOrganizations() bool {
return true
}
func (b *cloudBackend) ParseStackReference(s string) (backend.StackReference, error) {
split := strings.Split(s, "/")
var owner string
var projectName string
var stackName string
switch len(split) {
case 1:
stackName = split[0]
case 2:
owner = split[0]
stackName = split[1]
case 3:
owner = split[0]
projectName = split[1]
stackName = split[2]
default:
return nil, errors.Errorf("could not parse stack name '%s'", s)
}
if owner == "" {
currentUser, userErr := b.client.GetPulumiAccountName(context.Background())
if userErr != nil {
return nil, userErr
}
owner = currentUser
}
if projectName == "" {
currentProject, projectErr := workspace.DetectProject()
if projectErr != nil {
return nil, projectErr
}
projectName = currentProject.Name.String()
}
return cloudBackendReference{
owner: owner,
project: projectName,
name: tokens.QName(stackName),
b: b,
}, nil
}
// CloudConsoleURL returns a link to the cloud console with the given path elements. If a console link cannot be
// created, we return the empty string instead (this can happen if the endpoint isn't a recognized pattern).
func (b *cloudBackend) CloudConsoleURL(paths ...string) string {
return cloudConsoleURL(b.CloudURL(), paths...)
}
// serveBrowserLoginServer hosts the server that completes the browser based login flow.
func serveBrowserLoginServer(l net.Listener, expectedNonce string, destinationURL string, c chan<- string) {
handler := func(res http.ResponseWriter, req *http.Request) {
tok := req.URL.Query().Get("accessToken")
nonce := req.URL.Query().Get("nonce")
if tok == "" || nonce != expectedNonce {
res.WriteHeader(400)
return
}
http.Redirect(res, req, destinationURL, http.StatusTemporaryRedirect)
c <- tok
}
mux := &http.ServeMux{}
mux.HandleFunc("/", handler)
contract.IgnoreError(http.Serve(l, mux))
}
// CloudConsoleStackPath returns the stack path components for getting to a stack in the cloud console. This path
// must, of course, be combined with the actual console base URL by way of the CloudConsoleURL function above.
func (b *cloudBackend) cloudConsoleStackPath(stackID client.StackIdentifier) string {
return path.Join(stackID.Owner, stackID.Project, stackID.Stack)
}
// Logout logs out of the target cloud URL.
func (b *cloudBackend) Logout() error {
return workspace.DeleteAccessToken(b.CloudURL())
}
func (b *cloudBackend) GetStack(ctx context.Context, stackRef backend.StackReference) (backend.Stack, error) {
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return nil, err
}
stack, err := b.client.GetStack(ctx, stackID)
if err != nil {
// If this was a 404, return nil, nil as per this method's contract.
if errResp, ok := err.(*apitype.ErrorResponse); ok && errResp.Code == http.StatusNotFound {
return nil, nil
}
return nil, err
}
return newStack(stack, b), nil
}
func (b *cloudBackend) CreateStack(
ctx context.Context, stackRef backend.StackReference, _ interface{} /* No custom options for httpstate backend. */) (
backend.Stack, error) {
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return nil, err
}
tags, err := backend.GetEnvironmentTagsForCurrentStack()
if err != nil {
return nil, errors.Wrap(err, "error determining initial tags")
}
apistack, err := b.client.CreateStack(ctx, stackID, tags)
if err != nil {
// If the status is 409 Conflict (stack already exists), return StackAlreadyExistsError.
if errResp, ok := err.(*apitype.ErrorResponse); ok && errResp.Code == http.StatusConflict {
return nil, &backend.StackAlreadyExistsError{StackName: stackID.Stack}
}
return nil, err
}
stack := newStack(apistack, b)
fmt.Printf("Created stack '%s'\n", stack.Ref())
return stack, nil
}
func (b *cloudBackend) ListStacks(
ctx context.Context, projectFilter *tokens.PackageName) ([]backend.StackSummary, error) {
var cleanedProjectName *string
if projectFilter != nil {
clean := cleanProjectName(string(*projectFilter))
cleanedProjectName = &clean
}
apiSummaries, err := b.client.ListStacks(ctx, cleanedProjectName)
if err != nil {
return nil, err
}
// Convert []apitype.StackSummary into []backend.StackSummary.
var backendSummaries []backend.StackSummary
for _, apiSummary := range apiSummaries {
backendSummary := cloudStackSummary{
summary: apiSummary,
b: b,
}
backendSummaries = append(backendSummaries, backendSummary)
}
return backendSummaries, nil
}
func (b *cloudBackend) RemoveStack(ctx context.Context, stackRef backend.StackReference, force bool) (bool, error) {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return false, err
}
return b.client.DeleteStack(ctx, stack, force)
}
func (b *cloudBackend) RenameStack(ctx context.Context, stackRef backend.StackReference, newName tokens.QName) error {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return err
}
return b.client.RenameStack(ctx, stack, string(newName))
}
func getStack(ctx context.Context, b *cloudBackend, stackRef backend.StackReference) (backend.Stack, error) {
stack, err := b.GetStack(ctx, stackRef)
if err != nil {
return nil, err
} else if stack == nil {
return nil, errors.New("stack not found")
}
return stack, nil
}
func (b *cloudBackend) Preview(ctx context.Context, stackRef backend.StackReference,
op backend.UpdateOperation) (engine.ResourceChanges, result.Result) {
stack, err := getStack(ctx, b, stackRef)
if err != nil {
return nil, result.FromError(err)
}
// We can skip PreviewtThenPromptThenExecute, and just go straight to Execute.
opts := backend.ApplierOptions{
DryRun: true,
ShowLink: true,
}
return b.apply(
ctx, apitype.PreviewUpdate, stack, op, opts, nil /*events*/)
}
func (b *cloudBackend) Update(ctx context.Context, stackRef backend.StackReference,
op backend.UpdateOperation) (engine.ResourceChanges, result.Result) {
stack, err := getStack(ctx, b, stackRef)
if err != nil {
return nil, result.FromError(err)
}
return backend.PreviewThenPromptThenExecute(ctx, apitype.UpdateUpdate, stack, op, b.apply)
}
func (b *cloudBackend) Refresh(ctx context.Context, stackRef backend.StackReference,
op backend.UpdateOperation) (engine.ResourceChanges, result.Result) {
stack, err := getStack(ctx, b, stackRef)
if err != nil {
return nil, result.FromError(err)
}
return backend.PreviewThenPromptThenExecute(ctx, apitype.RefreshUpdate, stack, op, b.apply)
}
func (b *cloudBackend) Destroy(ctx context.Context, stackRef backend.StackReference,
op backend.UpdateOperation) (engine.ResourceChanges, result.Result) {
stack, err := getStack(ctx, b, stackRef)
if err != nil {
return nil, result.FromError(err)
}
return backend.PreviewThenPromptThenExecute(ctx, apitype.DestroyUpdate, stack, op, b.apply)
}
func (b *cloudBackend) Query(ctx context.Context, stackRef backend.StackReference,
op backend.UpdateOperation) result.Result {
stack, err := b.GetStack(ctx, stackRef)
if err != nil {
return result.FromError(err)
}
return b.query(ctx, stack, op, nil /*events*/)
}
func (b *cloudBackend) createAndStartUpdate(
ctx context.Context, action apitype.UpdateKind, stack backend.Stack,
op *backend.UpdateOperation, dryRun bool) (client.UpdateIdentifier, int, string, error) {
stackRef := stack.Ref()
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return client.UpdateIdentifier{}, 0, "", err
}
metadata := apitype.UpdateMetadata{
Message: op.M.Message,
Environment: op.M.Environment,
}
update, reqdPolicies, err := b.client.CreateUpdate(
ctx, action, stackID, op.Proj, op.StackConfiguration.Config, metadata, op.Opts.Engine, dryRun)
if err != nil {
return client.UpdateIdentifier{}, 0, "", err
}
//
// TODO[pulumi-service#3745]: Move this to the plugin-gathering routine when we have a dedicated
// service API when for getting a list of the required policies to run.
//
// For now, this list is given to us when we start an update; yet, the list of analyzers to boot
// is given to us by CLI flag, and passed to the step generator (which lazily instantiates the
// plugins) via `op.Opts.Engine.Analyzers`. Since the "start update" API request is sent well
// after this field is populated, we instead populate the `RequiredPlugins` field here.
//
// Once this API is implemented, we can safely move these lines to the plugin-gathering code,
// which is much closer to being the "correct" place for this stuff.
//
for _, policy := range reqdPolicies {
op.Opts.Engine.RequiredPolicies = append(
op.Opts.Engine.RequiredPolicies, newCloudRequiredPolicy(b.client, policy))
}
// Start the update. We use this opportunity to pass new tags to the service, to pick up any
// metadata changes.
tags, err := backend.GetMergedStackTags(ctx, stack)
if err != nil {
return client.UpdateIdentifier{}, 0, "", errors.Wrap(err, "getting stack tags")
}
version, token, err := b.client.StartUpdate(ctx, update, tags)
if err != nil {
return client.UpdateIdentifier{}, 0, "", err
}
// Any non-preview update will be considered part of the stack's update history.
if action != apitype.PreviewUpdate {
logging.V(7).Infof("Stack %s being updated to version %d", stackRef, version)
}
return update, version, token, nil
}
// apply actually performs the provided type of update on a stack hosted in the Pulumi Cloud.
func (b *cloudBackend) apply(
ctx context.Context, kind apitype.UpdateKind, stack backend.Stack,
op backend.UpdateOperation, opts backend.ApplierOptions,
events chan<- engine.Event) (engine.ResourceChanges, result.Result) {
actionLabel := backend.ActionLabel(kind, opts.DryRun)
if !op.Opts.Display.JSONDisplay {
// Print a banner so it's clear this is going to the cloud.
fmt.Printf(op.Opts.Display.Color.Colorize(
colors.SpecHeadline+"%s (%s):"+colors.Reset+"\n"), actionLabel, stack.Ref())
}
// Create an update object to persist results.
update, version, token, err :=
b.createAndStartUpdate(ctx, kind, stack, &op, opts.DryRun)
if err != nil {
return nil, result.FromError(err)
}
if opts.ShowLink && !op.Opts.Display.JSONDisplay {
// Print a URL at the end of the update pointing to the Pulumi Service.
var link string
base := b.cloudConsoleStackPath(update.StackIdentifier)
if !opts.DryRun {
link = b.CloudConsoleURL(base, "updates", strconv.Itoa(version))
} else {
link = b.CloudConsoleURL(base, "previews", update.UpdateID)
}
if link != "" {
defer func() {
fmt.Printf(op.Opts.Display.Color.Colorize(
colors.SpecHeadline+"Permalink: "+
colors.Underline+colors.BrightBlue+"%s"+colors.Reset+"\n"), link)
}()
}
}
return b.runEngineAction(ctx, kind, stack.Ref(), op, update, token, events, opts.DryRun)
}
// query executes a query program against the resource outputs of a stack hosted in the Pulumi
// Cloud.
func (b *cloudBackend) query(
ctx context.Context, stack backend.Stack, op backend.UpdateOperation,
callerEventsOpt chan<- engine.Event) result.Result {
stackRef := stack.Ref()
q, err := b.newQuery(ctx, stackRef, op)
if err != nil {
return result.FromError(err)
}
// Render query output to CLI.
displayEvents := make(chan engine.Event) |
// The engineEvents channel receives all events from the engine, which we then forward onto other
// channels for actual processing. (displayEvents and callerEventsOpt.)
engineEvents := make(chan engine.Event)
eventsDone := make(chan bool)
go func() {
for e := range engineEvents {
displayEvents <- e
if callerEventsOpt != nil {
callerEventsOpt <- e
}
}
close(eventsDone)
}()
// Depending on the action, kick off the relevant engine activity. Note that we don't immediately check and
// return error conditions, because we will do so below after waiting for the display channels to close.
cancellationScope := op.Scopes.NewScope(engineEvents, true /*dryRun*/)
engineCtx := &engine.Context{
Cancel: cancellationScope.Context(),
Events: engineEvents,
BackendClient: httpstateBackendClient{backend: b},
}
if parentSpan := opentracing.SpanFromContext(ctx); parentSpan != nil {
engineCtx.ParentSpan = parentSpan.Context()
}
res := engine.Query(engineCtx, q, op.Opts.Engine)
// Wait for dependent channels to finish processing engineEvents before closing.
<-displayDone
cancellationScope.Close() // Don't take any cancellations anymore, we're shutting down.
close(engineEvents)
// Make sure that the goroutine writing to displayEvents and callerEventsOpt
// has exited before proceeding
<-eventsDone
close(displayEvents)
return res
}
func (b *cloudBackend) runEngineAction(
ctx context.Context, kind apitype.UpdateKind, stackRef backend.StackReference,
op backend.UpdateOperation, update client.UpdateIdentifier, token string,
callerEventsOpt chan<- engine.Event, dryRun bool) (engine.ResourceChanges, result.Result) {
contract.Assertf(token != "", "persisted actions require a token")
u, err := b.newUpdate(ctx, stackRef, op, update, token)
if err != nil {
return nil, result.FromError(err)
}
// displayEvents renders the event to the console and Pulumi service. The processor for the
// will signal all events have been proceed when a value is written to the displayDone channel.
displayEvents := make(chan engine.Event)
displayDone := make(chan bool)
go u.RecordAndDisplayEvents(
backend.ActionLabel(kind, dryRun), kind, stackRef, op,
displayEvents, displayDone, op.Opts.Display, dryRun)
// The engineEvents channel receives all events from the engine, which we then forward onto other
// channels for actual processing. (displayEvents and callerEventsOpt.)
engineEvents := make(chan engine.Event)
eventsDone := make(chan bool)
go func() {
for e := range engineEvents {
displayEvents <- e
if callerEventsOpt != nil {
callerEventsOpt <- e
}
}
close(eventsDone)
}()
// The backend.SnapshotManager and backend.SnapshotPersister will keep track of any changes to
// the Snapshot (checkpoint file) in the HTTP backend.
persister := b.newSnapshotPersister(ctx, u.update, u.tokenSource, op.SecretsManager)
snapshotManager := backend.NewSnapshotManager(persister, u.GetTarget().Snapshot)
// Depending on the action, kick off the relevant engine activity. Note that we don't immediately check and
// return error conditions, because we will do so below after waiting for the display channels to close.
cancellationScope := op.Scopes.NewScope(engineEvents, dryRun)
engineCtx := &engine.Context{
Cancel: cancellationScope.Context(),
Events: engineEvents,
SnapshotManager: snapshotManager,
BackendClient: httpstateBackendClient{backend: b},
}
if parentSpan := opentracing.SpanFromContext(ctx); parentSpan != nil {
engineCtx.ParentSpan = parentSpan.Context()
}
var changes engine.ResourceChanges
var res result.Result
switch kind {
case apitype.PreviewUpdate:
changes, res = engine.Update(u, engineCtx, op.Opts.Engine, true)
case apitype.UpdateUpdate:
changes, res = engine.Update(u, engineCtx, op.Opts.Engine, dryRun)
case apitype.RefreshUpdate:
changes, res = engine.Refresh(u, engineCtx, op.Opts.Engine, dryRun)
case apitype.DestroyUpdate:
changes, res = engine.Destroy(u, engineCtx, op.Opts.Engine, dryRun)
default:
contract.Failf("Unrecognized update kind: %s", kind)
}
// Wait for dependent channels to finish processing engineEvents before closing.
<-displayDone
cancellationScope.Close() // Don't take any cancellations anymore, we're shutting down.
close(engineEvents)
contract.IgnoreClose(snapshotManager)
// Make sure that the goroutine writing to displayEvents and callerEventsOpt
// has exited before proceeding
<-eventsDone
close(displayEvents)
// Mark the update as complete.
status := apitype.UpdateStatusSucceeded
if res != nil {
status = apitype.UpdateStatusFailed
}
completeErr := u.Complete(status)
if completeErr != nil {
res = result.Merge(res, result.FromError(errors.Wrap(completeErr, "failed to complete update")))
}
return changes, res
}
func (b *cloudBackend) CancelCurrentUpdate(ctx context.Context, stackRef backend.StackReference) error {
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return err
}
stack, err := b.client.GetStack(ctx, stackID)
if err != nil {
return err
}
if stack.ActiveUpdate == "" {
return errors.Errorf("stack %v has never been updated", stackRef)
}
// Compute the update identifier and attempt to cancel the update.
//
// NOTE: the update kind is not relevant; the same endpoint will work for updates of all kinds.
updateID := client.UpdateIdentifier{
StackIdentifier: stackID,
UpdateKind: apitype.UpdateUpdate,
UpdateID: stack.ActiveUpdate,
}
return b.client.CancelUpdate(ctx, updateID)
}
func (b *cloudBackend) GetHistory(ctx context.Context, stackRef backend.StackReference) ([]backend.UpdateInfo, error) {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return nil, err
}
updates, err := b.client.GetStackUpdates(ctx, stack)
if err != nil {
return nil, err
}
// Convert apitype.UpdateInfo objects to the backend type.
var beUpdates []backend.UpdateInfo
for _, update := range updates {
// Convert types from the apitype package into their internal counterparts.
cfg, err := convertConfig(update.Config)
if err != nil {
return nil, errors.Wrap(err, "converting configuration")
}
beUpdates = append(beUpdates, backend.UpdateInfo{
Kind: update.Kind,
Message: update.Message,
Environment: update.Environment,
Config: cfg,
Result: backend.UpdateResult(update.Result),
StartTime: update.StartTime,
EndTime: update.EndTime,
ResourceChanges: convertResourceChanges(update.ResourceChanges),
})
}
return beUpdates, nil
}
func (b *cloudBackend) GetLatestConfiguration(ctx context.Context,
stackRef backend.StackReference) (config.Map, error) {
stackID, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return nil, err
}
cfg, err := b.client.GetLatestConfiguration(ctx, stackID)
switch {
case err == client.ErrNoPreviousDeployment:
return nil, backend.ErrNoPreviousDeployment
case err != nil:
return nil, err
default:
return cfg, nil
}
}
// convertResourceChanges converts the apitype version of engine.ResourceChanges into the internal version.
func convertResourceChanges(changes map[apitype.OpType]int) engine.ResourceChanges {
b := make(engine.ResourceChanges)
for k, v := range changes {
b[deploy.StepOp(k)] = v
}
return b
}
// convertResourceChanges converts the apitype version of config.Map into the internal version.
func convertConfig(apiConfig map[string]apitype.ConfigValue) (config.Map, error) {
c := make(config.Map)
for rawK, rawV := range apiConfig {
k, err := config.ParseKey(rawK)
if err != nil {
return nil, err
}
if rawV.Secret {
c[k] = config.NewSecureValue(rawV.String)
} else {
c[k] = config.NewValue(rawV.String)
}
}
return c, nil
}
func (b *cloudBackend) GetLogs(ctx context.Context, stackRef backend.StackReference, cfg backend.StackConfiguration,
logQuery operations.LogQuery) ([]operations.LogEntry, error) {
stack, err := b.GetStack(ctx, stackRef)
if err != nil {
return nil, err
}
if stack == nil {
return nil, errors.New("stack not found")
}
target, targetErr := b.getTarget(ctx, stackRef, cfg.Config, cfg.Decrypter)
if targetErr != nil {
return nil, targetErr
}
return filestate.GetLogsForTarget(target, logQuery)
}
func (b *cloudBackend) ExportDeployment(ctx context.Context,
stackRef backend.StackReference) (*apitype.UntypedDeployment, error) {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return nil, err
}
deployment, err := b.client.ExportStackDeployment(ctx, stack)
if err != nil {
return nil, err
}
return &deployment, nil
}
func (b *cloudBackend) ImportDeployment(ctx context.Context, stackRef backend.StackReference,
deployment *apitype.UntypedDeployment) error {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return err
}
update, err := b.client.ImportStackDeployment(ctx, stack, deployment)
if err != nil {
return err
}
// Wait for the import to complete, which also polls and renders event output to STDOUT.
status, err := b.waitForUpdate(
ctx, backend.ActionLabel(apitype.ImportUpdate, false /*dryRun*/), update,
display.Options{Color: colors.Always})
if err != nil {
return errors.Wrap(err, "waiting for import")
} else if status != apitype.StatusSucceeded {
return errors.Errorf("import unsuccessful: status %v", status)
}
return nil
}
var (
projectNameCleanRegexp = regexp.MustCompile("[^a-zA-Z0-9-_.]")
)
// cleanProjectName replaces undesirable characters in project names with hyphens. At some point, these restrictions
// will be further enforced by the service, but for now we need to ensure that if we are making a rest call, we
// do this cleaning on our end.
func cleanProjectName(projectName string) string {
return projectNameCleanRegexp.ReplaceAllString(projectName, "-")
}
// getCloudStackIdentifier converts a backend.StackReference to a client.StackIdentifier for the same logical stack
func (b *cloudBackend) getCloudStackIdentifier(stackRef backend.StackReference) (client.StackIdentifier, error) {
cloudBackendStackRef, ok := stackRef.(cloudBackendReference)
if !ok {
return client.StackIdentifier{}, errors.New("bad stack reference type")
}
return client.StackIdentifier{
Owner: cloudBackendStackRef.owner,
Project: cleanProjectName(cloudBackendStackRef.project),
Stack: string(cloudBackendStackRef.name),
}, nil
}
// Client returns a client object that may be used to interact with this backend.
func (b *cloudBackend) Client() *client.Client {
return b.client
}
type DisplayEventType string
const (
UpdateEvent DisplayEventType = "UpdateEvent"
ShutdownEvent DisplayEventType = "Shutdown"
)
type displayEvent struct {
Kind DisplayEventType
Payload interface{}
}
// waitForUpdate waits for the current update of a Pulumi program to reach a terminal state. Returns the
// final state. "path" is the URL endpoint to poll for updates.
func (b *cloudBackend) waitForUpdate(ctx context.Context, actionLabel string, update client.UpdateIdentifier,
displayOpts display.Options) (apitype.UpdateStatus, error) {
events, done := make(chan displayEvent), make(chan bool)
defer func() {
events <- displayEvent{Kind: ShutdownEvent, Payload: nil}
<-done
close(events)
close(done)
}()
go displayEvents(strings.ToLower(actionLabel), events, done, displayOpts)
// The UpdateEvents API returns a continuation token to only get events after the previous call.
var continuationToken *string
for {
// Query for the latest update results, including log entries so we can provide active status updates.
_, results, err := retry.Until(context.Background(), retry.Acceptor{
Accept: func(try int, nextRetryTime time.Duration) (bool, interface{}, error) {
return b.tryNextUpdate(ctx, update, continuationToken, try, nextRetryTime)
},
})
if err != nil {
return apitype.StatusFailed, err
}
// We got a result, print it out.
updateResults := results.(apitype.UpdateResults)
for _, event := range updateResults.Events {
events <- displayEvent{Kind: UpdateEvent, Payload: event}
}
continuationToken = updateResults.ContinuationToken
// A nil continuation token means there are no more events to read and the update has finished.
if continuationToken == nil {
return updateResults.Status, nil
}
}
}
func displayEvents(action string, events <-chan displayEvent, done chan<- bool, opts display.Options) {
prefix := fmt.Sprintf("%s%s...", cmdutil.EmojiOr("✨ ", "@ "), action)
spinner, ticker := cmdutil.NewSpinnerAndTicker(prefix, nil, 8 /*timesPerSecond*/)
defer func() {
spinner.Reset()
ticker.Stop()
done <- true
}()
for {
select {
case <-ticker.C:
spinner.Tick()
case event := <-events:
if event.Kind == ShutdownEvent {
return
}
// Pluck out the string.
payload := event.Payload.(apitype.UpdateEvent)
if raw, ok := payload.Fields["text"]; ok && raw != nil {
if text, ok := raw.(string); ok {
text = opts.Color.Colorize(text)
// Choose the stream to write to (by default stdout).
var stream io.Writer
if payload.Kind == apitype.StderrEvent {
stream = os.Stderr
} else {
stream = os.Stdout
}
if text != "" {
spinner.Reset()
fmt.Fprint(stream, text)
}
}
}
}
}
}
// tryNextUpdate tries to get the next update for a Pulumi program. This may time or error out, which results in a
// false returned in the first return value. If a non-nil error is returned, this operation should fail.
func (b *cloudBackend) tryNextUpdate(ctx context.Context, update client.UpdateIdentifier, continuationToken *string,
try int, nextRetryTime time.Duration) (bool, interface{}, error) {
// If there is no error, we're done.
results, err := b.client.GetUpdateEvents(ctx, update, continuationToken)
if err == nil {
return true, results, nil
}
// There are three kinds of errors we might see:
// 1) Expected HTTP errors (like timeouts); silently retry.
// 2) Unexpected HTTP errors (like Unauthorized, etc); exit with an error.
// 3) Anything else; this could be any number of things, including transient errors (flaky network).
// In this case, we warn the user and keep retrying; they can ^C if it's not transient.
warn := true
if errResp, ok := err.(*apitype.ErrorResponse); ok {
if errResp.Code == 504 {
// If our request to the Pulumi Service returned a 504 (Gateway Timeout), ignore it and keep
// continuing. The sole exception is if we've done this 10 times. At that point, we will have
// been waiting for many seconds, and want to let the user know something might be wrong.
if try < 10 {
warn = false
}
logging.V(3).Infof("Expected %s HTTP %d error after %d retries (retrying): %v",
b.CloudURL(), errResp.Code, try, err)
} else {
// Otherwise, we will issue an error.
logging.V(3).Infof("Unexpected %s HTTP %d error after %d retries (erroring): %v",
b.CloudURL(), errResp.Code, try, err)
return false, nil, err
}
} else {
logging.V(3).Infof("Unexpected %s error after %d retries (retrying): %v", b.CloudURL(), try, err)
}
// Issue a warning if appropriate.
if warn {
b.d.Warningf(diag.Message("" /*urn*/, "error querying update status: %v"), err)
b.d.Warningf(diag.Message("" /*urn*/, "retrying in %vs... ^C to stop (this will not cancel the update)"),
nextRetryTime.Seconds())
}
return false, nil, nil
}
// IsValidAccessToken tries to use the provided Pulumi access token and returns if it is accepted
// or not. Returns error on any unexpected error.
func IsValidAccessToken(ctx context.Context, cloudURL, accessToken string) (bool, error) {
// Make a request to get the authenticated user. If it returns a successful response,
// we know the access token is legit. We also parse the response as JSON and confirm
// it has a githubLogin field that is non-empty (like the Pulumi Service would return).
_, err := client.NewClient(cloudURL, accessToken, cmdutil.Diag()).GetPulumiAccountName(ctx)
if err != nil {
if errResp, ok := err.(*apitype.ErrorResponse); ok && errResp.Code == 401 {
return false, nil
}
return false, errors.Wrapf(err, "getting user info from %v", cloudURL)
}
return true, nil
}
// GetStackTags fetches the stack's existing tags.
func (b *cloudBackend) GetStackTags(ctx context.Context,
stackRef backend.StackReference) (map[apitype.StackTagName]string, error) {
stack, err := b.GetStack(ctx, stackRef)
if err != nil {
return nil, err
}
if stack == nil {
return nil, errors.New("stack not found")
}
return stack.(Stack).Tags(), nil
}
// UpdateStackTags updates the stacks's tags, replacing all existing tags.
func (b *cloudBackend) UpdateStackTags(ctx context.Context,
stackRef backend.StackReference, tags map[apitype.StackTagName]string) error {
stack, err := b.getCloudStackIdentifier(stackRef)
if err != nil {
return err
}
return b.client.UpdateStackTags(ctx, stack, tags)
}
type httpstateBackendClient struct {
backend Backend
}
func (c httpstateBackendClient) GetStackOutputs(ctx context.Context, name string) (resource.PropertyMap, error) {
// When using the cloud backend, require that stack references are fully qualified so they
// look like "<org>/<project>/<stack>"
if strings.Count(name, "/") != 2 {
return nil, errors.Errorf("a stack reference's name should be of the form " +
"'<organization>/<project>/<stack>'. See https://pulumi.io/help/stack-reference for more information.")
}
return backend.NewBackendClient(c.backend).GetStackOutputs(ctx, name)
}
func (c httpstateBackendClient) GetStackResourceOutputs(
ctx context.Context, name string) (resource.PropertyMap, error) {
return backend.NewBackendClient(c.backend).GetStackResourceOutputs(ctx, name)
} | displayDone := make(chan bool)
go display.ShowQueryEvents("running query", displayEvents, displayDone, op.Opts.Display) |
test_simple_checkpoint.py | from unittest.mock import patch
import pytest
import great_expectations.exceptions as ge_exceptions
from great_expectations import DataContext
from great_expectations.checkpoint import SimpleCheckpointConfigurator
from great_expectations.checkpoint.checkpoint import (
Checkpoint,
CheckpointResult,
SimpleCheckpoint,
)
from great_expectations.core.batch import RuntimeBatchRequest
from great_expectations.data_context.types.base import CheckpointConfig
from great_expectations.util import filter_properties_dict
@pytest.fixture
def update_data_docs_action():
return {
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
}
@pytest.fixture
def store_eval_parameter_action():
return {
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
}
@pytest.fixture
def store_validation_result_action():
return {
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
}
@pytest.fixture
def webhook() -> str:
return "https://hooks.slack.com/foo/bar"
@pytest.fixture
def slack_notification_action(webhook):
return {
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": webhook,
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
}
@pytest.fixture
def context_with_data_source_and_empty_suite(
titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def context_with_data_source_and_empty_suite_with_templates(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def simple_checkpoint_defaults(context_with_data_source_and_empty_suite):
return SimpleCheckpoint(
name="foo", data_context=context_with_data_source_and_empty_suite
)
@pytest.fixture
def two_validations(one_validation):
return [
one_validation,
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "two",
},
]
def test_simple_checkpoint_default_properties_with_no_optional_arguments(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
"""This demonstrates the simplest possible usage."""
checkpoint_config = SimpleCheckpointConfigurator(
"my_minimal_simple_checkpoint", empty_data_context
).build()
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_minimal_simple_checkpoint"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
def test_simple_checkpoint_raises_error_on_invalid_slack_webhook(
empty_data_context,
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook="bad"
).build()
def test_simple_checkpoint_has_slack_action_with_defaults_when_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook
).build()
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_simple_checkpoint_with_slack"
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_raises_error_on_invalid_notify_on(
empty_data_context,
):
for bad in [1, "bar", None, []]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=bad
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_list(
empty_data_context, slack_notification_action, webhook
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=["prod", "dev"]
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_not_default(
empty_data_context, slack_notification_action, webhook
):
for condition in ["failure", "success"]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=condition
).build()
def test_simple_checkpoint_raises_error_on_invalid_notify_with(
empty_data_context,
):
for bad in [1, "bar", ["local_site", 3]]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=bad
).build()
def test_simple_checkpoint_notify_with_all_has_data_docs_action_with_none_specified(
empty_data_context,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
"""
The underlying SlackNotificationAction and SlackRenderer default to
including links to all sites if the key notify_with is not present. We are
intentionally hiding this from users of SimpleCheckpoint by having a default
of "all" that sets the configuration appropriately.
"""
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook, notify_with="all"
).build()
# set the config to include all sites
slack_notification_action["action"]["notify_with"] = None
assert slack_notification_action in checkpoint_config.action_list
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack_and_notify_with_all"
)
checkpoint_config = checkpoint_from_store.config
assert slack_notification_action in checkpoint_config.action_list
def test_simple_checkpoint_has_slack_action_with_notify_adjustments_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo",
empty_data_context,
slack_webhook=webhook,
notify_on="failure",
notify_with=["local_site", "s3_prod"],
).build()
slack_notification_action["action"]["notify_on"] = "failure"
slack_notification_action["action"]["notify_with"] = ["local_site", "s3_prod"]
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_has_no_slack_action_when_no_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator("foo", empty_data_context).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_all_sites_when_site_names_is_all(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names="all"
).build()
# This is confusing: the UpdateDataDocsAction default behavior is to update
# all sites if site_names=None
update_data_docs_action["action"]["site_names"] = []
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_raises_errors_on_invalid_site_name_types(
empty_data_context,
):
for junk_input in [[1, "local"], 1, ["local", None]]:
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=junk_input
).build()
def test_simple_checkpoint_raises_errors_on_site_name_that_does_not_exist_on_data_context(
empty_data_context,
):
# assert the fixture is adequate
assert "prod" not in empty_data_context.get_site_names()
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["prod"]
).build()
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_selected_sites_when_sites_are_selected(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
# assert the fixture is adequate
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["local_site"]
).build()
# This is confusing: the UpdateDataDocsAction default behavior is to update
# all sites if site_names=None
update_data_docs_action["action"]["site_names"] = ["local_site"]
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
# assert the fixture is adequate
assert (
"local_site"
in titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_site_names()
)
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_site_names"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_no_update_data_docs_action_when_site_names_is_none(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
# assert the fixture is adequate
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=None
).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
]
def test_simple_checkpoint_persisted_to_store(
context_with_data_source_and_empty_suite, webhook, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
initial_checkpoint_config = SimpleCheckpointConfigurator(
"foo",
context_with_data_source_and_empty_suite,
site_names=None,
).build()
# TODO this add_checkpoint will be user facing and it could be more
# ergonomic by accepting a Checkpoint maybe .add_checkpoint() should take a
# Checkpoint and there should be a .create_checkpoint() that accepts all
# the current parameters
context_with_data_source_and_empty_suite.add_checkpoint(
**initial_checkpoint_config.to_json_dict()
)
assert context_with_data_source_and_empty_suite.list_checkpoints() == ["foo"]
checkpoint = context_with_data_source_and_empty_suite.get_checkpoint("foo")
assert isinstance(checkpoint, Checkpoint)
assert isinstance(checkpoint.config, CheckpointConfig)
assert checkpoint.config.to_json_dict() == {
"action_list": [
{
"action": {"class_name": "StoreValidationResultAction"},
"name": "store_validation_result",
},
{
"action": {"class_name": "StoreEvaluationParametersAction"},
"name": "store_evaluation_params",
},
],
"batch_request": None,
"class_name": "Checkpoint",
"config_version": 1.0,
"evaluation_parameters": {},
"expectation_suite_ge_cloud_id": None,
"expectation_suite_name": None,
"ge_cloud_id": None,
"module_name": "great_expectations.checkpoint",
"name": "foo",
"profilers": [],
"run_name_template": None,
"runtime_configuration": {},
"template_name": None,
"validations": [],
}
results = checkpoint.run(validations=[one_validation])
assert results.success
def test_simple_checkpoint_defaults_run_and_no_run_params_raises_checkpoint_error(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
with pytest.raises(ge_exceptions.CheckpointError) as cpe:
# noinspection PyUnusedLocal
result: CheckpointResult = simple_checkpoint_defaults.run()
assert 'Checkpoint "foo" does not contain any validations.' in str(cpe.value)
def test_simple_checkpoint_defaults_run_and_basic_run_params_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
def test_simple_checkpoint_runtime_kwargs_processing_site_names_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
},
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_slack_webhook_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs | )
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_special_kwargs_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_kwargs(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
simple_checkpoint_defaults,
one_validation,
monkeypatch,
):
monkeypatch.setenv("GE_ENVIRONMENT", "my_ge_environment")
monkeypatch.setenv("MY_PARAM", "1")
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": "my_simple_template_checkpoint",
"run_name_template": "my_runtime_run_name_template",
"expectation_suite_name": "my_runtime_suite",
"batch_request": {
"data_connector_query": {
"index": -1,
},
},
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": {
"aux_param_0": "1",
"aux_param_1": "1 + 1",
"environment": "my_ge_environment",
"my_runtime_key": "my_runtime_value",
"tolerance": 0.01,
},
"runtime_configuration": {
"my_runtime_key": "my_runtime_value",
"result_format": {
"result_format": "BASIC",
"partial_unexpected_count": 20,
},
},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
template_name="my_simple_template_checkpoint",
run_name_template="my_runtime_run_name_template",
expectation_suite_name="my_runtime_suite",
batch_request={
"data_connector_query": {
"index": -1,
},
},
validations=[one_validation],
evaluation_parameters={"my_runtime_key": "my_runtime_value"},
runtime_configuration={"my_runtime_key": "my_runtime_value"},
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
expected_runtime_kwargs.pop("template_name")
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_defaults_run_and_basic_run_params_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
webhook,
one_validation,
):
context: DataContext = context_with_data_source_and_empty_suite
checkpoint_config = SimpleCheckpointConfigurator(
"foo", context_with_data_source_and_empty_suite, slack_webhook=webhook
).build()
context.add_checkpoint(**checkpoint_config.to_json_dict())
checkpoint_name = checkpoint_config.name
assert context.list_checkpoints() == [checkpoint_name]
del checkpoint_config
checkpoint = context.get_checkpoint(checkpoint_name)
assert isinstance(checkpoint, Checkpoint)
result = checkpoint.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
@pytest.fixture
def one_validation():
return {
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
def test_simple_checkpoint_defaults_run_with_top_level_batch_request_and_suite(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
assert isinstance(result, CheckpointResult)
assert result.success
assert len(result.run_results) == 1
def test_simple_checkpoint_error_with_invalid_top_level_batch_request(
simple_checkpoint_defaults,
):
# raised by _validate_init_parameters() in BatchRequest.__init__()
with pytest.raises(TypeError):
# missing data_asset_name
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
def test_simple_checkpoint_defaults_run_multiple_validations_without_persistence(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context_with_data_source_and_empty_suite.create_expectation_suite("two")
assert len(context_with_data_source_and_empty_suite.list_expectation_suites()) == 2
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_defaults_run_multiple_validations_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context: DataContext = context_with_data_source_and_empty_suite
context.create_expectation_suite("two")
assert len(context.list_expectation_suites()) == 2
# persist to store
context.add_checkpoint(**simple_checkpoint_defaults.config.to_json_dict())
checkpoint_name = simple_checkpoint_defaults.name
assert context.list_checkpoints() == [checkpoint_name]
# reload from store
del simple_checkpoint_defaults
checkpoint = context.get_checkpoint(checkpoint_name)
result = checkpoint.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_with_runtime_batch_request_and_runtime_data_connector_creates_config(
context_with_data_source_and_empty_suite,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
context: DataContext = context_with_data_source_and_empty_suite
runtime_batch_request = RuntimeBatchRequest(
datasource_name="my_datasource",
data_connector_name="my_runtime_data_connector",
data_asset_name="users",
batch_identifiers={"pipeline_stage_name": "first"}, # defined in fixture
runtime_parameters={
"query": "SELECT * FROM taxi_data"
}, # not actually run, but used to test configuration
)
checkpoint = SimpleCheckpoint(
name="my_checkpoint", data_context=context, batch_request=runtime_batch_request
)
checkpoint_config = checkpoint.config
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.batch_request == {
"batch_identifiers": {"pipeline_stage_name": "first"},
"data_asset_name": "users",
"data_connector_name": "my_runtime_data_connector",
"datasource_name": "my_datasource",
"runtime_parameters": {"query": "SELECT * FROM taxi_data"},
}
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == [] | |
install.js | module.exports = installSelenium
const merge = require('lodash').merge
const selenium = require('selenium-standalone')
function installSelenium (state, callback) {
if (state.seleniumAlreadyRunning) {
return callback()
}
state.debugSelenium('Installing selenium')
const options = merge(state.config.selenium.standalone, {
// https://github.com/vvo/selenium-standalone#seleniuminstallopts-cb
// progressCb: (totalLength, progressLength) => {
// console.log(progressLength, '/', totalLength)
// }
logger: (message) => {
// ignore empty messages or messages containing - only
if (/^[-]+$/.test(message.trim())) {
return
}
state.debugSelenium(message)
}
})
selenium.install(options, (error) => {
if (error) {
if (error.message.indexOf('getaddrinfo')) {
// most likely there is no internet connectivity, so we try to just run
// tests as it might have been installed before | }
state.debugSelenium('Could not install selenium: ' + error.message)
return callback(error)
}
state.debugSelenium('Selenium installed')
callback()
})
} | state.debugSelenium('CONNECTION ERROR: could not install/update selenium. Will try to run tests either way')
return callback() |
integration.ts | // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* SignalFx VictorOps integration.
*
* > **NOTE** When managing integrations you'll need to use an admin token to authenticate the SignalFx provider. Otherwise you'll receive a 4xx error.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as signalfx from "@pulumi/signalfx";
*
* const vioctorOpsMyteam = new signalfx.victorops.Integration("vioctor_ops_myteam", {
* enabled: true,
* postUrl: "https://alert.victorops.com/integrations/generic/1234/alert/$key/$routing_key",
* });
* ```
*/
export class Integration extends pulumi.CustomResource {
/**
* Get an existing Integration resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: IntegrationState, opts?: pulumi.CustomResourceOptions): Integration {
return new Integration(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'signalfx:victorops/integration:Integration';
/**
* Returns true if the given object is an instance of Integration. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Integration {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Integration.__pulumiType;
}
/**
* Whether the integration is enabled.
*/
public readonly enabled!: pulumi.Output<boolean>;
/**
* Name of the integration.
*/
public readonly name!: pulumi.Output<string>;
/**
* Victor Ops REST API URL.
*/
public readonly postUrl!: pulumi.Output<string | undefined>;
/**
* Create a Integration resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: IntegrationArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: IntegrationArgs | IntegrationState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as IntegrationState | undefined;
inputs["enabled"] = state ? state.enabled : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["postUrl"] = state ? state.postUrl : undefined;
} else {
const args = argsOrState as IntegrationArgs | undefined;
if ((!args || args.enabled === undefined) && !opts.urn) {
throw new Error("Missing required property 'enabled'");
}
inputs["enabled"] = args ? args.enabled : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["postUrl"] = args ? args.postUrl : undefined;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Integration.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Integration resources.
*/
export interface IntegrationState {
/**
* Whether the integration is enabled.
*/
readonly enabled?: pulumi.Input<boolean>;
/**
* Name of the integration.
*/
readonly name?: pulumi.Input<string>;
/**
* Victor Ops REST API URL.
*/
readonly postUrl?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a Integration resource.
*/
export interface IntegrationArgs {
/**
* Whether the integration is enabled.
*/
readonly enabled: pulumi.Input<boolean>;
/** | * Name of the integration.
*/
readonly name?: pulumi.Input<string>;
/**
* Victor Ops REST API URL.
*/
readonly postUrl?: pulumi.Input<string>;
} | |
student.type.ts | import { ObjectType, Field } from "@nestjs/graphql";
@ObjectType('Student')
export class StudentType {
@Field()
id:string;
@Field()
firstName:string;
@Field()
lastName: string; | } |
|
test_comparator.py | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess as sp
import numpy as np
import pytest
import tensorrt as trt
from polygraphy.backend.onnx import BytesFromOnnx, OnnxFromTfGraph, GsFromOnnx
from polygraphy.backend.onnxrt import OnnxrtRunner, SessionFromOnnx
from polygraphy.backend.pluginref import PluginRefRunner
from polygraphy.backend.tf import SessionFromGraph, TfRunner
from polygraphy.backend.trt import EngineFromNetwork, NetworkFromOnnxBytes, TrtRunner
from polygraphy.exception import PolygraphyException
from polygraphy.comparator import Comparator, CompareFunc, DataLoader, IterationResult, PostprocessFunc, RunResults
from polygraphy import mod
from tests.models.meta import ONNX_MODELS, TF_MODELS
class TestComparator(object):
def test_warmup_runs(self):
onnx_loader = ONNX_MODELS["identity"].loader
runner = OnnxrtRunner(SessionFromOnnx(onnx_loader))
run_results = Comparator.run([runner], warm_up=2)
assert len(run_results[runner.name]) == 1
def test_list_as_data_loader(self):
onnx_loader = ONNX_MODELS["identity"].loader
runner = OnnxrtRunner(SessionFromOnnx(onnx_loader), name="onnx_runner")
data = [{"x": np.ones((1, 1, 2, 2), dtype=np.float32)}] * 2
run_results = Comparator.run([runner], data_loader=data)
iter_results = run_results["onnx_runner"]
assert len(iter_results) == 2
for actual, expected in zip(iter_results, data):
assert np.all(actual["y"] == expected["x"])
def test_generator_as_data_loader(self):
onnx_loader = ONNX_MODELS["identity"].loader
runner = OnnxrtRunner(SessionFromOnnx(onnx_loader), name="onnx_runner")
def data():
for feed_dict in [{"x": np.ones((1, 1, 2, 2), dtype=np.float32)}] * 2:
yield feed_dict
run_results = Comparator.run([runner], data_loader=data())
iter_results = run_results["onnx_runner"]
assert len(iter_results) == 2
for actual, expected in zip(iter_results, data()):
assert np.all(actual["y"] == expected["x"])
def test_multiple_runners(self):
load_tf = TF_MODELS["identity"].loader
build_tf_session = SessionFromGraph(load_tf)
onnx_model = OnnxFromTfGraph(load_tf)
load_serialized_onnx = BytesFromOnnx(onnx_model)
build_onnxrt_session = SessionFromOnnx(load_serialized_onnx)
load_engine = EngineFromNetwork(NetworkFromOnnxBytes(load_serialized_onnx))
gs_graph = GsFromOnnx(onnx_model)
runners = [
TfRunner(build_tf_session),
OnnxrtRunner(build_onnxrt_session),
PluginRefRunner(gs_graph),
TrtRunner(load_engine),
]
run_results = Comparator.run(runners)
compare_func = CompareFunc.simple(check_shapes=mod.version(trt.__version__) >= mod.version("7.0"))
assert bool(Comparator.compare_accuracy(run_results, compare_func=compare_func))
assert len(list(run_results.values())[0]) == 1 # Default number of iterations
def test_postprocess(self):
onnx_loader = ONNX_MODELS["identity"].loader
run_results = Comparator.run([OnnxrtRunner(SessionFromOnnx(onnx_loader))], use_subprocess=True)
# Output shape is (1, 1, 2, 2)
postprocessed = Comparator.postprocess(run_results, postprocess_func=PostprocessFunc.topk_func(k=1, axis=-1))
for _, results in postprocessed.items():
for result in results:
for _, output in result.items():
assert output.shape == (1, 1, 2, 1)
def test_errors_do_not_hang(self):
# Should error because interface is not implemented correctly.
class FakeRunner(object):
def __init__(self):
self.name = "fake"
runners = [FakeRunner()]
with pytest.raises(PolygraphyException):
Comparator.run(runners, use_subprocess=True, subprocess_polling_interval=1)
def test_segfault_does_not_hang(self):
def | ():
class FakeSegfault(sp.CalledProcessError):
pass
raise FakeSegfault(-11, ["simulate", "segfault"])
runners = [TrtRunner(EngineFromNetwork(raise_called_process_error))]
with pytest.raises(PolygraphyException):
Comparator.run(runners, use_subprocess=True, subprocess_polling_interval=1)
def test_multirun_outputs_are_different(self):
onnx_loader = ONNX_MODELS["identity"].loader
runner = TrtRunner(EngineFromNetwork(NetworkFromOnnxBytes(onnx_loader)))
run_results = Comparator.run([runner], data_loader=DataLoader(iterations=2))
iteration0 = run_results[runner.name][0]
iteration1 = run_results[runner.name][1]
for name in iteration0.keys():
assert np.any(iteration0[name] != iteration1[name])
def test_validate_nan(self):
run_results = RunResults()
run_results["fake-runner"] = [IterationResult(outputs={"x": np.array(np.nan)})]
assert not Comparator.validate(run_results)
def test_validate_inf(self):
run_results = RunResults()
run_results["fake-runner"] = [IterationResult(outputs={"x": np.array(np.inf)})]
assert not Comparator.validate(run_results, check_inf=True)
def test_dim_param_trt_onnxrt(self):
load_onnx_bytes = ONNX_MODELS["dim_param"].loader
build_onnxrt_session = SessionFromOnnx(load_onnx_bytes)
load_engine = EngineFromNetwork(NetworkFromOnnxBytes(load_onnx_bytes))
runners = [
OnnxrtRunner(build_onnxrt_session),
TrtRunner(load_engine),
]
run_results = Comparator.run(runners)
compare_func = CompareFunc.simple(check_shapes=mod.version(trt.__version__) >= mod.version("7.0"))
assert bool(Comparator.compare_accuracy(run_results, compare_func=compare_func))
assert len(list(run_results.values())[0]) == 1 # Default number of iterations
| raise_called_process_error |
nulldummy.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment
from test_framework.script import CScript
from io import BytesIO
import time
NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
'''
This test is meant to exercise NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
'''
class NULLDUMMYTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 1
self.setup_clean_chain = True
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-walletprematurewitness']])
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address)
NetworkThread().start() # Start up network handling in another thread
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
# We submit a couple of blocks that do not signal to delay activation until our coinbases have matured
for i in range(COINBASE_MATURITY):
block = create_block(int(self.nodes[0].getbestblockhash(), 16), create_coinbase(self.nodes[0].getblockcount() + 1), int(time.time())+2+i)
block.nVersion = 4
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize()))
# Generate the number blocks signalling that the continuation of the test case expects
self.nodes[0].generate(863-COINBASE_MATURITY-2-2)
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = self.nodes[0].getblockcount()
self.lastblocktime = int(time.time()) + self.lastblockheight + 1
print ("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
txid1 = self.tx_submit(self.nodes[0], test1txs[0])
test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
txid2 = self.tx_submit(self.nodes[0], test1txs[1])
test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
txid3 = self.tx_submit(self.nodes[0], test1txs[2])
self.block_submit(self.nodes[0], test1txs, False, True)
print ("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 48)
trueDummy(test2tx)
txid4 = self.tx_submit(self.nodes[0], test2tx, NULLDUMMY_ERROR)
print ("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
print ("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(self.nodes[0], txid4, self.address, 47)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
self.tx_submit(self.nodes[0], test4tx, NULLDUMMY_ERROR)
self.block_submit(self.nodes[0], [test4tx])
print ("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
self.tx_submit(self.nodes[0], test5tx, NULLDUMMY_ERROR)
self.block_submit(self.nodes[0], [test5tx], True)
| for i in test6txs:
self.tx_submit(self.nodes[0], i)
self.block_submit(self.nodes[0], test6txs, True, True)
def create_transaction(self, node, txid, to_address, amount):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def tx_submit(self, node, tx, msg = ""):
tx.rehash()
try:
node.sendrawtransaction(bytes_to_hex_str(tx.serialize_with_witness()), True)
except JSONRPCException as exp:
assert_equal(exp.error["message"], msg)
else:
assert_equal('', msg)
return tx.hash
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main() | print ("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]") |
comment.go | package usecase
import (
"bufio"
"bytes"
"github.com/masibw/gifc/domain/entity"
"github.com/masibw/gifc/domain/repository"
"github.com/pkg/errors"
"os"
"strings"
)
type CommentUseCase struct {
issueRepository repository.Issue
git *entity.Git
}
func NewCommentUseCase(issueRepository repository.Issue, git *entity.Git) *CommentUseCase {
return &CommentUseCase{
issueRepository: issueRepository,
git: git,
}
}
func (c *CommentUseCase) InspectFile(filePath string) (err error) {
var file *os.File
file, err = os.Open(filePath)
defer func() {
err = file.Close()
if err != nil {
err = errors.Wrap(err, "failed to Close")
}
}()
if err != nil {
err = errors.Wrap(err, "failed to open file")
return
}
var bs []byte
buf := bytes.NewBuffer(bs)
fileScanner := bufio.NewScanner(file)
for fileScanner.Scan() {
line := fileScanner.Text()
if isTodoComment(line) && notCreated(extractCommentContent(line)) {
commentContent := extractCommentContent(line)
todoContent := extractTodoContent(commentContent)
var createdIssue *entity.Issue
createdIssue, err = c.issueRepository.Create(entity.NewIssue(todoContent, todoContent), c.git)
if err != nil {
err = errors.Wrap(err, "failed to create issue")
return
}
newComment := createdIssue.GenerateTodoCommentWithGithubInfo()
err = writeLine(buf, newComment)
if err != nil {
err = errors.Wrap(err, "failed to writeLine")
return
}
} else {
err = writeLine(buf, line)
if err != nil {
err = errors.Wrap(err, "failed to writeLine")
return
}
}
}
if err = fileScanner.Err(); err != nil {
err = errors.Wrap(err, "error while reading file")
return
}
err = os.WriteFile(filePath, buf.Bytes(), 0644)
if err != nil {
err = errors.Wrap(err, "failed to write file")
return
}
return
}
func isTodoComment(line string) bool {
if !isComment(line) {
return false
}
return isTodo(extractCommentContent(line))
}
func isComment(line string) bool {
return strings.HasPrefix(strings.TrimSpace(line), "//")
}
func extractCommentContent(line string) string {
commentContent := strings.TrimPrefix(strings.TrimSpace(line), "//")
return strings.TrimSpace(commentContent)
}
// commentContent means that there are no comment prefix('//') in front of it.
func isTodo(commentContent string) bool {
upperLine := strings.ToUpper(strings.TrimSpace(commentContent))
return strings.HasPrefix(upperLine, "TODO")
}
func notCreated(commentContent string) bool |
func extractTodoContent(commentContent string) string {
commentContent = strings.TrimSpace(commentContent)
// remove TODO(todo)
noTodo := strings.TrimSpace(commentContent[4:])
noTodo = strings.Trim(noTodo, ":")
return strings.TrimSpace(noTodo)
}
func writeLine(buf *bytes.Buffer, content string) (err error) {
_, err = buf.WriteString(content)
if err != nil {
err = errors.Wrap(err, "failed to WriteString")
return
}
_, err = buf.WriteString("\n")
if err != nil {
err = errors.Wrap(err, "failed to WriteString")
return
}
return
}
| {
return !strings.HasPrefix(strings.TrimSpace(commentContent), "TODO-#")
} |
vggish_audio_encoder.py | __copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from pathlib import Path
from typing import Any, Optional, List, Iterable
from jina import Executor, requests, DocumentArray
from jina.logging.logger import JinaLogger
import requests as _requests
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from .vggish.vggish_postprocess import *
from .vggish.vggish_slim import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
class VggishAudioEncoder(Executor):
"""
Encode audio data with Vggish embeddings
:param model_path: path of the models directory
:param default_traversal_paths: fallback batch size in case there is not batch size sent in the request
"""
def __init__(self,
model_path: str = Path(cur_dir) / 'models',
default_traversal_paths: Optional[Iterable[str]] = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.default_traversal_paths = default_traversal_paths or ['r']
self.logger = JinaLogger(self.__class__.__name__)
self.model_path = Path(model_path)
self.vgg_model_path = self.model_path / 'vggish_model.ckpt'
self.pca_model_path = self.model_path / 'vggish_pca_params.ckpt'
self.model_path.mkdir(exist_ok=True) # Create the model directory if it does not exist yet
if not self.vgg_model_path.exists():
self.logger.info('VGGish model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_model.ckpt')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download vggish model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download vggish model')
raise
with open(self.vgg_model_path, 'wb') as f:
f.write(r.content)
if not self.pca_model_path.exists():
self.logger.info('PCA model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_pca_params.npz')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download pca model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download pca model')
raise
with open(self.pca_model_path, 'wb') as f:
f.write(r.content)
self.sess = tf.compat.v1.Session()
define_vggish_slim()
load_vggish_slim_checkpoint(self.sess, str(self.vgg_model_path))
self.feature_tensor = self.sess.graph.get_tensor_by_name(
INPUT_TENSOR_NAME)
self.embedding_tensor = self.sess.graph.get_tensor_by_name(
OUTPUT_TENSOR_NAME)
self.post_processor = Postprocessor(str(self.pca_model_path))
@requests
def encode(self, docs: Optional[DocumentArray], parameters: dict, **kwargs):
"""
Compute embeddings and store them in the `docs` array.
:param docs: documents sent to the encoder. The docs must have `text`.
By default, the input `text` must be a `list` of `str`.
:param parameters: dictionary to define the `traversal_paths` and the `batch_size`. For example,
`parameters={'traversal_paths': ['r'], 'batch_size': 10}`.
:param kwargs: Additional key value arguments.
:return:
"""
if docs:
cleaned_document_array = self._get_input_data(docs, parameters)
self._create_embeddings(cleaned_document_array)
def _get_input_data(self, docs: DocumentArray, parameters: dict):
"""Create a filtered set of Documents to iterate over."""
traversal_paths = parameters.get('traversal_paths', self.default_traversal_paths)
# traverse thought all documents which have to be processed
flat_docs = docs.traverse_flat(traversal_paths)
# filter out documents without images
filtered_docs = DocumentArray([doc for doc in flat_docs if doc.blob is not None])
return filtered_docs
def _create_embeddings(self, filtered_docs: Iterable):
"""Update the documents with the embeddings generated by VGGISH"""
for d in filtered_docs:
# Vggish broadcasts across different length audios, not batches
[embedding] = self.sess.run([self.embedding_tensor], feed_dict={self.feature_tensor: d.blob})
result = self.post_processor.postprocess(embedding)
d.embedding = np.mean((np.float32(result) - 128.) / 128., axis=0)
def | (self):
self.sess.close()
| close |
plural.go | package en_BS
| var pluralRule = "1" |
|
rz.rs | // Copyright 2019 Q1t BV
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::gates::Gate;
/// Rotation around `z` axis.
///
/// The `R`<sub>`Z`</sub>`(λ)` gate rotates the qubit around the `z` axis of the
/// Bloch sphere over an angle `λ`. It is equivalent to the `U`<sub>`1`</sub>
/// gate, up to an overall phase. The associated matrix is
/// ```text
/// ┌ ┐
/// │ exp(-iλ/2) 0 │
/// │ │
/// │ 0 exp(iλ/2)│
/// └ ┘
/// ```
#[derive(Clone)]
pub struct RZ
{
lambda: crate::gates::Parameter,
desc: String
}
impl RZ
{
/// Create a new `R`<sub>`Z`</sub> gate with fixed angle `lambda`
pub fn new<T>(lambda: T) -> Self
where crate::gates::Parameter: From<T>
{
let param = crate::gates::Parameter::from(lambda);
let desc = format!("RZ({:.4})", param);
RZ { lambda: param, desc: desc }
}
}
impl crate::gates::Gate for RZ
{
fn cost(&self) -> f64
{
crate::gates::U1::cost()
}
fn description(&self) -> &str
{
&self.desc
}
fn nr_affected_bits(&self) -> usize
{
1
}
fn matrix(&self) -> crate::cmatrix::CMatrix
{
let z = crate::cmatrix::COMPLEX_ZERO;
let p = num_complex::Complex::from_polar(&1.0, &(0.5 * self.lambda.value()));
array![[p.conj(), z], [z, p]]
}
fn apply_slice(&self, mut state: crate::cmatrix::CVecSliceMut)
{
assert!(state.len() % 2 == 0, "Number of rows is not even.");
let n = state.len() / 2;
let hlambda = 0.5 * self.lambda.value();
{
let mut slice = state.slice_mut(s![..n]);
slice *= num_complex::Complex::from_polar(&1.0, &(-hlambda));
}
{
let mut slice = state.slice_mut(s![n..]);
slice *= num_complex::Complex::from_polar(&1.0, &( hlambda));
}
}
fn apply_mat_slice(&self, mut state: crate::cmatrix::CMatSliceMut)
{
assert!(state.len() % 2 == 0, "Number of rows is not even.");
let n = state.rows() / 2;
let hlambda = 0.5 * self.lambda.value();
{
let mut slice = state.slice_mut(s![..n, ..]);
slice *= num_complex::Complex::from_polar(&1.0, &(-hlambda));
}
{
let mut slice = state.slice_mut(s![n.., ..]);
slice *= num_complex::Complex::from_polar(&1.0, &( hlambda));
}
}
}
impl crate::export::OpenQasm for RZ
{
fn open_qasm(&self, bit_names: &[String], bits: &[usize])
-> crate::error::Result<String>
{
Ok(format!("rz({}) {}", self.lambda, bit_names[bits[0]]))
}
}
impl crate::export::CQasm for RZ
{
fn c_qasm(&self, bit_names: &[String], bits: &[usize])
-> crate::error::Result<String>
{
Ok(format!("rz | Latex for RZ
{
fn latex(&self, bits: &[usize], state: &mut crate::export::LatexExportState)
-> crate::error::Result<()>
{
self.check_nr_bits(bits.len())?;
let contents = format!("R_z({:.4})", self.lambda);
state.add_block_gate(bits, &contents)
}
}
impl crate::arithmetic::Square for RZ
{
type SqType = Self;
fn square(&self) -> crate::error::Result<Self::SqType>
{
match self.lambda
{
crate::gates::Parameter::Direct(x) => Ok(Self::new(2.0 * x)),
_ => Err(crate::error::Error::ReferenceArithmetic)
}
}
}
#[cfg(test)]
mod tests
{
use super::RZ;
use crate::arithmetic::Square;
use crate::gates::{gate_test, Gate};
use crate::export::{Latex, LatexExportState, OpenQasm, CQasm};
#[test]
fn test_description()
{
let gate = RZ::new(::std::f64::consts::FRAC_PI_4);
assert_eq!(gate.description(), "RZ(0.7854)");
}
#[test]
fn test_cost()
{
let gate = RZ::new(0.21675627161);
assert_eq!(gate.cost(), 7.0);
}
#[test]
fn test_matrix()
{
let gate = RZ::new(::std::f64::consts::PI);
let z = crate::cmatrix::COMPLEX_ZERO;
let i = crate::cmatrix::COMPLEX_I;
assert_complex_matrix_eq!(gate.matrix(), array![[-i, z], [z, i]]);
}
#[test]
fn test_apply()
{
let z = crate::cmatrix::COMPLEX_ZERO;
let o = crate::cmatrix::COMPLEX_ONE;
let x = crate::cmatrix::COMPLEX_HSQRT2;
let i = crate::cmatrix::COMPLEX_I;
let mut state = array![
[o, z, x, x],
[z, o, x, -x]
];
let result = array![
[x*(o-i), z, 0.5*(o-i), 0.5*(o-i)],
[z , x*(o+i), 0.5*(o+i), -0.5*(o+i)]
];
let gate = RZ::new(::std::f64::consts::FRAC_PI_2);
gate_test(gate, &mut state, &result);
}
#[test]
fn test_open_qasm()
{
let bit_names = [String::from("qb")];
let qasm = RZ::new(2.25).open_qasm(&bit_names, &[0]);
assert_eq!(qasm, Ok(String::from("rz(2.25) qb")));
}
#[test]
fn test_c_qasm()
{
let bit_names = [String::from("qb")];
let qasm = RZ::new(2.25).c_qasm(&bit_names, &[0]);
assert_eq!(qasm, Ok(String::from("rz qb, 2.25")));
}
#[test]
fn test_latex()
{
let gate = RZ::new(::std::f64::consts::FRAC_PI_2);
let mut state = LatexExportState::new(1, 0);
assert_eq!(gate.latex(&[0], &mut state), Ok(()));
assert_eq!(state.code(),
r#"\Qcircuit @C=1em @R=.7em {
\lstick{\ket{0}} & \gate{R_z(1.5708)} & \qw \\
}
"#);
let gate = RZ::new(-24.0);
let mut state = LatexExportState::new(1, 0);
assert_eq!(gate.latex(&[0], &mut state), Ok(()));
assert_eq!(state.code(),
r#"\Qcircuit @C=1em @R=.7em {
\lstick{\ket{0}} & \gate{R_z(-24.0000)} & \qw \\
}
"#);
}
#[test]
fn test_square()
{
let gate = RZ::new(0.0);
let mat = gate.matrix();
let sq_mat = mat.dot(&mat);
assert_complex_matrix_eq!(gate.square().unwrap().matrix(), &sq_mat);
let gate = RZ::new(1.3);
let mat = gate.matrix();
let sq_mat = mat.dot(&mat);
assert_complex_matrix_eq!(gate.square().unwrap().matrix(), &sq_mat);
let gate = RZ::new(-2.5);
let mat = gate.matrix();
let sq_mat = mat.dot(&mat);
assert_complex_matrix_eq!(gate.square().unwrap().matrix(), &sq_mat);
}
}
| {}, {}", bit_names[bits[0]], self.lambda))
}
}
impl crate::export:: |
Kernel_based_Regressions.py | # -*- coding: utf-8 -*-
"""
@author: Sushant
"""
import numpy as np
import scipy
######################################## Least Square Linear Regression ####################################
def LinearRegression(data,labels):
numdata = int( np.size(data,0) )
b1 = np.hstack(( data,np.ones((numdata,1)) ) )
XXT = np.matmul(b1.T,b1)
invXXT = np.linalg.pinv(XXT)
b2 = np.matmul(b1.T,labels)
w = np.matmul(invXXT,b2)
return w
###################################### Ridges Regression ###################################################
def | (data,labels,lambda):
numdata = int( np.size(data,0) )
b1 = np.hstack(( data,np.ones((numdata,1)) ) )
XXT = np.matmul(b1.T,b1) + lambda*np.identity(np.size(b1,1))
b2 = np.matmul(b1.T,labels)
#solved using Cholesky decompostion Ax = b
b3 = scipy.linalg.cho_factor( XXT )
w = scipy.linalg.cho_solve(b3,b2)
return w
###################################### Predict values and Least Square Error###############################
def PredictLabels(testdata,w):
numdata = int( np.size(testdata,0) )
b3 = np.hstack( ( testdata,np.ones((numdata,1)) ) )
pred = np.matmul(b3,w)
return pred
def ltsqerror(prelabels,actlabels):
return np.sum((prelabels-actlabels)**2)/int(np.size(prelabels,0))
####################################### Kernel Ridges Regression ########################################
def linear(x1,x2,p = None):
return np.dot(x1,x2)
def polynomial(x1,x2,d):
return ( 1+np.dot(x1,x2) )**d
def rbf(x1,x2,l):
return np.exp( -np.divide(np.dot(x1-x2,x1-x2), 2*(l**2 ) ) )
def KernelRidgeRegression(data,labels,lamda,kernel,p):
numdata = int( np.size(data,0) )
traindata = np.asarray(data)
#=========Kernel matrix======================
K = np.zeros((numdata,numdata))
for i in range(0,numdata):
for j in range(0,numdata):
K[i,j] = kernel(traindata[i,:],traindata[j,:],p)
#solved using Cholesky decompostion Ax = b
b1 = scipy.linalg.cho_factor( K + lamda*np.identity(numdata) )
alphas = scipy.linalg.cho_solve(b1,labels)
return alphas
def KernelRidgesRegression_predict(traindata1,alphas,testdata1,kernel,p):
numtraindata = int( np.size(traindata1,0) )
numtestdata = int( np.size(testdata1,0) )
traindata = np.asarray(traindata1)
testdata = np.asarray(testdata1)
predlabels = np.zeros((numtestdata,1))
K = np.zeros((numtestdata,numtraindata))
for j in range(0,numtestdata):
for i in range(0,numtraindata):
K[j,i] = kernel(traindata[i,:],testdata[j,:],p)
predlabels = np.dot(K,alphas)
return predlabels
| RidgeRegression |
fields.py | import sys
from vyperlogix.misc import _utils
from django.utils.datastructures import SortedDict as SortedDictFromList
from vyperlogix.classes.SmartObject import SmartObject
def | (model, formfield_callback=lambda f: f.formfield()):
"""
Returns a list of fields for the given Django model class.
Provide ``formfield_callback`` if you want to define different logic for
determining the formfield for a given database field. It's a callable that
takes a database Field instance and returns a form Field instance.
"""
field_list = []
try:
opts = model._meta
for f in opts.fields + opts.many_to_many:
if not f.editable:
continue
formfield = formfield_callback(f)
if formfield:
field_list.append((f.name, formfield))
except Exception as details:
print >>sys.stderr, _utils.formattedException(details=details)
return SortedDictFromList(dict(field_list))
| fields_for_model |
DefaultDanmakuConfig.ts | import { IDanmakuConfig } from "../interface/IDanmakuConfig";
const DefaultDanmakuConfig: IDanmakuConfig = {
width:1280,
height:720
} | export default DefaultDanmakuConfig |
|
LayoutHero.tsx | import React, { ReactNode } from 'react';
import { AppBar, Toolbar, Box } from '@material-ui/core';
import styled from 'styled-components';
import { Flex, DarkModeToggle, LocaleToggle } from '@chia/core';
import AppTimeBombAlert from '../app/AppTimeBombAlert';
const StyledWrapper = styled(Box)`
padding-top: ${({ theme }) => `${theme.spacing(3)}px`};
display: flex;
flex-direction: column;
flex-grow: 1;
background: ${({ theme }) =>
theme.palette.type === 'dark'
? `linear-gradient(45deg, #222222 30%, #333333 90%)`
: `linear-gradient(45deg, #ffffff 30%, #fdfdfd 90%)`};
`;
const StyledBody = styled(Box)`
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
flex-grow: 1;
`;
type Props = {
children?: ReactNode;
header?: ReactNode;
};
export default function LayoutHero(props: Props) {
const { children, header } = props;
return (
<StyledWrapper>
<AppBar color="transparent" elevation={0}>
<Toolbar>
{header}
<Flex flexGrow={1} />
<LocaleToggle />
<DarkModeToggle />
</Toolbar>
</AppBar>
<StyledBody>
<Flex flexDirection="column" gap={2} alignItems="center">
<AppTimeBombAlert />
{children}
</Flex>
</StyledBody>
</StyledWrapper>
);
}
LayoutHero.defaultProps = {
header: undefined,
children: undefined, | }; |
|
element.mapper.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ElementMapper = void 0;
var kontent_core_1 = require("@kentico/kontent-core");
var config_1 = require("../config");
var elements_1 = require("../elements");
var models_1 = require("../models");
var resolvers_1 = require("../resolvers");
var ElementMapper = /** @class */ (function () {
function | (config, richTextHtmlParser) {
this.config = config;
this.richTextHtmlParser = richTextHtmlParser;
this.defaultLinkedItemWrapperTag = 'p';
this.defaultLinkedItemWrapperClasses = ['kc-linked-item-wrapper'];
}
/**
* Maps all element in given content item and returns strongly typed content item based on the resolver specified
* in DeliveryClientConfig
*/
ElementMapper.prototype.mapElements = function (data) {
var _this = this;
// return processed item if possible (to avoid infinite recursion)
var processedItem = data.processedItems[data.item.system.codename];
if (processedItem) {
// item was already resolved, return it
return {
item: processedItem,
processedItems: data.processedItems,
preparedItems: data.preparedItems,
processingStartedForCodenames: data.processingStartedForCodenames
};
}
var elementCodenames = Object.getOwnPropertyNames(data.item.elements);
var itemInstance = data.preparedItems[data.item.system.codename];
if (!itemInstance) {
// item is not present in response, no need to do any mapping
return undefined;
}
elementCodenames.forEach(function (elementCodename) {
var elementMap = _this.resolveElementMap(itemInstance, elementCodename);
var elementWrapper = {
contentItemSystem: data.item.system,
rawElement: data.item.elements[elementCodename],
propertyName: elementMap.resolvedName
};
if (elementMap.shouldMapElement) {
var mappedElement = _this.mapElement({
elementWrapper: elementWrapper,
item: itemInstance,
preparedItems: data.preparedItems,
processingStartedForCodenames: data.processingStartedForCodenames,
processedItems: data.processedItems,
queryConfig: data.queryConfig
});
// set mapped element to item instance
itemInstance[elementMap.resolvedName] = mappedElement;
}
});
return {
item: itemInstance,
processedItems: data.processedItems,
preparedItems: data.preparedItems,
processingStartedForCodenames: data.processingStartedForCodenames
};
};
ElementMapper.prototype.mapElement = function (data) {
var elementType = kontent_core_1.enumHelper.getEnumFromValue(elements_1.ElementType, data.elementWrapper.rawElement.type);
if (elementType) {
if (elementType === elements_1.ElementType.ModularContent) {
return this.mapLinkedItemsElement({
elementWrapper: data.elementWrapper,
preparedItems: data.preparedItems,
processingStartedForCodenames: data.processingStartedForCodenames,
processedItems: data.processedItems,
queryConfig: data.queryConfig
});
}
if (elementType === elements_1.ElementType.Text) {
return this.mapTextElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.Asset) {
return this.mapAssetsElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.Number) {
return this.mapNumberElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.MultipleChoice) {
return this.mapMultipleChoiceElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.DateTime) {
return this.mapDateTimeElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.RichText) {
return this.mapRichTextElement(data.item, data.elementWrapper, data.queryConfig, data.processedItems, data.processingStartedForCodenames, data.preparedItems);
}
if (elementType === elements_1.ElementType.UrlSlug) {
return this.mapUrlSlugElement(data.elementWrapper, data.item, data.queryConfig);
}
if (elementType === elements_1.ElementType.Taxonomy) {
return this.mapTaxonomyElement(data.elementWrapper);
}
if (elementType === elements_1.ElementType.Custom) {
return this.mapCustomElement(data.elementWrapper);
}
}
console.warn("Could not map element '" + data.elementWrapper.rawElement.name + "' of type '" + data.elementWrapper.rawElement.type + "'. Returning unknown element instead.");
return this.mapUnknowElement(data.elementWrapper);
};
ElementMapper.prototype.mapRichTextElement = function (item, elementWrapper, queryConfig, processedItems, processingStartedForCodenames, preparedItems) {
var _this = this;
// get all linked items nested in rich text
var richTextLinkedItems = [];
var rawElement = elementWrapper.rawElement;
if (rawElement.modular_content) {
if (Array.isArray(rawElement.modular_content)) {
rawElement.modular_content.forEach(function (codename) {
// get linked item and check if it exists (it might not be included in response due to 'Depth' parameter)
var preparedItem = preparedItems[codename];
// first try to get existing item
var existingLinkedItem = _this.getOrSaveLinkedItemForElement(codename, rawElement, queryConfig, processedItems, processingStartedForCodenames, preparedItems);
if (existingLinkedItem) {
// item was found, add it to linked items
richTextLinkedItems.push(existingLinkedItem);
}
else {
var throwErrorForMissingLinkedItems = false;
// check if errors should be thrown for missing linked items
if (queryConfig.throwErrorForMissingLinkedItems === false ||
queryConfig.throwErrorForMissingLinkedItems === true) {
// variable is a boolean
throwErrorForMissingLinkedItems = queryConfig.throwErrorForMissingLinkedItems;
}
// throw error if raw item is not available and errors are not skipped
if (!preparedItem) {
var msg = "Mapping RichTextElement element '" + rawElement.name + "' failed because referenced linked item with codename '" + codename + "' could not be found in Delivery response.\n Increasing 'depth' parameter may solve this issue as it will include nested items. Alternatively you may disable 'throwErrorForMissingLinkedItems' in your query";
if (throwErrorForMissingLinkedItems) {
throw Error(msg);
}
}
// item was not found or not yet resolved
if (preparedItem) {
var mappedLinkedItemResult = _this.mapElements({
item: preparedItem._raw,
preparedItems: preparedItems,
processingStartedForCodenames: processingStartedForCodenames,
processedItems: processedItems,
queryConfig: queryConfig
});
// add mapped linked item to result
if (mappedLinkedItemResult) {
richTextLinkedItems.push(mappedLinkedItemResult.item);
}
}
}
});
}
}
// extract and map links & images
var links = this.mapRichTextLinks(rawElement.links);
var images = this.mapRichTextImages(rawElement.images);
return new elements_1.Elements.RichTextElement(elementWrapper, rawElement.modular_content, {
links: links,
resolveRichTextFunc: function () {
return resolvers_1.richTextResolver.resolveData(item.system.codename, rawElement.value, elementWrapper.propertyName, {
enableAdvancedLogging: _this.config.isDeveloperMode ? _this.config.isDeveloperMode : false,
getGlobalUrlSlugResolver: function (type) { return _this.getGlobalUrlSlugResolverForType(type); },
images: images,
richTextHtmlParser: _this.richTextHtmlParser,
getLinkedItem: function (codename) {
return _this.getOrSaveLinkedItemForElement(codename, rawElement, queryConfig, processedItems, processingStartedForCodenames, preparedItems);
},
links: links,
queryConfig: queryConfig,
linkedItemWrapperTag: _this.config.linkedItemResolver && _this.config.linkedItemResolver.linkedItemWrapperTag
? _this.config.linkedItemResolver.linkedItemWrapperTag
: _this.defaultLinkedItemWrapperTag,
linkedItemWrapperClasses: _this.config.linkedItemResolver && _this.config.linkedItemResolver.linkedItemWrapperClasses
? _this.config.linkedItemResolver.linkedItemWrapperClasses
: _this.defaultLinkedItemWrapperClasses
});
},
images: images
});
};
ElementMapper.prototype.mapDateTimeElement = function (elementWrapper) {
return new elements_1.Elements.DateTimeElement(elementWrapper);
};
ElementMapper.prototype.mapMultipleChoiceElement = function (elementWrapper) {
return new elements_1.Elements.MultipleChoiceElement(elementWrapper);
};
ElementMapper.prototype.mapNumberElement = function (elementWrapper) {
return new elements_1.Elements.NumberElement(elementWrapper);
};
ElementMapper.prototype.mapTextElement = function (elementWrapper) {
return new elements_1.Elements.TextElement(elementWrapper);
};
ElementMapper.prototype.mapAssetsElement = function (elementWrapper) {
return new elements_1.Elements.AssetsElement(elementWrapper);
};
ElementMapper.prototype.mapTaxonomyElement = function (elementWrapper) {
return new elements_1.Elements.TaxonomyElement(elementWrapper);
};
ElementMapper.prototype.mapUnknowElement = function (elementWrapper) {
return new elements_1.Elements.UnknownElement(elementWrapper);
};
ElementMapper.prototype.mapCustomElement = function (elementWrapper) {
// try to find element resolver
if (this.config.elementResolver) {
var customElementClass = this.config.elementResolver(elementWrapper);
if (customElementClass) {
return customElementClass;
}
}
return new elements_1.Elements.DefaultCustomElement(elementWrapper);
};
ElementMapper.prototype.mapUrlSlugElement = function (elementWrapper, item, queryConfig) {
var _this = this;
var resolver = this.getUrlSlugResolverForElement(item, elementWrapper, queryConfig);
return new elements_1.Elements.UrlSlugElement(elementWrapper, {
resolveLinkFunc: function () {
return resolvers_1.urlSlugResolver.resolveUrl({
elementName: elementWrapper.propertyName,
elementValue: elementWrapper.rawElement.value,
item: item,
enableAdvancedLogging: _this.config.isDeveloperMode ? _this.config.isDeveloperMode : false,
resolver: resolver
}).url || '';
}
});
};
ElementMapper.prototype.mapLinkedItemsElement = function (data) {
var _this = this;
// prepare linked items
var linkedItems = [];
// value = array of item codenames
var linkedItemCodenames = data.elementWrapper.rawElement.value;
linkedItemCodenames.forEach(function (codename) {
var linkedItem = _this.getOrSaveLinkedItemForElement(codename, data.elementWrapper.rawElement, data.queryConfig, data.processedItems, data.processingStartedForCodenames, data.preparedItems);
if (linkedItem) {
// add item to result
linkedItems.push(linkedItem);
}
else {
// item was not found
if (_this.config.isDeveloperMode) {
// tslint:disable-next-line:max-line-length
console.warn("Linked item with codename '" + codename + "' in linked items element '" + data.elementWrapper.rawElement.name + "' of '" + data.elementWrapper.rawElement.type + "' type could not be found. If you require this item, consider increasing 'depth' of your query. This warning can be turned off by disabling 'enableAdvancedLogging' option.");
}
}
});
return new elements_1.Elements.LinkedItemsElement(data.elementWrapper, linkedItems);
};
ElementMapper.prototype.getUrlSlugResolverForElement = function (item, elementWrapper, queryConfig) {
// query `urlSlugResolver` has priority over global resolver
if (queryConfig.urlSlugResolver) {
return queryConfig.urlSlugResolver;
}
if (item._config && item._config.urlSlugResolver) {
return item._config.urlSlugResolver;
}
// resolve default link value
return function () { return elementWrapper.rawElement.value; };
};
ElementMapper.prototype.getOrSaveLinkedItemForElement = function (codename, element, queryConfig, processedItems, mappingStartedForCodenames, preparedItems) {
// first check if item was already resolved and return it if it was
var processedItem = processedItems[codename];
if (processedItem) {
// item was already resolved
return processedItem;
}
var preparedItem = preparedItems[codename];
if (mappingStartedForCodenames.includes(codename)) {
return preparedItem;
}
mappingStartedForCodenames.push(codename);
// by default errors are not thrown
var throwErrorForMissingLinkedItems = queryConfig.throwErrorForMissingLinkedItems === true ? true : false;
// throw error if item is not in response and errors are not skipped
if (!preparedItem) {
if (throwErrorForMissingLinkedItems) {
throw Error("Linked item with codename '" + codename + "' could not be found in Delivery response.\n This linked item was requested by '" + element.name + "' element of '" + element.type + "'.\n Error can usually be solved by increasing 'Depth' parameter of your query.\n Alternatively, you may prevent this error by disabling 'throwErrorForMissingLinkedItems' in query configuration.");
}
return undefined;
}
var mappedLinkedItem;
// original resolving if item is still undefined
var mappedLinkedItemResult = this.mapElements({
item: preparedItem._raw,
preparedItems: preparedItems,
processingStartedForCodenames: mappingStartedForCodenames,
processedItems: processedItems,
queryConfig: queryConfig
});
if (mappedLinkedItemResult) {
mappedLinkedItem = mappedLinkedItemResult.item;
// add to processed items
processedItems[codename] = mappedLinkedItem;
}
return mappedLinkedItem;
};
ElementMapper.prototype.mapRichTextLinks = function (linksJson) {
var links = [];
for (var _i = 0, _a = Object.keys(linksJson); _i < _a.length; _i++) {
var linkId = _a[_i];
var linkRaw = linksJson[linkId];
links.push(new models_1.Link({
codename: linkRaw.codename,
linkId: linkId,
urlSlug: linkRaw.url_slug,
type: linkRaw.type
}));
}
return links;
};
ElementMapper.prototype.mapRichTextImages = function (imagesJson) {
var images = [];
for (var _i = 0, _a = Object.keys(imagesJson); _i < _a.length; _i++) {
var imageId = _a[_i];
var imageRaw = imagesJson[imageId];
images.push(new models_1.RichTextImage({
description: imageRaw.description,
imageId: imageRaw.image_id,
url: imageRaw.url,
height: imageRaw.height,
width: imageRaw.width
}));
}
return images;
};
ElementMapper.prototype.resolveElementMap = function (item, originalElementCodename) {
var resolvedElementPropertyName = undefined;
// resolve using property resolver
if (item._config && item._config.propertyResolver) {
resolvedElementPropertyName = item._config.propertyResolver(originalElementCodename);
}
// if property hasn't been resolved, try getting name using decorator
if (resolvedElementPropertyName === originalElementCodename || !resolvedElementPropertyName) {
resolvedElementPropertyName = elements_1.ElementDecorators.getPropertyName(item, originalElementCodename);
}
if (!resolvedElementPropertyName) {
// use original element codename
resolvedElementPropertyName = originalElementCodename;
}
// check for collissions
if (this.collidesWithAnotherProperty(resolvedElementPropertyName, item)) {
// try to resolve collission using dedicated resolver
var collisionResolver = this.getCollisionResolver();
resolvedElementPropertyName = collisionResolver(resolvedElementPropertyName);
// verify again if the new element collides
if (this.collidesWithAnotherProperty(resolvedElementPropertyName, item)) {
console.warn("Element '" + resolvedElementPropertyName + "' collides with another element in same type. Element mapping is skipped. Source item: '" + item.system.codename + "'");
return {
shouldMapElement: false,
resolvedName: ''
};
}
}
return {
resolvedName: resolvedElementPropertyName,
shouldMapElement: true
};
};
ElementMapper.prototype.getGlobalUrlSlugResolverForType = function (type) {
var item = resolvers_1.stronglyTypedResolver.createEmptyItemInstanceOfType(type, this.config.typeResolvers || []);
if (item && item._config && item._config.urlSlugResolver) {
return item._config.urlSlugResolver;
}
return undefined;
};
ElementMapper.prototype.getCollisionResolver = function () {
return this.config.collisionResolver ? this.config.collisionResolver : config_1.defaultCollissionResolver;
};
ElementMapper.prototype.collidesWithAnotherProperty = function (elementName, item) {
return item[elementName] ? true : false;
};
return ElementMapper;
}());
exports.ElementMapper = ElementMapper;
//# sourceMappingURL=element.mapper.js.map | ElementMapper |
models.rs | #![doc = "generated by AutoRust"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[doc = "EnabledResourceType definition."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct EnabledResourceType {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[doc = "Properties for EnabledResourceType of a custom location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<EnabledResourceTypeProperties>,
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
impl EnabledResourceType {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Properties for EnabledResourceType of a custom location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct EnabledResourceTypeProperties {
#[doc = "Cluster Extension ID"]
#[serde(rename = "clusterExtensionId", default, skip_serializing_if = "Option::is_none")]
pub cluster_extension_id: Option<String>,
#[doc = "Cluster Extension Type"]
#[serde(rename = "extensionType", default, skip_serializing_if = "Option::is_none")]
pub extension_type: Option<String>,
#[doc = "Metadata of the Resource Type"]
#[serde(rename = "typesMetadata", default, skip_serializing_if = "Vec::is_empty")]
pub types_metadata: Vec<serde_json::Value>,
}
impl EnabledResourceTypeProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "List of EnabledResourceTypes definition."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct EnabledResourceTypesListResult {
#[doc = "The URL to use for getting the next set of results."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[doc = "The list of EnabledResourceTypes available for a customLocation."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<EnabledResourceType>,
}
impl EnabledResourceTypesListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The resource management error additional info."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorAdditionalInfo {
#[doc = "The additional info type."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "The additional info."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
impl ErrorAdditionalInfo {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The error detail."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorDetail {
#[doc = "The error code."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "The error message."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[doc = "The error target."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[doc = "The error details."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[doc = "The error additional info."]
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
impl ErrorDetail {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.)."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponse {
#[doc = "The error detail."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
impl ErrorResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
impl ProxyResource {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Common fields that are returned in the response for all Azure Resource Manager resources"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Resource {
#[doc = "Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The name of the resource"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\""]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
impl Resource {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[doc = "Resource tags."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[doc = "The geo-location where the resource lives"]
pub location: String,
}
impl TrackedResource {
pub fn new(location: String) -> Self {
Self {
resource: Resource::default(),
tags: None,
location,
}
}
}
#[doc = "Custom Locations definition."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomLocation {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[doc = "Properties for a custom location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CustomLocationProperties>,
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
impl CustomLocation {
pub fn new(tracked_resource: TrackedResource) -> Self {
Self {
tracked_resource,
properties: None,
system_data: None,
}
}
}
#[doc = "The List Custom Locations operation response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CustomLocationListResult {
#[doc = "The URL to use for getting the next set of results."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[doc = "The list of Custom Locations."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<CustomLocation>,
}
impl CustomLocationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Custom Locations operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CustomLocationOperation {
#[doc = "Describes the properties of a Custom Locations Operation Value Display."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<CustomLocationOperationValueDisplay>,
#[doc = "Is this Operation a data plane operation"]
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[doc = "The name of the compute operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The origin of the compute operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
impl CustomLocationOperation {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Describes the properties of a Custom Locations Operation Value Display."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CustomLocationOperationValueDisplay {
#[doc = "The description of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[doc = "The display name of the compute operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[doc = "The resource provider for the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[doc = "The display name of the resource the operation applies to."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
}
impl CustomLocationOperationValueDisplay {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Lists of Custom Locations operations."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomLocationOperationsList {
#[doc = "Next page of operations."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[doc = "Array of customLocationOperation"]
pub value: Vec<CustomLocationOperation>,
}
impl CustomLocationOperationsList {
pub fn new(value: Vec<CustomLocationOperation>) -> Self {
Self { next_link: None, value }
}
}
#[doc = "Properties for a custom location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CustomLocationProperties {
#[doc = "This is optional input that contains the authentication that should be used to generate the namespace."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub authentication: Option<custom_location_properties::Authentication>,
#[doc = "Contains the reference to the add-on that contains charts to deploy CRDs and operators."]
#[serde(rename = "clusterExtensionIds", default, skip_serializing_if = "Vec::is_empty")]
pub cluster_extension_ids: Vec<String>,
#[doc = "Display name for the Custom Locations location."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "Connected Cluster or AKS Cluster. The Custom Locations RP will perform a checkAccess API for listAdminCredentials permissions."]
#[serde(rename = "hostResourceId", default, skip_serializing_if = "Option::is_none")]
pub host_resource_id: Option<String>,
#[doc = "Type of host the Custom Locations is referencing (Kubernetes, etc...)."]
#[serde(rename = "hostType", default, skip_serializing_if = "Option::is_none")]
pub host_type: Option<custom_location_properties::HostType>,
#[doc = "Kubernetes namespace that will be created on the specified cluster."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub namespace: Option<String>,
#[doc = "Provisioning State for the Custom Location."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
impl CustomLocationProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod custom_location_properties {
use super::*;
#[doc = "This is optional input that contains the authentication that should be used to generate the namespace."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Authentication {
#[doc = "The type of the Custom Locations authentication"]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "The kubeconfig value."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
impl Authentication {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Type of host the Custom Locations is referencing (Kubernetes, etc...)."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HostType {
Kubernetes,
}
}
#[doc = "The Custom Locations patchable resource definition."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PatchableCustomLocations {
#[doc = "Properties for a custom location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CustomLocationProperties>,
#[doc = "Resource tags"]
#[serde(default, skip_serializing_if = "Option::is_none")] | pub fn new() -> Self {
Self::default()
}
}
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SystemData {
#[doc = "The identity that created the resource."]
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[doc = "The type of identity that created the resource."]
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[doc = "The timestamp of resource creation (UTC)."]
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[doc = "The identity that last modified the resource."]
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[doc = "The type of identity that last modified the resource."]
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[doc = "The timestamp of resource last modification (UTC)"]
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
impl SystemData {
pub fn new() -> Self {
Self::default()
}
}
pub mod system_data {
use super::*;
#[doc = "The type of identity that created the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[doc = "The type of identity that last modified the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
} | pub tags: Option<serde_json::Value>,
}
impl PatchableCustomLocations { |
artemplate.py | from bika.lims.interfaces import IJSONReadExtender, IARTemplate
from zope.component import adapts
from zope.interface import implements
class JSONReadExtender(object):
"""- Place additional information about profile services
into the returned records.
Used in AR Add to prevent extra requests
"""
| def __init__(self, context):
self.context = context
def render_template_partitions(self):
"""
Supplies a more detailed view of the Partitions for this
template. It's built to mimic the partitions that are stored in the
ar_add form state variable, so that when a partition is chosen, there
is no further translation necessary.
It combines the Analyses and Partitions AT schema field values.
For some fields (separate, minvol) there is no information, when partitions
are specified in the AR Template.
:return a list of dictionaries like this:
container
[]
container_titles
[]
preservation
[]
preservation_titles
[]
separate
false
minvol
"0.0000 m3 "
services
["2fdc040e05bb42ca8b52e41761fdb795", 6 more...]
service_titles
["Copper", "Iron", "Magnesium", 4 more...]
"""
Analyses = self.context.Schema()['Analyses'].get(self.context)
Parts = self.context.Schema()['Partitions'].get(self.context)
if not Parts:
# default value copied in from content/artemplate.py
Parts = [{'part_id': 'part-1',
'Container': '',
'Preservation': '',
'container_uid': '',
'preservation_uid': ''}]
parts = []
not_found = set()
for Part in Parts:
part = {
'part_id': Part.get("part_id", "part-1"),
'container_titles': Part.get("Container", ""),
'container': Part.get("container_uid", ""),
'preservation_titles': Part.get("Preservation", ""),
'preservation': Part.get("preservation_uid", ""),
'services': [],
'service_titles': [],
}
for analysis in Analyses:
uid = analysis['service_uid']
partiton = analysis['partition']
if partiton == part['part_id']:
part['services'].append(uid)
part['service_titles'].append(uid)
not_found.discard(analysis['service_uid'])
else:
if uid in part['services']:
part['services'].remove(uid)
if uid in part['service_titles']:
part['service_titles'].remove(uid)
not_found.add(analysis['service_uid'])
parts.append(part)
# all others go into the first part. Mostly this will be due to
# partition info not being defined?
for uid in not_found:
if uid not in part['services']:
parts[0]['services'].append(uid)
if uid not in part['service_titles']:
parts[0]['service_titles'].append(uid)
return parts
def __call__(self, request, data):
bsc = self.context.bika_setup_catalog
service_data = []
for item in self.context.getAnalyses():
service_uid = item['service_uid']
service = bsc(UID=service_uid)[0].getObject()
this_service = {'UID': service.UID(),
'Title': service.Title(),
'Keyword': service.getKeyword(),
'Price': service.getPrice(),
'VAT': service.getVAT(),
'PointOfCapture': service.getPointOfCapture(),
'CategoryTitle': service.getCategory().Title()}
service_data.append(this_service)
data['service_data'] = service_data
data['Partitions'] = self.render_template_partitions() | implements(IJSONReadExtender)
adapts(IARTemplate)
|
input.py | #!/usr/bin/env python3
import argparse
import os
import subprocess
def naive_config_read(cfg):
for line in cfg:
key, val = line.strip().split(':')
if key == 'pipe':
return val.strip()
else:
print("Input pipe not found in config")
exit(0)
def main(config_file):
|
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", default="config.yml")
args = parser.parse_args()
main(args.config)
| if not os.path.exists(config_file):
os.chdir(os.path.dirname(__file__))
with open(config_file) as cfg:
input_pipe = naive_config_read(cfg)
result = subprocess.run("dmenu",
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE)
text = result.stdout.decode().strip()
if not text:
return
print("message", text, sep=':')
with open(input_pipe, 'w') as pipe:
print("message", text, sep=':', file=pipe) |
flags.rs | pub struct MethodFlags(pub u32);
pub struct TypeFlags(pub u32);
#[derive(Default)]
pub struct ParamFlags(pub u32);
impl MethodFlags {
pub fn special(&self) -> bool {
self.0 & 0b1000_0000_0000 != 0
}
}
impl TypeFlags {
pub fn windows_runtime(&self) -> bool {
self.0 & 0b100_0000_0000_0000 != 0
}
pub fn interface(&self) -> bool {
self.0 & 0b10_0000 != 0
}
}
impl ParamFlags {
pub fn input(&self) -> bool {
self.0 & 0b1 != 0
}
}
#[derive(PartialEq)]
pub enum TypeCategory {
Interface,
Class,
Enum,
Struct,
Delegate,
}
#[allow(dead_code)]
pub enum ParamCategory {
Array,
Enum,
Generic,
Object,
Primitive,
String,
Struct,
}
#[allow(dead_code)]
pub enum ElementType {
Bool,
Char,
I8,
U8,
I16,
U16,
I32,
U32,
I64,
U64, | String,
Object,
}
#[derive(Copy, Clone, PartialEq)]
pub enum MethodCategory {
Normal,
Get,
Set,
Add,
Remove,
} | F32,
F64, |
example_using_relations.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.ext import ndb
from ndb_relations.relations import OneToMany
class User2(ndb.Model):
|
class Order2(ndb.Model):
pass
class OrderItem2(ndb.Model):
name = ndb.StringProperty()
price = ndb.FloatProperty()
class OrderOwner(OneToMany):
origin = ndb.KeyProperty(User2)
destin = ndb.KeyProperty(Order2)
class Item(ndb.Model):
name = ndb.StringProperty()
class OrderItemRelation(OneToMany):
origin = ndb.KeyProperty(Order2)
destin = ndb.KeyProperty(Item)
| name = ndb.StringProperty() |
pg_test.go | package postgres_test
import (
"database/sql"
"testing"
"github.com/stretchr/testify/assert"
"github.com/ribice/gorsk"
"github.com/ribice/gorsk/pkg/utl/postgres"
"github.com/fortytw2/dockertest"
)
func TestNew(t *testing.T) | {
container, err := dockertest.RunContainer("postgres:alpine", "5432", func(addr string) error {
db, err := sql.Open("postgres", "postgres://postgres:postgres@"+addr+"?sslmode=disable")
if err != nil {
return err
}
return db.Ping()
}, "-e", "POSTGRES_PASSWORD=postgres", "-e", "POSTGRES_USER=postgres")
defer container.Shutdown()
if err != nil {
t.Fatalf("could not start postgres, %s", err)
}
_, err = postgres.New("PSN", 1, false)
if err == nil {
t.Error("Expected error")
}
_, err = postgres.New("postgres://postgres:postgres@localhost:1234/postgres?sslmode=disable", 0, false)
if err == nil {
t.Error("Expected error")
}
dbLogTest, err := postgres.New("postgres://postgres:postgres@"+container.Addr+"/postgres?sslmode=disable", 0, true)
if err != nil {
t.Fatalf("Error establishing connection %v", err)
}
dbLogTest.Close()
db, err := postgres.New("postgres://postgres:postgres@"+container.Addr+"/postgres?sslmode=disable", 1, true)
if err != nil {
t.Fatalf("Error establishing connection %v", err)
}
var user gorsk.User
db.Select(&user)
assert.NotNil(t, db)
db.Close()
} |
|
properties.rs | use indradb::NamedProperty;
use inexor_rgf_core_reactive::NamedProperties;
use serde_json::json; | #[allow(non_camel_case_types)]
#[derive(AsRefStr, IntoStaticStr, ToString)]
pub enum MqttBrokerProperties {
#[strum(serialize = "hostname")]
HOSTNAME,
#[strum(serialize = "port")]
PORT,
#[strum(serialize = "send_package")]
SEND_PACKAGE,
#[strum(serialize = "received_package")]
RECEIVED_PACKAGE,
}
impl MqttBrokerProperties {
pub fn default_value(&self) -> String {
match self {
MqttBrokerProperties::HOSTNAME => String::from("localhost"),
MqttBrokerProperties::PORT => String::from("1833"), // TODO: i64
MqttBrokerProperties::SEND_PACKAGE => String::from("{}"),
MqttBrokerProperties::RECEIVED_PACKAGE => String::from("{}"),
}
}
pub fn properties() -> NamedProperties {
vec![
NamedProperty::from(MqttBrokerProperties::HOSTNAME),
NamedProperty::from(MqttBrokerProperties::PORT),
NamedProperty::from(MqttBrokerProperties::SEND_PACKAGE),
NamedProperty::from(MqttBrokerProperties::RECEIVED_PACKAGE),
]
}
}
impl From<MqttBrokerProperties> for NamedProperty {
fn from(p: MqttBrokerProperties) -> Self {
NamedProperty {
name: p.to_string(),
value: json!(p.default_value()),
}
}
}
impl From<MqttBrokerProperties> for String {
fn from(p: MqttBrokerProperties) -> Self {
p.to_string()
}
} | use strum_macros::{AsRefStr, IntoStaticStr, ToString};
|
ItemInfo_20220222011253.js | import React, { Component } from 'react';
import api, { getItemById } from '../api';
import { shared } from '../constants';
import styled from 'styled-components';
import
const Title = styled.h1.attrs({
className: 'h1',
})``;
const Wrapper = styled.div.attrs({
className: 'form-group',
})`
margin: 0 0 30px 30px;
float: left;
`;
const Label = styled.label`
margin: 5px;
max-width: 30%;
float: left;
`;
const InfoParagraph = styled.p`
margin-right:20px;
height:10%;
`;
class ItemInfo extends Component{
constructor(props){
super(props);
this.state = {
_id: '',
name: '',
gender: '',
age: '',
zip: '',
priority: '',
};
}
componentDidMount() {
const itemId = this.props.match.params.id;
this.fetchSingleItem(itemId).then(resp => {
const { item } = resp.data;
this.setState({ ...item });
});
}
fetchSingleItem = itemId => {
return api
.getItemById(itemId) | })
.catch(err => {
console.error(`ERROR in 'fetchSingleItem': ${err}`);
console.error(err);
return err;
});
};
render(){
const {_id, name, gender, age, zip, priority} = this.state;
// const item = {_id, name, gender, age, zip, priority};
return (
_id && (
<Wrapper>
<Title>Patient Info </Title>
<Label>Name:</Label>
<InfoParagraph>{name}</InfoParagraph>
<Label>ID:</Label>
<InfoParagraph>{_id}</InfoParagraph>
</Wrapper>
));
}
};
export default ItemInfo; | .then(resp => {
console.log('getItemById: resp');
console.log(resp);
return resp; |
skips.py | from __future__ import absolute_import
from django.conf import settings
from six.moves.urllib.parse import urlparse
import os
import socket
import pytest
_service_status = {}
def cassandra_is_available():
if "cassandra" in _service_status:
|
try:
socket.create_connection(("127.0.0.1", 9042), 1.0)
except socket.error:
_service_status["cassandra"] = False
else:
_service_status["cassandra"] = True
return _service_status["cassandra"]
requires_cassandra = pytest.mark.skipif(
not cassandra_is_available(), reason="requires cassandra server running"
)
def snuba_is_available():
if "snuba" in _service_status:
return _service_status["snuba"]
try:
parsed = urlparse(settings.SENTRY_SNUBA)
socket.create_connection((parsed.host, parsed.port), 1.0)
except socket.error:
_service_status["snuba"] = False
else:
_service_status["snuba"] = True
return _service_status["snuba"]
requires_snuba = pytest.mark.skipif(not snuba_is_available, reason="requires snuba server running")
def xfail_if_not_postgres(reason):
def decorator(function):
return pytest.mark.xfail(os.environ.get("TEST_SUITE") != "postgres", reason=reason)(
function
)
return decorator
| return _service_status["cassandra"] |
record.go | package models
| type NumericRecord struct {
ID uint `gorm:"primaryKey,autoIncrement" json:"id"`
Timestamp time.Time `gorm:"autoCreateTime" json:"timestamp"`
HobbitID uint `json:"hobbit_id,omitempty"`
Hobbit Hobbit `gorm:"foreignKey:HobbitID" json:"hobbit,omitempty"`
Value int64 `json:"value,omitempty"`
Comment string `json:"comment,omitempty"`
CreatedAt time.Time
UpdatedAt time.Time
} | import "time"
|
python-socketio.py | """
python-socketio.py
Sample Mcity OCTANE python socketio script
"""
import os
from dotenv import load_dotenv
import socketio
#Load environment variables
load_dotenv()
api_key = os.environ.get('MCITY_OCTANE_KEY', None)
server = os.environ.get('MCITY_OCTANE_SERVER', 'http://localhost:5000')
namespace = "/octane"
#If no API Key provided, exit.
if not api_key:
print ("No API KEY SPECIFIED. EXITING")
exit()
#Create an SocketIO Python client.
sio = socketio.Client()
# Async client is available also: sio = socketio.AsyncClient()
def send_auth():
"""
Emit an authentication event.
"""
sio.emit('auth', {'x-api-key': api_key}, namespace=namespace)
#Define event callbacks
@sio.on('connect', namespace=namespace)
def on_connect():
"""
Handle connection event and send authentication key
"""
send_auth()
@sio.on('join', namespace=namespace)
def on_join(data):
"""
Event fired when user joins a channel
"""
print('Join received with ', data)
@sio.on('channels', namespace=namespace)
def on_channels(data):
"""
Event fired when a user requests current channel information.
"""
print('Channel information', data)
@sio.on('disconnect', namespace=namespace)
def | ():
"""
Event fired on disconnect.
"""
print('disconnected from server')
#Make connection.
sio.connect(server, namespaces=[namespace])
sio.wait() | on_disconnect |
US_BPA.py | #!/usr/bin/env python3
# Archive reason: No longer in use.
"""Parser for the Bonneville Power Administration area of the USA."""
import logging
from io import StringIO
import arrow
import pandas as pd
import requests
GENERATION_URL = "https://transmission.bpa.gov/business/operations/Wind/baltwg.txt"
GENERATION_MAPPING = {
"Wind": "wind",
"Hydro": "hydro",
"Fossil/Biomass": "unknown",
"Nuclear": "nuclear",
}
def get_data(url, session=None):
|
def timestamp_converter(timestamp):
"""Turns a timestamp str into an aware datetime object."""
arr_dt_naive = arrow.get(timestamp, "MM/DD/YYYY HH:mm")
dt_aware = arr_dt_naive.replace(tzinfo="America/Los_Angeles").datetime
return dt_aware
def data_processor(df, logger) -> list:
"""
Takes a dataframe and drops all generation rows that are empty or more than 1 day old.
Turns each row into a dictionary and removes any generation types that are unknown.
:return: list of tuples in the form of (datetime, production).
"""
df = df.dropna(thresh=2)
df.columns = df.columns.str.strip()
# 5min data for the last 24 hours.
df = df.tail(288)
df["Date/Time"] = df["Date/Time"].map(timestamp_converter)
known_keys = GENERATION_MAPPING.keys() | {"Date/Time", "Load"}
column_headers = set(df.columns)
unknown_keys = column_headers - known_keys
for k in unknown_keys:
logger.warning(
"New data {} seen in US-BPA data source".format(k), extra={"key": "US-BPA"}
)
keys_to_remove = unknown_keys | {"Load"}
processed_data = []
for index, row in df.iterrows():
production = row.to_dict()
dt = production.pop("Date/Time")
dt = dt.to_pydatetime()
mapped_production = {
GENERATION_MAPPING[k]: v
for k, v in production.items()
if k not in keys_to_remove
}
processed_data.append((dt, mapped_production))
return processed_data
def fetch_production(
zone_key="US-BPA",
session=None,
target_datetime=None,
logger=logging.getLogger(__name__),
) -> list:
"""Requests the last known production mix (in MW) of a given zone."""
if target_datetime:
raise NotImplementedError("This parser is not yet able to parse past dates")
raw_data = get_data(GENERATION_URL, session=session)
processed_data = data_processor(raw_data, logger)
data = []
for item in processed_data:
datapoint = {
"zoneKey": zone_key,
"datetime": item[0],
"production": item[1],
"storage": {},
"source": "bpa.gov",
}
data.append(datapoint)
return data
if __name__ == "__main__":
print("fetch_production() ->")
print(fetch_production())
| """Returns a pandas dataframe."""
s = session or requests.Session()
req = s.get(url)
df = pd.read_table(StringIO(req.text), skiprows=11)
return df |
reconcile_common.go | /*
Copyright 2020 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
| Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package reconciler
import (
"context"
"reflect"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"knative.dev/pkg/apis"
duckv1 "knative.dev/pkg/apis/duck/v1"
"knative.dev/pkg/logging"
)
const failedGenerationBump = "NewObservedGenFailure"
const (
DoReconcileKind = "ReconcileKind"
DoFinalizeKind = "FinalizeKind"
DoObserveKind = "ObserveKind"
DoObserveFinalizeKind = "ObserveFinalizeKind"
)
// PreProcessReconcile contains logic to apply before reconciliation of a resource.
func PreProcessReconcile(ctx context.Context, resource duckv1.KRShaped) {
newStatus := resource.GetStatus()
// We may be reading a version of the object that was stored at an older version
// and may not have had all of the assumed defaults specified. This won't result
// in this getting written back to the API Server, but lets downstream logic make
// assumptions about defaulting.
if d, ok := resource.(apis.Defaultable); ok {
d.SetDefaults(ctx)
}
// Ensure conditions are initialized before we modify.
condSet := resource.GetConditionSet()
manager := condSet.Manage(newStatus)
manager.InitializeConditions()
if newStatus.ObservedGeneration != resource.GetGeneration() {
// Reset Ready/Successful to unknown. The reconciler is expected to overwrite this.
manager.MarkUnknown(condSet.GetTopLevelConditionType(), failedGenerationBump, "unsuccessfully observed a new generation")
}
}
// PostProcessReconcile contains logic to apply after reconciliation of a resource.
func PostProcessReconcile(ctx context.Context, resource, oldResource duckv1.KRShaped) {
logger := logging.FromContext(ctx)
status := resource.GetStatus()
mgr := resource.GetConditionSet().Manage(status)
// Bump observed generation to denote that we have processed this
// generation regardless of success or failure.
status.ObservedGeneration = resource.GetGeneration()
if rc := mgr.GetTopLevelCondition(); rc == nil {
logger.Warn("A reconciliation included no top-level condition")
} else if rc.Reason == failedGenerationBump {
logger.Warn("A reconciler observed a new generation without updating the resource status")
}
groomConditionsTransitionTime(resource, oldResource)
}
// groomConditionsTransitionTime ensures that the LastTransitionTime only advances for resources
// where the condition has changed during reconciliation. This also ensures that all advanced
// conditions share the same timestamp.
func groomConditionsTransitionTime(resource, oldResource duckv1.KRShaped) {
now := apis.VolatileTime{Inner: metav1.NewTime(time.Now())}
sts := resource.GetStatus()
for i := range sts.Conditions {
cond := &sts.Conditions[i]
if oldCond := oldResource.GetStatus().GetCondition(cond.Type); oldCond != nil {
cond.LastTransitionTime = oldCond.LastTransitionTime
if reflect.DeepEqual(cond, oldCond) {
continue
}
}
cond.LastTransitionTime = now
}
} | |
curl.py | # -*- coding: utf-8 -*-
from datetime import datetime
from flask import g, jsonify, request
from flask_httpauth import HTTPBasicAuth
from app import app, db
from app.models import User, Post
auth = HTTPBasicAuth()
@auth.verify_password
def | (username, password):
user = db.session.query(User).filter_by(username=username).first()
if not user or not user.check_password(password):
return False
g.user = user
return True
@app.route('/curl/post', methods=['POST'])
@auth.login_required
def curl_post():
try:
content = request.json.get('content')
title = request.json.get('title')
post = Post(user=g.user, title=title, content=content, pub_date=datetime.now())
post.save()
return jsonify({'data': 'Hello, %s!' % g.user.username})
except:
return jsonify({'data': 'Something Went Wrong.'})
| verify_password |
hitbtc.py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
class hitbtc (Exchange):
def describe(self):
return self.deep_extend(super(hitbtc, self).describe(), {
'id': 'hitbtc',
'name': 'HitBTC',
'countries': ['HK'],
'rateLimit': 1500,
'version': '1',
'has': {
'CORS': False,
'fetchTrades': True,
'fetchTickers': True,
'fetchOrder': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrderTrades': True,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766555-8eaec20e-5edc-11e7-9c5b-6dc69fc42f5e.jpg',
'api': 'https://api.hitbtc.com',
'www': 'https://hitbtc.com',
'referral': 'https://hitbtc.com/?ref_id=5a5d39a65d466',
'doc': 'https://github.com/hitbtc-com/hitbtc-api/blob/master/APIv1.md',
'fees': [
'https://hitbtc.com/fees-and-limits',
'https://support.hitbtc.com/hc/en-us/articles/115005148605-Fees-and-limits',
],
},
'api': {
'public': {
'get': [
'{symbol}/orderbook',
'{symbol}/ticker',
'{symbol}/trades',
'{symbol}/trades/recent',
'symbols',
'ticker',
'time',
],
},
'trading': {
'get': [
'balance',
'orders/active',
'orders/recent',
'order',
'trades/by/order',
'trades',
],
'post': [
'new_order',
'cancel_order',
'cancel_orders',
],
},
'payment': {
'get': [
'balance',
'address/{currency}',
'transactions',
'transactions/{transaction}',
],
'post': [
'transfer_to_trading',
'transfer_to_main',
'address/{currency}',
'payout',
],
},
},
# hardcoded fees are deprecated and should only be used when there's no other way to get fee info
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': -0.01 / 100,
'taker': 0.1 / 100,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
'BCC': 0.0018,
'ETH': 0.00215,
'BCH': 0.0018,
'USDT': 100,
'DASH': 0.03,
'BTG': 0.0005,
'LTC': 0.003,
'ZEC': 0.0001,
'XMR': 0.09,
'1ST': 0.84,
'ADX': 5.7,
'AE': 6.7,
'AEON': 0.01006,
'AIR': 565,
'AMP': 9,
'ANT': 6.7,
'ARDR': 1,
'ARN': 18.5,
'ART': 26,
'ATB': 0.0004,
'ATL': 27,
'ATM': 504,
'ATS': 860,
'AVT': 1.9,
'BAS': 113,
'BCN': 0.1,
'DAO.Casino': 124, # id = 'BET'
'BKB': 46,
'BMC': 32,
'BMT': 100,
'BNT': 2.57,
'BQX': 4.7,
'BTM': 40,
'BTX': 0.04,
'BUS': 0.004,
'CCT': 115,
'CDT': 100,
'CDX': 30,
'CFI': 61,
'CLD': 0.88,
'CND': 574,
'CNX': 0.04,
'COSS': 65,
'CSNO': 16,
'CTR': 15,
'CTX': 146,
'CVC': 8.46,
'DBIX': 0.0168,
'DCN': 120000,
'DCT': 0.02,
'DDF': 342,
'DENT': 6240,
'DGB': 0.4,
'DGD': 0.01,
'DICE': 0.32,
'DLT': 0.26,
'DNT': 0.21,
'DOGE': 2,
'DOV': 34,
'DRPU': 24,
'DRT': 240,
'DSH': 0.017,
'EBET': 84,
'EBTC': 20,
'EBTCOLD': 6.6,
'ECAT': 14,
'EDG': 2,
'EDO': 2.9,
'ELE': 0.00172,
'ELM': 0.004,
'EMC': 0.03,
'EMGO': 14,
'ENJ': 163,
'EOS': 1.5,
'ERO': 34,
'ETBS': 15,
'ETC': 0.002,
'ETP': 0.004,
'EVX': 5.4,
'EXN': 456,
'FRD': 65,
'FUEL': 123.00105,
'FUN': 202.9598309,
'FYN': 1.849,
'FYP': 66.13,
'GNO': 0.0034,
'GUP': 4,
'GVT': 1.2,
'HAC': 144,
'HDG': 7,
'HGT': 1082,
'HPC': 0.4,
'HVN': 120,
'ICN': 0.55,
'ICO': 34,
'ICOS': 0.35,
'IND': 76,
'INDI': 5913,
'ITS': 15.0012,
'IXT': 11,
'KBR': 143,
'KICK': 112,
'LA': 41,
'LAT': 1.44,
'LIFE': 13000,
'LRC': 27,
'LSK': 0.3,
'LUN': 0.34,
'MAID': 5,
'MANA': 143,
'MCAP': 5.44,
'MIPS': 43,
'MNE': 1.33,
'MSP': 121,
'MTH': 92,
'MYB': 3.9,
'NDC': 165,
'NEBL': 0.04,
'NET': 3.96,
'NTO': 998,
'NXC': 13.39,
'NXT': 3,
'OAX': 15,
'ODN': 0.004,
'OMG': 2,
'OPT': 335,
'ORME': 2.8,
'OTN': 0.57,
'PAY': 3.1,
'PIX': 96,
'PLBT': 0.33,
'PLR': 114,
'PLU': 0.87,
'POE': 784,
'POLL': 3.5,
'PPT': 2,
'PRE': 32,
'PRG': 39,
'PRO': 41,
'PRS': 60,
'PTOY': 0.5,
'QAU': 63,
'QCN': 0.03,
'QTUM': 0.04,
'QVT': 64,
'REP': 0.02,
'RKC': 15,
'RVT': 14,
'SAN': 2.24,
'SBD': 0.03,
'SCL': 2.6,
'SISA': 1640,
'SKIN': 407,
'SMART': 0.4,
'SMS': 0.0375,
'SNC': 36,
'SNGLS': 4,
'SNM': 48,
'SNT': 233,
'STEEM': 0.01,
'STRAT': 0.01,
'STU': 14,
'STX': 11,
'SUB': 17,
'SUR': 3,
'SWT': 0.51,
'TAAS': 0.91,
'TBT': 2.37,
'TFL': 15,
'TIME': 0.03,
'TIX': 7.1,
'TKN': 1,
'TKR': 84,
'TNT': 90,
'TRST': 1.6,
'TRX': 1395,
'UET': 480,
'UGT': 15,
'VEN': 14,
'VERI': 0.037,
'VIB': 50,
'VIBE': 145,
'VOISE': 618,
'WEALTH': 0.0168,
'WINGS': 2.4,
'WTC': 0.75,
'XAUR': 3.23,
'XDN': 0.01,
'XEM': 15,
'XUC': 0.9,
'YOYOW': 140,
'ZAP': 24,
'ZRX': 23,
'ZSC': 191,
},
'deposit': {
'BTC': 0.0006,
'ETH': 0.003,
'BCH': 0,
'USDT': 0,
'BTG': 0,
'LTC': 0,
'ZEC': 0,
'XMR': 0,
'1ST': 0,
'ADX': 0,
'AE': 0,
'AEON': 0,
'AIR': 0,
'AMP': 0,
'ANT': 0,
'ARDR': 0,
'ARN': 0,
'ART': 0,
'ATB': 0,
'ATL': 0,
'ATM': 0,
'ATS': 0,
'AVT': 0,
'BAS': 0,
'BCN': 0,
'DAO.Casino': 0, # id = 'BET'
'BKB': 0,
'BMC': 0,
'BMT': 0,
'BNT': 0,
'BQX': 0,
'BTM': 0,
'BTX': 0,
'BUS': 0,
'CCT': 0,
'CDT': 0,
'CDX': 0,
'CFI': 0,
'CLD': 0,
'CND': 0,
'CNX': 0,
'COSS': 0,
'CSNO': 0,
'CTR': 0,
'CTX': 0,
'CVC': 0,
'DBIX': 0,
'DCN': 0,
'DCT': 0,
'DDF': 0,
'DENT': 0,
'DGB': 0,
'DGD': 0,
'DICE': 0,
'DLT': 0,
'DNT': 0,
'DOGE': 0,
'DOV': 0,
'DRPU': 0,
'DRT': 0,
'DSH': 0,
'EBET': 0,
'EBTC': 0,
'EBTCOLD': 0,
'ECAT': 0,
'EDG': 0,
'EDO': 0,
'ELE': 0,
'ELM': 0,
'EMC': 0,
'EMGO': 0,
'ENJ': 0,
'EOS': 0,
'ERO': 0,
'ETBS': 0,
'ETC': 0,
'ETP': 0,
'EVX': 0,
'EXN': 0,
'FRD': 0,
'FUEL': 0,
'FUN': 0,
'FYN': 0,
'FYP': 0,
'GNO': 0,
'GUP': 0,
'GVT': 0,
'HAC': 0,
'HDG': 0,
'HGT': 0,
'HPC': 0,
'HVN': 0,
'ICN': 0,
'ICO': 0,
'ICOS': 0,
'IND': 0,
'INDI': 0,
'ITS': 0,
'IXT': 0,
'KBR': 0,
'KICK': 0,
'LA': 0,
'LAT': 0,
'LIFE': 0,
'LRC': 0,
'LSK': 0,
'LUN': 0,
'MAID': 0,
'MANA': 0,
'MCAP': 0,
'MIPS': 0,
'MNE': 0,
'MSP': 0,
'MTH': 0,
'MYB': 0,
'NDC': 0,
'NEBL': 0,
'NET': 0,
'NTO': 0,
'NXC': 0,
'NXT': 0,
'OAX': 0,
'ODN': 0,
'OMG': 0,
'OPT': 0,
'ORME': 0,
'OTN': 0,
'PAY': 0,
'PIX': 0,
'PLBT': 0,
'PLR': 0,
'PLU': 0,
'POE': 0,
'POLL': 0,
'PPT': 0,
'PRE': 0,
'PRG': 0,
'PRO': 0,
'PRS': 0,
'PTOY': 0,
'QAU': 0,
'QCN': 0,
'QTUM': 0,
'QVT': 0,
'REP': 0,
'RKC': 0,
'RVT': 0,
'SAN': 0,
'SBD': 0,
'SCL': 0,
'SISA': 0,
'SKIN': 0,
'SMART': 0,
'SMS': 0,
'SNC': 0,
'SNGLS': 0,
'SNM': 0,
'SNT': 0,
'STEEM': 0,
'STRAT': 0,
'STU': 0,
'STX': 0,
'SUB': 0,
'SUR': 0,
'SWT': 0,
'TAAS': 0,
'TBT': 0,
'TFL': 0,
'TIME': 0,
'TIX': 0,
'TKN': 0,
'TKR': 0,
'TNT': 0,
'TRST': 0,
'TRX': 0,
'UET': 0,
'UGT': 0,
'VEN': 0,
'VERI': 0,
'VIB': 0,
'VIBE': 0,
'VOISE': 0,
'WEALTH': 0,
'WINGS': 0,
'WTC': 0,
'XAUR': 0,
'XDN': 0,
'XEM': 0,
'XUC': 0,
'YOYOW': 0,
'ZAP': 0,
'ZRX': 0,
'ZSC': 0,
},
},
},
'commonCurrencies': {
'BET': 'DAO.Casino',
'CAT': 'BitClave',
'DRK': 'DASH',
'EMGO': 'MGO',
'GET': 'Themis',
'HSR': 'HC',
'LNC': 'LinkerCoin',
'UNC': 'Unigame',
'USD': 'USDT',
'XBT': 'BTC',
},
'options': {
'defaultTimeInForce': 'FOK',
},
})
def fetch_markets(self, params={}):
response = self.publicGetSymbols(params)
markets = self.safe_value(response, 'symbols')
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'commodity')
quoteId = self.safe_string(market, 'currency')
lot = self.safe_float(market, 'lot')
step = self.safe_float(market, 'step')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
result.append({
'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'lot': lot,
'step': step,
'active': True,
'maker': self.safe_float(market, 'provideLiquidityRate'),
'taker': self.safe_float(market, 'takeLiquidityRate'),
'precision': {
'amount': self.precision_from_string(market['lot']),
'price': self.precision_from_string(market['step']),
},
'limits': {
'amount': {
'min': lot,
'max': None,
},
'price': {
'min': step,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
})
return result
def fetch_balance(self, params={}):
self.load_markets()
method = self.safe_string(params, 'type', 'trading')
method += 'GetBalance'
query = self.omit(params, 'type')
response = getattr(self, method)(query)
balances = self.safe_value(response, 'balance', [])
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency_code')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_float_2(balance, 'cash', 'balance')
account['used'] = self.safe_float(balance, 'reserved')
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
orderbook = self.publicGetSymbolOrderbook(self.extend({
'symbol': self.market_id(symbol),
}, params))
return self.parse_order_book(orderbook)
def | (self, ticker, market=None):
timestamp = self.safe_integer(ticker, 'timestamp')
symbol = None
if market:
symbol = market['symbol']
last = self.safe_float(ticker, 'last')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'ask'),
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'volume_quote'),
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
tickers = self.publicGetTicker(params)
ids = list(tickers.keys())
result = {}
for i in range(0, len(ids)):
id = ids[i]
market = self.markets_by_id[id]
symbol = market['symbol']
ticker = tickers[id]
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.publicGetSymbolTicker(self.extend(request, params))
if 'message' in response:
raise ExchangeError(self.id + ' ' + response['message'])
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
if isinstance(trade, list):
return self.parse_public_trade(trade, market)
return self.parse_order_trade(trade, market)
def parse_public_trade(self, trade, market=None):
symbol = None
if market is not None:
symbol = market['symbol']
side = None
tradeLength = len(trade)
if tradeLength > 4:
side = trade[4]
price = float(trade[1])
amount = float(trade[2])
cost = price * amount
return {
'info': trade,
'id': str(trade[0]),
'timestamp': trade[3],
'datetime': self.iso8601(trade[3]),
'symbol': symbol,
'type': None,
'side': side,
'order': None,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
def parse_order_trade(self, trade, market=None):
symbol = None
if market is not None:
symbol = market['symbol']
amount = self.safe_float(trade, 'execQuantity')
if market:
amount *= market['lot']
price = self.safe_float(trade, 'execPrice')
cost = price * amount
fee = {
'cost': self.safe_float(trade, 'fee'),
'currency': None,
'rate': None,
}
timestamp = self.safe_integer(trade, 'timestamp')
id = self.safe_string(trade, 'tradeId')
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
orderId = self.safe_string(trade, 'clientOrderId')
side = self.safe_string(trade, 'side')
return {
'info': trade,
'id': id,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'from': 0,
# 'till': 100,
# 'by': 'ts', # or by trade_id
# 'sort': 'desc', # or asc
# 'start_index': 0,
# 'max_results': 1000,
# 'format_item': 'object',
# 'format_price': 'number',
# 'format_amount': 'number',
# 'format_tid': 'string',
# 'format_timestamp': 'millisecond',
# 'format_wrap': False,
# 'side': 'true',
}
if since is not None:
request['by'] = 'ts'
request['from'] = since
if limit is not None:
request['max_results'] = limit
response = self.publicGetSymbolTrades(self.extend(request, params))
return self.parse_trades(response['trades'], market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
# check if amount can be evenly divided into lots
# they want integer quantity in lot units
quantity = float(amount) / market['lot']
wholeLots = int(round(quantity))
difference = quantity - wholeLots
if abs(difference) > market['step']:
raise ExchangeError(self.id + ' order amount should be evenly divisible by lot unit size of ' + str(market['lot']))
clientOrderId = self.milliseconds()
request = {
'clientOrderId': str(clientOrderId),
'symbol': market['id'],
'side': side,
'quantity': str(wholeLots), # quantity in integer lot units
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
else:
request['timeInForce'] = self.options['defaultTimeInForce']
response = self.tradingPostNewOrder(self.extend(request, params))
order = self.parse_order(response['ExecutionReport'], market)
if order['status'] == 'rejected':
raise InvalidOrder(self.id + ' order was rejected by the exchange ' + self.json(order))
return order
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
return self.tradingPostCancelOrder(self.extend(request, params))
def parse_order_status(self, status):
statuses = {
'new': 'open',
'partiallyFilled': 'open',
'filled': 'closed',
'canceled': 'canceled',
'rejected': 'rejected',
'expired': 'expired',
}
return self.safe_string(statuses, status)
def parse_order(self, order, market=None):
timestamp = self.safe_integer(order, 'lastTimestamp')
if timestamp is None:
timestamp = self.safe_integer(order, 'timestamp')
symbol = None
if market is None:
market = self.markets_by_id[order['symbol']]
status = self.parse_order_status(self.safe_string(order, 'orderStatus'))
price = self.safe_float(order, 'orderPrice')
price = self.safe_float(order, 'price', price)
price = self.safe_float(order, 'avgPrice', price)
amount = self.safe_float(order, 'orderQuantity')
amount = self.safe_float(order, 'quantity', amount)
remaining = self.safe_float(order, 'quantityLeaves')
remaining = self.safe_float(order, 'leavesQuantity', remaining)
filled = None
cost = None
amountDefined = (amount is not None)
remainingDefined = (remaining is not None)
if market is not None:
symbol = market['symbol']
if amountDefined:
amount *= market['lot']
if remainingDefined:
remaining *= market['lot']
else:
marketId = self.safe_string(order, 'symbol')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if amountDefined:
if remainingDefined:
filled = amount - remaining
if price is not None:
cost = price * filled
feeCost = self.safe_float(order, 'fee')
feeCurrency = None
if market is not None:
symbol = market['symbol']
feeCurrency = market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrency,
'rate': None,
}
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
id = self.safe_string(order, 'clientOrderId')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
return {
'id': id,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'fee': fee,
}
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
response = self.tradingGetOrder(self.extend(request, params))
if response['orders'][0]:
return self.parse_order(response['orders'][0])
raise OrderNotFound(self.id + ' fetchOrder() error: ' + self.response)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
statuses = ['new', 'partiallyFiiled']
market = None
request = {
'sort': 'desc',
'statuses': ','.join(statuses),
}
if symbol is not None:
market = self.market(symbol)
request['symbols'] = market['id']
response = self.tradingGetOrdersActive(self.extend(request, params))
return self.parse_orders(response['orders'], market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
statuses = ['filled', 'canceled', 'rejected', 'expired']
request = {
'sort': 'desc',
'statuses': ','.join(statuses),
'max_results': 1000,
}
if symbol is not None:
market = self.market(symbol)
request['symbols'] = market['id']
response = self.tradingGetOrdersRecent(self.extend(request, params))
return self.parse_orders(response['orders'], market, since, limit)
def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
response = self.tradingGetTradesByOrder(self.extend(request, params))
return self.parse_trades(response['trades'], market, since, limit)
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
request = {
'currency_code': currency['id'],
'amount': amount,
'address': address,
}
if tag is not None:
request['extra_id'] = tag
response = self.paymentPostPayout(self.extend(request, params))
return {
'info': response,
'id': response['transaction'],
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/' + 'api' + '/' + self.version + '/' + api + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = self.nonce()
payload = {'nonce': nonce, 'apikey': self.apiKey}
query = self.extend(payload, query)
if method == 'GET':
url += '?' + self.urlencode(query)
else:
url += '?' + self.urlencode(payload)
auth = url
if method == 'POST':
if query:
body = self.urlencode(query)
auth += body
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'X-Signature': self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha512).lower(),
}
url = self.urls['api'] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'code' in response:
if 'ExecutionReport' in response:
if response['ExecutionReport']['orderRejectReason'] == 'orderExceedsLimit':
raise InsufficientFunds(self.id + ' ' + self.json(response))
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| parse_ticker |
add_guild_member.rs | use crate::{
client::Client,
error::Error as HttpError,
request::{Request, TryIntoRequest},
response::ResponseFuture,
routing::Route,
};
use serde::Serialize;
use twilight_model::{
guild::PartialMember,
id::{
marker::{GuildMarker, RoleMarker, UserMarker},
Id,
},
};
use twilight_validate::request::{nickname as validate_nickname, ValidationError};
#[derive(Serialize)]
struct AddGuildMemberFields<'a> {
pub access_token: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
pub deaf: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub mute: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub nick: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub roles: Option<&'a [Id<RoleMarker>]>,
}
#[must_use = "requests must be configured and executed"]
pub struct AddGuildMember<'a> {
fields: AddGuildMemberFields<'a>,
guild_id: Id<GuildMarker>,
http: &'a Client,
user_id: Id<UserMarker>,
}
/// Add a user to a guild.
///
/// An access token for the user with `guilds.join` scope is required. All other
/// fields are optional. See [Discord Docs/Add Guild Member].
///
/// [Discord Docs/Add Guild Member]: https://discord.com/developers/docs/resources/guild#add-guild-member
impl<'a> AddGuildMember<'a> {
pub(crate) const fn new(
http: &'a Client,
guild_id: Id<GuildMarker>,
user_id: Id<UserMarker>,
access_token: &'a str,
) -> Self {
Self {
fields: AddGuildMemberFields {
access_token,
deaf: None,
mute: None,
nick: None,
roles: None,
},
guild_id,
http,
user_id,
}
}
/// Whether the new member will be unable to hear audio when connected to a
/// voice channel.
pub const fn deaf(mut self, deaf: bool) -> Self |
/// Whether the new member will be unable to speak in voice channels.
pub const fn mute(mut self, mute: bool) -> Self {
self.fields.mute = Some(mute);
self
}
/// Set the user's initial nickname.
///
/// The minimum length is 1 UTF-16 character and the maximum is 32 UTF-16
/// characters.
///
/// # Errors
///
/// Returns an error of type [`Nickname`] if the nickname length is too
/// short or too long.
///
/// [`Nickname`]: twilight_validate::request::ValidationErrorType::Nickname
pub fn nick(mut self, nick: &'a str) -> Result<Self, ValidationError> {
validate_nickname(nick)?;
self.fields.nick.replace(nick);
Ok(self)
}
/// List of roles to assign the new member.
pub const fn roles(mut self, roles: &'a [Id<RoleMarker>]) -> Self {
self.fields.roles = Some(roles);
self
}
/// Execute the request, returning a future resolving to a [`Response`].
///
/// [`Response`]: crate::response::Response
pub fn exec(self) -> ResponseFuture<PartialMember> {
let http = self.http;
match self.try_into_request() {
Ok(request) => http.request(request),
Err(source) => ResponseFuture::error(source),
}
}
}
impl TryIntoRequest for AddGuildMember<'_> {
fn try_into_request(self) -> Result<Request, HttpError> {
let mut request = Request::builder(&Route::AddGuildMember {
guild_id: self.guild_id.get(),
user_id: self.user_id.get(),
});
request = request.json(&self.fields)?;
Ok(request.build())
}
}
| {
self.fields.deaf = Some(deaf);
self
} |
FamaProfilingServer.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import json
import os
import random as _random
import sys
import traceback
from getopt import getopt, GetoptError
from multiprocessing import Process
from os import environ
from wsgiref.simple_server import make_server
import requests as _requests
from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError, \
JSONRPCError, InvalidRequestError
from jsonrpcbase import ServerError as JSONServerError
from biokbase import log
from authclient import KBaseAuth as _KBaseAuth
try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser
DEPLOY = 'KB_DEPLOYMENT_CONFIG'
SERVICE = 'KB_SERVICE_NAME'
AUTH = 'auth-service-url'
# Note that the error fields do not match the 2.0 JSONRPC spec
def get_config_file():
return environ.get(DEPLOY, None)
def get_service_name():
return environ.get(SERVICE, None)
def get_config():
if not get_config_file():
return None
retconfig = {}
config = ConfigParser()
config.read(get_config_file())
for nameval in config.items(get_service_name() or 'FamaProfiling'):
retconfig[nameval[0]] = nameval[1]
return retconfig
config = get_config()
from FamaProfilingImpl import FamaProfiling # noqa @IgnorePep8
impl_FamaProfiling = FamaProfiling(config)
class JSONObjectEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
if hasattr(obj, 'toJSONable'):
return obj.toJSONable()
return json.JSONEncoder.default(self, obj)
class JSONRPCServiceCustom(JSONRPCService):
def call(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in a JSON
string or None if there is none.
Arguments:
jsondata -- remote method call in jsonrpc format
"""
result = self.call_py(ctx, jsondata)
if result is not None:
return json.dumps(result, cls=JSONObjectEncoder)
return None
def _call_method(self, ctx, request):
"""Calls given method with given params and returns it value."""
method = self.method_data[request['method']]['method']
params = request['params']
result = None
try:
if isinstance(params, list):
# Does it have enough arguments?
if len(params) < self._man_args(method) - 1:
raise InvalidParamsError('not enough arguments')
# Does it have too many arguments?
if(not self._vargs(method) and len(params) >
self._max_args(method) - 1):
raise InvalidParamsError('too many arguments')
result = method(ctx, *params)
elif isinstance(params, dict):
# Do not accept keyword arguments if the jsonrpc version is
# not >=1.1.
if request['jsonrpc'] < 11:
raise KeywordError
result = method(ctx, **params)
else: # No params
result = method(ctx)
except JSONRPCError:
raise
except Exception as e:
# log.exception('method %s threw an exception' % request['method'])
# Exception was raised inside the method.
newerr = JSONServerError()
newerr.trace = traceback.format_exc()
if len(e.args) == 1:
newerr.data = repr(e.args[0])
else:
newerr.data = repr(e.args)
raise newerr
return result
def call_py(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in python
object format or None if there is none.
This method is same as call() except the return value is a python
object instead of JSON string. This method is mainly only useful for
debugging purposes.
"""
rdata = jsondata
# we already deserialize the json string earlier in the server code, no
# need to do it again
# try:
# rdata = json.loads(jsondata)
# except ValueError:
# raise ParseError
# set some default values for error handling
request = self._get_default_vals()
if isinstance(rdata, dict) and rdata:
# It's a single request.
self._fill_request(request, rdata)
respond = self._handle_request(ctx, request)
# Don't respond to notifications
if respond is None:
return None
return respond
elif isinstance(rdata, list) and rdata:
# It's a batch.
requests = []
responds = []
for rdata_ in rdata:
# set some default values for error handling
request_ = self._get_default_vals()
self._fill_request(request_, rdata_)
requests.append(request_)
for request_ in requests:
respond = self._handle_request(ctx, request_)
# Don't respond to notifications
if respond is not None:
responds.append(respond)
if responds:
return responds
# Nothing to respond.
return None
else:
# empty dict, list or wrong type
raise InvalidRequestError
def _handle_request(self, ctx, request):
"""Handles given request and returns its response."""
if 'types' in self.method_data[request['method']]:
self._validate_params_types(request['method'], request['params'])
result = self._call_method(ctx, request)
# Do not respond to notifications.
if request['id'] is None:
return None
respond = {}
self._fill_ver(request['jsonrpc'], respond)
respond['result'] = result
respond['id'] = request['id']
return respond
class MethodContext(dict):
|
class ServerError(Exception):
'''
The call returned an error. Fields:
name - the name of the error.
code - the error code.
message - a human readable error message.
data - the server side stacktrace.
'''
def __init__(self, name, code, message, data=None, error=None):
super(Exception, self).__init__(message)
self.name = name
self.code = code
self.message = message if message else ''
self.data = data or error or ''
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message + \
'\n' + self.data
def getIPAddress(environ):
xFF = environ.get('HTTP_X_FORWARDED_FOR')
realIP = environ.get('HTTP_X_REAL_IP')
trustXHeaders = config is None or \
config.get('dont_trust_x_ip_headers') != 'true'
if (trustXHeaders):
if (xFF):
return xFF.split(',')[0].strip()
if (realIP):
return realIP.strip()
return environ.get('REMOTE_ADDR')
class Application(object):
# Wrap the wsgi handler in a class definition so that we can
# do some initialization and avoid regenerating stuff over
# and over
def logcallback(self):
self.serverlog.set_log_file(self.userlog.get_log_file())
def log(self, level, context, message):
self.serverlog.log_message(level, message, context['client_ip'],
context['user_id'], context['module'],
context['method'], context['call_id'])
def __init__(self):
submod = get_service_name() or 'FamaProfiling'
self.userlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, changecallback=self.logcallback,
config=get_config_file())
self.serverlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, logfile=self.userlog.get_log_file())
self.serverlog.set_log_level(6)
self.rpc_service = JSONRPCServiceCustom()
self.method_authentication = dict()
self.rpc_service.add(impl_FamaProfiling.run_FamaReadProfiling,
name='FamaProfiling.run_FamaReadProfiling',
types=[dict])
self.method_authentication['FamaProfiling.run_FamaReadProfiling'] = 'required' # noqa
self.rpc_service.add(impl_FamaProfiling.run_FamaGenomeProfiling,
name='FamaProfiling.run_FamaGenomeProfiling',
types=[dict])
self.method_authentication['FamaProfiling.run_FamaGenomeProfiling'] = 'required' # noqa
self.rpc_service.add(impl_FamaProfiling.view_FamaFunctionalProfile,
name='FamaProfiling.view_FamaFunctionalProfile',
types=[dict])
self.method_authentication['FamaProfiling.view_FamaFunctionalProfile'] = 'required' # noqa
self.rpc_service.add(impl_FamaProfiling.status,
name='FamaProfiling.status',
types=[dict])
authurl = config.get(AUTH) if config else None
self.auth_client = _KBaseAuth(authurl)
def __call__(self, environ, start_response):
# Context object, equivalent to the perl impl CallContext
ctx = MethodContext(self.userlog)
ctx['client_ip'] = getIPAddress(environ)
status = '500 Internal Server Error'
try:
body_size = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
body_size = 0
if environ['REQUEST_METHOD'] == 'OPTIONS':
# we basically do nothing and just return headers
status = '200 OK'
rpc_result = ""
else:
request_body = environ['wsgi.input'].read(body_size)
try:
req = json.loads(request_body)
except ValueError as ve:
err = {'error': {'code': -32700,
'name': "Parse error",
'message': str(ve),
}
}
rpc_result = self.process_error(err, ctx, {'version': '1.1'})
else:
ctx['module'], ctx['method'] = req['method'].split('.')
ctx['call_id'] = req['id']
ctx['rpc_context'] = {
'call_stack': [{'time': self.now_in_utc(),
'method': req['method']}
]
}
prov_action = {'service': ctx['module'],
'method': ctx['method'],
'method_params': req['params']
}
ctx['provenance'] = [prov_action]
try:
token = environ.get('HTTP_AUTHORIZATION')
# parse out the method being requested and check if it
# has an authentication requirement
method_name = req['method']
auth_req = self.method_authentication.get(
method_name, 'none')
if auth_req != 'none':
if token is None and auth_req == 'required':
err = JSONServerError()
err.data = (
'Authentication required for ' +
'FamaProfiling ' +
'but no authentication header was passed')
raise err
elif token is None and auth_req == 'optional':
pass
else:
try:
user = self.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
except Exception as e:
if auth_req == 'required':
err = JSONServerError()
err.data = \
"Token validation failed: %s" % e
raise err
if (environ.get('HTTP_X_FORWARDED_FOR')):
self.log(log.INFO, ctx, 'X-Forwarded-For: ' +
environ.get('HTTP_X_FORWARDED_FOR'))
self.log(log.INFO, ctx, 'start method')
rpc_result = self.rpc_service.call(ctx, req)
self.log(log.INFO, ctx, 'end method')
status = '200 OK'
except JSONRPCError as jre:
err = {'error': {'code': jre.code,
'name': jre.message,
'message': jre.data
}
}
trace = jre.trace if hasattr(jre, 'trace') else None
rpc_result = self.process_error(err, ctx, req, trace)
except Exception:
err = {'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error ' +
'occurred',
}
}
rpc_result = self.process_error(err, ctx, req,
traceback.format_exc())
# print('Request method was %s\n' % environ['REQUEST_METHOD'])
# print('Environment dictionary is:\n%s\n' % pprint.pformat(environ))
# print('Request body was: %s' % request_body)
# print('Result from the method call is:\n%s\n' % \
# pprint.pformat(rpc_result))
if rpc_result:
response_body = rpc_result
else:
response_body = ''
response_headers = [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Headers', environ.get(
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')),
('content-type', 'application/json'),
('content-length', str(len(response_body)))]
start_response(status, response_headers)
return [response_body.encode('utf8')]
def process_error(self, error, context, request, trace=None):
if trace:
self.log(log.ERR, context, trace.split('\n')[0:-1])
if 'id' in request:
error['id'] = request['id']
if 'version' in request:
error['version'] = request['version']
e = error['error'].get('error')
if not e:
error['error']['error'] = trace
elif 'jsonrpc' in request:
error['jsonrpc'] = request['jsonrpc']
error['error']['data'] = trace
else:
error['version'] = '1.0'
error['error']['error'] = trace
return json.dumps(error)
def now_in_utc(self):
# noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8
dtnow = datetime.datetime.now()
dtutcnow = datetime.datetime.utcnow()
delta = dtnow - dtutcnow
hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60,
60)
return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm)
application = Application()
# This is the uwsgi application dictionary. On startup uwsgi will look
# for this dict and pull its configuration from here.
# This simply lists where to "mount" the application in the URL path
#
# This uwsgi module "magically" appears when running the app within
# uwsgi and is not available otherwise, so wrap an exception handler
# around it
#
# To run this server in uwsgi with 4 workers listening on port 9999 use:
# uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_
# To run a using the single threaded python BaseHTTP service
# listening on port 9999 by default execute this file
#
try:
import uwsgi
# Before we do anything with the application, see if the
# configs specify patching all std routines to be asynch
# *ONLY* use this if you are going to wrap the service in
# a wsgi container that has enabled gevent, such as
# uwsgi with the --gevent option
if config is not None and config.get('gevent_monkeypatch_all', False):
print("Monkeypatching std libraries for async")
from gevent import monkey
monkey.patch_all()
uwsgi.applications = {'': application}
except ImportError:
# Not available outside of wsgi, ignore
pass
_proc = None
def start_server(host='localhost', port=0, newprocess=False):
'''
By default, will start the server on localhost on a system assigned port
in the main thread. Excecution of the main thread will stay in the server
main loop until interrupted. To run the server in a separate process, and
thus allow the stop_server method to be called, set newprocess = True. This
will also allow returning of the port number.'''
global _proc
if _proc:
raise RuntimeError('server is already running')
httpd = make_server(host, port, application)
port = httpd.server_address[1]
print("Listening on port %s" % port)
if newprocess:
_proc = Process(target=httpd.serve_forever)
_proc.daemon = True
_proc.start()
else:
httpd.serve_forever()
return port
def stop_server():
global _proc
_proc.terminate()
_proc = None
def process_async_cli(input_file_path, output_file_path, token):
exit_code = 0
with open(input_file_path) as data_file:
req = json.load(data_file)
if 'version' not in req:
req['version'] = '1.1'
if 'id' not in req:
req['id'] = str(_random.random())[2:]
ctx = MethodContext(application.userlog)
if token:
user = application.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
if 'context' in req:
ctx['rpc_context'] = req['context']
ctx['CLI'] = 1
ctx['module'], ctx['method'] = req['method'].split('.')
prov_action = {'service': ctx['module'], 'method': ctx['method'],
'method_params': req['params']}
ctx['provenance'] = [prov_action]
resp = None
try:
resp = application.rpc_service.call_py(ctx, req)
except JSONRPCError as jre:
trace = jre.trace if hasattr(jre, 'trace') else None
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': jre.code,
'name': jre.message,
'message': jre.data,
'error': trace}
}
except Exception:
trace = traceback.format_exc()
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error occurred',
'error': trace}
}
if 'error' in resp:
exit_code = 500
with open(output_file_path, "w") as f:
f.write(json.dumps(resp, cls=JSONObjectEncoder))
return exit_code
if __name__ == "__main__":
if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and
os.path.isfile(sys.argv[1])):
token = None
if len(sys.argv) == 4:
if os.path.isfile(sys.argv[3]):
with open(sys.argv[3]) as token_file:
token = token_file.read()
else:
token = sys.argv[3]
sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token))
try:
opts, args = getopt(sys.argv[1:], "", ["port=", "host="])
except GetoptError as err:
# print help information and exit:
print(str(err)) # will print something like "option -a not recognized"
sys.exit(2)
port = 9999
host = 'localhost'
for o, a in opts:
if o == '--port':
port = int(a)
elif o == '--host':
host = a
print("Host set to %s" % host)
else:
assert False, "unhandled option"
start_server(host=host, port=port)
# print("Listening on port %s" % port)
# httpd = make_server( host, port, application)
#
# httpd.serve_forever()
| def __init__(self, logger):
self['client_ip'] = None
self['user_id'] = None
self['authenticated'] = None
self['token'] = None
self['module'] = None
self['method'] = None
self['call_id'] = None
self['rpc_context'] = None
self['provenance'] = None
self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3'])
self._logger = logger
def log_err(self, message):
self._log(log.ERR, message)
def log_info(self, message):
self._log(log.INFO, message)
def log_debug(self, message, level=1):
if level in self._debug_levels:
pass
else:
level = int(level)
if level < 1 or level > 3:
raise ValueError("Illegal log level: " + str(level))
level = level + 6
self._log(level, message)
def set_log_level(self, level):
self._logger.set_log_level(level)
def get_log_level(self):
return self._logger.get_log_level()
def clear_log_level(self):
self._logger.clear_user_log_level()
def _log(self, level, message):
self._logger.log_message(level, message, self['client_ip'],
self['user_id'], self['module'],
self['method'], self['call_id'])
def provenance(self):
callbackURL = os.environ.get('SDK_CALLBACK_URL')
if callbackURL:
# OK, there's a callback server from which we can get provenance
arg_hash = {'method': 'CallbackServer.get_provenance',
'params': [],
'version': '1.1',
'id': str(_random.random())[2:]
}
body = json.dumps(arg_hash)
response = _requests.post(callbackURL, data=body,
timeout=60)
response.encoding = 'utf-8'
if response.status_code == 500:
if ('content-type' in response.headers and
response.headers['content-type'] ==
'application/json'):
err = response.json()
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, response.text)
else:
raise ServerError('Unknown', 0, response.text)
if not response.ok:
response.raise_for_status()
resp = response.json()
if 'result' not in resp:
raise ServerError('Unknown', 0,
'An unknown server error occurred')
return resp['result'][0]
else:
return self.get('provenance') |
p-ce7d4956.entry.js | import{r as t,h as e,F as a,g as i,c as n}from"./p-6b8b45ed.js";import{b as o,g as r,C as l}from"./p-f1686cee.js";import"./p-a4e6e35b.js";const s=class{constructor(e){t(this,e)}renderHeader(){return o(this.el,"header")?e("slot",{name:"header"}):null}renderContent(){return this.contentBehind?[e("div",{class:{content:!0,"content--behind":!!this.contentBehind}},e("slot",null)),e("slot",{name:"center-row"})]:[e("div",{class:"content"},e("slot",null),e("slot",{name:"center-row"}))]}renderFooter(){return o(this.el,"footer")?e("div",{class:"footer"},e("slot",{name:"footer"})):null}renderMain(){const t=o(this.el,"primary-panel");return e("div",{class:{main:!0,"main--reversed":"end"===(null==t?void 0:t.position)}},e("slot",{name:"primary-panel"}),this.renderContent(),e("slot",{name:"contextual-panel"}))}render(){return e(a,null,this.renderHeader(),this.renderMain(),this.renderFooter())}get el(){return i(this)}};s.style="@-webkit-keyframes in{0%{opacity:0}100%{opacity:1}}@keyframes in{0%{opacity:0}100%{opacity:1}}@-webkit-keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}@keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}:root{--calcite-animation-timing:300ms}.calcite-animate{opacity:0;-webkit-animation-fill-mode:both;animation-fill-mode:both;-webkit-animation-duration:var(--calcite-animation-timing);animation-duration:var(--calcite-animation-timing)}.calcite-animate__in{-webkit-animation-name:in;animation-name:in}.calcite-animate__in-down{-webkit-animation-name:in-down;animation-name:in-down}.calcite-animate__in-up{-webkit-animation-name:in-up;animation-name:in-up}.calcite-animate__in-scale{-webkit-animation-name:in-scale;animation-name:in-scale}:host{-webkit-box-sizing:border-box;box-sizing:border-box;background-color:var(--calcite-ui-foreground-1);color:var(--calcite-ui-text-2);font-size:var(--calcite-font-size--1)}:host *{-webkit-box-sizing:border-box;box-sizing:border-box}:root{--calcite-popper-transition:150ms ease-in-out}:host([hidden]){display:none}:host{width:100%;height:100%;position:absolute;display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;overflow:hidden;top:0;right:0;bottom:0;left:0;--calcite-shell-tip-spacing:26vw}.main{height:100%;width:100%;-ms-flex:1 1 auto;flex:1 1 auto;display:-ms-flexbox;display:flex;-ms-flex-direction:row;flex-direction:row;position:relative;border-width:0;border-top-width:1px;border-bottom-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3);-ms-flex-pack:justify;justify-content:space-between;overflow:hidden}.main--reversed{-ms-flex-direction:row-reverse;flex-direction:row-reverse}.content{display:-ms-flexbox;display:flex;height:100%;overflow:auto;width:100%;border-width:0;border-left-width:1px;border-right-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3);-ms-flex-flow:column nowrap;flex-flow:column nowrap}.content ::slotted(calcite-shell-center-row),.content ::slotted(calcite-panel),.content ::slotted(calcite-flow){-ms-flex-item-align:stretch;align-self:stretch;-ms-flex:1 1 auto;flex:1 1 auto;max-height:unset}.content--behind{border-width:0;position:absolute;top:0;right:0;bottom:0;left:0;z-index:0;display:initial}::slotted(calcite-shell-center-row){width:unset}::slotted(.header .heading){font-weight:var(--calcite-font-weight-normal);font-size:var(--calcite-font-size--2);line-height:1.375}::slotted(calcite-shell-panel),::slotted(calcite-shell-center-row){position:relative;z-index:1}slot[name=center-row]::slotted(calcite-shell-center-row:not([detached])){border-left-width:1px;border-right-width:1px;border-color:var(--calcite-ui-border-3)}::slotted(calcite-tip-manager){border-radius:0.25rem;-webkit-box-shadow:0 6px 20px -4px rgba(0, 0, 0, 0.1), 0 4px 12px -2px rgba(0, 0, 0, 0.08);box-shadow:0 6px 20px -4px rgba(0, 0, 0, 0.1), 0 4px 12px -2px rgba(0, 0, 0, 0.08);position:absolute;-webkit-animation:in-up var(--calcite-animation-timing) ease-in-out;animation:in-up var(--calcite-animation-timing) ease-in-out;-webkit-box-sizing:border-box;box-sizing:border-box;bottom:0.5rem;left:var(--calcite-shell-tip-spacing);right:var(--calcite-shell-tip-spacing);z-index:2}";const c=class{constructor(e){t(this,e),this.detached=!1,this.heightScale="s",this.position="end"}render(){const{el:t}=this,i="rtl"===r(t),n=e("div",{class:{content:!0,[l.rtl]:i}},e("slot",null)),s=o(t,"action-bar"),c=[s?e("div",{class:{"action-bar-container":!0,[l.rtl]:i}},e("slot",{name:"action-bar"})):null,n];return"end"===(null==s?void 0:s.position)&&c.reverse(),e(a,null,c)}get el(){return i(this)}};c.style="@-webkit-keyframes in{0%{opacity:0}100%{opacity:1}}@keyframes in{0%{opacity:0}100%{opacity:1}}@-webkit-keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}@keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}:root{--calcite-animation-timing:300ms}.calcite-animate{opacity:0;-webkit-animation-fill-mode:both;animation-fill-mode:both;-webkit-animation-duration:var(--calcite-animation-timing);animation-duration:var(--calcite-animation-timing)}.calcite-animate__in{-webkit-animation-name:in;animation-name:in}.calcite-animate__in-down{-webkit-animation-name:in-down;animation-name:in-down}.calcite-animate__in-up{-webkit-animation-name:in-up;animation-name:in-up}.calcite-animate__in-scale{-webkit-animation-name:in-scale;animation-name:in-scale}:host{-webkit-box-sizing:border-box;box-sizing:border-box;background-color:var(--calcite-ui-foreground-1);color:var(--calcite-ui-text-2);font-size:var(--calcite-font-size--1)}:host *{-webkit-box-sizing:border-box;box-sizing:border-box}:root{--calcite-popper-transition:150ms ease-in-out}:host([hidden]){display:none}:host{display:-ms-flexbox;display:flex;-ms-flex:1 1 auto;flex:1 1 auto;overflow:hidden;background-color:transparent}.content{display:-ms-flexbox;display:flex;height:100%;margin:0;overflow:hidden;width:100%;-ms-flex:1 0 0px;flex:1 0 0}.action-bar-container{display:-ms-flexbox;display:flex}:host([detached]){border-width:0;border-radius:0.25rem;-webkit-box-shadow:0 4px 8px -1px rgba(0, 0, 0, 0.08), 0 2px 4px -1px rgba(0, 0, 0, 0.04);box-shadow:0 4px 8px -1px rgba(0, 0, 0, 0.08), 0 2px 4px -1px rgba(0, 0, 0, 0.04);margin-top:0.5rem;margin-bottom:1.5rem;margin-left:0.5rem;margin-right:0.5rem;-webkit-animation:in-up var(--calcite-animation-timing) ease-in-out;animation:in-up var(--calcite-animation-timing) ease-in-out}:host([position=end]){-ms-flex-item-align:end;align-self:flex-end}:host([position=start]){-ms-flex-item-align:start;align-self:flex-start}:host([height-scale=s]){height:33.333333%}:host([height-scale=m]){height:70%}:host([height-scale=l]){height:100%}:host([height-scale=l][detached]){height:calc(100% - 2rem)}::slotted(calcite-panel){width:100%;height:100%}::slotted(calcite-action-bar){border-right-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3)}::slotted(calcite-action-bar[position=end]){border-left-width:0;border-right-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3)}.calcite--rtl ::slotted(calcite-action-bar){border-right-width:0;border-left-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3)}.calcite--rtl ::slotted(calcite-action-bar[position=end]){border-left-width:0;border-right-width:1px;border-style:solid;border-color:var(--calcite-ui-border-3)}";const m=class{constructor(e){t(this,e),this.calciteShellPanelToggle=n(this,"calciteShellPanelToggle",7),this.collapsed=!1,this.detached=!1,this.detachedHeightScale="l",this.widthScale="m"}watchHandler(){this.calciteShellPanelToggle.emit()}renderHeader(){const{el:t}=this;return o(t,"header")?e("div",{class:"content__header"},e("slot",{name:"header"})):null}render(){const{collapsed:t,detached:i,position:n}=this,o=e("div",{class:{content:!0,"content--detached":i},hidden:t},this.renderHeader(),e("div",{class:"content__body"},e("slot",null))),r=[e("slot",{name:"action-bar"}),o];return"end"===n&&r.reverse(),e(a,null,r)}get el(){return i(this)}static get watchers(){return{collapsed:["watchHandler"]}}};m.style="@-webkit-keyframes in{0%{opacity:0}100%{opacity:1}}@keyframes in{0%{opacity:0}100%{opacity:1}}@-webkit-keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-down{0%{opacity:0;-webkit-transform:translate3D(0, -5px, 0);transform:translate3D(0, -5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@keyframes in-up{0%{opacity:0;-webkit-transform:translate3D(0, 5px, 0);transform:translate3D(0, 5px, 0)}100%{opacity:1;-webkit-transform:translate3D(0, 0, 0);transform:translate3D(0, 0, 0)}}@-webkit-keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}@keyframes in-scale{0%{opacity:0;-webkit-transform:scale3D(0.95, 0.95, 1);transform:scale3D(0.95, 0.95, 1)}100%{opacity:1;-webkit-transform:scale3D(1, 1, 1);transform:scale3D(1, 1, 1)}}:root{--calcite-animation-timing:300ms}.calcite-animate{opacity:0;-webkit-animation-fill-mode:both;animation-fill-mode:both;-webkit-animation-duration:var(--calcite-animation-timing);animation-duration:var(--calcite-animation-timing)}.calcite-animate__in{-webkit-animation-name:in;animation-name:in}.calcite-animate__in-down{-webkit-animation-name:in-down;animation-name:in-down}.calcite-animate__in-up{-webkit-animation-name:in-up;animation-name:in-up}.calcite-animate__in-scale{-webkit-animation-name:in-scale;animation-name:in-scale}:host{-webkit-box-sizing:border-box;box-sizing:border-box;background-color:var(--calcite-ui-foreground-1);color:var(--calcite-ui-text-2);font-size:var(--calcite-font-size--1)}:host *{-webkit-box-sizing:border-box;box-sizing:border-box}:root{--calcite-popper-transition:150ms ease-in-out}:host([hidden]){display:none}:host{display:-ms-flexbox;display:flex;-ms-flex-align:stretch;align-items:stretch;background-color:transparent;pointer-events:none;--calcite-shell-panel-detached-max-height:unset}::slotted(calcite-panel),::slotted(calcite-flow){-ms-flex:1 1 auto;flex:1 1 auto;height:100%;width:100%;max-height:unset;max-width:unset}::slotted(.calcite-match-height){display:-ms-flexbox;display:flex;-ms-flex:1 1 auto;flex:1 1 auto;overflow:hidden}.content{-ms-flex-align:stretch;align-items:stretch;-ms-flex-item-align:stretch;align-self:stretch;background-color:var(--calcite-ui-background);display:-ms-flexbox;display:flex;padding:0;pointer-events:auto;-ms-flex-direction:column;flex-direction:column;-ms-flex-wrap:nowrap;flex-wrap:nowrap;width:var(--calcite-shell-panel-width);max-width:var(--calcite-shell-panel-max-width);min-width:var(--calcite-shell-panel-min-width);-webkit-transition:max-height 150ms ease-in-out, max-width 150ms ease-in-out;transition:max-height 150ms ease-in-out, max-width 150ms ease-in-out}.content__header{display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;-ms-flex:0 1 auto;flex:0 1 auto;-ms-flex-wrap:nowrap;flex-wrap:nowrap;-ms-flex-align:stretch;align-items:stretch}.content__body{display:-ms-flexbox;display:flex;-ms-flex:1 1 auto;flex:1 1 auto;-ms-flex-direction:column;flex-direction:column;overflow:hidden}:host([width-scale=s]) .content{--calcite-shell-panel-width:calc(var(--calcite-panel-width-multiplier) * 12vw);--calcite-shell-panel-max-width:calc(var(--calcite-panel-width-multiplier) * 300px);--calcite-shell-panel-min-width:calc(var(--calcite-panel-width-multiplier) * 150px)}:host([width-scale=m]) .content{--calcite-shell-panel-width:calc(var(--calcite-panel-width-multiplier) * 20vw);--calcite-shell-panel-max-width:calc(var(--calcite-panel-width-multiplier) * 420px);--calcite-shell-panel-min-width:calc(var(--calcite-panel-width-multiplier) * 240px)}:host([width-scale=l]) .content{--calcite-shell-panel-width:calc(var(--calcite-panel-width-multiplier) * 45vw);--calcite-shell-panel-max-width:calc(var(--calcite-panel-width-multiplier) * 680px);--calcite-shell-panel-min-width:calc(var(--calcite-panel-width-multiplier) * 340px)}:host([detached-height-scale=s]) .content--detached{--calcite-shell-panel-detached-max-height:40vh}:host([detached-height-scale=m]) .content--detached{--calcite-shell-panel-detached-max-height:60vh}:host([detached-height-scale=l]) .content--detached{--calcite-shell-panel-detached-max-height:80vh}.content--detached{border-radius:0.25rem;-webkit-box-shadow:0 4px 8px -1px rgba(0, 0, 0, 0.08), 0 2px 4px -1px rgba(0, 0, 0, 0.04);box-shadow:0 4px 8px -1px rgba(0, 0, 0, 0.08), 0 2px 4px -1px rgba(0, 0, 0, 0.04);height:auto;overflow:hidden;margin-top:0.5rem;margin-bottom:auto;margin-left:0.5rem;margin-right:0.5rem;max-height:var(--calcite-shell-panel-detached-max-height)}.content--detached ::slotted(calcite-panel),.content--detached ::slotted(calcite-flow){max-height:unset}.content[hidden]{display:none}:host([position=start]) slot[name=action-bar]::slotted(calcite-action-bar){border-right-width:1px;border-right-color:var(--calcite-ui-border-3);border-right-style:solid}:host([position=end]) slot[name=action-bar]::slotted(calcite-action-bar){border-left-width:1px;border-left-color:var(--calcite-ui-border-3);border-left-style:solid}";export{s as calcite_shell,c as calcite_shell_center_row,m as calcite_shell_panel} |
||
flatten.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::handle_fields::{HANDLER_ARG_NAME, KEY_ARG_NAME};
use crate::match_::MATCH_CONSTANTS;
use crate::util::{
is_relay_custom_inline_fragment_directive, CustomMetadataDirectives, PointerAddress,
};
use graphql_ir::{
Argument, Condition, Directive, FragmentDefinition, InlineFragment, LinkedField,
OperationDefinition, Program, Selection, TransformedValue, ValidationMessage,
};
use interner::StringKey;
use schema::{Schema, Type};
use crate::node_identifier::{LocationAgnosticPartialEq, NodeIdentifier};
use common::{Diagnostic, DiagnosticsResult, Location, NamedItem};
use fnv::FnvHashMap;
use parking_lot::{Mutex, RwLock};
use rayon::prelude::*;
use schema::SDLSchema;
use std::sync::Arc;
type SeenLinkedFields = Arc<RwLock<FnvHashMap<PointerAddress, TransformedValue<Arc<LinkedField>>>>>;
type SeenInlineFragments =
Arc<RwLock<FnvHashMap<(PointerAddress, Type), TransformedValue<Arc<InlineFragment>>>>>;
/// Transform that flattens inline fragments, fragment spreads, merges linked fields selections.
///
/// Inline fragments are inlined (replaced with their selections) when:
/// - The fragment type matches the type of its parent, and it `is_for_codegen`,
/// or the inline fragment doesn't have directives .
///
/// with the exception that it never flattens the inline fragment with relay
/// directives (@defer, @__clientExtensions).
pub fn flatten(program: &mut Program, is_for_codegen: bool) -> DiagnosticsResult<()> {
let transform = FlattenTransform::new(program, is_for_codegen);
let errors = Arc::new(Mutex::new(Vec::new()));
program.par_operations_mut().for_each(|operation| {
if let Err(err) = transform.transform_operation(operation) {
errors.lock().extend(err.into_iter());
}
});
program.par_fragments_mut().for_each(|fragment| {
if let Err(err) = transform.transform_fragment(fragment) {
errors.lock().extend(err.into_iter());
}
});
let is_errors_empty = { errors.lock().is_empty() };
if is_errors_empty {
Ok(())
} else {
Err(Arc::try_unwrap(errors).unwrap().into_inner())
}
}
struct FlattenTransform {
schema: Arc<SDLSchema>,
is_for_codegen: bool,
seen_linked_fields: SeenLinkedFields,
seen_inline_fragments: SeenInlineFragments,
}
impl FlattenTransform {
fn new(program: &'_ Program, is_for_codegen: bool) -> Self {
Self {
schema: Arc::clone(&program.schema),
is_for_codegen,
seen_linked_fields: Default::default(),
seen_inline_fragments: Default::default(),
}
}
fn transform_operation(
&self,
operation: &mut Arc<OperationDefinition>,
) -> DiagnosticsResult<()> {
let next_selections = self.transform_selections(&operation.selections, operation.type_)?;
if let TransformedValue::Replace(next_selections) = next_selections {
Arc::make_mut(operation).selections = next_selections
};
Ok(())
}
fn transform_fragment(&self, fragment: &mut Arc<FragmentDefinition>) -> DiagnosticsResult<()> {
let next_selections =
self.transform_selections(&fragment.selections, fragment.type_condition)?;
if let TransformedValue::Replace(next_selections) = next_selections {
Arc::make_mut(fragment).selections = next_selections
};
Ok(())
}
fn transform_selections(
&self,
selections: &[Selection],
parent_type: Type,
) -> DiagnosticsResult<TransformedValue<Vec<Selection>>> {
let mut next_selections = Vec::new();
let mut has_changes = false;
for (index, selection) in selections.iter().enumerate() {
let next_selection = self.transform_selection(selection, parent_type)?;
if let TransformedValue::Replace(next_selection) = next_selection {
if !has_changes {
has_changes = true;
next_selections.reserve(selections.len());
next_selections.extend(selections.iter().take(index).cloned())
}
next_selections.push(next_selection);
} else if has_changes {
next_selections.push(selection.clone());
}
}
let mut flattened_selections = Vec::with_capacity(selections.len());
has_changes = self.flatten_selections(
&mut flattened_selections,
if has_changes {
&next_selections
} else {
selections
},
parent_type,
)? || has_changes;
Ok(if has_changes {
TransformedValue::Replace(flattened_selections)
} else | )
}
fn transform_linked_field(
&self,
linked_field: &Arc<LinkedField>,
) -> DiagnosticsResult<TransformedValue<Arc<LinkedField>>> {
let should_cache = Arc::strong_count(linked_field) > 1;
let key = PointerAddress::new(Arc::as_ref(linked_field));
if should_cache {
let seen_linked_fields = self.seen_linked_fields.read();
if let Some(prev) = seen_linked_fields.get(&key) {
return Ok(prev.clone());
}
}
let type_ = self
.schema
.field(linked_field.definition.item)
.type_
.inner();
let result = self
.transform_selections(&linked_field.selections, type_)?
.map(|next_selections| {
Arc::new(LinkedField {
alias: linked_field.alias,
definition: linked_field.definition,
arguments: linked_field.arguments.clone(),
directives: linked_field.directives.clone(),
selections: next_selections,
})
});
if should_cache {
let mut seen_linked_fields = self.seen_linked_fields.write();
// If another thread computed this in the meantime, use that result
if let Some(prev) = seen_linked_fields.get(&key) {
return Ok(prev.clone());
}
seen_linked_fields.insert(key, result.clone());
}
Ok(result)
}
fn transform_inline_fragment(
&self,
fragment: &Arc<InlineFragment>,
parent_type: Type,
) -> DiagnosticsResult<TransformedValue<Arc<InlineFragment>>> {
let should_cache = Arc::strong_count(fragment) > 1;
let key = (PointerAddress::new(Arc::as_ref(fragment)), parent_type);
if should_cache {
let seen_inline_fragments = self.seen_inline_fragments.read();
if let Some(prev) = seen_inline_fragments.get(&key) {
return Ok(prev.clone());
}
}
let next_parent_type = match fragment.type_condition {
Some(type_condition) => type_condition,
None => parent_type,
};
let result = self
.transform_selections(&fragment.selections, next_parent_type)?
.map(|next_selections| {
Arc::new(InlineFragment {
type_condition: fragment.type_condition,
directives: fragment.directives.clone(),
selections: next_selections,
})
});
if should_cache {
let mut seen_inline_fragments = self.seen_inline_fragments.write();
// If another thread computed this in the meantime, use that result
if let Some(prev) = seen_inline_fragments.get(&key) {
return Ok(prev.clone());
}
seen_inline_fragments.insert(key, result.clone());
}
Ok(result)
}
fn transform_selection(
&self,
selection: &Selection,
parent_type: Type,
) -> DiagnosticsResult<TransformedValue<Selection>> {
Ok(match selection {
Selection::InlineFragment(node) => self
.transform_inline_fragment(node, parent_type)?
.map(Selection::InlineFragment),
Selection::LinkedField(node) => self
.transform_linked_field(node)?
.map(Selection::LinkedField),
Selection::Condition(node) => self
.transform_selections(&node.selections, parent_type)?
.map(|next_selections| {
Selection::Condition(Arc::new(Condition {
value: node.value.clone(),
passing_value: node.passing_value,
selections: next_selections,
}))
}),
Selection::FragmentSpread(_) | Selection::ScalarField(_) => TransformedValue::Keep,
})
}
fn flatten_selections(
&self,
flattened_selections: &mut Vec<Selection>,
selections: &[Selection],
parent_type: Type,
) -> DiagnosticsResult<bool> {
let mut has_changes = false;
for selection in selections {
if let Selection::InlineFragment(inline_fragment) = selection {
if should_flatten_inline_fragment(inline_fragment, parent_type, self.is_for_codegen)
{
has_changes = true;
self.flatten_selections(
flattened_selections,
&inline_fragment.selections,
parent_type,
)?;
continue;
}
}
let flattened_selection = flattened_selections
.iter_mut()
.find(|sel| NodeIdentifier::are_equal(&self.schema, sel, selection));
match flattened_selection {
None => {
flattened_selections.push(selection.clone());
}
Some(flattened_selection) => {
has_changes = true;
match flattened_selection {
Selection::InlineFragment(flattened_node) => {
let type_condition =
flattened_node.type_condition.unwrap_or(parent_type);
let node = match selection {
Selection::InlineFragment(node) => node,
_ => unreachable!("FlattenTransform: Expected an InlineFragment."),
};
if let Some(flattened_module_directive) = flattened_node
.directives
.named(MATCH_CONSTANTS.custom_module_directive_name)
{
if let Some(module_directive) = node
.directives
.named(MATCH_CONSTANTS.custom_module_directive_name)
{
if !flattened_module_directive.arguments[0].location_agnostic_eq(&module_directive.arguments[0]) || // key
!flattened_module_directive.arguments[2].location_agnostic_eq(&module_directive.arguments[2]) || // module
!flattened_module_directive.arguments[4].location_agnostic_eq(&module_directive.arguments[4])
// name
{
let error = Diagnostic::error(
ValidationMessage::ConflictingModuleSelections,
module_directive.name.location,
)
.annotate(
"conflicts with",
flattened_module_directive.name.location,
);
return Err(vec![error]);
}
}
}
let flattened_node = Arc::make_mut(flattened_node);
self.flatten_selections(
&mut flattened_node.selections,
&node.selections,
type_condition,
)?;
}
Selection::LinkedField(flattened_node) => {
let node = match selection {
Selection::LinkedField(node) => node,
_ => unreachable!("FlattenTransform: Expected a LinkedField."),
};
if !ignoring_type_and_location::arguments_equals(
&node.arguments,
&flattened_node.arguments,
) {
return Err(vec![self.create_conflicting_fields_error(
node.alias_or_name(&self.schema),
flattened_node.definition.location,
&flattened_node.arguments,
node.definition.location,
&node.arguments,
)]);
}
let type_ = self
.schema
.field(flattened_node.definition.item)
.type_
.inner();
let should_merge_handles = selection.directives().iter().any(|d| {
CustomMetadataDirectives::is_handle_field_directive(d.name.item)
});
let flattened_node = Arc::make_mut(flattened_node);
if should_merge_handles {
flattened_node.directives = merge_handle_directives(
&flattened_node.directives,
selection.directives(),
)
};
self.flatten_selections(
&mut flattened_node.selections,
&node.selections,
type_,
)?;
}
Selection::Condition(flattened_node) => {
let node_selections = match selection {
Selection::Condition(node) => &node.selections,
_ => unreachable!("FlattenTransform: Expected a Condition."),
};
let flattened_node = Arc::make_mut(flattened_node);
self.flatten_selections(
&mut flattened_node.selections,
&node_selections,
parent_type,
)?;
}
Selection::ScalarField(flattened_node) => {
let node = match selection {
Selection::ScalarField(node) => node,
_ => unreachable!("FlattenTransform: Expected a ScalarField."),
};
if !ignoring_type_and_location::arguments_equals(
&node.arguments,
&flattened_node.arguments,
) {
return Err(vec![self.create_conflicting_fields_error(
node.alias_or_name(&self.schema),
flattened_node.definition.location,
&flattened_node.arguments,
node.definition.location,
&node.arguments,
)]);
}
let should_merge_handles = node.directives.iter().any(|d| {
CustomMetadataDirectives::is_handle_field_directive(d.name.item)
});
if should_merge_handles {
let flattened_node = Arc::make_mut(flattened_node);
flattened_node.directives = merge_handle_directives(
&flattened_node.directives,
selection.directives(),
);
}
}
Selection::FragmentSpread(_) => {}
};
}
}
}
Ok(has_changes)
}
fn create_conflicting_fields_error(
&self,
field_name: StringKey,
location_a: Location,
arguments_a: &[Argument],
location_b: Location,
arguments_b: &[Argument],
) -> Diagnostic {
Diagnostic::error(
ValidationMessage::InvalidSameFieldWithDifferentArguments {
field_name,
arguments_a: graphql_text_printer::print_arguments(&self.schema, &arguments_a),
},
location_a,
)
.annotate(
format!(
"which conflicts with this field with applied argument values {}",
graphql_text_printer::print_arguments(&self.schema, &arguments_b),
),
location_b,
)
}
}
fn should_flatten_inline_fragment(
inline_fragment: &InlineFragment,
parent_type: Type,
is_for_codegen: bool,
) -> bool {
if let Some(type_condition) = inline_fragment.type_condition {
if type_condition != parent_type {
return false;
}
}
if is_for_codegen {
!inline_fragment
.directives
.iter()
.any(is_relay_custom_inline_fragment_directive)
} else {
inline_fragment.directives.is_empty()
}
}
fn merge_handle_directives(
directives_a: &[Directive],
directives_b: &[Directive],
) -> Vec<Directive> {
let (mut handles, mut directives): (Vec<_>, Vec<_>) =
directives_a.iter().cloned().partition(|directive| {
CustomMetadataDirectives::is_handle_field_directive(directive.name.item)
});
for directive in directives_b {
if CustomMetadataDirectives::is_handle_field_directive(directive.name.item) {
if handles.is_empty() {
handles.push(directive.clone());
} else {
let current_handler_arg = directive.arguments.named(*HANDLER_ARG_NAME);
let current_name_arg = directive.arguments.named(*KEY_ARG_NAME);
let is_duplicate_handle = handles.iter().any(|handle| {
current_handler_arg
.location_agnostic_eq(&handle.arguments.named(*HANDLER_ARG_NAME))
&& current_name_arg
.location_agnostic_eq(&handle.arguments.named(*KEY_ARG_NAME))
});
if !is_duplicate_handle {
handles.push(directive.clone());
}
}
}
}
directives.extend(handles.into_iter());
directives
}
mod ignoring_type_and_location {
use crate::node_identifier::LocationAgnosticPartialEq;
use graphql_ir::{Argument, Value};
/// Verify that two sets of arguments are equivalent - same argument names
/// and values. Notably, this ignores the types of arguments and values,
/// which may not always be inferred identically.
pub fn arguments_equals(a: &[Argument], b: &[Argument]) -> bool {
slice_equals(a, b, |a, b| {
a.name.location_agnostic_eq(&b.name) && value_equals(&a.value.item, &b.value.item)
})
}
fn value_equals(a: &Value, b: &Value) -> bool {
match (a, b) {
(Value::Constant(a), Value::Constant(b)) => a.location_agnostic_eq(b),
(Value::Variable(a), Value::Variable(b)) => a.name.location_agnostic_eq(&b.name),
(Value::List(a), Value::List(b)) => slice_equals(a, b, value_equals),
(Value::Object(a), Value::Object(b)) => arguments_equals(a, b),
_ => false,
}
}
fn slice_equals<T, F>(a: &[T], b: &[T], eq: F) -> bool
where
F: Fn(&T, &T) -> bool,
{
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| eq(a, b))
}
}
| {
TransformedValue::Keep
} |
serializers.py | from rest_framework import serializers
# from django.contrib.auth.models import User
from .models import User
class | (serializers.ModelSerializer):
class Meta:
model = User
fields = ('username',)
| UserSerializer |
ordered_set.ts | import cached from "./fields/cached"
import ItemType from "./item"
import { IDFields } from "./object_identification"
import {
GraphQLString,
GraphQLObjectType,
GraphQLNonNull,
GraphQLList,
GraphQLFieldConfig, | import { ResolverContext } from "types/graphql"
const OrderedSetType = new GraphQLObjectType<any, ResolverContext>({
name: "OrderedSet",
fields: () => ({
...IDFields,
cached,
description: {
type: GraphQLString,
},
key: {
type: GraphQLString,
},
item_type: {
type: GraphQLString,
},
items: {
type: new GraphQLList(ItemType),
resolve: ({ id, item_type }, _options, { setItemsLoader }) => {
return setItemsLoader(id).then(({ body: items }) => {
return items.map(item => {
item.item_type = item_type // eslint-disable-line no-param-reassign
return item
})
})
},
},
name: {
type: GraphQLString,
},
}),
})
const OrderedSet: GraphQLFieldConfig<void, ResolverContext> = {
type: OrderedSetType,
description: "An OrderedSet",
args: {
id: {
type: new GraphQLNonNull(GraphQLString),
description: "The ID of the OrderedSet",
},
},
resolve: (_root, { id }, { setLoader }) => setLoader(id),
}
export default OrderedSet | } from "graphql" |
select.ts | import { IIterator } from "../intefaces/iterator.interface";
import { BaseIterator } from "../implements/baseIterator";
export class SelectClause<T> extends BaseIterator<T> implements IIterator<T> { | if (source) {
return (source as T[]).map(x => (this._iterator(x)));
}
return source;
}
constructor(func: (item: T) => any) {
super();
this._iterator = func;
}
} |
_iterator: (item: T) => boolean;
execute(source: T[]): any[]{ |
file_test.go | package telebot
import (
"io"
"testing"
"github.com/stretchr/testify/assert"
)
func | (t *testing.T) {
f := FromDisk("telebot.go")
g := FromURL("http://")
assert.True(t, f.OnDisk())
assert.True(t, (&File{FileID: "1"}).InCloud())
assert.Equal(t, File{FileLocal: "telebot.go"}, f)
assert.Equal(t, File{FileURL: "http://"}, g)
assert.Equal(t, File{FileReader: io.Reader(nil)}, FromReader(io.Reader(nil)))
g.stealRef(&f)
f.stealRef(&g)
assert.Equal(t, g.FileLocal, f.FileLocal)
assert.Equal(t, f.FileURL, g.FileURL)
}
| TestFile |
results.go | package trigger
import (
"github.com/chnsz/golangsdk"
"github.com/chnsz/golangsdk/pagination"
)
type commonResult struct {
golangsdk.Result
}
// CreateResult represents a result of the Create method.
type CreateResult struct {
commonResult
}
// GetResult represents a result of the Update method.
type GetResult struct {
commonResult
}
// UpdateResult represents a result of the Update method.
type UpdateResult struct {
golangsdk.ErrResult
}
// DeleteResult represents a result of the Delete method.
type DeleteResult struct {
golangsdk.ErrResult
}
// Trigger is a struct that represents the result of Create and Get methods.
type Trigger struct {
TriggerId string `json:"trigger_id"`
TriggerTypeCode string `json:"trigger_type_code"`
EventData map[string]interface{} `json:"event_data"`
EventTypeCode string `json:"event_type_code"`
Status string `json:"trigger_status"`
LastUpdatedTime string `json:"last_updated_time"`
CreatedTime string `json:"created_time"`
LastError string `json:"last_error"`
}
func (r commonResult) Extract() (*Trigger, error) {
var s Trigger
err := r.ExtractInto(&s)
return &s, err
}
// TriggerPage represents the response pages of the List method.
type TriggerPage struct {
pagination.SinglePageBase
}
// ExtractList is a method which to extract the response to a trigger list.
func | (r pagination.Page) ([]Trigger, error) {
var s []Trigger
err := (r.(TriggerPage)).ExtractInto(&s)
return s, err
}
| ExtractList |
io.py | """Tasks for reading and writing data.
Tasks
=====
.. autosummary::
:toctree:
LoadFiles
LoadMaps
LoadFilesFromParams
Save
Print
LoadBeamTransfer
File Groups
===========
Several tasks accept groups of files as arguments. These are specified in the YAML file as a dictionary like below.
.. code-block:: yaml
list_of_file_groups:
- tag: first_group # An optional tag naming the group
files:
- 'file1.h5'
- 'file[3-4].h5' # Globs are processed
- 'file7.h5'
- files: # No tag specified, implicitly gets the tag 'group_2'
- 'another_file1.h5'
- 'another_file2.h5'
single_group:
files: ['file1.h5', 'file2.h5']
"""
import os.path
import h5py
import numpy as np
from yaml import dump as yamldump
from caput import pipeline
from caput import config
from cora.util import units
from . import task
from ..util.truncate import bit_truncate_weights, bit_truncate_fixed
from .containers import SiderealStream, TimeStream, TrackBeam
TRUNC_SPEC = {
SiderealStream: {
"dataset": ["vis", "vis_weight"],
"weight_dataset": ["vis_weight", None],
"fixed_precision": 1e-4,
"variance_increase": 1e-3,
},
TimeStream: {
"dataset": ["vis", "vis_weight"],
"weight_dataset": ["vis_weight", None],
"fixed_precision": 1e-4,
"variance_increase": 1e-3,
},
TrackBeam: {
"dataset": ["beam", "weight"],
"weight_dataset": ["weight", None],
"fixed_precision": 1e-4,
"variance_increase": 1e-3,
},
}
def _list_of_filelists(files):
# Take in a list of lists/glob patterns of filenames
import glob
f2 = []
for filelist in files:
if isinstance(filelist, str):
filelist = glob.glob(filelist)
elif isinstance(filelist, list):
pass
else:
raise Exception("Must be list or glob pattern.")
f2.append(filelist)
return f2
def _list_or_glob(files):
# Take in a list of lists/glob patterns of filenames
import glob
if isinstance(files, str):
files = sorted(glob.glob(files))
elif isinstance(files, list):
pass
else:
raise ValueError("Argument must be list or glob pattern, got %s" % repr(files))
return files
def _list_of_filegroups(groups):
# Process a file group/groups
import glob
# Convert to list if the group was not included in a list
if not isinstance(groups, list):
groups = [groups]
# Iterate over groups, set the tag if needed, and process the file list
# through glob
for gi, group in enumerate(groups):
files = group["files"]
if "tag" not in group:
group["tag"] = "group_%i" % gi
flist = []
for fname in files:
flist += glob.glob(fname)
if not len(flist):
raise RuntimeError("No files in group exist (%s)." % files)
group["files"] = flist
return groups
class LoadMaps(task.MPILoggedTask):
"""Load a series of maps from files given in the tasks parameters.
Maps are given as one, or a list of `File Groups` (see
:mod:`draco.core.io`). Maps within the same group are added together
before being passed on.
Attributes
----------
maps : list or dict
A dictionary specifying a file group, or a list of them.
"""
maps = config.Property(proptype=_list_of_filegroups)
def next(self):
"""Load the groups of maps from disk and pass them on.
Returns
-------
map : :class:`containers.Map`
"""
from . import containers
# Exit this task if we have eaten all the file groups
if len(self.maps) == 0:
raise pipeline.PipelineStopIteration
group = self.maps.pop(0)
map_stack = None
# Iterate over all the files in the group, load them into a Map
# container and add them all together
for mfile in group["files"]:
self.log.debug("Loading file %s", mfile)
current_map = containers.Map.from_file(mfile, distributed=True)
current_map.redistribute("freq")
# Start the stack if needed
if map_stack is None:
map_stack = current_map
# Otherwise, check that the new map has consistent frequencies,
# nside and pol and stack up.
else:
if (current_map.freq != map_stack.freq).all():
raise RuntimeError("Maps do not have consistent frequencies.")
if (current_map.index_map["pol"] != map_stack.index_map["pol"]).all():
raise RuntimeError("Maps do not have the same polarisations.")
if (
current_map.index_map["pixel"] != map_stack.index_map["pixel"]
).all():
raise RuntimeError("Maps do not have the same pixelisation.")
map_stack.map[:] += current_map.map[:]
# Assign a tag to the stack of maps
map_stack.attrs["tag"] = group["tag"]
return map_stack
class LoadFITSCatalog(task.SingleTask):
"""Load an SDSS-style FITS source catalog.
Catalogs are given as one, or a list of `File Groups` (see
:mod:`draco.core.io`). Catalogs within the same group are combined together
before being passed on.
Attributes
----------
catalogs : list or dict
A dictionary specifying a file group, or a list of them.
z_range : list, optional
Select only sources with a redshift within the given range.
freq_range : list, optional
Select only sources with a 21cm line freq within the given range. Overrides
`z_range`.
"""
catalogs = config.Property(proptype=_list_of_filegroups)
z_range = config.list_type(type_=float, length=2, default=None)
freq_range = config.list_type(type_=float, length=2, default=None)
def process(self):
"""Load the groups of catalogs from disk, concatenate them and pass them on.
Returns
-------
catalog : :class:`containers.SpectroscopicCatalog`
"""
from astropy.io import fits
from . import containers
# Exit this task if we have eaten all the file groups
if len(self.catalogs) == 0:
raise pipeline.PipelineStopIteration
group = self.catalogs.pop(0)
# Set the redshift selection
if self.freq_range:
zl = units.nu21 / self.freq_range[1] - 1
zh = units.nu21 / self.freq_range[0] - 1
self.z_range = (zl, zh)
if self.z_range:
zl, zh = self.z_range
self.log.info(f"Applying redshift selection {zl:.2f} <= z <= {zh:.2f}")
# Load the data only on rank=0 and then broadcast
if self.comm.rank == 0:
# Iterate over all the files in the group, load them into a Map
# container and add them all together
catalog_stack = []
for cfile in group["files"]:
self.log.debug("Loading file %s", cfile)
# TODO: read out the weights from the catalogs
with fits.open(cfile, mode="readonly") as cat:
pos = np.array([cat[1].data[col] for col in ["RA", "DEC", "Z"]])
# Apply any redshift selection to the objects
if self.z_range:
zsel = (pos[2] >= self.z_range[0]) & (pos[2] <= self.z_range[1])
pos = pos[:, zsel]
catalog_stack.append(pos)
# NOTE: this one is tricky, for some reason the concatenate in here
# produces a non C contiguous array, so we need to ensure that otherwise
# the broadcasting will get very confused
catalog_array = np.concatenate(catalog_stack, axis=-1).astype(np.float64)
catalog_array = np.ascontiguousarray(catalog_array)
num_objects = catalog_array.shape[-1]
else:
num_objects = None
catalog_array = None
# Broadcast the size of the catalog to all ranks, create the target array and
# broadcast into it
num_objects = self.comm.bcast(num_objects, root=0)
self.log.debug(f"Constructing catalog with {num_objects} objects.")
if self.comm.rank != 0:
catalog_array = np.zeros((3, num_objects), dtype=np.float64)
self.comm.Bcast(catalog_array, root=0)
catalog = containers.SpectroscopicCatalog(object_id=num_objects)
catalog["position"]["ra"] = catalog_array[0]
catalog["position"]["dec"] = catalog_array[1]
catalog["redshift"]["z"] = catalog_array[2]
catalog["redshift"]["z_error"] = 0
# Assign a tag to the stack of maps
catalog.attrs["tag"] = group["tag"]
return catalog
class LoadFilesFromParams(task.SingleTask):
"""Load data from files given in the tasks parameters.
Attributes
----------
files : glob pattern, or list
Can either be a glob pattern, or lists of actual files.
distributed : bool, optional
Whether the file should be loaded distributed across ranks.
convert_strings : bool, optional
Convert strings to unicode when loading.
selections : dict, optional
A dictionary of axis selections. See the section below for details.
Selections
----------
Selections can be given to limit the data read to specified subsets. They can be
given for any named axis in the container.
Selections can be given as a slice with an `<axis name>_range` key with either
`[start, stop]` or `[start, stop, step]` as the value. Alternatively a list of
explicit indices to extract can be given with the `<axis name>_index` key, and
the value is a list of the indices. If both `<axis name>_range` and `<axis
name>_index` keys are given the former will take precedence, but you should
clearly avoid doing this.
Additionally index based selections currently don't work for distributed reads.
Here's an example in the YAML format that the pipeline uses:
.. code-block:: yaml
selections:
freq_range: [256, 512, 4] # A strided slice
stack_index: [1, 2, 4, 9, 16, 25, 36, 49, 64] # A sparse selection
stack_range: [1, 14] # Will override the selection above
"""
files = config.Property(proptype=_list_or_glob)
distributed = config.Property(proptype=bool, default=True)
convert_strings = config.Property(proptype=bool, default=True)
selections = config.Property(proptype=dict, default=None)
def setup(self):
"""Resolve the selections."""
self._sel = self._resolve_sel()
def process(self):
"""Load the given files in turn and pass on.
Returns
-------
cont : subclass of `memh5.BasicCont`
"""
from caput import memh5
# Garbage collect to workaround leaking memory from containers.
# TODO: find actual source of leak
import gc
gc.collect()
if len(self.files) == 0:
raise pipeline.PipelineStopIteration
# Fetch and remove the first item in the list
file_ = self.files.pop(0)
self.log.info(f"Loading file {file_}")
self.log.debug(f"Reading with selections: {self._sel}")
# If we are applying selections we need to dispatch the `from_file` via the
# correct subclass, rather than relying on the internal detection of the
# subclass. To minimise the number of files being opened this is only done on
# rank=0 and is then broadcast
if self._sel:
if self.comm.rank == 0:
with h5py.File(file_, "r") as fh:
clspath = memh5.MemDiskGroup._detect_subclass_path(fh)
else:
clspath = None
clspath = self.comm.bcast(clspath, root=0)
new_cls = memh5.MemDiskGroup._resolve_subclass(clspath)
else:
new_cls = memh5.BasicCont
cont = new_cls.from_file(
file_,
distributed=self.distributed,
comm=self.comm,
convert_attribute_strings=self.convert_strings,
convert_dataset_strings=self.convert_strings,
**self._sel,
)
if "tag" not in cont.attrs:
# Get the first part of the actual filename and use it as the tag
tag = os.path.splitext(os.path.basename(file_))[0]
cont.attrs["tag"] = tag
return cont
def _resolve_sel(self):
# Turn the selection parameters into actual selectable types
sel = {}
sel_parsers = {"range": self._parse_range, "index": self._parse_index}
# To enforce the precedence of range vs index selections, we rely on the fact
# that a sort will place the axis_range keys after axis_index keys
for k in sorted(self.selections or []):
# Parse the key to get the axis name and type, accounting for the fact the
# axis name may contain an underscore
*axis, type_ = k.split("_")
axis_name = "_".join(axis)
if type_ not in sel_parsers:
raise ValueError(
f'Unsupported selection type "{type_}", or invalid key "{k}"'
)
sel[f"{axis_name}_sel"] = sel_parsers[type_](self.selections[k])
return sel
def _parse_range(self, x):
# Parse and validate a range type selection
if not isinstance(x, (list, tuple)) or len(x) > 3 or len(x) < 2:
raise ValueError(
f"Range spec must be a length 2 or 3 list or tuple. Got {x}."
)
for v in x:
if not isinstance(v, int):
raise ValueError(f"All elements of range spec must be ints. Got {x}")
return slice(*x)
def _parse_index(self, x):
# Parse and validate an index type selection
if not isinstance(x, (list, tuple)) or len(x) == 0:
raise ValueError(f"Index spec must be a non-empty list or tuple. Got {x}.")
for v in x:
if not isinstance(v, int):
raise ValueError(f"All elements of index spec must be ints. Got {x}")
return list(x)
# Define alias for old code
LoadBasicCont = LoadFilesFromParams
class FindFiles(pipeline.TaskBase):
"""Take a glob or list of files specified as a parameter in the
configuration file and pass on to other tasks.
Parameters
----------
files : list or glob
"""
files = config.Property(proptype=_list_or_glob)
def setup(self):
"""Return list of files specified in the parameters."""
if not isinstance(self.files, (list, tuple)):
raise RuntimeError("Argument must be list of files.")
return self.files
class LoadFiles(LoadFilesFromParams):
"""Load data from files passed into the setup routine.
File must be a serialised subclass of :class:`memh5.BasicCont`.
"""
files = None
def setup(self, files):
"""Set the list of files to load.
Parameters
----------
files : list
"""
# Call the baseclass setup to resolve any selections
super().setup()
if not isinstance(files, (list, tuple)):
raise RuntimeError(f'Argument must be list of files. Got "{files}"')
self.files = files
class Save(pipeline.TaskBase):
"""Save out the input, and pass it on.
Assumes that the input has a `to_hdf5` method. Appends a *tag* if there is
a `tag` entry in the attributes, otherwise just uses a count.
Attributes
----------
root : str
Root of the file name to output to.
"""
root = config.Property(proptype=str)
count = 0
def next(self, data):
"""Write out the data file.
Assumes it has an MPIDataset interface.
Parameters
----------
data : mpidataset.MPIDataset
Data to write out.
"""
if "tag" not in data.attrs:
tag = self.count
self.count += 1
else:
tag = data.attrs["tag"]
fname = "%s_%s.h5" % (self.root, str(tag))
data.to_hdf5(fname)
return data
class Print(pipeline.TaskBase):
"""Stupid module which just prints whatever it gets. Good for debugging."""
def next(self, input_):
print(input_)
return input_
class LoadBeamTransfer(pipeline.TaskBase):
"""Loads a beam transfer manager from disk.
Attributes
----------
product_directory : str
Path to the saved Beam Transfer products.
"""
product_directory = config.Property(proptype=str)
def setup(self):
"""Load the beam transfer matrices.
Returns
-------
tel : TransitTelescope
Object describing the telescope.
bt : BeamTransfer
BeamTransfer manager.
feed_info : list, optional
Optional list providing additional information about each feed.
"""
import os
from drift.core import beamtransfer
if not os.path.exists(self.product_directory):
raise RuntimeError("BeamTransfers do not exist.")
bt = beamtransfer.BeamTransfer(self.product_directory)
tel = bt.telescope
try:
return tel, bt, tel.feeds
except AttributeError:
return tel, bt
class LoadProductManager(pipeline.TaskBase):
"""Loads a driftscan product manager from disk.
Attributes
----------
product_directory : str
Path to the root of the products. This is the same as the output
directory used by ``drift-makeproducts``.
"""
product_directory = config.Property(proptype=str)
def setup(self):
"""Load the beam transfer matrices.
Returns
-------
manager : ProductManager
Object describing the telescope.
"""
import os
from drift.core import manager
if not os.path.exists(self.product_directory):
raise RuntimeError("Products do not exist.")
# Load ProductManager and Timestream
pm = manager.ProductManager.from_config(self.product_directory)
return pm
class Truncate(task.SingleTask):
"""Precision truncate data prior to saving with bitshuffle compression.
If no configuration is provided, will look for preset values for the
input container. Any properties defined in the config will override the
presets.
If available, each specified dataset will be truncated relative to a
(specified) weight dataset with the truncation increasing the variance up
to the specified maximum in `variance_increase`. If there is no specified
weight dataset then the truncation falls back to using the
`fixed_precision`.
Attributes
----------
dataset : list of str
Datasets to truncate.
weight_dataset : list of str
Datasets to use as inverse variance for truncation precision.
fixed_precision : float
Relative precision to truncate to (default 1e-4).
variance_increase : float
Maximum fractional increase in variance from numerical truncation.
"""
dataset = config.Property(proptype=list, default=None)
weight_dataset = config.Property(proptype=list, default=None)
fixed_precision = config.Property(proptype=float, default=None)
variance_increase = config.Property(proptype=float, default=None)
def _get_params(self, container):
"""Load truncation parameters from config or container defaults."""
if container in TRUNC_SPEC:
self.log.info("Truncating from preset for container {}".format(container))
for key in [
"dataset",
"weight_dataset",
"fixed_precision",
"variance_increase",
]:
attr = getattr(self, key)
if attr is None:
setattr(self, key, TRUNC_SPEC[container][key])
else:
self.log.info("Overriding container default for '{}'.".format(key))
else:
if (
self.dataset is None
or self.fixed_precision is None
or self.variance_increase is None
):
raise pipeline.PipelineConfigError(
"Container {} has no preset values. You must define all of 'dataset', "
"'fixed_precision', and 'variance_increase' properties.".format(
container
)
)
# Factor of 3 for variance over uniform distribution of truncation errors
self.variance_increase *= 3
def process(self, data):
"""Truncate the incoming data.
The truncation is done *in place*.
Parameters
----------
data : containers.ContainerBase
Data to truncate.
Returns
-------
truncated_data : containers.ContainerBase
Truncated data.
"""
# get truncation parameters from config or container defaults
self._get_params(type(data))
if self.weight_dataset is None:
self.weight_dataset = [None] * len(self.dataset)
for dset, wgt in zip(self.dataset, self.weight_dataset):
old_shape = data[dset].local_shape
val = np.ndarray.reshape(data[dset][:], data[dset][:].size) | val.real, self.fixed_precision
).reshape(old_shape)
data[dset][:].imag = bit_truncate_fixed(
val.imag, self.fixed_precision
).reshape(old_shape)
else:
data[dset][:] = bit_truncate_fixed(
val, self.fixed_precision
).reshape(old_shape)
else:
if data[dset][:].shape != data[wgt][:].shape:
raise pipeline.PipelineRuntimeError(
"Dataset and weight arrays must have same shape ({} != {})".format(
data[dset].shape, data[wgt].shape
)
)
invvar = np.ndarray.reshape(data[wgt][:], data[dset][:].size)
if np.iscomplexobj(data[dset]):
data[dset][:].real = bit_truncate_weights(
val.real,
invvar * 2.0 / self.variance_increase,
self.fixed_precision,
).reshape(old_shape)
data[dset][:].imag = bit_truncate_weights(
val.imag,
invvar * 2.0 / self.variance_increase,
self.fixed_precision,
).reshape(old_shape)
else:
data[dset][:] = bit_truncate_weights(
val, invvar / self.variance_increase, self.fixed_precision
).reshape(old_shape)
return data
class SaveModuleVersions(task.SingleTask):
"""Write module versions to a YAML file.
The list of modules should be added to the configuration under key 'save_versions'.
The version strings are written to a YAML file.
Attributes
----------
root : str
Root of the file name to output to.
"""
root = config.Property(proptype=str)
done = True
def setup(self):
"""Save module versions."""
fname = "{}_versions.yml".format(self.root)
f = open(fname, "w")
f.write(yamldump(self.versions))
f.close()
self.done = True
def process(self):
"""Do nothing."""
self.done = True
return
class SaveConfig(task.SingleTask):
"""Write pipeline config to a text file.
Yaml configuration document is written to a text file.
Attributes
----------
root : str
Root of the file name to output to.
"""
root = config.Property(proptype=str)
done = True
def setup(self):
"""Save module versions."""
fname = "{}_config.yml".format(self.root)
f = open(fname, "w")
f.write(yamldump(self.pipeline_config))
f.close()
self.done = True
def process(self):
"""Do nothing."""
self.done = True
return
def get_telescope(obj):
"""Return a telescope object out of the input (either `ProductManager`,
`BeamTransfer` or `TransitTelescope`).
"""
from drift.core import telescope
try:
return get_beamtransfer(obj).telescope
except RuntimeError:
if isinstance(obj, telescope.TransitTelescope):
return obj
raise RuntimeError("Could not get telescope instance out of %s" % repr(obj))
def get_beamtransfer(obj):
"""Return a BeamTransfer object out of the input (either `ProductManager`,
`BeamTransfer`).
"""
from drift.core import manager, beamtransfer
if isinstance(obj, beamtransfer.BeamTransfer):
return obj
if isinstance(obj, manager.ProductManager):
return obj.beamtransfer
raise RuntimeError("Could not get BeamTransfer instance out of %s" % repr(obj)) | if wgt is None:
if np.iscomplexobj(data[dset]):
data[dset][:].real = bit_truncate_fixed( |
legacy_repository.py | import cgi
import re
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
try:
from html import unescape
except ImportError:
try:
from html.parser import HTMLParser
except ImportError:
from HTMLParser import HTMLParser
unescape = HTMLParser().unescape
from typing import Generator
from typing import Union
import html5lib
import requests
from cachecontrol import CacheControl
from cachecontrol.caches.file_cache import FileCache
from cachy import CacheManager
import poetry.packages
from poetry.config import Config
from poetry.locations import CACHE_DIR
from poetry.masonry.publishing.uploader import wheel_file_re
from poetry.packages import Package
from poetry.packages import dependency_from_pep_508
from poetry.packages.utils.link import Link
from poetry.semver import parse_constraint
from poetry.semver import Version
from poetry.semver import VersionConstraint
from poetry.utils._compat import Path
from poetry.utils.helpers import canonicalize_name, get_http_basic_auth
from poetry.version.markers import InvalidMarker
from .pypi_repository import PyPiRepository
class Page:
VERSION_REGEX = re.compile(r"(?i)([a-z0-9_\-.]+?)-(?=\d)([a-z0-9_.!+-]+)")
SUPPORTED_FORMATS = [
".tar.gz",
".whl",
".zip",
".tar.bz2",
".tar.xz",
".tar.Z",
".tar",
]
def __init__(self, url, content, headers):
if not url.endswith("/"):
url += "/"
self._url = url
encoding = None
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
encoding = params["charset"]
self._content = content
if encoding is None:
self._parsed = html5lib.parse(content, namespaceHTMLElements=False)
else:
self._parsed = html5lib.parse(
content, transport_encoding=encoding, namespaceHTMLElements=False
)
@property
def versions(self): # type: () -> Generator[Version]
seen = set()
for link in self.links:
version = self.link_version(link)
if not version:
continue
if version in seen:
continue
seen.add(version)
yield version
@property
def links(self): # type: () -> Generator[Link]
for anchor in self._parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(urlparse.urljoin(self._url, href))
pyrequire = anchor.get("data-requires-python")
pyrequire = unescape(pyrequire) if pyrequire else None
link = Link(url, self, requires_python=pyrequire)
if link.ext not in self.SUPPORTED_FORMATS:
continue
yield link
def links_for_version(self, version): # type: (Version) -> Generator[Link]
for link in self.links:
if self.link_version(link) == version:
yield link
def link_version(self, link): # type: (Link) -> Union[Version, None]
m = wheel_file_re.match(link.filename)
if m:
version = m.group("ver")
else:
info, ext = link.splitext()
match = self.VERSION_REGEX.match(info)
if not match:
return
version = match.group(2)
try:
version = Version.parse(version)
except ValueError:
return
return version
_clean_re = re.compile(r"[^a-z0-9$&+,/:;=?@.#%_\\|-]", re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(lambda match: "%%%2x" % ord(match.group(0)), url)
class LegacyRepository(PyPiRepository):
def __init__(self, name, url, disable_cache=False):
if name == "pypi":
raise ValueError("The name [pypi] is reserved for repositories")
self._packages = []
self._name = name
self._url = url.rstrip("/")
self._cache_dir = Path(CACHE_DIR) / "cache" / "repositories" / name
self._cache = CacheManager(
{
"default": "releases",
"serializer": "json",
"stores": {
"releases": {"driver": "file", "path": str(self._cache_dir)},
"packages": {"driver": "dict"},
"matches": {"driver": "dict"},
},
}
)
self._session = CacheControl(
requests.session(), cache=FileCache(str(self._cache_dir / "_http"))
)
url_parts = urlparse.urlparse(self._url)
if not url_parts.username:
self._session.auth = get_http_basic_auth(
Config.create("auth.toml"), self.name
)
self._disable_cache = disable_cache
@property
def name(self):
return self._name
def find_packages(
self, name, constraint=None, extras=None, allow_prereleases=False
):
packages = []
if constraint is not None and not isinstance(constraint, VersionConstraint):
constraint = parse_constraint(constraint)
key = name
if constraint:
key = "{}:{}".format(key, str(constraint))
if self._cache.store("matches").has(key):
versions = self._cache.store("matches").get(key)
else:
page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-")))
if page is None:
return []
versions = []
for version in page.versions:
if not constraint or (constraint and constraint.allows(version)):
versions.append(version)
self._cache.store("matches").put(key, versions, 5)
for version in versions:
package = Package(name, version)
package.source_type = "legacy"
package.source_url = self._url
if extras is not None:
package.requires_extras = extras
packages.append(package)
self._log(
"{} packages found for {} {}".format(len(packages), name, str(constraint)),
level="debug",
)
return packages
def package(
self, name, version, extras=None
): # type: (...) -> poetry.packages.Package
"""
Retrieve the release information.
This is a heavy task which takes time.
We have to download a package to get the dependencies.
We also need to download every file matching this release
to get the various hashes.
Note that, this will be cached so the subsequent operations
should be much faster.
"""
try:
index = self._packages.index(
poetry.packages.Package(name, version, version)
)
return self._packages[index]
except ValueError:
if extras is None:
extras = []
release_info = self.get_release_info(name, version)
package = poetry.packages.Package(name, version, version)
package.source_type = "legacy"
package.source_url = self._url
package.source_reference = self.name
requires_dist = release_info["requires_dist"] or []
for req in requires_dist:
try:
dependency = dependency_from_pep_508(req)
except InvalidMarker:
# Invalid marker | req = req.split(";")[0]
dependency = dependency_from_pep_508(req)
if dependency.extras:
for extra in dependency.extras:
if extra not in package.extras:
package.extras[extra] = []
package.extras[extra].append(dependency)
if not dependency.is_optional():
package.requires.append(dependency)
# Adding description
package.description = release_info.get("summary", "")
# Adding hashes information
package.hashes = release_info["digests"]
# Activate extra dependencies
for extra in extras:
if extra in package.extras:
for dep in package.extras[extra]:
dep.activate()
package.requires += package.extras[extra]
self._packages.append(package)
return package
def _get_release_info(self, name, version): # type: (str, str) -> dict
page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-")))
if page is None:
raise ValueError('No package named "{}"'.format(name))
data = {
"name": name,
"version": version,
"summary": "",
"requires_dist": [],
"requires_python": [],
"digests": [],
}
links = list(page.links_for_version(Version.parse(version)))
if not links:
raise ValueError(
'No valid distribution links found for package: "{}" version: "{}"'.format(
name, version
)
)
urls = {}
hashes = []
default_link = links[0]
for link in links:
if link.is_wheel:
urls["bdist_wheel"] = link.url
elif link.filename.endswith(".tar.gz"):
urls["sdist"] = link.url
elif (
link.filename.endswith((".zip", ".bz2", ".xz", ".Z", ".tar"))
and "sdist" not in urls
):
urls["sdist"] = link.url
hash = link.hash
if link.hash_name == "sha256":
hashes.append(hash)
data["digests"] = hashes
if not urls:
if default_link.is_wheel:
m = wheel_file_re.match(default_link.filename)
python = m.group("pyver")
platform = m.group("plat")
if python == "py2.py3" and platform == "any":
urls["bdist_wheel"] = default_link.url
elif default_link.filename.endswith(".tar.gz"):
urls["sdist"] = default_link.url
elif (
default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls
):
urls["sdist"] = default_link.url
else:
return data
info = self._get_info_from_urls(urls)
data["summary"] = info["summary"]
data["requires_dist"] = info["requires_dist"]
data["requires_python"] = info["requires_python"]
return data
def _download(self, url, dest): # type: (str, str) -> None
r = self._session.get(url, stream=True)
with open(dest, "wb") as f:
for chunk in r.raw.stream(1024):
if chunk:
f.write(chunk)
def _get(self, endpoint): # type: (str) -> Union[Page, None]
url = self._url + endpoint
response = self._session.get(url)
if response.status_code == 404:
return
return Page(url, response.content, response.headers) | # We strip the markers hoping for the best |
parser.go | // Copyright 2017 Frédéric Guillot. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package feed
import (
"bytes"
"encoding/xml"
"io"
"strings"
"time"
"github.com/miniflux/miniflux/errors"
"github.com/miniflux/miniflux/logger"
"github.com/miniflux/miniflux/model"
"github.com/miniflux/miniflux/reader/atom"
"github.com/miniflux/miniflux/reader/encoding"
"github.com/miniflux/miniflux/reader/json"
"github.com/miniflux/miniflux/reader/rdf"
"github.com/miniflux/miniflux/reader/rss"
"github.com/miniflux/miniflux/timer"
)
// List of feed formats.
const (
FormatRDF = "rdf"
FormatRSS = "rss"
FormatAtom = "atom"
FormatJSON = "json"
FormatUnknown = "unknown"
)
// DetectFeedFormat detect feed format from input data.
func DetectFeedFormat(r io.Reader) string {
defer timer.ExecutionTime(time.Now(), "[Feed:DetectFeedFormat]")
var buffer bytes.Buffer
tee := io.TeeReader(r, &buffer)
decoder := xml.NewDecoder(tee)
decoder.CharsetReader = encoding.CharsetReader
for {
token, _ := decoder.Token()
if token == nil {
break
}
if element, ok := token.(xml.StartElement); ok {
switch element.Name.Local {
case "rss":
return FormatRSS
case "feed":
return FormatAtom
case "RDF":
return FormatRDF
}
}
}
if strings.HasPrefix(strings.TrimSpace(buffer.String()), "{") {
return FormatJSON
}
return FormatUnknown
}
func parseFeed(r io.Reader) (*model.Feed, *errors.LocalizedError) {
defer timer.ExecutionTime(time.Now(), "[Feed:ParseFeed]")
var buffer bytes.Buffer
size, _ := io.Copy(&buffer, r)
if size == 0 {
return nil, errors.NewLocalizedError("This feed is empty")
}
str := stripInvalidXMLCharacters(buffer.String())
reader := strings.NewReader(str)
format := DetectFeedFormat(reader)
reader.Seek(0, io.SeekStart)
switch format {
case FormatAtom:
return atom.Parse(reader)
case FormatRSS:
return rss.Parse(reader)
case FormatJSON:
return json.Parse(reader)
case FormatRDF:
return rdf.Parse(reader)
default:
return nil, errors.NewLocalizedError("Unsupported feed format")
}
}
func st | nput string) string {
return strings.Map(func(r rune) rune {
if isInCharacterRange(r) {
return r
}
logger.Debug("Strip invalid XML characters: %U", r)
return -1
}, input)
}
// Decide whether the given rune is in the XML Character Range, per
// the Char production of http://www.xml.com/axml/testaxml.htm,
// Section 2.2 Characters.
func isInCharacterRange(r rune) (inrange bool) {
return r == 0x09 ||
r == 0x0A ||
r == 0x0D ||
r >= 0x20 && r <= 0xDF77 ||
r >= 0xE000 && r <= 0xFFFD ||
r >= 0x10000 && r <= 0x10FFFF
}
| ripInvalidXMLCharacters(i |
environment_store.go | package etcd
import (
"context"
"encoding/json"
"errors"
"fmt"
v3 "github.com/coreos/etcd/clientv3"
"github.com/coreos/etcd/mvcc/mvccpb"
"github.com/sensu/sensu-go/backend/store"
"github.com/sensu/sensu-go/types"
)
var (
environmentsPathPrefix = "environments"
environmentKeyBuilder = store.NewKeyBuilder(environmentsPathPrefix)
)
func getEnvironmentsPath(org, env string) string {
return environmentKeyBuilder.WithOrg(org).Build(env)
}
// DeleteEnvironment deletes an environment
func (s *Store) DeleteEnvironment(ctx context.Context, env *types.Environment) error {
if err := env.Validate(); err != nil {
return err
}
org := env.Organization
ctx = context.WithValue(ctx, types.OrganizationKey, org)
ctx = context.WithValue(ctx, types.EnvironmentKey, env.Name)
// Validate whether there are any resources referencing the organization
getresp, err := s.client.Txn(ctx).Then(
v3.OpGet(checkKeyBuilder.WithContext(ctx).Build(), v3.WithPrefix(), v3.WithCountOnly()),
v3.OpGet(entityKeyBuilder.WithContext(ctx).Build(), v3.WithPrefix(), v3.WithCountOnly()),
v3.OpGet(handlerKeyBuilder.WithContext(ctx).Build(), v3.WithPrefix(), v3.WithCountOnly()),
v3.OpGet(mutatorKeyBuilder.WithContext(ctx).Build(), v3.WithPrefix(), v3.WithCountOnly()),
).Commit()
if err != nil {
return err
}
for _, r := range getresp.Responses {
if r.GetResponseRange().Count > 0 {
return errors.New("environment is not empty")
}
}
// Validate that there are no roles referencing the organization
roles, err := s.GetRoles(ctx)
if err != nil |
for _, role := range roles {
for _, rule := range role.Rules {
if rule.Organization == org && rule.Environment == env.Name {
return fmt.Errorf("environment is not empty; role '%s' references it", role.Name)
}
}
}
_, err = s.client.Delete(ctx, getEnvironmentsPath(org, env.Name), v3.WithPrefix())
return err
}
// GetEnvironment returns a single environment
func (s *Store) GetEnvironment(ctx context.Context, org, env string) (*types.Environment, error) {
resp, err := s.client.Get(
ctx,
getEnvironmentsPath(org, env),
v3.WithLimit(1),
)
if err != nil {
return nil, err
}
if len(resp.Kvs) != 1 {
// DNE, but not an error
return nil, nil
}
envs, err := unmarshalEnvironments(resp.Kvs)
if err != nil {
return &types.Environment{}, err
}
return envs[0], nil
}
// GetEnvironments returns all Environments.
func (s *Store) GetEnvironments(ctx context.Context, org string) ([]*types.Environment, error) {
// Support "*" as a wildcard (defined by constant types.OrganizationTypeAll)
if org == types.OrganizationTypeAll {
org = ""
}
resp, err := s.client.Get(ctx, getEnvironmentsPath(org, ""), v3.WithPrefix())
if err != nil {
return []*types.Environment{}, err
}
return unmarshalEnvironments(resp.Kvs)
}
// UpdateEnvironment updates an environment
func (s *Store) UpdateEnvironment(ctx context.Context, env *types.Environment) error {
if err := env.Validate(); err != nil {
return err
}
bytes, err := json.Marshal(env)
if err != nil {
return err
}
org := env.Organization
// We need to prepare a transaction to verify that the organization under
// which we are creating this environment exists
cmp := v3.Compare(v3.Version(getOrganizationsPath(org)), ">", 0)
req := v3.OpPut(getEnvironmentsPath(org, env.Name), string(bytes))
res, err := s.client.Txn(ctx).If(cmp).Then(req).Commit()
if err != nil {
return err
}
if !res.Succeeded {
return fmt.Errorf(
"the organization %s does not exist, cannot create the environment %s",
org, env.Name,
)
}
return nil
}
func unmarshalEnvironments(kvs []*mvccpb.KeyValue) ([]*types.Environment, error) {
s := make([]*types.Environment, len(kvs))
for i, kv := range kvs {
env := &types.Environment{}
s[i] = env
if err := json.Unmarshal(kv.Value, env); err != nil {
return nil, err
}
}
return s, nil
}
| {
return err
} |
main.rs | //! This implementation isn't based on anything in particular, although it's probably informed by a
//! lot of Rust's JSON encoding code. It should be very fast (both encoding and decoding the toy
//! example here takes under a microsecond on my machine) and tries to avoid unnecessary allocation.
//!
//! In a real implementation, most of this would be private, with only a few visible functions, and
//! there would be somewhat nicer signatures (in particular, the fact that `ParseContext` has to be
//! mutable would get annoying in real code pretty quickly, so it would probably be split out).
//!
//! It supports the ability to read individual atoms, not just lists, although whether this is
//! useful is questionable.
//!
//! Caveats: Does not support symbols vs. non-symbols (it wouldn't be hard, but it would greatly
//! complicate setting up our test structure since we'd have to force it to go through functions
//! that checked to make sure `Symbol`s couldn't have spaces, or slow down our parser by checking
//! for this information each time, which is obnoxious). Does not support string escaping, because
//! the decoding technique doesn't allocate extra space for strings. Does support numbers, but
//! only float types (supporting more types is possible but would complicate the code
//! significantly).
#![feature(rustc_private)]
#![feature(test)]
extern crate arena;
extern crate test;
use arena::TypedArena;
use std::io;
use std::num::FpCategory;
use std::str::FromStr;
use self::SExp::*;
use self::Error::*;
use self::Token::*;
/// The actual `SExp` structure. Supports `f64`s, lists, and string literals. Note that it takes
/// everything by reference, rather than owning it--this is mostly done just so we can allocate
/// `SExp`s statically (since we don't have to call `Vec`). It does complicate the code a bit,
/// requiring us to have a `ParseContext` that holds an arena where lists are actually allocated.
#[derive(PartialEq,Debug)]
enum SExp<'a> {
/// Float literal: 0.5
F64(f64),
/// List of SExps: ( a b c)
List(&'a [SExp<'a>]),
/// Plain old string literal: "abc"
Str(&'a str),
}
/// Errors that can be thrown by the parser.
#[derive(PartialEq, Debug)]
enum Error {
/// If the float is `NaN`, `Infinity`, etc.
NoReprForFloat,
/// Missing an end double quote during string parsing
UnterminatedStringLiteral,
/// Some other kind of I/O error
IoError,
/// ) appeared where it shouldn't (usually as the first token)
IncorrectCloseDelimiter,
/// Usually means a missing ), but could also mean there were no tokens at all.
UnexpectedEOF,
/// More tokens after the list is finished, or after a literal if there is no list.
ExpectedEOF,
}
impl From<io::Error> for Error {
fn from(_err: io::Error) -> Error {
Error::IoError
}
}
/// Tokens returned from the token stream.
#[derive(PartialEq, Debug)]
enum Token<'a> {
/// Left parenthesis
ListStart,
/// Right parenthesis
ListEnd,
/// String or float literal, quotes removed.
Literal(SExp<'a>),
/// Stream is out of tokens.
EOF,
}
/// An iterator over a string that yields a stream of Tokens.
///
/// Implementation note: it probably seems weird to store first, rest, AND string, since they should
/// all be derivable from string. But see below.
#[derive(Copy, Clone, Debug)]
struct Tokens<'a> {
/// The part of the string that still needs to be parsed
string: &'a str,
/// The first character to parse
first: Option<char>,
/// The rest of the string after the first character
rest: &'a str,
}
impl<'a> Tokens<'a> {
/// Initialize a token stream for a given string.
fn new(string: &str) -> Tokens {
let mut chars = string.chars();
match chars.next() {
Some(ch) => {
Tokens {
string: string,
first: Some(ch),
rest: chars.as_str(),
}
}
None => {
Tokens {
string: string,
first: None,
rest: string,
}
}
}
}
/// Utility function to update information in the iterator. It might not be performant to keep
/// rest cached, but there are times where we don't know exactly what string is (at least, not
/// in a way that we can *safely* reconstruct it without allocating), so we keep both here.
/// With some unsafe code we could probably get rid of one of them (and maybe first, too).
fn update(&mut self, string: &'a str) {
self.string = string;
let mut chars = self.string.chars();
if let Some(ch) = chars.next() {
self.first = Some(ch);
self.rest = chars.as_str();
} else {
self.first = None;
};
}
/// This is where the lexing happens. Note that it does not handle string escaping.
fn next_token(&mut self) -> Result<Token<'a>, Error> {
loop {
match self.first {
// List start
Some('(') => {
self.update(self.rest);
return Ok(ListStart);
}
// List end
Some(')') => {
self.update(self.rest);
return Ok(ListEnd);
}
// Quoted literal start
Some('"') => {
// Split the string at most once. This lets us get a
// reference to the next piece of the string without having
// to loop through the string again.
let mut iter = self.rest.splitn(2, '"');
// The first time splitn is run it will never return None, so this is safe.
let str = iter.next().unwrap();
match iter.next() {
// Extract the interior of the string without allocating. If we want to
// handle string escaping, we would have to allocate at some point though.
Some(s) => {
self.update(s);
return Ok(Literal(Str(str)));
}
None => return Err(UnterminatedStringLiteral),
}
}
// Plain old literal start
Some(c) => {
// Skip whitespace. This could probably be made more efficient.
if c.is_whitespace() {
self.update(self.rest);
continue;
}
// Since we've exhausted all other possibilities, this must be a real literal.
// Unlike the quoted case, it's not an error to encounter EOF before whitespace.
let mut end_ch = None;
let str = {
let mut iter = self.string.splitn(2, |ch: char| {
let term = ch == ')' || ch == '(';
if term {
end_ch = Some(ch)
}
term || ch.is_whitespace()
});
// The first time splitn is run it will never return None, so this is safe.
let str = iter.next().unwrap();
self.rest = iter.next().unwrap_or("");
str
};
match end_ch {
// self.string will be incorrect in the Some(_) case. The only reason it's
// okay is because the next time next() is called in this case, we know it
// will be '(' or ')', so it will never reach any code that actually looks
// at self.string. In a real implementation this would be enforced by
// visibility rules.
Some(_) => self.first = end_ch,
None => self.update(self.rest),
}
return Ok(Literal(parse_literal(str)));
}
None => return Ok(EOF),
}
}
}
}
/// This is not the most efficient way to do this, because we end up going over numeric literals
/// twice, but it avoids having to write our own number parsing logic.
fn parse_literal(literal: &str) -> SExp {
match literal.bytes().next() {
Some(b'0'...b'9') |
Some(b'-') => {
match f64::from_str(literal) {
Ok(f) => F64(f),
Err(_) => Str(literal),
}
}
_ => Str(literal),
}
}
/// Parse context, holds information required by the parser (and owns any allocations it makes)
struct ParseContext<'a> {
/// The string being parsed. Not required, but convenient.
string: &'a str,
/// Arena holding any allocations made by the parser.
arena: Option<TypedArena<Vec<SExp<'a>>>>,
/// Stored in the parse context so it can be reused once allocated.
stack: Vec<Vec<SExp<'a>>>,
}
impl<'a> ParseContext<'a> {
/// Create a new parse context from a given string
fn new(string: &'a str) -> ParseContext<'a> {
ParseContext {
string: string,
arena: None,
stack: Vec::new(),
}
}
}
impl<'a> SExp<'a> {
/// Serialize a SExp.
fn encode<T: io::Write>(&self, writer: &mut T) -> Result<(), Error> {
match *self {
F64(f) => {
match f.classify() {
// We don't want to identify NaN, Infinity, etc. as floats.
FpCategory::Normal | FpCategory::Zero => {
try!(write!(writer, "{}", f));
Ok(())
}
_ => Err(Error::NoReprForFloat),
}
}
List(ref l) => {
// Writing a list is very straightforward--write a left parenthesis, then
// recursively call encode on each member, and then write a right parenthesis. The
// only reason the logic is as long as it is is to make sure we don't write
// unnecessary spaces between parentheses in the zero or one element cases.
try!(write!(writer, "{}", '('));
let mut iter = l.iter();
if let Some(sexp) = iter.next() {
try!(sexp.encode(writer));
for sexp in iter {
try!(write!(writer, "{}", ' '));
try!(sexp.encode(writer));
}
}
try!(write!(writer, "{}", ')'));
Ok(())
}
Str(s) => {
try!(write!(writer, "\"{}\"", s));
Ok(())
}
}
}
/// Deserialize a SExp.
fn parse(ctx: &'a mut ParseContext<'a>) -> Result<SExp<'a>, Error> {
ctx.arena = Some(TypedArena::new());
// Hopefully this unreachable! gets optimized out, because it should literally be
// unreachable.
let arena = match ctx.arena {
Some(ref mut arena) => arena,
None => unreachable!(),
};
let ParseContext { string, ref mut stack, .. } = *ctx;
// Make sure the stack is cleared--we keep it in the context to avoid unnecessary
// reallocation between parses (if you need to remember old parse information for a new
// list, you can pass in a new context).
stack.clear();
let mut tokens = Tokens::new(string);
// First, we check the very first token to see if we're parsing a full list. It
// simplifies parsing a lot in the subsequent code if we can assume that.
let next = tokens.next_token();
let mut list = match try!(next) {
ListStart => Vec::new(),
Literal(s) => {
return if try!(tokens.next_token()) == EOF {
Ok(s)
} else {
Err(ExpectedEOF)
}
}
ListEnd => return Err(IncorrectCloseDelimiter),
EOF => return Err(UnexpectedEOF),
};
// We know we're in a list if we got this far.
loop {
let tok = tokens.next_token();
match try!(tok) {
ListStart => {
// We push the previous context onto our stack when we start reading a new list.
stack.push(list);
list = Vec::new()
}
Literal(s) => list.push(s), // Plain old literal, push it onto the current list
ListEnd => {
match stack.pop() { // Pop the old context off the stack on list end.
Some(mut l) => {
// We allocate a slot for the current list in our parse context (needed
// for safety) before pushing it onto its parent list.
l.push(List(&*arena.alloc(list)));
// Now reset the current list to the parent list
list = l;
}
// There was nothing on the stack, so we're at the end of the topmost list.
// The check to make sure there are no more tokens is required for
// correctness.
None => {
return match try!(tokens.next_token()) {
EOF => Ok(List(&*arena.alloc(list))),
_ => Err(ExpectedEOF),
}
}
}
}
// We encountered an EOF before the list ended--that's an error.
EOF => return Err(UnexpectedEOF),
}
}
}
/// Convenience method for the common case where you just want to encode a SExp as a String.
fn buffer_encode(&self) -> Result<String, Error> {
let mut m = Vec::new();
try!(self.encode(&mut m));
// Because encode() only ever writes valid UTF-8, we can safely skip the secondary check we
// normally have to do when converting from Vec<u8> to String. If we didn't know that the
// buffer was already UTF-8, we'd want to call container_as_str() here.
unsafe { Ok(String::from_utf8_unchecked(m)) }
}
}
const SEXP_STRUCT: SExp<'static> =
List(&[List(&[Str("data"), Str("quoted data"), F64(123.), F64(4.5)]),
List(&[Str("data"),
List(&[Str("!@#"), List(&[F64(4.5)]), Str("(more"), Str("data)")])])]);
fn try_encode() -> Result<String, Error> {
SEXP_STRUCT.buffer_encode()
}
const SEXP_STRING_IN: &'static str = r#"((data "quoted data" 123 4.5)
(data (!@# (4.5) "(more" "data)")))"#;
fn try_decode<'a>(ctx: &'a mut ParseContext<'a>) -> Result<SExp<'a>, Error> |
fn main() {
println!("{:?}", try_encode());
let ctx = &mut ParseContext::new(SEXP_STRING_IN);
println!("{:?}", try_decode(ctx));
}
#[bench]
fn bench_decode(b: &mut test::Bencher) {
b.iter(|| {
let ctx = &mut ParseContext::new(SEXP_STRING_IN);
assert!(try_decode(ctx).is_ok());
})
}
#[bench]
fn bench_encode(b: &mut test::Bencher) {
b.iter(|| {
assert!(try_encode().is_ok());
})
}
#[test]
fn test_sexp_encode() {
const SEXP_STRING: &'static str =
r#"(("data" "quoted data" 123 4.5) ("data" ("!@#" (4.5) "(more" "data)")))"#;
assert_eq!(Ok(SEXP_STRING.to_string()), try_encode());
}
#[test]
fn test_sexp_decode() {
let ctx = &mut ParseContext::new(SEXP_STRING_IN);
assert_eq!(Ok(SEXP_STRUCT), try_decode(ctx));
}
| {
SExp::parse(ctx)
} |
archive.go | package repository
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"gitlab.com/gitlab-org/gitaly/internal/command"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/gitaly/service/commit"
"gitlab.com/gitlab-org/gitaly/internal/helper"
"gitlab.com/gitlab-org/gitaly/internal/log"
"gitlab.com/gitlab-org/gitaly/internal/storage"
"gitlab.com/gitlab-org/gitaly/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/streamio"
"gitlab.com/gitlab-org/labkit/correlation"
)
type archiveParams struct {
ctx context.Context
writer io.Writer
in *gitalypb.GetArchiveRequest
compressCmd *exec.Cmd
format string
archivePath string
exclude []string
internalCfg []byte
tlsCfg []byte
binDir string
loggingDir string
}
func (s *server) GetArchive(in *gitalypb.GetArchiveRequest, stream gitalypb.RepositoryService_GetArchiveServer) error {
ctx := stream.Context()
compressCmd, format := parseArchiveFormat(in.GetFormat())
repo := s.localrepo(in.GetRepository())
repoRoot, err := repo.Path()
if err != nil {
return err
}
path, err := storage.ValidateRelativePath(repoRoot, string(in.GetPath()))
if err != nil {
return helper.ErrInvalidArgument(err)
}
exclude := make([]string, len(in.GetExclude()))
for i, ex := range in.GetExclude() {
exclude[i], err = storage.ValidateRelativePath(repoRoot, string(ex))
if err != nil {
return helper.ErrInvalidArgument(err)
}
}
if err := validateGetArchiveRequest(in, format, path); err != nil {
return err
}
if err := s.validateGetArchivePrecondition(ctx, repo, in.GetCommitId(), path, exclude); err != nil {
return err
}
if in.GetElidePath() {
// `git archive <commit ID>:<path>` expects exclusions to be relative to path
pathSlash := path + string(os.PathSeparator)
for i := range exclude {
if !strings.HasPrefix(exclude[i], pathSlash) {
return helper.ErrInvalidArgumentf("invalid exclude: %q is not a subdirectory of %q", exclude[i], path)
}
exclude[i] = exclude[i][len(pathSlash):]
}
}
writer := streamio.NewWriter(func(p []byte) error {
return stream.Send(&gitalypb.GetArchiveResponse{Data: p})
})
gitlabConfig, err := json.Marshal(s.cfg.Gitlab)
if err != nil {
return err
}
tlsCfg, err := json.Marshal(s.cfg.TLS)
if err != nil {
return err
}
return s.handleArchive(archiveParams{
ctx: ctx,
writer: writer,
in: in,
compressCmd: compressCmd,
format: format,
archivePath: path,
exclude: exclude,
internalCfg: gitlabConfig,
tlsCfg: tlsCfg,
binDir: s.binDir,
loggingDir: s.loggingCfg.Dir,
})
}
func parseArchiveFormat(format gitalypb.GetArchiveRequest_Format) (*exec.Cmd, string) {
switch format {
case gitalypb.GetArchiveRequest_TAR:
return nil, "tar"
case gitalypb.GetArchiveRequest_TAR_GZ:
return exec.Command("gzip", "-c", "-n"), "tar"
case gitalypb.GetArchiveRequest_TAR_BZ2:
return exec.Command("bzip2", "-c"), "tar"
case gitalypb.GetArchiveRequest_ZIP:
return nil, "zip"
}
return nil, ""
}
func | (in *gitalypb.GetArchiveRequest, format string, path string) error {
if err := git.ValidateRevision([]byte(in.GetCommitId())); err != nil {
return helper.ErrInvalidArgumentf("invalid commitId: %v", err)
}
if len(format) == 0 {
return helper.ErrInvalidArgumentf("invalid format")
}
return nil
}
func (s *server) validateGetArchivePrecondition(
ctx context.Context,
repo git.RepositoryExecutor,
commitID string,
path string,
exclude []string,
) error {
c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
f := commit.NewTreeEntryFinder(c)
if path != "." {
if ok, err := findGetArchivePath(ctx, f, commitID, path); err != nil {
return err
} else if !ok {
return helper.ErrPreconditionFailedf("path doesn't exist")
}
}
for i, exclude := range exclude {
if ok, err := findGetArchivePath(ctx, f, commitID, exclude); err != nil {
return err
} else if !ok {
return helper.ErrPreconditionFailedf("exclude[%d] doesn't exist", i)
}
}
return nil
}
func findGetArchivePath(ctx context.Context, f *commit.TreeEntryFinder, commitID, path string) (ok bool, err error) {
treeEntry, err := f.FindByRevisionAndPath(ctx, commitID, path)
if err != nil {
return false, err
}
if treeEntry == nil || len(treeEntry.Oid) == 0 {
return false, nil
}
return true, nil
}
func (s *server) handleArchive(p archiveParams) error {
var args []string
pathspecs := make([]string, 0, len(p.exclude)+1)
if !p.in.GetElidePath() {
// git archive [options] <commit ID> -- <path> [exclude*]
args = []string{p.in.GetCommitId()}
pathspecs = append(pathspecs, p.archivePath)
} else if p.archivePath != "." {
// git archive [options] <commit ID>:<path> -- [exclude*]
args = []string{p.in.GetCommitId() + ":" + p.archivePath}
} else {
// git archive [options] <commit ID> -- [exclude*]
args = []string{p.in.GetCommitId()}
}
for _, exclude := range p.exclude {
pathspecs = append(pathspecs, ":(exclude)"+exclude)
}
env := []string{
fmt.Sprintf("GL_REPOSITORY=%s", p.in.GetRepository().GetGlRepository()),
fmt.Sprintf("GL_PROJECT_PATH=%s", p.in.GetRepository().GetGlProjectPath()),
fmt.Sprintf("GL_INTERNAL_CONFIG=%s", p.internalCfg),
fmt.Sprintf("GITALY_TLS=%s", p.tlsCfg),
fmt.Sprintf("CORRELATION_ID=%s", correlation.ExtractFromContext(p.ctx)),
fmt.Sprintf("%s=%s", log.GitalyLogDirEnvKey, p.loggingDir),
}
var config []git.ConfigPair
if p.in.GetIncludeLfsBlobs() {
binary := filepath.Join(p.binDir, "gitaly-lfs-smudge")
config = append(config, git.ConfigPair{Key: "filter.lfs.smudge", Value: binary})
}
archiveCommand, err := s.gitCmdFactory.New(p.ctx, p.in.GetRepository(), git.SubCmd{
Name: "archive",
Flags: []git.Option{git.ValueFlag{"--format", p.format}, git.ValueFlag{"--prefix", p.in.GetPrefix() + "/"}},
Args: args,
PostSepArgs: pathspecs,
}, git.WithEnv(env...), git.WithConfig(config...))
if err != nil {
return err
}
if p.compressCmd != nil {
command, err := command.New(p.ctx, p.compressCmd, archiveCommand, p.writer, nil)
if err != nil {
return err
}
if err := command.Wait(); err != nil {
return err
}
} else if _, err = io.Copy(p.writer, archiveCommand); err != nil {
return err
}
return archiveCommand.Wait()
}
| validateGetArchiveRequest |
temp_project.rs | use std::env;
use std::ffi::OsStr;
use std::fs::File;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use serde_json;
use notion_core::path;
use test_support::{self, paths, paths::PathExt, process::ProcessBuilder};
// catalog.toml
#[derive(PartialEq, Clone)]
pub struct FileBuilder {
path: PathBuf,
contents: String,
}
impl FileBuilder {
pub fn new(path: PathBuf, contents: &str) -> FileBuilder {
FileBuilder {
path,
contents: contents.to_string(),
}
}
pub fn build(&self) {
self.dirname().mkdir_p();
let mut file = File::create(&self.path)
.unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e));
ok_or_panic!{ file.write_all(self.contents.as_bytes()) };
}
fn dirname(&self) -> &Path {
self.path.parent().unwrap()
}
}
#[must_use]
pub struct TempProjectBuilder {
root: TempProject,
files: Vec<FileBuilder>,
}
impl TempProjectBuilder {
/// Root of the project, ex: `/path/to/cargo/target/smoke_test/t0/foo`
pub fn root(&self) -> PathBuf {
self.root.root()
}
pub fn new(root: PathBuf) -> TempProjectBuilder {
TempProjectBuilder {
root: TempProject { root },
files: vec![],
}
}
| let package_file = package_json_file(self.root());
self.files.push(FileBuilder::new(package_file, contents));
self
}
/// Create the project
pub fn build(self) -> TempProject {
// First, clean the temporary project directory if it already exists
self.rm_root();
// Create the empty directory
self.root.root().mkdir_p();
// make sure these directories exist and are empty
ok_or_panic!(path::node_cache_dir()).ensure_empty();
ok_or_panic!(path::shim_dir()).ensure_empty();
ok_or_panic!(path::node_inventory_dir()).ensure_empty();
ok_or_panic!(path::yarn_inventory_dir()).ensure_empty();
ok_or_panic!(path::package_inventory_dir()).ensure_empty();
ok_or_panic!(path::node_image_root_dir()).ensure_empty();
ok_or_panic!(path::yarn_image_root_dir()).ensure_empty();
ok_or_panic!(path::user_toolchain_dir()).ensure_empty();
// and these files do not exist
ok_or_panic!(path::notion_file()).rm();
ok_or_panic!(path::launchbin_file()).rm();
ok_or_panic!(path::launchscript_file()).rm();
ok_or_panic!(path::user_config_file()).rm();
ok_or_panic!(path::user_platform_file()).rm();
// write files
for file_builder in self.files {
file_builder.build();
}
let TempProjectBuilder { root, .. } = self;
root
}
fn rm_root(&self) {
self.root.root().rm_rf()
}
}
// files and dirs in the temporary project
fn package_json_file(mut root: PathBuf) -> PathBuf {
root.push("package.json");
root
}
pub struct TempProject {
root: PathBuf,
}
impl TempProject {
/// Root of the project, ex: `/path/to/cargo/target/integration_test/t0/foo`
pub fn root(&self) -> PathBuf {
self.root.clone()
}
/// Create a `ProcessBuilder` to run a program in the project.
/// Example:
/// assert_that(
/// p.process(&p.bin("foo")),
/// execs().with_stdout("bar\n"),
/// );
pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder {
let mut p = test_support::process::process(program);
p.cwd(self.root())
// setup the Notion environment
.env_remove("NOTION_NODE_VERSION")
.env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows
p
}
/// Create a `ProcessBuilder` to run notion.
/// Arguments can be separated by spaces.
/// Example:
/// assert_that(p.notion("use node 9.5"), execs());
pub fn notion(&self, cmd: &str) -> ProcessBuilder {
let mut p = self.process(¬ion_exe());
split_and_add_args(&mut p, cmd);
p
}
/// Create a `ProcessBuilder` to run Node.
pub fn node(&self, cmd: &str) -> ProcessBuilder {
let mut p = self.process(&node_exe());
split_and_add_args(&mut p, cmd);
p
}
/// Create a `ProcessBuilder` to run Yarn.
pub fn yarn(&self, cmd: &str) -> ProcessBuilder {
let mut p = self.process(&yarn_exe());
split_and_add_args(&mut p, cmd);
p
}
/// Verify that the input Node version has been fetched.
pub fn node_version_is_fetched(&self, version: &str) -> bool {
let distro_file_name = path::node_distro_file_name(version);
let inventory_dir = ok_or_panic!{ path::node_inventory_dir() };
inventory_dir.join(distro_file_name).exists()
}
/// Verify that the input Node version has been unpacked.
pub fn node_version_is_unpacked(&self, version: &str, npm_version: &str) -> bool {
let unpack_dir = ok_or_panic!{ path::node_image_bin_dir(version, npm_version) };
unpack_dir.exists()
}
/// Verify that the input Node version has been installed.
pub fn assert_node_version_is_installed(&self, version: &str, npm_version: &str) -> () {
let user_platform = ok_or_panic!{ path::user_platform_file() };
let platform_contents = read_file_to_string(user_platform);
let json_contents: serde_json::Value =
serde_json::from_str(&platform_contents).expect("could not parse platform.json");
assert_eq!(json_contents["node"]["runtime"], version);
assert_eq!(json_contents["node"]["npm"], npm_version);
}
/// Verify that the input Yarn version has been fetched.
pub fn yarn_version_is_fetched(&self, version: &str) -> bool {
let distro_file_name = path::yarn_distro_file_name(version);
let inventory_dir = ok_or_panic!{ path::yarn_inventory_dir() };
inventory_dir.join(distro_file_name).exists()
}
/// Verify that the input Yarn version has been unpacked.
pub fn yarn_version_is_unpacked(&self, version: &str) -> bool {
let unpack_dir = ok_or_panic!{ path::yarn_image_dir(version) };
unpack_dir.exists()
}
/// Verify that the input Yarn version has been installed.
pub fn assert_yarn_version_is_installed(&self, version: &str) -> () {
let user_platform = ok_or_panic!{ path::user_platform_file() };
let platform_contents = read_file_to_string(user_platform);
let json_contents: serde_json::Value =
serde_json::from_str(&platform_contents).expect("could not parse platform.json");
assert_eq!(json_contents["yarn"], version);
}
}
// Generates a temporary project environment
pub fn temp_project() -> TempProjectBuilder {
TempProjectBuilder::new(paths::root().join("temp-project"))
}
// Path to compiled executables
pub fn cargo_dir() -> PathBuf {
env::var_os("CARGO_BIN_PATH")
.map(PathBuf::from)
.or_else(|| {
env::current_exe().ok().map(|mut path| {
path.pop();
if path.ends_with("deps") {
path.pop();
}
path
})
})
.unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test"))
}
fn notion_exe() -> PathBuf {
cargo_dir().join(format!("notion{}", env::consts::EXE_SUFFIX))
}
fn node_exe() -> PathBuf {
cargo_dir().join(format!("node{}", env::consts::EXE_SUFFIX))
}
fn yarn_exe() -> PathBuf {
cargo_dir().join(format!("yarn{}", env::consts::EXE_SUFFIX))
}
fn split_and_add_args(p: &mut ProcessBuilder, s: &str) {
for arg in s.split_whitespace() {
if arg.contains('"') || arg.contains('\'') {
panic!("shell-style argument parsing is not supported")
}
p.arg(arg);
}
}
fn read_file_to_string(file_path: PathBuf) -> String {
let mut contents = String::new();
let mut file = ok_or_panic!{ File::open(file_path) };
ok_or_panic!{ file.read_to_string(&mut contents) };
contents
} | /// Set the package.json for the temporary project (chainable)
pub fn package_json(mut self, contents: &str) -> Self { |
Types.py | # coding: utf-8
"""Python data types for IB Flex format XML data.
These class definitions are introspected by ibflex.parser to type-convert
IB data. They're dataclasses, made immutable by passing `Frozen=True` to the
class decorator. Class attributes are annotated with PEP 484 type hints.
Except for the top-level XML elements, i.e. <FlexQueryResponse>,
<FlexStatements>, and <FlexStatement>, the Flex format cleanly differentiates
between data-bearing elements and container elements. Data elements hold
their values in XML element attributes; container elements are sequences
of child elements (usually data elements, but sometimes other containers).
XML element attributes are represented by class attributes hinted with the
Python type to which their values should be converted. Almost all are marked
`Optional`, since Flex report configuration allows any of them to be included
or omitted individually. Default value is `None` for a single value, or an
empty tuple for a sequence.
Specifically defined enums are an exception; the parser handles missing values
for them, so you shouldn't specify a default value. The enums therefore need
to come first in the class definition to avoid offending dataclass.
Some data elements have XML attributes whose values are sequences delimited by
commas or semicolons. These are represented as by class attributes hinted as
a variable-length `Tuple` of their sequence item type (`str` or an Enum type).
XML container elements are represented as variable-length `Tuple` of contained
child type.
TODO - need types for:
FdicInsuredDepositsByBank
ComplexPositions
HKIPOSubscriptionActivity
PendingExcercises
FxTransactions
UnbookedTrades
RoutingCommissions
IBGNoteTransactions
Adjustments
SoftDollars
CFDCharges
SLBOpenContracts
HKIPOOpenSubscriptions
"""
# PEP 563 compliance
# https://www.python.org/dev/peps/pep-0563/#resolving-type-hints-at-runtime
from __future__ import annotations
__all__ = [
"FlexElement",
"FlexQueryResponse",
"FlexStatement",
"AccountInformation",
"ChangeInNAV",
"MTMPerformanceSummaryUnderlying",
"EquitySummaryByReportDateInBase",
"MTDYTDPerformanceSummaryUnderlying",
"CashReportCurrency",
"FIFOPerformanceSummaryUnderlying",
"NetStockPosition",
"UnsettledTransfer",
"UnbundledCommissionDetail",
"StatementOfFundsLine",
"ChangeInPositionValue",
"OpenPosition",
"FxLot",
"Trade",
"TradeConfirm",
"OptionEAE",
"TradeTransfer",
"TierInterestDetail",
"HardToBorrowDetail",
"InterestAccrualsCurrency",
"SLBActivity",
"Transfer",
"CorporateAction",
"CashTransaction",
"ChangeInDividendAccrual",
"OpenDividendAccrual",
"SecurityInfo",
"ConversionRate",
"PriorPeriodPosition",
"ClientFee",
"ClientFeesDetail",
"SalesTax",
"DebitCardActivity",
"SymbolSummary",
"Order"
]
import datetime
import decimal
from dataclasses import dataclass, astuple
from typing import Tuple, Optional
from ibflex import enums
@dataclass(frozen=True)
class FlexElement:
""" Base class for data element types """
def __iter__(self):
return iter(astuple(self))
def items(self):
for attr, val in self.__dict__.items():
yield attr, val
@dataclass(frozen=True)
class FlexQueryResponse(FlexElement):
""" Root element """
queryName: str
type: str
FlexStatements: Tuple["FlexStatement", ...]
def __repr__(self):
repr = (
f"{type(self).__name__}("
f"queryName={self.queryName!r}, "
f"type={self.type!r}, "
f"len(FlexStatements)={len(self.FlexStatements)}"
")"
)
return repr
@dataclass(frozen=True)
class FlexStatement(FlexElement):
""" Wrapped in <FlexStatements> """
accountId: str
fromDate: datetime.date
toDate: datetime.date
period: str
whenGenerated: datetime.datetime
AccountInformation: Optional["_AccountInformation"] = None
ChangeInNAV: Optional["_ChangeInNAV"] = None
CashReport: Tuple["CashReportCurrency", ...] = ()
MTDYTDPerformanceSummary: Tuple["MTDYTDPerformanceSummaryUnderlying", ...] = ()
MTMPerformanceSummaryInBase: Tuple["MTMPerformanceSummaryUnderlying", ...] = ()
EquitySummaryInBase: Tuple["EquitySummaryByReportDateInBase", ...] = ()
FIFOPerformanceSummaryInBase: Tuple["FIFOPerformanceSummaryUnderlying", ...] = ()
FdicInsuredDepositsByBank: Tuple = () # TODO
StmtFunds: Tuple["StatementOfFundsLine", ...] = ()
ChangeInPositionValues: Tuple["ChangeInPositionValue", ...] = ()
OpenPositions: Tuple["OpenPosition", ...] = ()
NetStockPositionSummary: Tuple["NetStockPosition", ...] = ()
ComplexPositions: Tuple = () # TODO
FxPositions: Tuple["FxLot", ...] = () # N.B. FXLot wrapped in FxLots
Trades: Tuple["Trade", ...] = ()
HKIPOSubscriptionActivity: Tuple = () # TODO
TradeConfirms: Tuple["TradeConfirm", ...] = ()
TransactionTaxes: Tuple = ()
OptionEAE: Tuple["_OptionEAE", ...] = ()
# Not a typo - they really spell it "Excercises"
PendingExcercises: Tuple = () # TODO
TradeTransfers: Tuple["TradeTransfer", ...] = ()
FxTransactions: Tuple = () # TODO
UnbookedTrades: Tuple = () # TODO
RoutingCommissions: Tuple = () # TODO
IBGNoteTransactions: Tuple = () # TODO
UnsettledTransfers: Tuple["UnsettledTransfer", ...] = ()
UnbundledCommissionDetails: Tuple["UnbundledCommissionDetail", ...] = ()
Adjustments: Tuple = () # TODO
PriorPeriodPositions: Tuple["PriorPeriodPosition", ...] = ()
CorporateActions: Tuple["CorporateAction", ...] = ()
ClientFees: Tuple["ClientFee", ...] = ()
ClientFeesDetail: Tuple["_ClientFeesDetail", ...] = ()
DebitCardActivities: Tuple["DebitCardActivity", ...] = ()
SoftDollars: Tuple = () # TODO
CashTransactions: Tuple["CashTransaction", ...] = ()
SalesTaxes: Tuple["SalesTax", ...] = ()
CFDCharges: Tuple = () # TODO
InterestAccruals: Tuple["InterestAccrualsCurrency", ...] = ()
TierInterestDetails: Tuple["TierInterestDetail", ...] = ()
HardToBorrowDetails: Tuple["HardToBorrowDetail", ...] = ()
HardToBorrowMarkupDetails: Tuple = ()
SLBOpenContracts: Tuple = () # TODO
SLBActivities: Tuple["SLBActivity", ...] = ()
SLBFees: Tuple["SLBFee", ...] = ()
Transfers: Tuple["Transfer", ...] = ()
ChangeInDividendAccruals: Tuple["_ChangeInDividendAccrual", ...] = ()
OpenDividendAccruals: Tuple["OpenDividendAccrual", ...] = ()
SecuritiesInfo: Tuple["SecurityInfo", ...] = ()
ConversionRates: Tuple["ConversionRate", ...] = ()
HKIPOOpenSubscriptions: Tuple = () # TODO
CommissionCredits: Tuple = () # TODO
StockGrantActivities: Tuple = () # TODO
def __repr__(self):
repr = (
f"{type(self).__name__}("
f"accountId={self.accountId!r}, "
f"fromDate={self.fromDate!r}, "
f"toDate={self.toDate!r}, "
f"period={self.period!r}, "
f"whenGenerated={self.whenGenerated!r}"
)
sequences = (
(k, getattr(self, k))
for k, v in self.__annotations__.items()
if hasattr(v, "__origin__") and v.__origin__ is tuple
)
nonempty_sequences = ", ".join(
f"len({name})={len(value)}" for (name, value) in sequences if value
)
if nonempty_sequences:
repr += ", "
for seq in nonempty_sequences:
repr += seq
repr += ")"
return repr
@dataclass(frozen=True)
class AccountInformation(FlexElement):
""" Child of <FlexStatement> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
name: Optional[str] = None
accountType: Optional[str] = None
customerType: Optional[str] = None
accountCapabilities: Tuple[str, ...] = ()
tradingPermissions: Tuple[str, ...] = ()
registeredRepName: Optional[str] = None
registeredRepPhone: Optional[str] = None
dateOpened: Optional[datetime.date] = None
dateFunded: Optional[datetime.date] = None
dateClosed: Optional[datetime.date] = None
street: Optional[str] = None
street2: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
country: Optional[str] = None
postalCode: Optional[str] = None
streetResidentialAddress: Optional[str] = None
street2ResidentialAddress: Optional[str] = None
cityResidentialAddress: Optional[str] = None
stateResidentialAddress: Optional[str] = None
countryResidentialAddress: Optional[str] = None
postalCodeResidentialAddress: Optional[str] = None
masterName: Optional[str] = None
ibEntity: Optional[str] = None
primaryEmail: Optional[str] = None
accountRepName: Optional[str] = None
accountRepPhone: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_AccountInformation = AccountInformation
@dataclass(frozen=True)
class ChangeInNAV(FlexElement):
""" Child of <FlexStatement> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingValue: Optional[decimal.Decimal] = None
mtm: Optional[decimal.Decimal] = None
realized: Optional[decimal.Decimal] = None
changeInUnrealized: Optional[decimal.Decimal] = None
costAdjustments: Optional[decimal.Decimal] = None
transferredPnlAdjustments: Optional[decimal.Decimal] = None
depositsWithdrawals: Optional[decimal.Decimal] = None
internalCashTransfers: Optional[decimal.Decimal] = None
assetTransfers: Optional[decimal.Decimal] = None
debitCardActivity: Optional[decimal.Decimal] = None
billPay: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
withholdingTax: Optional[decimal.Decimal] = None
withholding871m: Optional[decimal.Decimal] = None
withholdingTaxCollected: Optional[decimal.Decimal] = None
changeInDividendAccruals: Optional[decimal.Decimal] = None
interest: Optional[decimal.Decimal] = None
changeInInterestAccruals: Optional[decimal.Decimal] = None
advisorFees: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
changeInBrokerFeeAccruals: Optional[decimal.Decimal] = None
clientFees: Optional[decimal.Decimal] = None
otherFees: Optional[decimal.Decimal] = None
feesReceivables: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
commissionReceivables: Optional[decimal.Decimal] = None
forexCommissions: Optional[decimal.Decimal] = None
transactionTax: Optional[decimal.Decimal] = None
taxReceivables: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
softDollars: Optional[decimal.Decimal] = None
netFxTrading: Optional[decimal.Decimal] = None
fxTranslation: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
endingValue: Optional[decimal.Decimal] = None
twr: Optional[decimal.Decimal] = None
corporateActionProceeds: Optional[decimal.Decimal] = None
commissionCreditsRedemption: Optional[decimal.Decimal] = None
grantActivity: Optional[decimal.Decimal] = None
excessFundSweep: Optional[decimal.Decimal] = None
billableSalesTax: Optional[decimal.Decimal] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_ChangeInNAV = ChangeInNAV
@dataclass(frozen=True)
class MTMPerformanceSummaryUnderlying(FlexElement):
""" Wrapped in <MTMPerformanceSummaryInBase> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
prevCloseQuantity: Optional[decimal.Decimal] = None
prevClosePrice: Optional[decimal.Decimal] = None
closeQuantity: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
transactionMtm: Optional[decimal.Decimal] = None
priorOpenMtm: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
total: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
corpActionMtm: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
otherWithAccruals: Optional[decimal.Decimal] = None
totalWithAccruals: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class EquitySummaryByReportDateInBase(FlexElement):
""" Wrapped in <EquitySummaryInBase> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
reportDate: Optional[datetime.date] = None
cash: Optional[decimal.Decimal] = None
cashLong: Optional[decimal.Decimal] = None
cashShort: Optional[decimal.Decimal] = None
slbCashCollateral: Optional[decimal.Decimal] = None
slbCashCollateralLong: Optional[decimal.Decimal] = None
slbCashCollateralShort: Optional[decimal.Decimal] = None
stock: Optional[decimal.Decimal] = None
stockLong: Optional[decimal.Decimal] = None
stockShort: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowed: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowedLong: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowedShort: Optional[decimal.Decimal] = None
slbDirectSecuritiesLent: Optional[decimal.Decimal] = None
slbDirectSecuritiesLentLong: Optional[decimal.Decimal] = None
slbDirectSecuritiesLentShort: Optional[decimal.Decimal] = None
options: Optional[decimal.Decimal] = None
optionsLong: Optional[decimal.Decimal] = None
optionsShort: Optional[decimal.Decimal] = None
bonds: Optional[decimal.Decimal] = None
bondsLong: Optional[decimal.Decimal] = None
bondsShort: Optional[decimal.Decimal] = None
bondInterestAccrualsComponent: Optional[decimal.Decimal] = None
bondInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
bondInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
notes: Optional[decimal.Decimal] = None
notesLong: Optional[decimal.Decimal] = None
notesShort: Optional[decimal.Decimal] = None
interestAccruals: Optional[decimal.Decimal] = None
interestAccrualsLong: Optional[decimal.Decimal] = None
interestAccrualsShort: Optional[decimal.Decimal] = None
softDollars: Optional[decimal.Decimal] = None
softDollarsLong: Optional[decimal.Decimal] = None
softDollarsShort: Optional[decimal.Decimal] = None
dividendAccruals: Optional[decimal.Decimal] = None
dividendAccrualsLong: Optional[decimal.Decimal] = None
dividendAccrualsShort: Optional[decimal.Decimal] = None
total: Optional[decimal.Decimal] = None
totalLong: Optional[decimal.Decimal] = None
totalShort: Optional[decimal.Decimal] = None
commodities: Optional[decimal.Decimal] = None
commoditiesLong: Optional[decimal.Decimal] = None
commoditiesShort: Optional[decimal.Decimal] = None
funds: Optional[decimal.Decimal] = None
fundsLong: Optional[decimal.Decimal] = None
fundsShort: Optional[decimal.Decimal] = None
forexCfdUnrealizedPl: Optional[decimal.Decimal] = None
forexCfdUnrealizedPlLong: Optional[decimal.Decimal] = None
forexCfdUnrealizedPlShort: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponent: Optional[decimal.Decimal] = None
brokerCashComponent: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponent: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponentLong: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponentShort: Optional[decimal.Decimal] = None
cfdUnrealizedPl: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccount: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountLong: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountShort: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponent: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponentLong: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponentShort: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccruals: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsLong: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsShort: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponent: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
brokerCashComponentLong: Optional[decimal.Decimal] = None
brokerCashComponentShort: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
cfdUnrealizedPlLong: Optional[decimal.Decimal] = None
cfdUnrealizedPlShort: Optional[decimal.Decimal] = None
ipoSubscription: Optional[decimal.Decimal] = None
ipoSubscriptionLong: Optional[decimal.Decimal] = None
ipoSubscriptionShort: Optional[decimal.Decimal] = None
physDel: Optional[decimal.Decimal] = None
physDelLong: Optional[decimal.Decimal] = None
physDelShort: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class MTDYTDPerformanceSummaryUnderlying(FlexElement):
""" Wrapped in <MTDYTDPerformanceSummary> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
mtmMTD: Optional[decimal.Decimal] = None
mtmYTD: Optional[decimal.Decimal] = None
realSTMTD: Optional[decimal.Decimal] = None
realSTYTD: Optional[decimal.Decimal] = None
realLTMTD: Optional[decimal.Decimal] = None
realLTYTD: Optional[decimal.Decimal] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
realizedPnlMTD: Optional[decimal.Decimal] = None
realizedCapitalGainsPnlMTD: Optional[decimal.Decimal] = None
realizedFxPnlMTD: Optional[decimal.Decimal] = None
realizedPnlYTD: Optional[decimal.Decimal] = None
realizedCapitalGainsPnlYTD: Optional[decimal.Decimal] = None
realizedFxPnlYTD: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
brokerFeesSec: Optional[decimal.Decimal] = None
brokerFeesCom: Optional[decimal.Decimal] = None
brokerFeesMTD: Optional[decimal.Decimal] = None
brokerFeesYTD: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class CashReportCurrency(FlexElement):
""" Wrapped in <CashReport> """
accountId: Optional[str] = None
currency: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingCash: Optional[decimal.Decimal] = None
startingCashSec: Optional[decimal.Decimal] = None
startingCashCom: Optional[decimal.Decimal] = None
clientFees: Optional[decimal.Decimal] = None
clientFeesSec: Optional[decimal.Decimal] = None
clientFeesCom: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
commissionsSec: Optional[decimal.Decimal] = None
commissionsCom: Optional[decimal.Decimal] = None
billableCommissions: Optional[decimal.Decimal] = None
billableCommissionsSec: Optional[decimal.Decimal] = None
billableCommissionsCom: Optional[decimal.Decimal] = None
depositWithdrawals: Optional[decimal.Decimal] = None
depositWithdrawalsSec: Optional[decimal.Decimal] = None
depositWithdrawalsCom: Optional[decimal.Decimal] = None
deposits: Optional[decimal.Decimal] = None
depositsSec: Optional[decimal.Decimal] = None
depositsCom: Optional[decimal.Decimal] = None
withdrawals: Optional[decimal.Decimal] = None
withdrawalsSec: Optional[decimal.Decimal] = None
withdrawalsCom: Optional[decimal.Decimal] = None
accountTransfers: Optional[decimal.Decimal] = None
accountTransfersSec: Optional[decimal.Decimal] = None
accountTransfersCom: Optional[decimal.Decimal] = None
internalTransfers: Optional[decimal.Decimal] = None
internalTransfersSec: Optional[decimal.Decimal] = None
internalTransfersCom: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
dividendsSec: Optional[decimal.Decimal] = None
dividendsCom: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
brokerFeesSec: Optional[decimal.Decimal] = None
brokerFeesCom: Optional[decimal.Decimal] = None
brokerFeesMTD: Optional[decimal.Decimal] = None
brokerFeesYTD: Optional[decimal.Decimal] = None
brokerInterest: Optional[decimal.Decimal] = None
brokerInterestSec: Optional[decimal.Decimal] = None
brokerInterestCom: Optional[decimal.Decimal] = None
bondInterest: Optional[decimal.Decimal] = None
bondInterestSec: Optional[decimal.Decimal] = None
bondInterestCom: Optional[decimal.Decimal] = None
cashSettlingMtm: Optional[decimal.Decimal] = None
cashSettlingMtmSec: Optional[decimal.Decimal] = None
cashSettlingMtmCom: Optional[decimal.Decimal] = None
cfdCharges: Optional[decimal.Decimal] = None
cfdChargesSec: Optional[decimal.Decimal] = None
cfdChargesCom: Optional[decimal.Decimal] = None
netTradesSales: Optional[decimal.Decimal] = None
netTradesSalesSec: Optional[decimal.Decimal] = None
netTradesSalesCom: Optional[decimal.Decimal] = None
netTradesPurchases: Optional[decimal.Decimal] = None
netTradesPurchasesSec: Optional[decimal.Decimal] = None
netTradesPurchasesCom: Optional[decimal.Decimal] = None
feesReceivables: Optional[decimal.Decimal] = None
feesReceivablesSec: Optional[decimal.Decimal] = None
feesReceivablesCom: Optional[decimal.Decimal] = None
paymentInLieu: Optional[decimal.Decimal] = None
paymentInLieuSec: Optional[decimal.Decimal] = None
paymentInLieuCom: Optional[decimal.Decimal] = None
transactionTax: Optional[decimal.Decimal] = None
transactionTaxSec: Optional[decimal.Decimal] = None
transactionTaxCom: Optional[decimal.Decimal] = None
withholdingTax: Optional[decimal.Decimal] = None
withholdingTaxSec: Optional[decimal.Decimal] = None
withholdingTaxCom: Optional[decimal.Decimal] = None
fxTranslationGainLoss: Optional[decimal.Decimal] = None
fxTranslationGainLossSec: Optional[decimal.Decimal] = None
fxTranslationGainLossCom: Optional[decimal.Decimal] = None
otherFees: Optional[decimal.Decimal] = None
otherFeesSec: Optional[decimal.Decimal] = None
otherFeesCom: Optional[decimal.Decimal] = None
endingCash: Optional[decimal.Decimal] = None
endingCashSec: Optional[decimal.Decimal] = None
endingCashCom: Optional[decimal.Decimal] = None
endingSettledCash: Optional[decimal.Decimal] = None
endingSettledCashSec: Optional[decimal.Decimal] = None
endingSettledCashCom: Optional[decimal.Decimal] = None
clientFeesMTD: Optional[decimal.Decimal] = None
clientFeesYTD: Optional[decimal.Decimal] = None
commissionsMTD: Optional[decimal.Decimal] = None
commissionsYTD: Optional[decimal.Decimal] = None
billableCommissionsMTD: Optional[decimal.Decimal] = None
billableCommissionsYTD: Optional[decimal.Decimal] = None
depositWithdrawalsMTD: Optional[decimal.Decimal] = None
depositWithdrawalsYTD: Optional[decimal.Decimal] = None
depositsMTD: Optional[decimal.Decimal] = None
depositsYTD: Optional[decimal.Decimal] = None
withdrawalsMTD: Optional[decimal.Decimal] = None
withdrawalsYTD: Optional[decimal.Decimal] = None
accountTransfersMTD: Optional[decimal.Decimal] = None
accountTransfersYTD: Optional[decimal.Decimal] = None
internalTransfersMTD: Optional[decimal.Decimal] = None
internalTransfersYTD: Optional[decimal.Decimal] = None
excessFundSweep: Optional[decimal.Decimal] = None
excessFundSweepSec: Optional[decimal.Decimal] = None
excessFundSweepCom: Optional[decimal.Decimal] = None
excessFundSweepMTD: Optional[decimal.Decimal] = None
excessFundSweepYTD: Optional[decimal.Decimal] = None
dividendsMTD: Optional[decimal.Decimal] = None
dividendsYTD: Optional[decimal.Decimal] = None
insuredDepositInterestMTD: Optional[decimal.Decimal] = None
insuredDepositInterestYTD: Optional[decimal.Decimal] = None
brokerInterestMTD: Optional[decimal.Decimal] = None
brokerInterestYTD: Optional[decimal.Decimal] = None
bondInterestMTD: Optional[decimal.Decimal] = None
bondInterestYTD: Optional[decimal.Decimal] = None
cashSettlingMtmMTD: Optional[decimal.Decimal] = None
cashSettlingMtmYTD: Optional[decimal.Decimal] = None
realizedVmMTD: Optional[decimal.Decimal] = None
realizedVmYTD: Optional[decimal.Decimal] = None
cfdChargesMTD: Optional[decimal.Decimal] = None
cfdChargesYTD: Optional[decimal.Decimal] = None
netTradesSalesMTD: Optional[decimal.Decimal] = None
netTradesSalesYTD: Optional[decimal.Decimal] = None
advisorFeesMTD: Optional[decimal.Decimal] = None
advisorFeesYTD: Optional[decimal.Decimal] = None
feesReceivablesMTD: Optional[decimal.Decimal] = None
feesReceivablesYTD: Optional[decimal.Decimal] = None
netTradesPurchasesMTD: Optional[decimal.Decimal] = None
netTradesPurchasesYTD: Optional[decimal.Decimal] = None
paymentInLieuMTD: Optional[decimal.Decimal] = None
paymentInLieuYTD: Optional[decimal.Decimal] = None
transactionTaxMTD: Optional[decimal.Decimal] = None
transactionTaxYTD: Optional[decimal.Decimal] = None
taxReceivablesMTD: Optional[decimal.Decimal] = None
taxReceivablesYTD: Optional[decimal.Decimal] = None
withholdingTaxMTD: Optional[decimal.Decimal] = None
withholdingTaxYTD: Optional[decimal.Decimal] = None
withholding871mMTD: Optional[decimal.Decimal] = None
withholding871mYTD: Optional[decimal.Decimal] = None
withholdingCollectedTaxMTD: Optional[decimal.Decimal] = None
withholdingCollectedTaxYTD: Optional[decimal.Decimal] = None
salesTaxMTD: Optional[decimal.Decimal] = None
salesTaxYTD: Optional[decimal.Decimal] = None
otherFeesMTD: Optional[decimal.Decimal] = None
otherFeesYTD: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
avgCreditBalance: Optional[decimal.Decimal] = None
avgCreditBalanceSec: Optional[decimal.Decimal] = None
avgCreditBalanceCom: Optional[decimal.Decimal] = None
avgDebitBalance: Optional[decimal.Decimal] = None
avgDebitBalanceSec: Optional[decimal.Decimal] = None
avgDebitBalanceCom: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
linkingAdjustmentsSec: Optional[decimal.Decimal] = None
linkingAdjustmentsCom: Optional[decimal.Decimal] = None
insuredDepositInterest: Optional[decimal.Decimal] = None
insuredDepositInterestSec: Optional[decimal.Decimal] = None
insuredDepositInterestCom: Optional[decimal.Decimal] = None
realizedVm: Optional[decimal.Decimal] = None
realizedVmSec: Optional[decimal.Decimal] = None
realizedVmCom: Optional[decimal.Decimal] = None
advisorFees: Optional[decimal.Decimal] = None
advisorFeesSec: Optional[decimal.Decimal] = None
advisorFeesCom: Optional[decimal.Decimal] = None
taxReceivables: Optional[decimal.Decimal] = None
taxReceivablesSec: Optional[decimal.Decimal] = None
taxReceivablesCom: Optional[decimal.Decimal] = None
withholding871m: Optional[decimal.Decimal] = None
withholding871mSec: Optional[decimal.Decimal] = None
withholding871mCom: Optional[decimal.Decimal] = None
withholdingCollectedTax: Optional[decimal.Decimal] = None
withholdingCollectedTaxSec: Optional[decimal.Decimal] = None
withholdingCollectedTaxCom: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
salesTaxSec: Optional[decimal.Decimal] = None
salesTaxCom: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
otherSec: Optional[decimal.Decimal] = None
otherCom: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
debitCardActivity: Optional[decimal.Decimal] = None
debitCardActivitySec: Optional[decimal.Decimal] = None
debitCardActivityCom: Optional[decimal.Decimal] = None
debitCardActivityMTD: Optional[decimal.Decimal] = None
debitCardActivityYTD: Optional[decimal.Decimal] = None
billPay: Optional[decimal.Decimal] = None
billPaySec: Optional[decimal.Decimal] = None
billPayCom: Optional[decimal.Decimal] = None
billPayMTD: Optional[decimal.Decimal] = None
billPayYTD: Optional[decimal.Decimal] = None
realizedForexVm: Optional[decimal.Decimal] = None
realizedForexVmSec: Optional[decimal.Decimal] = None
realizedForexVmCom: Optional[decimal.Decimal] = None
realizedForexVmMTD: Optional[decimal.Decimal] = None
realizedForexVmYTD: Optional[decimal.Decimal] = None
ipoSubscription: Optional[decimal.Decimal] = None
ipoSubscriptionSec: Optional[decimal.Decimal] = None
ipoSubscriptionCom: Optional[decimal.Decimal] = None
ipoSubscriptionMTD: Optional[decimal.Decimal] = None
ipoSubscriptionYTD: Optional[decimal.Decimal] = None
billableSalesTax: Optional[decimal.Decimal] = None
billableSalesTaxSec: Optional[decimal.Decimal] = None
billableSalesTaxCom: Optional[decimal.Decimal] = None
billableSalesTaxMTD: Optional[decimal.Decimal] = None
billableSalesTaxYTD: Optional[decimal.Decimal] = None
commissionCreditsRedemption: Optional[decimal.Decimal] = None
commissionCreditsRedemptionSec: Optional[decimal.Decimal] = None
commissionCreditsRedemptionCom: Optional[decimal.Decimal] = None
commissionCreditsRedemptionMTD: Optional[decimal.Decimal] = None
commissionCreditsRedemptionYTD: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class StatementOfFundsLine(FlexElement):
""" Wrapped in <StmtFunds> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
balance: Optional[decimal.Decimal] = None
debit: Optional[decimal.Decimal] = None
credit: Optional[decimal.Decimal] = None
currency: Optional[str] = None
tradeID: Optional[str] = None
# Despite the name, `date` actually contains date/time data.
date: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
activityDescription: Optional[str] = None
amount: Optional[decimal.Decimal] = None
buySell: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fxRateToBase: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
settleDate: Optional[datetime.date] = None
activityCode: Optional[str] = None # FIXME
orderID: Optional[str] = None
tradeQuantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeGross: Optional[decimal.Decimal] = None
tradeCommission: Optional[decimal.Decimal] = None
tradeTax: Optional[decimal.Decimal] = None
tradeCode: Optional[str] = None
levelOfDetail: Optional[str] = None
transactionID: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class ChangeInPositionValue(FlexElement):
""" Wrapped in <ChangeInPositionValues> """
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
priorPeriodValue: Optional[decimal.Decimal] = None
transactions: Optional[decimal.Decimal] = None
mtmPriorPeriodPositions: Optional[decimal.Decimal] = None
mtmTransactions: Optional[decimal.Decimal] = None
corporateActions: Optional[decimal.Decimal] = None
accountTransfers: Optional[decimal.Decimal] = None
fxTranslationPnl: Optional[decimal.Decimal] = None
futurePriceAdjustments: Optional[decimal.Decimal] = None
settledCash: Optional[decimal.Decimal] = None
endOfPeriodValue: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
other: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class OpenPosition(FlexElement):
""" Wrapped in <OpenPositions> """
side: Optional[enums.LongShort] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
position: Optional[decimal.Decimal] = None
markPrice: Optional[decimal.Decimal] = None
positionValue: Optional[decimal.Decimal] = None
openPrice: Optional[decimal.Decimal] = None
costBasisPrice: Optional[decimal.Decimal] = None
costBasisMoney: Optional[decimal.Decimal] = None
fifoPnlUnrealized: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
securityIDType: Optional[str] = None
issuer: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
originatingOrderID: Optional[str] = None
originatingTransactionID: Optional[str] = None
accruedInt: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
percentOfNAV: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
positionValueInBase: Optional[decimal.Decimal] = None
unrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
unrealizedlFxPnl: Optional[decimal.Decimal] = None
vestingDate: Optional[datetime.date] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class FxLot(FlexElement):
""" Wrapped in <FxLots>, which in turn is wrapped in <FxPositions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
reportDate: Optional[datetime.date] = None
functionalCurrency: Optional[str] = None
fxCurrency: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
costPrice: Optional[decimal.Decimal] = None
costBasis: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
unrealizedPL: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
lotDescription: Optional[str] = None
lotOpenDateTime: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
@dataclass(frozen=True)
class Trade(FlexElement):
""" Wrapped in <Trades> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
netCashInBase: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
ibOrderID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
clearingFirmID: Optional[str] = None
# Effective 2013, every Trade has a `transactionID` attribute that can't
# be deselected in the Flex query template.
transactionID: Optional[str] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
ibExecID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
sedol: Optional[str] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
accruedInt: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class Lot(FlexElement):
""" Wrapped in <Trades> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
netCashInBase: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
ibOrderID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
clearingFirmID: Optional[str] = None
# Effective 2013, every Trade has a `transactionID` attribute that can't
# be deselected in the Flex query template.
transactionID: Optional[str] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
ibExecID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
sedol: Optional[str] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
@dataclass(frozen=True)
class | (FlexElement):
""" Wrapped in <UnbundledCommissionDetails> """
buySell: Optional[enums.BuySell] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
orderReference: Optional[str] = None
totalCommission: Optional[decimal.Decimal] = None
brokerExecutionCharge: Optional[decimal.Decimal] = None
brokerClearingCharge: Optional[decimal.Decimal] = None
thirdPartyExecutionCharge: Optional[decimal.Decimal] = None
thirdPartyClearingCharge: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None
regFINRATradingActivityFee: Optional[decimal.Decimal] = None
regSection31TransactionFee: Optional[decimal.Decimal] = None
regOther: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class SymbolSummary(FlexElement):
""" Wrapped in <TradeConfirms> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
transactionType: Optional[enums.TradeType] = None
tradeID: Optional[str] = None
orderID: Optional[decimal.Decimal] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
clearingFirmID: Optional[str] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
dateTime: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
settleDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
exchange: Optional[str] = None
buySell: Optional[enums.BuySell] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commissionCurrency: Optional[str] = None
tax: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
orderType: Optional[enums.OrderType] = None
levelOfDetail: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
allocatedTo: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class Order(FlexElement):
""" Wrapped in <TradeConfirms> or <Trades>"""
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
transactionType: Optional[enums.TradeType] = None
tradeID: Optional[str] = None
orderID: Optional[decimal.Decimal] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
clearingFirmID: Optional[str] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
dateTime: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
settleDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
exchange: Optional[str] = None
buySell: Optional[enums.BuySell] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commissionCurrency: Optional[str] = None
tax: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
orderType: Optional[enums.OrderType] = None
levelOfDetail: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
allocatedTo: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
netCash: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibOrderID: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
settleDateTarget: Optional[datetime.date] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
closePrice: Optional[decimal.Decimal] = None
openCloseIndicator: Optional[enums.OpenClose] = None
notes: Optional[str] = None
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origOrderID: Optional[str] = None
transactionID: Optional[str] = None
ibExecID: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TradeConfirm(FlexElement):
""" Wrapped in <TradeConfirms> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
clearingFirmID: Optional[str] = None
transactionID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
commissionCurrency: Optional[str] = None
price: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
orderID: Optional[decimal.Decimal] = None
allocatedTo: Optional[str] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
ibOrderID: Optional[str] = None
ibExecID: Optional[str] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
code: Tuple[enums.Code, ...] = ()
tax: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingListingExchange: Optional[str] = None
settleDate: Optional[datetime.date] = None
underlyingSecurityID: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class OptionEAE(FlexElement):
"""Option Exercise Assignment or Expiration
Wrapped in (identically-named) <OptionEAE>
"""
transactionType: Optional[enums.OptionAction] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
markPrice: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commisionsAndTax: Optional[decimal.Decimal] = None
costBasis: Optional[decimal.Decimal] = None
realizedPnl: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_OptionEAE = OptionEAE
@dataclass(frozen=True)
class TradeTransfer(FlexElement):
""" Wrapped in <TradeTransfers> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
direction: Optional[enums.ToFrom] = None
deliveredReceived: Optional[enums.DeliveredReceived] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
brokerName: Optional[str] = None
brokerAccount: Optional[str] = None
awayBrokerCommission: Optional[decimal.Decimal] = None
regulatoryFee: Optional[decimal.Decimal] = None
netTradeMoney: Optional[decimal.Decimal] = None
netTradeMoneyInBase: Optional[decimal.Decimal] = None
netTradePrice: Optional[decimal.Decimal] = None
multiplier: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
netCash: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
# Oddly, `origTradeDate` appears to have hard-coded YYYYMMDD format
# instead of the date format from the report configuration.
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
clearingFirmID: Optional[str] = None
transactionID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
securityIDType: Optional[str] = None
@dataclass(frozen=True)
class InterestAccrualsCurrency(FlexElement):
""" Wrapped in <InterestAccruals> """
accountId: Optional[str] = None
currency: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingAccrualBalance: Optional[decimal.Decimal] = None
interestAccrued: Optional[decimal.Decimal] = None
accrualReversal: Optional[decimal.Decimal] = None
endingAccrualBalance: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
fxTranslation: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TierInterestDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
interestType: Optional[str] = None
valueDate: Optional[datetime.date] = None
tierBreak: Optional[str] = None
balanceThreshold: Optional[decimal.Decimal] = None
securitiesPrincipal: Optional[decimal.Decimal] = None
commoditiesPrincipal: Optional[decimal.Decimal] = None
ibuklPrincipal: Optional[decimal.Decimal] = None
totalPrincipal: Optional[decimal.Decimal] = None
rate: Optional[decimal.Decimal] = None
securitiesInterest: Optional[decimal.Decimal] = None
commoditiesInterest: Optional[decimal.Decimal] = None
ibuklInterest: Optional[decimal.Decimal] = None
totalInterest: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class HardToBorrowDetail(FlexElement):
""" Wrapped in <HardToBorrowDetails> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
valueDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
borrowFeeRate: Optional[decimal.Decimal] = None
borrowFee: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class SLBActivity(FlexElement):
""" Wrapped in <SLBActivities> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
slbTransactionId: Optional[str] = None
activityDescription: Optional[str] = None
type: Optional[str] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
feeRate: Optional[decimal.Decimal] = None
collateralAmount: Optional[decimal.Decimal] = None
markQuantity: Optional[decimal.Decimal] = None
markPriorPrice: Optional[decimal.Decimal] = None
markCurrentPrice: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class SLBFee:
""" Wrapped in <SLBFees> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[str] = None
assetCategory: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
valueDate: Optional[datetime.date] = None
startDate: Optional[datetime.date] = None
type: Optional[str] = None # FIXME
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
collateralAmount: Optional[decimal.Decimal] = None
feeRate: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
carryCharge: Optional[decimal.Decimal] = None
ticketCharge: Optional[decimal.Decimal] = None
totalCharges: Optional[decimal.Decimal] = None
marketFeeRate: Optional[decimal.Decimal] = None
grossLendFee: Optional[decimal.Decimal] = None
netLendFeeRate: Optional[decimal.Decimal] = None
netLendFee: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class Transfer(FlexElement):
""" Wrapped in <Transfers> """
type: Optional[enums.TransferType] = None
direction: Optional[enums.InOut] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
reportDate: Optional[datetime.date] = None
underlyingConid: Optional[str] = None
date: Optional[datetime.date] = None
dateTime: Optional[datetime.datetime] = None
account: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
transferPrice: Optional[decimal.Decimal] = None
positionAmount: Optional[decimal.Decimal] = None
positionAmountInBase: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
cashTransfer: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
clientReference: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
company: Optional[str] = None
accountName: Optional[str] = None
pnlAmount: Optional[decimal.Decimal] = None
pnlAmountInBase: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class UnsettledTransfer(FlexElement):
""" Wrapped in <UnsettledTransfers> """
direction: Optional[enums.ToFrom] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
underlyingConid: Optional[str] = None
stage: Optional[str] = None
tradeDate: Optional[datetime.date] = None
targetSettlement: Optional[datetime.date] = None
contra: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeAmount: Optional[decimal.Decimal] = None
tradeAmountInBase: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
@dataclass(frozen=True)
class PriorPeriodPosition(FlexElement):
""" Wrapped in <PriorPeriodPositions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
priorMtmPnl: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
price: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class CorporateAction(FlexElement):
""" Wrapped in <CorporateActions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
actionDescription: Optional[str] = None
dateTime: Optional[datetime.datetime] = None
amount: Optional[decimal.Decimal] = None
quantity: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
# Effective 2010, CorporateAction has a `type` attribute
type: Optional[enums.Reorg] = None
code: Tuple[enums.Code, ...] = ()
sedol: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
proceeds: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
@dataclass(frozen=True)
class CashTransaction(FlexElement):
""" Wrapped in <CashTransactions> """
type: Optional[enums.CashAction] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
amount: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
sedol: Optional[str] = None
symbol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
transactionID: Optional[str] = None
reportDate: Optional[datetime.date] = None
clientReference: Optional[str] = None
settleDate: Optional[datetime.date] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
levelOfDetail: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class DebitCardActivity(FlexElement):
""" Wrapped in <DebitCardActivities> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
status: Optional[str] = None
reportDate: Optional[datetime.date] = None
postingDate: Optional[datetime.date] = None
transactionDateTime: Optional[datetime.datetime] = None
category: Optional[str] = None
merchantNameLocation: Optional[str] = None
amount: Optional[decimal.Decimal] = None
model: Optional[str] = None
@dataclass(frozen=True)
class ChangeInDividendAccrual(FlexElement):
""" Wrapped in <ChangeInDividendAccruals> """
date: Optional[datetime.date] = None
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
reportDate: Optional[datetime.date] = None
underlyingConid: Optional[str] = None
exDate: Optional[datetime.date] = None
payDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tax: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
grossRate: Optional[decimal.Decimal] = None
grossAmount: Optional[decimal.Decimal] = None
netAmount: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_ChangeInDividendAccrual = ChangeInDividendAccrual
@dataclass(frozen=True)
class OpenDividendAccrual(FlexElement):
""" Wrapped in <OpenDividendAccruals> """
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
exDate: Optional[datetime.date] = None
payDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tax: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
grossRate: Optional[decimal.Decimal] = None
grossAmount: Optional[decimal.Decimal] = None
netAmount: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class SecurityInfo(FlexElement):
""" Wrapped in <SecuritiesInfo> """
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingCategory: Optional[str] = None
subCategory: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
maturity: Optional[str] = None
issueDate: Optional[datetime.date] = None
type: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
currency: Optional[str] = None
settlementPolicyMethod: Optional[str] = None
@dataclass(frozen=True)
class ConversionRate(FlexElement):
""" Wrapped in <ConversionRates> """
reportDate: Optional[datetime.date] = None
fromCurrency: Optional[str] = None
toCurrency: Optional[str] = None
rate: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class FIFOPerformanceSummaryUnderlying(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
listingExchange: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
realizedSTProfit: Optional[decimal.Decimal] = None
realizedSTLoss: Optional[decimal.Decimal] = None
realizedLTProfit: Optional[decimal.Decimal] = None
realizedLTLoss: Optional[decimal.Decimal] = None
totalRealizedPnl: Optional[decimal.Decimal] = None
unrealizedProfit: Optional[decimal.Decimal] = None
unrealizedLoss: Optional[decimal.Decimal] = None
totalUnrealizedPnl: Optional[decimal.Decimal] = None
totalFifoPnl: Optional[decimal.Decimal] = None
totalRealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
totalRealizedFxPnl: Optional[decimal.Decimal] = None
totalUnrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
totalUnrealizedFxPnl: Optional[decimal.Decimal] = None
totalCapitalGainsPnl: Optional[decimal.Decimal] = None
totalFxPnl: Optional[decimal.Decimal] = None
transferredPnl: Optional[decimal.Decimal] = None
transferredCapitalGainsPnl: Optional[decimal.Decimal] = None
transferredFxPnl: Optional[decimal.Decimal] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
unrealizedSTProfit: Optional[decimal.Decimal] = None
unrealizedSTLoss: Optional[decimal.Decimal] = None
unrealizedLTProfit: Optional[decimal.Decimal] = None
unrealizedLTLoss: Optional[decimal.Decimal] = None
costAdj: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class NetStockPosition(FlexElement):
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
sharesAtIb: Optional[decimal.Decimal] = None
sharesBorrowed: Optional[decimal.Decimal] = None
sharesLent: Optional[decimal.Decimal] = None
netShares: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class ClientFee(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
feeType: Optional[str] = None
date: Optional[datetime.datetime] = None
description: Optional[str] = None
expenseIndicator: Optional[str] = None
revenue: Optional[decimal.Decimal] = None
expense: Optional[decimal.Decimal] = None
net: Optional[decimal.Decimal] = None
revenueInBase: Optional[decimal.Decimal] = None
expenseInBase: Optional[decimal.Decimal] = None
netInBase: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
execID: Optional[str] = None
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class ClientFeesDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
tradeID: Optional[str] = None
execID: Optional[str] = None
totalRevenue: Optional[decimal.Decimal] = None
totalCommission: Optional[decimal.Decimal] = None
brokerExecutionCharge: Optional[decimal.Decimal] = None
clearingCharge: Optional[decimal.Decimal] = None
thirdPartyExecutionCharge: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None
regFINRATradingActivityFee: Optional[decimal.Decimal] = None
regSection31TransactionFee: Optional[decimal.Decimal] = None
regOther: Optional[decimal.Decimal] = None
totalNet: Optional[decimal.Decimal] = None
totalNetInBase: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
other: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TransactionTax(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
taxDescription: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
taxAmount: Optional[decimal.Decimal] = None
tradeId: Optional[str] = None
tradePrice: Optional[decimal.Decimal] = None
source: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class TransactionTaxDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
taxDescription: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
taxAmount: Optional[decimal.Decimal] = None
tradeId: Optional[str] = None
tradePrice: Optional[decimal.Decimal] = None
source: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class SalesTax(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
country: Optional[str] = None
taxType: Optional[str] = None
payer: Optional[str] = None
taxableDescription: Optional[str] = None
taxableAmount: Optional[decimal.Decimal] = None
taxRate: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
taxableTransactionID: Optional[str] = None
transactionID: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
# Type alias to work around https://github.com/python/mypy/issues/1775
_ClientFeesDetail = ClientFeesDetail
| UnbundledCommissionDetail |
goniometer.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Project: Fast Azimuthal integration
# https://github.com/silx-kit/pyFAI
#
# Copyright (C) 2017-2021 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Jérôme Kieffer ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# .
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# .
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Everything you need to calibrate a detector mounted on a goniometer or any
translation table
"""
__author__ = "Jérôme Kieffer"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "19/08/2021"
__status__ = "development"
__docformat__ = 'restructuredtext'
import os
import logging
import json
import numpy
from collections import OrderedDict, namedtuple
from scipy.optimize import minimize
from silx.image import marchingsquares
from .massif import Massif
from .control_points import ControlPoints
from .detectors import detector_factory, Detector
from .geometry import Geometry
from .geometryRefinement import GeometryRefinement
from .azimuthalIntegrator import AzimuthalIntegrator
from .utils import StringTypes
from .multi_geometry import MultiGeometry
from .units import CONST_hc, CONST_q
logger = logging.getLogger(__name__)
try:
import numexpr
except ImportError:
logger.debug("Backtrace", exc_info=True)
numexpr = None
# Parameter set used in PyFAI:
PoniParam = namedtuple("PoniParam", ["dist", "poni1", "poni2", "rot1", "rot2", "rot3"])
class BaseTransformation(object):
"""This class, once instanciated, behaves like a function (via the __call__
method). It is responsible for taking any input geometry and translate it
into a set of parameters compatible with pyFAI, i.e. a tuple with:
(dist, poni1, poni2, rot1, rot2, rot3)
This class relies on a user provided function which does the work.
"""
def __init__(self, funct, param_names, pos_names=None):
"""Constructor of the class
:param funct: function which takes as parameter the param_names and the pos_name
:param param_names: list of names of the parameters used in the model
:param pos_names: list of motor names for gonio with >1 degree of freedom
"""
self.callable = funct
self.variables = {}
self.param_names = tuple(param_names)
if pos_names is not None:
self.pos_names = tuple(pos_names)
else:
self.pos_names = ("pos",)
for key in self.param_names + self.pos_names:
if key in self.variables:
raise RuntimeError("The keyword %s is already defined, please chose another variable name")
self.variables[key] = numpy.NaN
self.codes = {}
def __call__(self, param, pos):
"""This makes the class instance behave like a function,
actually a function that translates the n-parameter of the detector
positioning on the goniometer and the m-parameters.
:param param: parameter of the fit
:param pos: position of the goniometer (representation from the
goniometer)
:return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as needed
for pyFAI.
"""
variables = self.variables.copy()
for name, value in zip(self.param_names, param):
variables[name] = value
if len(self.pos_names) == 1:
variables[self.pos_names[0]] = pos
else:
for name, value in zip(self.pos_names, pos):
variables[name] = value
res = self.callable(**variables)
return PoniParam(*res)
def __repr__(self):
return "BaseTransformation with param: %s and pos: %s" % (self.param_names, self.pos_names)
def to_dict(self):
"""Export the instance representation for serialization as a dictionary
"""
raise RuntimeError("BaseTransformation is not serializable")
class GeometryTransformation(object):
"""This class, once instanciated, behaves like a function (via the __call__
method). It is responsible for taking any input geometry and translate it
into a set of parameters compatible with pyFAI, i.e. a tuple with:
(dist, poni1, poni2, rot1, rot2, rot3)
This function uses numexpr for formula evaluation.
"""
def __init__(self, dist_expr, poni1_expr, poni2_expr,
rot1_expr, rot2_expr, rot3_expr,
param_names, pos_names=None, constants=None,
content=None):
"""Constructor of the class
:param dist_expr: formula (as string) providing with the dist
:param poni1_expr: formula (as string) providing with the poni1
:param poni2_expr: formula (as string) providing with the poni2
:param rot1_expr: formula (as string) providing with the rot1
:param rot2_expr: formula (as string) providing with the rot2
:param rot3_expr: formula (as string) providing with the rot3
:param param_names: list of names of the parameters used in the model
:param pos_names: list of motor names for gonio with >1 degree of freedom
:param constants: a dictionary with some constants the user may want to use
:param content: Should be None or the name of the class (may be used
in the future to dispatch to multiple derivative classes)
"""
if content is not None:
# Ensures we use the constructor of the right class
assert content in (self.__class__.__name__, "GeometryTransformation")
if numexpr is None:
raise RuntimeError("Geometry translation requires the *numexpr* package")
self.expressions = OrderedDict()
if dist_expr is not None:
self.expressions["dist"] = dist_expr
if poni1_expr is not None:
self.expressions["poni1"] = poni1_expr
if poni2_expr is not None:
self.expressions["poni2"] = poni2_expr
if rot1_expr is not None:
self.expressions["rot1"] = rot1_expr
if rot2_expr is not None:
self.expressions["rot2"] = rot2_expr
if rot3_expr is not None:
self.expressions["rot3"] = rot3_expr
self.variables = {"pi": numpy.pi}
if constants is not None:
self.variables.update(constants)
self.param_names = tuple(param_names)
if pos_names is not None:
self.pos_names = tuple(pos_names)
else:
self.pos_names = ("pos",)
for key in self.param_names + self.pos_names:
if key in self.variables:
raise RuntimeError(f"The keyword `{key}` is already defined, please chose another variable name")
self.variables[key] = numpy.NaN
self.codes = OrderedDict(((name, numexpr.NumExpr(expr)) for name, expr in self.expressions.items()))
@property
def dist_expr(self):
return self.expressions.get("dist")
@property
def poni1_expr(self):
return self.expressions.get("poni1")
@property
def poni2_expr(self):
return self.expressions.get("poni2")
@property
def rot1_expr(self):
return self.expressions.get("rot1")
@property
def rot2_expr(self):
return self.expressions.get("rot2")
@property
def rot3_expr(self):
return self.expressions.get("rot3")
def __call__(self, param, pos):
"""This makes the class instance behave like a function,
actually a function that translates the n-parameter of the detector
positioning on the goniometer and the m-parameters.
:param param: parameter of the fit
:param pos: position of the goniometer (representation from the
goniometer)
:return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as needed
for pyFAI.
"""
res = {}
variables = self.variables.copy()
for name, value in zip(self.param_names, param):
variables[name] = value
if len(self.pos_names) == 1:
variables[self.pos_names[0]] = pos
else:
for name, value in zip(self.pos_names, pos):
variables[name] = value
for name, code in self.codes.items():
signa = [variables.get(name, numpy.NaN) for name in code.input_names]
res[name] = (float(code(*signa)))
# could ne done in a single liner but harder to understand !
return PoniParam(**res)
def __repr__(self):
res = ["GeometryTransformation with param: %s and pos: %s" % (self.param_names, self.pos_names),
" dist= %s" % self.dist_expr,
" poni1= %s" % self.poni1_expr,
" poni2= %s" % self.poni2_expr,
" rot1= %s" % self.rot1_expr,
" rot2= %s" % self.rot2_expr,
" rot3= %s" % self.rot3_expr]
return os.linesep.join(res)
def to_dict(self):
"""Export the instance representation for serialization as a dictionary
"""
res = OrderedDict([("content", self.__class__.__name__),
("param_names", self.param_names),
("pos_names", self.pos_names),
("dist_expr", self.dist_expr),
("poni1_expr", self.poni1_expr),
("poni2_expr", self.poni2_expr),
("rot1_expr", self.rot1_expr),
("rot2_expr", self.rot2_expr),
("rot3_expr", self.rot3_expr),
])
constants = OrderedDict()
for key, val in self.variables.items():
if key in self.param_names:
continue
if self.pos_names and key in self.pos_names:
continue
constants[key] = val
res["constants"] = constants
return res
class ExtendedTransformation(object):
"""This class behaves like GeometryTransformation and extends transformation
to the wavelength parameter.
This function uses numexpr for formula evaluation.
"""
def __init__(self, dist_expr=None, poni1_expr=None, poni2_expr=None,
rot1_expr=None, rot2_expr=None, rot3_expr=None, wavelength_expr=None,
param_names=None, pos_names=None, constants=None,
content=None):
"""Constructor of the class
:param dist_expr: formula (as string) providing with the dist
:param poni1_expr: formula (as string) providing with the poni1
:param poni2_expr: formula (as string) providing with the poni2
:param rot1_expr: formula (as string) providing with the rot1
:param rot2_expr: formula (as string) providing with the rot2
:param rot3_expr: formula (as string) providing with the rot3
:param wavelength_expr: formula (as a string) to calculate wavelength used in angstrom
:param param_names: list of names of the parameters used in the model
:param pos_names: list of motor names for gonio with >1 degree of freedom
:param constants: a dictionary with some constants the user may want to use
:param content: Should be None or the name of the class (may be used
in the future to dispatch to multiple derivative classes)
"""
if content is not None:
# Ensures we use the constructor of the right class
assert content in (self.__class__.__name__, "ExtendedTransformation")
if numexpr is None:
raise RuntimeError("This Transformation requires the *numexpr* package")
self.expressions = OrderedDict()
if dist_expr is not None:
self.expressions["dist"] = dist_expr
if poni1_expr is not None:
self.expressions["poni1"] = poni1_expr
if poni2_expr is not None:
self.expressions["poni2"] = poni2_expr
if rot1_expr is not None:
self.expressions["rot1"] = rot1_expr
if rot2_expr is not None:
self.expressions["rot2"] = rot2_expr
if rot3_expr is not None:
self.expressions["rot3"] = rot3_expr
if wavelength_expr is not None:
self.expressions["wavelength"] = wavelength_expr
self.ParamNT = namedtuple("ParamNT", list(self.expressions.keys()))
self.variables = {"pi": numpy.pi,
"hc": CONST_hc,
"q": CONST_q}
if constants is not None:
self.variables.update(constants)
self.param_names = tuple(param_names) if param_names is not None else tuple()
if pos_names is not None:
self.pos_names = tuple(pos_names)
else:
self.pos_names = ("pos",)
for key in self.param_names + self.pos_names:
if key in self.variables:
raise RuntimeError("The keyword %s is already defined, please chose another variable name")
self.variables[key] = numpy.NaN
self.codes = OrderedDict(((name, numexpr.NumExpr(expr)) for name, expr in self.expressions.items()))
def __call__(self, param, pos):
"""This makes the class instance behave like a function,
actually a function that translates the n-parameter of the detector
positioning on the goniometer and the m-parameters.
:param param: parameter of the fit
:param pos: position of the goniometer (representation from the
goniometer)
:return: 6-tuple with (dist, poni1, poni2, rot1, rot2, rot3) as needed
for pyFAI.
"""
res = {}
variables = self.variables.copy()
for name, value in zip(self.param_names, param):
variables[name] = value
if len(self.pos_names) == 1:
variables[self.pos_names[0]] = pos
else:
for name, value in zip(self.pos_names, pos):
variables[name] = value
for name, code in self.codes.items():
signa = [variables.get(name, numpy.NaN) for name in code.input_names]
res[name] = (float(code(*signa)))
# could ne done in a single liner but harder to understand !
return self.ParamNT(**res)
def __repr__(self):
res = ["%s with param: %s and pos: %s" % (self.__class__.__name__, self.param_names, self.pos_names), ]
for name, expr in self.expressions.items():
res.append(" %s= %s" % (name, expr))
return os.linesep.join(res)
def to_dict(self):
"""Export the instance representation for serialization as a dictionary
"""
res = OrderedDict([("content", self.__class__.__name__),
("param_names", self.param_names),
("pos_names", self.pos_names),
])
for name, expr in self.expressions.items():
res[name + "_expr"] = expr
constants = OrderedDict()
for key, val in self.variables.items():
if key in self.param_names:
continue
if self.pos_names and key in self.pos_names:
continue
constants[key] = val
res["constants"] = constants
return res
GeometryTranslation = GeometryTransformation
class Goniometer(object):
"""This class represents the goniometer model. Unlike this name suggests,
it may include translation in addition to rotations
"""
_file_version_1_1 = "Goniometer calibration v1.1"
file_version = "Goniometer calibration v2"
def __init__(self, param, trans_function, detector="Detector",
wavelength=None, param_names=None, pos_names=None):
"""Constructor of the Goniometer class.
:param param: vector of parameter to refine for defining the detector
position on the goniometer
:param trans_function: function taking the parameters of the
goniometer and the goniometer position and return the
6 parameters [dist, poni1, poni2, rot1, rot2, rot3]
:param detector: detector mounted on the moving arm
:param wavelength: the wavelength used for the experiment
:param param_names: list of names to "label" the param vector.
:param pos_names: list of names to "label" the position vector of
the gonio.
"""
self.param = param
self.trans_function = trans_function
self.detector = detector_factory(detector)
self._wavelength = wavelength
if param_names is None and "param_names" in dir(trans_function):
param_names = trans_function.param_names
if param_names is not None:
if isinstance(param, dict):
self.param = [param.get(i, 0) for i in param_names]
self.nt_param = namedtuple("GonioParam", param_names)
else:
self.nt_param = lambda *x: tuple(x)
if pos_names is None and "pos_names" in dir(trans_function):
pos_names = trans_function.pos_names
self.nt_pos = namedtuple("GonioPos", pos_names) if pos_names else lambda *x: tuple(x)
def __repr__(self):
return "Goniometer with param %s %s with %s" % (self.nt_param(*self.param), os.linesep, self.detector)
@property
def wavelength(self):
wl_fct = self.trans_function.codes.get("wavelength")
if wl_fct is not None:
# check that wavelengt does not depend on the motor position
params = wl_fct.input_names
for motor in self.trans_function.pos_names:
if motor in params:
logger.warning("Wavelength depends on motors, returning the default value")
return self._wavelength
dummy_position = [0] * len(self.nt_pos._fields)
return self.trans_function(self.param, dummy_position).wavelength
else:
return self._wavelength
@wavelength.setter
def wavelength(self, value):
if "wavelength" in self.trans_function.codes:
logger.warning("Wavelength is a fitted parameter, cannot be set. Please set fitted parameter")
else:
self._wavelength = value
def get_ai(self, position):
"""Creates an azimuthal integrator from the motor position
:param position: the goniometer position, a float for a 1 axis goniometer
:return: A freshly build AzimuthalIntegrator
"""
res = self.trans_function(self.param, position)
params = {"detector": self.detector,
"wavelength": self._wavelength}
for name, value in zip(res._fields, res):
params[name] = value
return AzimuthalIntegrator(**params)
def get_mg(self, positions):
"""C | def to_dict(self):
"""Export the goniometer configuration to a dictionary
:return: Ordered dictionary
"""
dico = OrderedDict([("content", self.file_version)])
dico["detector"] = self.detector.name
dico["detector_config"] = self.detector.get_config()
if self.wavelength:
dico["wavelength"] = self.wavelength
dico["param"] = tuple(self.param)
if "_fields" in dir(self.nt_param):
dico["param_names"] = self.nt_param._fields
if "_fields" in dir(self.nt_pos):
dico["pos_names"] = self.nt_pos._fields
if "to_dict" in dir(self.trans_function):
dico["trans_function"] = self.trans_function.to_dict()
else:
logger.warning("trans_function is not serializable")
return dico
def save(self, filename):
"""Save the goniometer configuration to file
:param filename: name of the file to save configuration to
"""
dico = self.to_dict()
try:
with open(filename, "w") as f:
f.write(json.dumps(dico, indent=2))
except IOError:
logger.error("IOError while writing to file %s", filename)
write = save
@classmethod
def _get_detector_from_dict(cls, dico):
file_version = dico["content"]
if file_version == cls._file_version_1_1:
# v1.1
# Try to extract useful keys
detector = Detector.factory(dico["detector"])
# This is not accurate, some keys could be missing
keys = detector.get_config().keys()
config = {}
for k in keys:
if k in dico:
config[k] = dico[k]
del dico[k]
detector = Detector.factory(dico["detector"], config)
else:
# v2
detector = Detector.factory(dico["detector"], dico.get("detector_config", None))
return detector
@classmethod
def sload(cls, filename):
"""Class method for instanciating a Goniometer object from a JSON file
:param filename: name of the JSON file
:return: Goniometer object
"""
with open(filename) as f:
dico = json.load(f)
assert "trans_function" in dico, "No translation function defined in JSON file"
file_version = dico["content"]
assert file_version in [cls.file_version, cls._file_version_1_1], "JSON file contains a goniometer calibration"
detector = cls._get_detector_from_dict(dico)
tansfun = dico.get("trans_function", {})
if "content" in tansfun:
content = tansfun.pop("content")
# May be adapted for other classes of GeometryTransformation functions
if content in ("GeometryTranslation", "GeometryTransformation"):
funct = GeometryTransformation(**tansfun)
elif content == "ExtendedTransformation":
funct = ExtendedTransformation(**tansfun)
else:
raise RuntimeError(f"content={content}, not in in (GeometryTranslation, GeometryTransformation, ExtendedTransformation)")
else: # assume GeometryTransformation
funct = GeometryTransformation(**tansfun)
gonio = cls(param=dico.get("param", []),
trans_function=funct,
detector=detector,
wavelength=dico.get("wavelength"))
return gonio
class SingleGeometry(object):
"""This class represents a single geometry of a detector position on a
goniometer arm
"""
def __init__(self, label, image=None, metadata=None, pos_function=None,
control_points=None, calibrant=None, detector=None, geometry=None):
"""Constructor of the SingleGeometry class, used for calibrating a
multi-geometry setup with a moving detector.
:param label: name of the geometry, a string or anything unmutable
:param image: image with Debye-Scherrer rings as 2d numpy array
:param metadata: anything which contains the goniometer position
:param pos_function: a function which takes the metadata as input
and returns the goniometer arm position
:param control_points: a pyFAI.control_points.ControlPoints instance
(optional parameter)
:param calibrant: a pyFAI.calibrant.Calibrant instance.
Contains the wavelength to be used (optional parameter)
:param detector: a pyFAI.detectors.Detector instance or something like
that Contains the mask to be used (optional parameter)
:param geometry: an azimuthal integrator or a ponifile
(or a dict with the geometry) (optional parameter)
"""
self.label = label
self.image = image
self.metadata = metadata # may be anything
self.calibrant = calibrant
if control_points is None or isinstance(control_points, ControlPoints):
self.control_points = control_points
else:
# Probaly a NPT file
self.control_points = ControlPoints(control_points, calibrant=calibrant)
if detector is not None:
self.detector = detector_factory(detector)
else:
self.detector = None
if isinstance(geometry, Geometry):
dict_geo = geometry.getPyFAI()
elif isinstance(geometry, StringTypes) and os.path.exists(geometry):
dict_geo = Geometry.sload(geometry).getPyFAI()
elif isinstance(geometry, dict):
dict_geo = geometry
if self.detector is not None:
dict_geo["detector"] = self.detector
if self.control_points is not None:
dict_geo["data"] = self.control_points.getList()
if self.calibrant is not None:
dict_geo["calibrant"] = self.calibrant
if "max_shape" in dict_geo:
# not used in constructor
dict_geo.pop("max_shape")
self.geometry_refinement = GeometryRefinement(**dict_geo)
if self.detector is None:
self.detector = self.geometry_refinement.detector
self.pos_function = pos_function
self.massif = None
def get_position(self):
"""This method is in charge of calculating the motor position from metadata/label/..."""
return self.pos_function(self.metadata)
def extract_cp(self, max_rings=None, pts_per_deg=1.0, Imin=0):
"""Performs an automatic keypoint extraction and update the geometry refinement part
:param max_ring: extract at most N rings from the image
:param pts_per_deg: number of control points per azimuthal degree (increase for better precision)
"""
if self.massif is None:
self.massif = Massif(self.image)
tth = numpy.array([i for i in self.calibrant.get_2th() if i is not None])
tth = numpy.unique(tth)
tth_min = numpy.zeros_like(tth)
tth_max = numpy.zeros_like(tth)
delta = (tth[1:] - tth[:-1]) / 4.0
tth_max[:-1] = delta
tth_max[-1] = delta[-1]
tth_min[1:] = -delta
tth_min[0] = -delta[0]
tth_max += tth
tth_min += tth
shape = self.image.shape
ttha = self.geometry_refinement.twoThetaArray(shape)
chia = self.geometry_refinement.chiArray(shape)
rings = 0
cp = ControlPoints(calibrant=self.calibrant)
if max_rings is None:
max_rings = tth.size
ms = marchingsquares.MarchingSquaresMergeImpl(ttha,
mask=self.geometry_refinement.detector.mask,
use_minmax_cache=True)
for i in range(tth.size):
if rings >= max_rings:
break
mask = numpy.logical_and(ttha >= tth_min[i], ttha < tth_max[i])
if self.detector.mask is not None:
mask = numpy.logical_and(mask, numpy.logical_not(self.geometry_refinement.detector.mask))
size = mask.sum(dtype=int)
if (size > 0):
rings += 1
sub_data = self.image.ravel()[numpy.where(mask.ravel())]
mean = sub_data.mean(dtype=numpy.float64)
std = sub_data.std(dtype=numpy.float64)
upper_limit = mean + std
mask2 = numpy.logical_and(self.image > upper_limit, mask)
size2 = mask2.sum(dtype=int)
if size2 < 1000:
upper_limit = mean
mask2 = numpy.logical_and(self.image > upper_limit, mask)
size2 = mask2.sum()
# length of the arc:
points = ms.find_pixels(tth[i])
seeds = set((i[0], i[1]) for i in points if mask2[i[0], i[1]])
# max number of points: 360 points for a full circle
azimuthal = chia[points[:, 0].clip(0, shape[0]), points[:, 1].clip(0, shape[1])]
nb_deg_azim = numpy.unique(numpy.rad2deg(azimuthal).round()).size
keep = int(nb_deg_azim * pts_per_deg)
if keep == 0:
continue
dist_min = len(seeds) / 2.0 / keep
# why 3.0, why not ?
logger.info("Extracting datapoint for ring %s (2theta = %.2f deg); " +
"searching for %i pts out of %i with I>%.1f, dmin=%.1f",
i, numpy.degrees(tth[i]), keep, size2, upper_limit, dist_min)
res = self.massif.peaks_from_area(mask2, Imin=Imin, keep=keep, dmin=dist_min, seed=seeds, ring=i)
cp.append(res, i)
self.control_points = cp
self.geometry_refinement.data = numpy.asarray(cp.getList(), dtype=numpy.float64)
return cp
def get_ai(self):
"""Create a new azimuthal integrator to be used.
:return: Azimuthal Integrator instance
"""
config = self.geometry_refinement.get_config()
ai = AzimuthalIntegrator()
ai.set_config(config)
return ai
class GoniometerRefinement(Goniometer):
"""This class allow the translation of a goniometer geometry into a pyFAI
geometry using a set of parameter to refine.
"""
def __init__(self, param, pos_function, trans_function,
detector="Detector", wavelength=None, param_names=None, pos_names=None,
bounds=None):
"""Constructor of the GoniometerRefinement class
:param param: vector of parameter to refine for defining the detector
position on the goniometer
:param pos_function: a function taking metadata and extracting the
goniometer position
:param trans_function: function taking the parameters of the
goniometer and the gonopmeter position and return the
6/7 parameters [dist, poni1, poni2, rot1, rot2, rot3, wavelength]
:param detector: detector mounted on the moving arm
:param wavelength: the wavelength used for the experiment
:param param_names: list of names to "label" the param vector.
:param pos_names: list of names to "label" the position vector of the
gonio.
:param bounds: list of 2-tuple with the lower and upper bound of each function
"""
Goniometer.__init__(self, param, trans_function,
detector=detector, wavelength=wavelength,
param_names=param_names, pos_names=pos_names)
self.single_geometries = OrderedDict() # a dict of labels: SingleGeometry
if bounds is None:
self.bounds = [(None, None)] * len(self.param)
else:
if isinstance(bounds, dict) and "_fields" in dir(self.nt_param):
self.bounds = [bounds.get(i, (None, None))
for i in self.nt_param._fields]
else:
self.bounds = list(bounds)
self.pos_function = pos_function
self.fit_wavelength = "wavelength" in self.trans_function.codes
def new_geometry(self, label, image=None, metadata=None, control_points=None,
calibrant=None, geometry=None):
"""Add a new geometry for calibration
:param label: usually a string
:param image: 2D numpy array with the Debye scherrer rings
:param metadata: some metadata
:param control_points: an instance of ControlPoints
:param calibrant: the calibrant used for calibrating
:param geometry: poni or AzimuthalIntegrator instance.
"""
if geometry is None:
geometry = self.get_ai(self.pos_function(metadata))
sg = SingleGeometry(label=label,
image=image,
metadata=metadata,
control_points=control_points,
calibrant=calibrant,
detector=self.detector,
pos_function=self.pos_function,
geometry=geometry)
self.single_geometries[label] = sg
return sg
def __repr__(self):
name = self.__class__.__name__
count = len(self.single_geometries)
geometry_list = ", ".join(self.single_geometries.keys())
return "%s with %i geometries labeled: %s." % (name, count, geometry_list)
def residu2(self, param):
"Actually performs the calulation of the average of the error squared"
sumsquare = 0.0
npt = 0
for single in self.single_geometries.values():
motor_pos = single.get_position()
single_param = self.trans_function(param, motor_pos)._asdict()
pyFAI_param = [single_param.get(name, 0.0)
for name in ["dist", "poni1", "poni2", "rot1", "rot2", "rot3"]]
pyFAI_param.append(single_param.get("wavelength", self.wavelength) * 1e10)
if (single.geometry_refinement is not None) and (len(single.geometry_refinement.data) >= 1):
sumsquare += single.geometry_refinement.chi2_wavelength(pyFAI_param)
npt += single.geometry_refinement.data.shape[0]
return sumsquare / max(npt, 1)
def chi2(self, param=None):
"""Calculate the average of the square of the error for a given parameter set
"""
if param is not None:
return self.residu2(param)
else:
return self.residu2(self.param)
def refine2(self, method="slsqp", **options):
"""Geometry refinement tool
See https://docs.scipy.org/doc/scipy-0.18.1/reference/generated/scipy.optimize.minimize.html
:param method: name of the minimizer
:param options: options for the minimizer
"""
if method.lower() in ["simplex", "nelder-mead"]:
method = "Nelder-Mead"
bounds = None
else:
bounds = self.bounds
former_error = self.chi2()
print("Cost function before refinement: %s" % former_error)
param = numpy.asarray(self.param, dtype=numpy.float64)
print(param)
res = minimize(self.residu2, param, method=method,
bounds=bounds, tol=1e-12,
options=options)
print(res)
newparam = res.x
new_error = res.fun
print("Cost function after refinement: %s" % new_error)
print(self.nt_param(*newparam))
# print("Constrained Least square %s --> %s" % (former_error, new_error))
if new_error < former_error:
# print(param, newparam)
i = abs(param - newparam).argmax()
if "_fields" in dir(self.nt_param):
name = self.nt_param._fields[i]
print("maxdelta on: %s (%i) %s --> %s" % (name, i, self.param[i], newparam[i]))
else:
print("maxdelta on: %i %s --> %s" % (i, self.param[i], newparam[i]))
self.param = newparam
# update wavelength after successful optimization: not easy
# if self.fit_wavelength:
# self.wavelength = self.
elif self.fit_wavelength:
print("Restore wavelength and former parameters")
former_wavelength = self.wavelength
for sg in self.single_geometries.values():
sg.calibrant.setWavelength_change2th(former_wavelength)
print(self.nt_param(*self.param))
return self.param
def set_bounds(self, name, mini=None, maxi=None):
"""Redefines the bounds for the refinement
:param name: name of the parameter or index in the parameter set
:param mini: minimum value
:param maxi: maximum value
"""
if isinstance(name, StringTypes) and "_fields" in dir(self.nt_param):
idx = self.nt_param._fields.index(name)
else:
idx = int(name)
self.bounds[idx] = (mini, maxi)
@classmethod
def sload(cls, filename, pos_function=None):
"""Class method for instanciating a Goniometer object from a JSON file
:param filename: name of the JSON file
:param pos_function: a function taking metadata and extracting the
goniometer position
:return: Goniometer object
"""
with open(filename) as f:
dico = json.load(f)
assert dico["content"] == cls.file_version, "JSON file contains a goniometer calibration"
assert "trans_function" in dico, "No translation function defined in JSON file"
detector = cls._get_detector_from_dict(dico)
tansfun = dico.get("trans_function", {})
if "content" in tansfun:
content = tansfun.pop("content")
# May be adapted for other classes of GeometryTransformation functions
if content in ("GeometryTranslation", "GeometryTransformation"):
funct = GeometryTransformation(**tansfun)
elif content == "ExtendedTranformation":
funct = ExtendedTransformation(**tansfun)
else:
raise RuntimeError("content= %s, not in in (GeometryTranslation, GeometryTransformation, ExtendedTranformation)")
else: # assume GeometryTransformation
funct = GeometryTransformation(**tansfun)
gonio = cls(param=dico.get("param", []),
trans_function=funct,
pos_function=pos_function,
detector=detector,
wavelength=dico.get("wavelength"))
return gonio
| reates a MultiGeometry integrator from a list of goniometer
positions.
:param positions: A list of goniometer positions
:return: A freshly build multi-geometry
"""
ais = [self.get_ai(pos) for pos in positions]
mg = MultiGeometry(ais)
return mg
|
mod.rs | mod build;
mod clean;
mod init;
mod publish; | pub use self::build::build;
pub use self::clean::clean;
pub use self::init::init;
pub use self::publish::publish;
pub use self::serve::serve;
pub use self::update::update; | mod serve;
mod update;
|
flask_test.py | # Test cases for restful API
from app import app
import unittest
class FlaskappTests(unittest.TestCase):
def setUp(self):
# creates a test client
self.app = app.test_client()
# propagate the exceptions to the test client
self.app.testing = True
def test_users_status_code(self):
# sends HTTP GET request to the application
# on the specified path
result = self.app.get('/api/v1/users')
print (result)
# assert the status code of the response
self.assertEqual(result.status_code, 200)
def test_tweets_status_code(self):
# sends HTTP GET request to the application
# on the specified path
result = self.app.get('/api/v2/tweets')
# assert the status code of the response
self.assertEqual(result.status_code, 200)
def test_addusers_status_code(self):
# sends HTTP POST request to the application
# on the specified path
result = self.app.post('/api/v1/users', data='{ "username":"Ovestint", "email": "[email protected]", "password": "juzahpei6e", "name":"Ronald R. Vera"}', content_type='application/json')
print (result)
# assert the status code of the response
self.assertEquals(result.status_code, 201)
def test_updusers_status_code(self):
# sends HTTP PUT request to the application
# on the specified path
result = self.app.put('/api/v1/users/5', data='{"username":"Tagning", "email": "[email protected]"}', content_type='application/json')
# assert the status code of the response
self.assertEquals(result.status_code, 200)
def | (self):
# sends HTTP GET request to the application
# on the specified path
result = self.app.post('/api/v2/tweets', data='{"username":"Tagning", "body": "It Works!#Awesome"}', content_type='application/json')
# assert the status code of the response
self.assertEqual(result.status_code, 201)
def test_delusers_status_code(self):
# sends HTTP Delete request to the application
# on the specified path
result = self.app.delete('/api/v1/users', data='{"username":"Ovestint"}', content_type='application/json')
print (result)
# assert the status code of the response
self.assertEquals(result.status_code, 200)
| test_addtweets_status_code |
0011_auto_20201008_1533.py | # Generated by Django 3.1.1 on 2020-10-08 12:33
from django.db import migrations, models
class | (migrations.Migration):
dependencies = [
('publications', '0010_auto_20201007_2250'),
]
operations = [
migrations.AddField(
model_name='publication',
name='email',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='publication',
name='facebook',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='publication',
name='icon',
field=models.ImageField(blank=True, null=True, upload_to='publications/icon/'),
),
migrations.AddField(
model_name='publication',
name='instagram',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='publication',
name='patreon',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='publication',
name='twitter',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='publication',
name='youtube',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
| Migration |
main.go | package errors
import (
"fmt"
"net/http"
"github.com/getsentry/raven-go"
"github.com/go-errors/errors"
)
// FromPanic extracts the err from the result of a recover() call.
func FromPanic(rec interface{}) error {
err, ok := rec.(error)
if !ok {
err = fmt.Errorf("%s", rec)
}
return errors.Wrap(err, 4)
}
// ReportToSentry reports err to the configured sentry server. Optionally,
// specifying a non-nil `r` will include information in the report about the
// current http request.
func ReportToSentry(err error, r *http.Request) {
st := raven.NewStacktrace(4, 3, []string{"github.org/hcnet"})
exc := raven.NewException(err, st)
var packet *raven.Packet
if r != nil {
h := raven.NewHttp(r)
packet = raven.NewPacket(err.Error(), exc, h)
} else {
packet = raven.NewPacket(err.Error(), exc)
}
raven.Capture(packet, nil)
}
// Stack returns the stack, as a string, if one can be extracted from `err`.
func | (err error) string {
if stackProvider, ok := err.(*errors.Error); ok {
return string(stackProvider.Stack())
}
return "unknown"
}
| Stack |
gethostlatency.py | #!/usr/bin/python
#
# gethostlatency Show latency for getaddrinfo/gethostbyname[2] calls.
# For Linux, uses BCC, eBPF. Embedded C.
#
# This can be useful for identifying DNS latency, by identifying which
# remote host name lookups were slow, and by how much.
#
# This uses dynamic tracing of user-level functions and registers, and may
# need modifications to match your software and processor architecture.
#
# Copyright 2016 Netflix, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 28-Jan-2016 Brendan Gregg Created this.
# 30-Mar-2016 Allan McAleavy updated for BPF_PERF_OUTPUT
from __future__ import print_function
from bcc import BPF
from time import strftime
import argparse
examples = """examples:
./gethostlatency # time getaddrinfo/gethostbyname[2] calls
./gethostlatency -p 181 # only trace PID 181
"""
parser = argparse.ArgumentParser(
description="Show latency for getaddrinfo/gethostbyname[2] calls",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-p", "--pid", help="trace this PID only", type=int,
default=-1)
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
# load BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/sched.h>
struct val_t {
u32 pid;
char comm[TASK_COMM_LEN];
char host[80];
u64 ts;
};
struct data_t {
u32 pid;
u64 delta;
char comm[TASK_COMM_LEN];
char host[80];
};
BPF_HASH(start, u32, struct val_t);
BPF_PERF_OUTPUT(events);
int do_entry(struct pt_regs *ctx) {
if (!PT_REGS_PARM1(ctx))
return 0;
struct val_t val = {};
u32 pid = bpf_get_current_pid_tgid();
if (bpf_get_current_comm(&val.comm, sizeof(val.comm)) == 0) {
bpf_probe_read(&val.host, sizeof(val.host),
(void *)PT_REGS_PARM1(ctx));
val.pid = bpf_get_current_pid_tgid();
val.ts = bpf_ktime_get_ns();
start.update(&pid, &val);
}
return 0;
}
int do_return(struct pt_regs *ctx) {
struct val_t *valp;
struct data_t data = {};
u64 delta;
u32 pid = bpf_get_current_pid_tgid();
u64 tsp = bpf_ktime_get_ns();
valp = start.lookup(&pid);
if (valp == 0)
return 0; // missed start
bpf_probe_read(&data.comm, sizeof(data.comm), valp->comm);
bpf_probe_read(&data.host, sizeof(data.host), (void *)valp->host);
data.pid = valp->pid;
data.delta = tsp - valp->ts;
events.perf_submit(ctx, &data, sizeof(data));
start.delete(&pid);
return 0;
}
"""
if args.ebpf:
print(bpf_text)
exit()
b = BPF(text=bpf_text)
b.attach_uprobe(name="c", sym="getaddrinfo", fn_name="do_entry", pid=args.pid)
b.attach_uprobe(name="c", sym="gethostbyname", fn_name="do_entry",
pid=args.pid)
b.attach_uprobe(name="c", sym="gethostbyname2", fn_name="do_entry",
pid=args.pid)
b.attach_uretprobe(name="c", sym="getaddrinfo", fn_name="do_return",
pid=args.pid)
b.attach_uretprobe(name="c", sym="gethostbyname", fn_name="do_return",
pid=args.pid)
b.attach_uretprobe(name="c", sym="gethostbyname2", fn_name="do_return",
pid=args.pid)
# header
print("%-9s %-6s %-16s %10s %s" % ("TIME", "PID", "COMM", "LATms", "HOST"))
def print_event(cpu, data, size):
|
# loop with callback to print_event
b["events"].open_perf_buffer(print_event)
while 1:
try:
b.perf_buffer_poll()
except KeyboardInterrupt:
exit()
| event = b["events"].event(data)
print("%-9s %-6d %-16s %10.2f %s" % (strftime("%H:%M:%S"), event.pid,
event.comm.decode('utf-8', 'replace'), (float(event.delta) / 1000000),
event.host.decode('utf-8', 'replace'))) |
index.js | import vfs from 'vinyl-fs'
import fs from 'fs'
import through from 'through2'
import path from 'path'
import which from 'which'
import { findNpm, runCmd } from '../utils'
const { join, basename } = path
export default function server(cmd, options) {
createServer(
options.init ? (cmd || path.basename(process.cwd())) : cmd,
options.init ? process.cwd() : join(process.cwd(), cmd)
)
}
function | (serverName, dest) {
var cwd = join(__dirname, '../../assets/server/template')
vfs.src(['**/*', '!node_modules/**/*'], {cwd: cwd,cwdbase: true,dot: true})
.pipe(through.obj(function (file, enc, cb) {
if (!file.stat.isFile()) {
return cb()
}
this.push(file)
cb()
}))
.pipe(vfs.dest(dest))
.on('end', async () => {
var replaceNameFiles = [
path.join(dest, 'package.json'),
]
replaceNameFiles.forEach(o => {
fs.writeFileSync(o, fs.readFileSync(o, 'utf-8').replace(/\$\{serverName\}/g, serverName))
})
var npm = findNpm()
await runCmd(which.sync(npm), ['install', 'mk-server', '--save'], dest)
console.log("OK!")
}).resume();
}
| createServer |
utils.rs | pub enum Option<T> {
None,
Some(T)
}
///
/// `overflow_correction` function handles 16 bit overflow correction.
/// This function is to handle issues with overflow conditions when it comes
/// to the few S16 bit registers/
///
/// # Arguments
///
/// - `a` - Value to overflow correct
///
/// # Results
///
/// - `u16` value which represents a `s15` signed one's compliment register.
///
pub fn overflow_correction(a: u16) -> u16 {
let newa = match 0xC000 & a {
0x8000 => a | 0xC000,
0x4000 => a & 0x3FFF,
_ => a,
};
// Return the 15 bits
newa
}
///
/// `sign_extend` function handles extending a one's compliment S15 value into
/// a one's compliment S15 register.
///
/// # Arguments
///
/// - `k` is a S15 one's compliment value that will be signed extended to
/// S16 one's compliment value
///
pub fn sign_extend(k: u16) -> u16 {
let bit = k & 0x4000;
if bit != 0 {
let v = k | 0x8000;
v
} else {
let v = k & 0x7FFF;
v
}
}
///
/// Converts a S15 one's compliment value into a S15 two's compliment value
///
/// # Arguments
///
/// - `k` is a S15 one's compliment value that will be converted to S15
/// two's compliment value
///
#[allow(dead_code)]
pub fn s15_ones_to_twos(val: u16) -> u16 {
if val & 0x4000 == 0x4000 {
(val + 1) & 0x7FFF
} else {
val & 0x7FFF
}
}
pub fn s15_add(a: u16, b: u16) -> u16 {
let mut res = a as u32 + b as u32;
if res & 0o100000 == 0o100000 {
res += 1;
}
(res & 0o77777) as u16
}
pub fn s16_add(a: u16, b: u16) -> u16 {
let mut res = a as u32 + b as u32;
if res & 0xFFFF0000 != 0x00000000 {
res += 1;
}
(res & 0o177777) as u16
}
pub fn _dp_add(a: u32, b: u32) -> u32 {
let mut res = a + b;
if res & 0xE0000000 != 0x0 {
res += 1;
}
res
}
pub fn cpu_to_agc_sp(cpu_val: i16) -> u16 {
if cpu_val <= 0 {
!((cpu_val * -1) as u16)
} else {
cpu_val as u16
}
}
pub fn agc_sp_to_cpu(agc_val: u16) -> i16 {
if agc_val & 0o040000 != 0 {
-(((!agc_val) & 0o037777) as i16)
} else |
}
pub fn agc_dp_to_cpu(agc_val: u32) -> i32 {
if agc_val & 0o2000000000 != 0 {
-(((!agc_val) & 0o1777777777) as i32)
} else {
(agc_val & 0o1777777777) as i32
}
}
#[cfg(test)]
mod utils_tests {
use super::*;
#[test]
fn test_overflow_correction_pos() {
for test_val in 0o040000..0o077777 {
let result = overflow_correction(test_val);
assert_eq!(test_val & 0o37777, result);
}
}
#[test]
fn test_overflow_correction_neg() {
for test_val in 0o100000..0o137777 {
let result = overflow_correction(test_val);
assert_eq!(test_val | 0o40000, result);
}
}
#[test]
///
/// Tests the `sign_extend` function to check to see that there is no
/// sign extension for positive values.
///
fn test_sign_extend_positive() {
for test_val in 0o000000..=0o037777 {
let result = sign_extend(test_val);
assert_eq!(
test_val, result,
"Failed sign extension: Expected: {:o} | Result: {:o}",
test_val, result
);
}
}
#[test]
///
/// Tests the `sign_extend` function to check to see that there is a proper
/// sign extension.
///
fn test_sign_extend_negative() {
for test_val in 0o040000..=0o077777 {
let result = sign_extend(test_val);
assert_eq!(
test_val | 0o100000,
result,
"Failed sign extension: Expected: {:o} | Result: {:o}",
test_val,
result
);
}
}
#[test]
///
/// `s15_ones_to_twos` test to check the positive signed values
/// are being properly converted from one's compliment to twos
/// compliment.
///
/// The test will check all positive values for one's complement
/// s15 from 0o00000 to 0o37777 (bit 15 set to 10
///
fn s15_ones_to_twos_test_positive() {
for test_val in 0o00000..=0o37777 {
let result = s15_ones_to_twos(test_val);
assert_eq!(
test_val, result,
"s15_ones_to_twos failed. Expected {:o} | Result: {:o}",
test_val, result
);
}
}
#[test]
///
/// `s15_ones_to_twos` test to check the negative signed values
/// are being properly converted from one's compliment to twos
/// compliment.
///
/// The test will check all negative values for one's complement
/// s15 from 0o40000 to 0o77777 (bit 15 set to 1)
///
fn s15_ones_to_twos_test_negative() {
for test_val in 0o40000..=0o77777 {
let result = s15_ones_to_twos(test_val);
assert_eq!(
(test_val + 1) & 0o77777,
result,
"s15_ones_to_twos failed. Expected {:o} | Result: {:o}",
test_val,
result
);
}
}
#[test]
///
/// Testing `s15_add` function to handle all the one's compliment signed
/// 15 bit additions.
///
fn s15_add_tests() {
let test_vals = [
// Test the zero case
(0o77777, 0o77777, 0o77777),
(0, 0, 0),
// Test the basic math cases
(1, 1, 2),
(0o77776, 0o77776, 0o77775),
// IR: 47ff | INDEX: 3809 | Result: 0009
(0x47ff, 0x3809, 0x0009),
];
for (a, b, expect) in test_vals.iter() {
let res = s15_add(*a, *b);
assert_eq!(
*expect, res,
"Failed S15 Addition: {:o} + {:o} = {:o}, Result: {:o}",
a, b, expect, res
);
}
}
}
| {
(agc_val & 0o37777) as i16
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.