file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
lib.rs | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! Macros needed by the `mz_lowertest` crate.
//!
//! TODO: eliminate macros in favor of using `walkabout`?
use proc_macro::TokenStream;
use quote::{quote, ToTokens};
use syn::{parse, Data, DeriveInput, Fields};
/// Types defined outside of Materialize used to build test objects.
const EXTERNAL_TYPES: &[&str] = &["String", "FixedOffset", "Tz", "NaiveDateTime", "Regex"];
const SUPPORTED_ANGLE_TYPES: &[&str] = &["Vec", "Box", "Option"];
/// Macro generating an implementation for the trait MzReflect
#[proc_macro_derive(MzReflect, attributes(mzreflect))]
pub fn mzreflect_derive(input: TokenStream) -> TokenStream {
// The intended trait implementation is
// ```
// impl MzReflect for #name {
// /// Adds the information required to create an object of this type
// /// to `enum_dict` if it is an enum and to `struct_dict` if it is a
// /// struct.
// fn add_to_reflected_type_info(
// rti: &mut mz_lowertest::ReflectedTypeInfo
// )
// {
// // if the object is an enum
// if rti.enum_dict.contains_key(#name) { return; }
// use std::collections::HashMap;
// let mut result = HashMap::new();
// // repeat line below for all variants
// result.insert(variant_name, (<field_names>, <field_types>));
// rti.enum_dist.insert(<enum_name>, result);
//
// // if the object is a struct
// if rti.struct_dict.contains_key(#name) { return ; }
// rti.struct_dict.insert(#name, (<field_names>, <field_types>));
//
// // for all object types, repeat line below for each field type
// // that should be recursively added to the reflected type info
// <field_type>::add_reflect_type_info(enum_dict, struct_dict);
// }
// }
// ```
let ast: DeriveInput = parse(input).unwrap();
let object_name = &ast.ident;
let object_name_as_string = object_name.to_string();
let mut referenced_types = Vec::new();
let add_object_info = if let Data::Enum(enumdata) = &ast.data {
let variants = enumdata
.variants
.iter()
.map(|v| {
let variant_name = v.ident.to_string();
let (names, types_as_string, mut types_as_syn) = get_fields_names_types(&v.fields);
referenced_types.append(&mut types_as_syn);
quote! {
result.insert(#variant_name, (vec![#(#names),*], vec![#(#types_as_string),*]));
}
})
.collect::<Vec<_>>();
quote! {
if rti.enum_dict.contains_key(#object_name_as_string) { return; }
use std::collections::HashMap;
let mut result = HashMap::new();
#(#variants)*
rti.enum_dict.insert(#object_name_as_string, result);
}
} else if let Data::Struct(structdata) = &ast.data {
let (names, types_as_string, mut types_as_syn) = get_fields_names_types(&structdata.fields);
referenced_types.append(&mut types_as_syn);
quote! {
if rti.struct_dict.contains_key(#object_name_as_string) { return; }
rti.struct_dict.insert(#object_name_as_string,
(vec![#(#names),*], vec![#(#types_as_string),*]));
}
} else {
unreachable!("Not a struct or enum")
};
let referenced_types = referenced_types
.into_iter()
.flat_map(extract_reflected_type)
.map(|typ| quote! { #typ::add_to_reflected_type_info(rti); })
.collect::<Vec<_>>();
let gen = quote! {
impl mz_lowertest::MzReflect for #object_name {
fn add_to_reflected_type_info(
rti: &mut mz_lowertest::ReflectedTypeInfo
)
{
#add_object_info
#(#referenced_types)*
}
}
};
gen.into()
}
/* #region Helper methods */
/// Gets the names and the types of the fields of an enum variant or struct.
///
/// The result has three parts:
/// 1. The names of the fields. If the fields are unnamed, this is empty.
/// 2. The types of the fields as strings.
/// 3. The types of the fields as [syn::Type]
///
/// Fields with the attribute `#[mzreflect(ignore)]` are not returned.
fn get_fields_names_types(f: &syn::Fields) -> (Vec<String>, Vec<String>, Vec<&syn::Type>) {
match f {
Fields::Named(named_fields) => {
let (names, types): (Vec<_>, Vec<_>) = named_fields
.named
.iter()
.flat_map(get_field_name_type)
.unzip();
let (types_as_string, types_as_syn) = types.into_iter().unzip();
(names, types_as_string, types_as_syn)
}
Fields::Unnamed(unnamed_fields) => {
let (types_as_string, types_as_syn): (Vec<_>, Vec<_>) = unnamed_fields
.unnamed
.iter()
.flat_map(get_field_name_type)
.map(|(_, (type_as_string, type_as_syn))| (type_as_string, type_as_syn))
.unzip();
(Vec::new(), types_as_string, types_as_syn)
}
Fields::Unit => (Vec::new(), Vec::new(), Vec::new()),
}
}
/// Gets the name and the type of a field of an enum variant or struct.
///
/// The result has three parts:
/// 1. The name of the field. If the field is unnamed, this is empty.
/// 2. The type of the field as a string.
/// 3. The type of the field as [syn::Type].
///
/// Returns None if the field has the attribute `#[mzreflect(ignore)]`.
fn get_field_name_type(f: &syn::Field) -> Option<(String, (String, &syn::Type))> {
for attr in f.attrs.iter() {
if let Ok(syn::Meta::List(meta_list)) = attr.parse_meta() {
if meta_list.path.segments.last().unwrap().ident == "mzreflect" {
for nested_meta in meta_list.nested.iter() {
if let syn::NestedMeta::Meta(syn::Meta::Path(path)) = nested_meta {
if path.segments.last().unwrap().ident == "ignore" {
return None;
}
}
}
}
}
}
let name = if let Some(name) = f.ident.as_ref() {
name.to_string()
} else {
"".to_string()
};
Some((name, (get_type_as_string(&f.ty), &f.ty)))
}
/// Gets the type name from the [`syn::Type`] object
fn get_type_as_string(t: &syn::Type) -> String |
/// If `t` is a supported type, extracts from `t` types defined in a
/// Materialize package.
///
/// Returns an empty vector if `t` is of an unsupported type.
///
/// Supported types are:
/// A plain path type A -> extracts A
/// Box<A>, Vec<A>, Option<A> -> extracts A
/// Tuple (A, (B, C)) -> extracts A, B, C.
/// Remove A, B, C from expected results if they are primitive types or listed
/// in [EXTERNAL_TYPES].
fn extract_reflected_type(t: &syn::Type) -> Vec<&syn::Type> {
match t {
syn::Type::Group(tg) => {
return extract_reflected_type(&tg.elem);
}
syn::Type::Path(tp) => {
let last_segment = tp.path.segments.last().unwrap();
let type_name = last_segment.ident.to_string();
match &last_segment.arguments {
syn::PathArguments::None => {
if EXTERNAL_TYPES.contains(&&type_name[..])
|| type_name.starts_with(|c: char| c.is_lowercase())
{
// Ignore primitive types and types
return Vec::new();
} else {
return vec![t];
}
}
syn::PathArguments::AngleBracketed(args) => {
if SUPPORTED_ANGLE_TYPES.contains(&&type_name[..]) {
return args
.args
.iter()
.flat_map(|arg| {
if let syn::GenericArgument::Type(typ) = arg {
extract_reflected_type(typ)
} else {
Vec::new()
}
})
.collect::<Vec<_>>();
}
}
_ => {}
}
}
syn::Type::Tuple(tt) => {
return tt
.elems
.iter()
.flat_map(extract_reflected_type)
.collect::<Vec<_>>();
}
_ => {}
}
Vec::new()
}
/* #endregion */
| {
// convert type back into a token stream and then into a string
let mut token_stream = proc_macro2::TokenStream::new();
t.to_tokens(&mut token_stream);
token_stream.to_string()
} |
shared_mempool.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
core_mempool::{CoreMempool, TimelineState, TxnPointer},
counters,
};
use admission_control_proto::{
proto::admission_control::{
submit_transaction_response::Status, AdmissionControlStatusCode, SubmitTransactionRequest,
SubmitTransactionResponse,
},
AdmissionControlStatus,
};
use anyhow::{format_err, Result};
use bounded_executor::BoundedExecutor;
use futures::{
channel::{
mpsc::{self, Receiver, UnboundedSender},
oneshot,
},
future::join_all,
stream::select_all,
Stream, StreamExt,
};
use libra_config::config::{MempoolConfig, NodeConfig};
use libra_logger::prelude::*;
use libra_mempool_shared_proto::proto::mempool_status::{
MempoolAddTransactionStatus as MempoolAddTransactionStatusProto,
MempoolAddTransactionStatusCode,
};
use libra_types::{
account_address::AccountAddress,
proto::types::{SignedTransaction as SignedTransactionProto, VmStatus as VmStatusProto},
transaction::SignedTransaction,
vm_error::{StatusCode::RESOURCE_DOES_NOT_EXIST, VMStatus},
PeerId,
};
use network::{
proto::MempoolSyncMsg,
validator_network::{Event, MempoolNetworkEvents, MempoolNetworkSender},
};
use std::{
cmp,
collections::{HashMap, HashSet},
convert::{TryFrom, TryInto},
ops::Deref,
pin::Pin,
sync::{Arc, Mutex},
time::Duration,
};
use storage_client::{StorageRead, StorageReadServiceClient};
use tokio::{
runtime::{Builder, Handle, Runtime},
time::interval,
};
use vm_validator::vm_validator::{get_account_state, TransactionValidation, VMValidator};
/// state of last sync with peer
/// `timeline_id` is position in log of ready transactions
/// `is_alive` - is connection healthy
#[derive(Clone)]
struct PeerSyncState {
timeline_id: u64,
is_alive: bool,
network_id: PeerId,
}
/// stores only peers that receive txns from this node
type PeerInfo = HashMap<PeerId, PeerSyncState>;
/// Outbound peer syncing event emitted by [`IntervalStream`].
#[derive(Debug)]
pub(crate) struct SyncEvent;
type IntervalStream = Pin<Box<dyn Stream<Item = SyncEvent> + Send + 'static>>;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum SharedMempoolNotification {
Sync,
PeerStateChange,
NewTransactions,
}
/// Struct that owns all dependencies required by shared mempool routines
#[derive(Clone)]
struct SharedMempool<V>
where
V: TransactionValidation + 'static,
{
mempool: Arc<Mutex<CoreMempool>>,
config: MempoolConfig,
network_senders: HashMap<PeerId, MempoolNetworkSender>,
storage_read_client: Arc<dyn StorageRead>,
validator: Arc<V>,
peer_info: Arc<Mutex<PeerInfo>>,
subscribers: Vec<UnboundedSender<SharedMempoolNotification>>,
}
/// Message sent from consensus to mempool
pub enum ConsensusRequest {
/// request to pull block to submit to consensus
GetBlockRequest(
// max block size
u64,
// transactions to exclude from requested block
Vec<TransactionExclusion>,
// callback to send response back to sender
oneshot::Sender<Result<ConsensusResponse>>,
),
/// notifications about *rejected* committed txns
RejectNotification(
// committed transactions
Vec<CommittedTransaction>,
// callback to send response back to sender
oneshot::Sender<Result<ConsensusResponse>>,
),
}
/// Response setn from mempool to consensus
pub enum ConsensusResponse {
/// block to submit to consensus
GetBlockResponse(
// transactions in block
Vec<SignedTransaction>,
),
/// ACK for commit notification
CommitResponse(),
}
/// notification from state sync to mempool of commit event
/// This notifies mempool to remove committed txns
pub struct CommitNotification {
/// committed transactions
pub transactions: Vec<CommittedTransaction>,
/// timestamp of committed block
pub block_timestamp_usecs: u64,
/// callback to send back response from mempool to State Sync
pub callback: oneshot::Sender<Result<CommitResponse>>,
}
/// ACK response to commit notification
#[derive(Debug)]
pub struct CommitResponse {
/// error msg if applicable - empty string if commit was processed successfully by mempool
pub msg: String,
}
/// successfully executed and committed txn
pub struct CommittedTransaction {
/// sender
pub sender: AccountAddress,
/// sequence number
pub sequence_number: u64,
}
/// excluded txn
pub struct TransactionExclusion {
/// sender
pub sender: AccountAddress,
/// sequence number
pub sequence_number: u64,
}
fn notify_subscribers(
event: SharedMempoolNotification,
subscribers: &[UnboundedSender<SharedMempoolNotification>],
) {
for subscriber in subscribers {
let _ = subscriber.unbounded_send(event);
}
}
fn default_timer(tick_ms: u64) -> IntervalStream {
interval(Duration::from_millis(tick_ms))
.map(|_| SyncEvent)
.boxed()
}
/// new peer discovery handler
/// adds new entry to `peer_info`
/// `network_id` is the ID of the mempool network the peer belongs to
fn new_peer(peer_info: &Mutex<PeerInfo>, peer_id: PeerId, network_id: PeerId) {
peer_info
.lock()
.expect("[shared mempool] failed to acquire peer_info lock")
.entry(peer_id)
.or_insert(PeerSyncState {
timeline_id: 0,
is_alive: true,
network_id,
})
.is_alive = true;
}
/// lost peer handler. Marks connection as dead
fn lost_peer(peer_info: &Mutex<PeerInfo>, peer_id: PeerId) {
if let Some(state) = peer_info
.lock()
.expect("[shared mempool] failed to acquire peer_info lock")
.get_mut(&peer_id)
{
state.is_alive = false;
}
}
/// sync routine
/// used to periodically broadcast ready to go transactions to peers
async fn sync_with_peers<'a>(
peer_info: &'a Mutex<PeerInfo>,
mempool: &'a Mutex<CoreMempool>,
mut network_senders: HashMap<PeerId, MempoolNetworkSender>,
batch_size: usize,
) {
// Clone the underlying peer_info map and use this to sync and collect
// state updates. We do this instead of holding the lock for the whole
// function since that would hold the lock across await points which is bad.
let peer_info_copy = peer_info
.lock()
.expect("[shared mempool] failed to acquire peer_info lock")
.deref()
.clone();
let mut state_updates = vec![];
for (peer_id, peer_state) in peer_info_copy.into_iter() {
if peer_state.is_alive {
let timeline_id = peer_state.timeline_id;
let (transactions, new_timeline_id) = mempool
.lock()
.expect("[shared mempool] failed to acquire mempool lock")
.read_timeline(timeline_id, batch_size);
if !transactions.is_empty() {
counters::SHARED_MEMPOOL_TRANSACTION_BROADCAST.inc_by(transactions.len() as i64);
let mut msg = MempoolSyncMsg::default();
msg.peer_id = peer_id.into();
msg.transactions = transactions
.into_iter()
.map(|txn| txn.try_into().unwrap())
.collect();
// Since this is a direct-send, this will only error if the network
// module has unexpectedly crashed or shutdown.
let network_sender = network_senders.get_mut(&peer_state.network_id).unwrap();
network_sender
.clone()
.send_to(peer_id, msg)
.expect("[shared mempool] failed to direct-send mempool sync message");
}
state_updates.push((peer_id, new_timeline_id));
}
}
// Lock the shared peer_info and apply state updates.
let mut peer_info = peer_info
.lock()
.expect("[shared mempool] failed to acquire peer_info lock");
for (peer_id, new_timeline_id) in state_updates {
peer_info
.entry(peer_id)
.and_modify(|t| t.timeline_id = new_timeline_id);
}
}
fn convert_txn_from_proto(txn_proto: SignedTransactionProto) -> Option<SignedTransaction> {
match SignedTransaction::try_from(txn_proto.clone()) {
Ok(txn) => Some(txn),
Err(e) => {
security_log(SecurityEvent::InvalidTransactionMP)
.error(&e)
.data(&txn_proto)
.log();
None
}
}
}
/// submits a list of SignedTransaction to the local mempool
/// and returns a vector containing AdmissionControlStatus
async fn process_incoming_transactions<V>(
smp: SharedMempool<V>,
transactions: Vec<SignedTransaction>,
timeline_state: TimelineState,
) -> Vec<Status>
where
V: TransactionValidation,
{
let mut statuses = vec![];
let account_states = join_all(
transactions
.iter()
.map(|t| get_account_state(smp.storage_read_client.clone(), t.sender())),
)
.await;
// eagerly filter out transactions that were already committed
let transactions: Vec<_> = transactions
.into_iter()
.enumerate()
.filter_map(|(idx, t)| {
if let Ok((sequence_number, balance)) = account_states[idx] {
if t.sequence_number() >= sequence_number {
return Some((t, sequence_number, balance));
}
} else {
// failed to get transaction
statuses.push(Status::VmStatus(VmStatusProto::from(
VMStatus::new(RESOURCE_DOES_NOT_EXIST)
.with_message("[shared mempool] failed to get account state".to_string()),
)));
}
None
})
.collect();
let validations = join_all(
transactions |
{
let mut mempool = smp
.mempool
.lock()
.expect("[shared mempool] failed to acquire mempool lock");
for (idx, (transaction, sequence_number, balance)) in transactions.into_iter().enumerate() {
if let Ok(None) = validations[idx] {
let gas_cost = transaction.max_gas_amount();
let mempool_status = mempool.add_txn(
transaction,
gas_cost,
sequence_number,
balance,
timeline_state,
);
if mempool_status.code == MempoolAddTransactionStatusCode::Valid {
statuses.push(Status::AcStatus(AdmissionControlStatus::Accepted.into()));
} else {
statuses.push(Status::MempoolStatus(
MempoolAddTransactionStatusProto::from(mempool_status),
));
}
} else if let Ok(Some(validation_status)) = &validations[idx] {
statuses.push(Status::VmStatus(VmStatusProto::from(
validation_status.clone(),
)));
}
}
}
notify_subscribers(SharedMempoolNotification::NewTransactions, &smp.subscribers);
statuses
}
async fn process_client_transaction_submission<V>(
smp: SharedMempool<V>,
req: SubmitTransactionRequest,
callback: oneshot::Sender<Result<SubmitTransactionResponse>>,
) where
V: TransactionValidation,
{
let mut response = SubmitTransactionResponse::default();
let txn_proto = req
.transaction
.clone()
.unwrap_or_else(SignedTransactionProto::default);
// get status from attempt to submit txns
match convert_txn_from_proto(txn_proto) {
None => {
response.status = Some(Status::AcStatus(
AdmissionControlStatus::Rejected("submit txn rejected".to_string()).into(),
));
}
Some(txn) => {
let mut statuses =
process_incoming_transactions(smp.clone(), vec![txn], TimelineState::NotReady)
.await;
log_txn_process_results(statuses.clone(), None);
if statuses.is_empty() {
error!("[shared mempool] unexpected error happened");
} else {
response.status = Some(statuses.remove(0));
}
}
}
if let Err(e) = callback
.send(Ok(response))
.map_err(|_| format_err!("[shared mempool] timeout on callback send to AC endpoint"))
{
error!("[shared mempool] failed to send back transaction submission result to AC endpoint with error: {:?}", e);
}
}
fn log_txn_process_results(results: Vec<Status>, sender: Option<PeerId>) {
let sender = match sender {
Some(peer) => peer.to_string(),
None => "client".to_string(),
};
for result in results.iter() {
match result {
Status::AcStatus(ac_status) => {
// log success
if ac_status.code() == AdmissionControlStatusCode::Accepted {
counters::SHARED_MEMPOOL_TRANSACTIONS_PROCESSED
.with_label_values(&["success".to_string().deref(), &sender])
.inc();
}
}
Status::VmStatus(_) => {
// log vm validation failure
counters::SHARED_MEMPOOL_TRANSACTIONS_PROCESSED
.with_label_values(&["validation_failed".to_string().deref(), &sender])
.inc();
}
Status::MempoolStatus(mempool_status_proto) => {
// log mempool status failure
counters::SHARED_MEMPOOL_TRANSACTIONS_PROCESSED
.with_label_values(&[
format!("{:?}", mempool_status_proto.code).deref(),
&sender,
])
.inc();
}
}
}
}
async fn process_transaction_broadcast<V>(
smp: SharedMempool<V>,
transactions: Vec<SignedTransaction>,
timeline_state: TimelineState,
peer_id: PeerId,
) where
V: TransactionValidation,
{
let results = process_incoming_transactions(smp, transactions, timeline_state).await;
log_txn_process_results(results, Some(peer_id));
}
/// This task handles [`SyncEvent`], which is periodically emitted for us to
/// broadcast ready to go transactions to peers.
async fn outbound_sync_task<V>(smp: SharedMempool<V>, mut interval: IntervalStream)
where
V: TransactionValidation,
{
let peer_info = smp.peer_info;
let mempool = smp.mempool;
let network_senders = smp.network_senders;
let batch_size = smp.config.shared_mempool_batch_size;
let subscribers = smp.subscribers;
while let Some(sync_event) = interval.next().await {
trace!("SyncEvent: {:?}", sync_event);
sync_with_peers(&peer_info, &mempool, network_senders.clone(), batch_size).await;
notify_subscribers(SharedMempoolNotification::Sync, &subscribers);
}
crit!("SharedMempool outbound_sync_task terminated");
}
async fn commit_txns<V>(
smp: SharedMempool<V>,
transactions: Vec<CommittedTransaction>,
block_timestamp_usecs: u64,
is_rejected: bool,
) where
V: TransactionValidation,
{
let mut pool = smp
.mempool
.lock()
.expect("[shared mempool] failed to get mempool lock");
for transaction in transactions {
pool.remove_transaction(
&transaction.sender,
transaction.sequence_number,
is_rejected,
);
}
if block_timestamp_usecs > 0 {
pool.gc_by_expiration_time(Duration::from_micros(block_timestamp_usecs));
}
}
async fn process_state_sync_request<V>(smp: SharedMempool<V>, req: CommitNotification)
where
V: TransactionValidation,
{
commit_txns(smp, req.transactions, req.block_timestamp_usecs, false).await;
// send back to callback
if let Err(e) = req
.callback
.send(Ok(CommitResponse {
msg: "".to_string(),
}))
.map_err(|_| {
format_err!("[shared mempool] timeout on callback sending response to Mempool request")
})
{
error!(
"[shared mempool] failed to send back CommitResponse with error: {:?}",
e
);
}
}
async fn process_consensus_request<V>(smp: SharedMempool<V>, req: ConsensusRequest)
where
V: TransactionValidation,
{
let (resp, callback) = match req {
ConsensusRequest::GetBlockRequest(max_block_size, transactions, callback) => {
let block_size = cmp::max(max_block_size, 1);
counters::MEMPOOL_SERVICE
.with_label_values(&["get_block", "requested"])
.inc_by(block_size as i64);
let exclude_transactions: HashSet<TxnPointer> = transactions
.iter()
.map(|txn| (txn.sender, txn.sequence_number))
.collect();
let mut txns = smp
.mempool
.lock()
.expect("[get_block] acquire mempool lock")
.get_block(block_size, exclude_transactions);
let transactions = txns.drain(..).map(SignedTransaction::into).collect();
(ConsensusResponse::GetBlockResponse(transactions), callback)
}
ConsensusRequest::RejectNotification(transactions, callback) => {
// handle rejected txns
commit_txns(smp, transactions, 0, true).await;
(ConsensusResponse::CommitResponse(), callback)
}
};
// send back to callback
if let Err(e) = callback.send(Ok(resp)).map_err(|_| {
format_err!("[shared mempool] timeout on callback sending response to Mempool request")
}) {
error!(
"[shared mempool] failed to send back mempool response with error: {:?}",
e
);
}
}
/// This task handles inbound network events.
async fn inbound_network_task<V>(
smp: SharedMempool<V>,
executor: Handle,
network_events: Vec<(PeerId, MempoolNetworkEvents)>,
mut client_events: mpsc::Receiver<(
SubmitTransactionRequest,
oneshot::Sender<Result<SubmitTransactionResponse>>,
)>,
mut consensus_requests: mpsc::Receiver<ConsensusRequest>,
mut state_sync_requests: mpsc::Receiver<CommitNotification>,
node_config: NodeConfig,
) where
V: TransactionValidation,
{
let peer_info = smp.peer_info.clone();
let subscribers = smp.subscribers.clone();
let smp_events: Vec<_> = network_events
.into_iter()
.map(|(network_id, events)| events.map(move |e| (network_id, e)))
.collect();
let mut events = select_all(smp_events).fuse();
// Use a BoundedExecutor to restrict only `workers_available` concurrent
// worker tasks that can process incoming transactions.
let workers_available = smp.config.shared_mempool_max_concurrent_inbound_syncs;
let bounded_executor = BoundedExecutor::new(workers_available, executor);
loop {
::futures::select! {
(mut msg, callback) = client_events.select_next_some() => {
bounded_executor
.spawn(process_client_transaction_submission(
smp.clone(),
msg,
callback,
))
.await;
},
msg = consensus_requests.select_next_some() => {
process_consensus_request(smp.clone(), msg).await;
}
msg = state_sync_requests.select_next_some() => {
tokio::spawn(process_state_sync_request(smp.clone(), msg));
},
(network_id, event) = events.select_next_some() => {
match event {
Ok(network_event) => {
match network_event {
Event::NewPeer(peer_id) => {
counters::SHARED_MEMPOOL_EVENTS
.with_label_values(&["new_peer".to_string().deref()])
.inc();
if node_config.is_upstream_peer(peer_id, network_id) {
new_peer(&peer_info, peer_id, network_id);
}
notify_subscribers(SharedMempoolNotification::PeerStateChange, &subscribers);
}
Event::LostPeer(peer_id) => {
counters::SHARED_MEMPOOL_EVENTS
.with_label_values(&["lost_peer".to_string().deref()])
.inc();
if node_config.is_upstream_peer(peer_id, network_id) {
lost_peer(&peer_info, peer_id);
}
notify_subscribers(SharedMempoolNotification::PeerStateChange, &subscribers);
}
Event::Message((peer_id, msg)) => {
counters::SHARED_MEMPOOL_EVENTS
.with_label_values(&["message".to_string().deref()])
.inc();
let transactions: Vec<_> = msg
.transactions
.clone()
.into_iter()
.filter_map(|txn| convert_txn_from_proto(txn))
.collect();
counters::SHARED_MEMPOOL_TRANSACTIONS_PROCESSED
.with_label_values(&["received".to_string().deref(), peer_id.to_string().deref()])
.inc_by(transactions.len() as i64);
let smp_clone = smp.clone();
let timeline_state = match node_config.is_upstream_peer(peer_id, network_id) {
true => TimelineState::NonQualified,
false => TimelineState::NotReady,
};
bounded_executor
.spawn(process_transaction_broadcast(
smp_clone,
transactions,
timeline_state,
peer_id
))
.await;
}
_ => {
security_log(SecurityEvent::InvalidNetworkEventMP)
.error("UnexpectedNetworkEvent")
.data(&network_event)
.log();
debug_assert!(false, "Unexpected network event");
}
}
},
Err(e) => {
security_log(SecurityEvent::InvalidNetworkEventMP)
.error(&e)
.log();
}
};
},
complete => break,
}
}
crit!("[shared mempool] inbound_network_task terminated");
}
/// GC all expired transactions by SystemTTL
async fn gc_task(mempool: Arc<Mutex<CoreMempool>>, gc_interval_ms: u64) {
let mut interval = interval(Duration::from_millis(gc_interval_ms));
while let Some(_interval) = interval.next().await {
mempool
.lock()
.expect("[shared mempool] failed to acquire mempool lock")
.gc_by_system_ttl();
}
crit!("SharedMempool gc_task terminated");
}
/// bootstrap of SharedMempool
/// creates separate Tokio Runtime that runs following routines:
/// - outbound_sync_task (task that periodically broadcasts transactions to peers)
/// - inbound_network_task (task that handles inbound mempool messages and network events)
/// - gc_task (task that performs GC of all expired transactions by SystemTTL)
pub(crate) fn start_shared_mempool<V>(
executor: &Handle,
config: &NodeConfig,
mempool: Arc<Mutex<CoreMempool>>,
// First element in tuple is the network ID
// See `NodeConfig::is_upstream_peer` for the definition of network ID
mempool_network_handles: Vec<(PeerId, MempoolNetworkSender, MempoolNetworkEvents)>,
client_events: mpsc::Receiver<(
SubmitTransactionRequest,
oneshot::Sender<Result<SubmitTransactionResponse>>,
)>,
consensus_requests: mpsc::Receiver<ConsensusRequest>,
state_sync_requests: mpsc::Receiver<CommitNotification>,
storage_read_client: Arc<dyn StorageRead>,
validator: Arc<V>,
subscribers: Vec<UnboundedSender<SharedMempoolNotification>>,
timer: Option<IntervalStream>,
) where
V: TransactionValidation + 'static,
{
let peer_info = Arc::new(Mutex::new(PeerInfo::new()));
let config_clone = config.clone_for_template();
let mut all_network_events = vec![];
let mut network_senders = HashMap::new();
for (network_id, network_sender, network_events) in mempool_network_handles.into_iter() {
all_network_events.push((network_id, network_events));
network_senders.insert(network_id, network_sender);
}
let smp = SharedMempool {
mempool: mempool.clone(),
config: config.mempool.clone(),
network_senders,
storage_read_client,
validator,
peer_info,
subscribers,
};
let interval_ms = config.mempool.shared_mempool_tick_interval_ms;
let smp_outbound = smp.clone();
let f = async move {
let interval = timer.unwrap_or_else(|| default_timer(interval_ms));
outbound_sync_task(smp_outbound, interval).await
};
executor.spawn(f);
executor.spawn(inbound_network_task(
smp,
executor.clone(),
all_network_events,
client_events,
consensus_requests,
state_sync_requests,
config_clone,
));
executor.spawn(gc_task(
mempool,
config.mempool.system_transaction_gc_interval_ms,
));
}
/// method used to bootstrap shared mempool for a node
pub fn bootstrap(
config: &NodeConfig,
// The first element in the tuple is the ID of the network that this network is a handle to
// See `NodeConfig::is_upstream_peer` for the definition of network ID
mempool_network_handles: Vec<(PeerId, MempoolNetworkSender, MempoolNetworkEvents)>,
client_events: Receiver<(
SubmitTransactionRequest,
oneshot::Sender<Result<SubmitTransactionResponse>>,
)>,
consensus_requests: Receiver<ConsensusRequest>,
state_sync_requests: Receiver<CommitNotification>,
) -> Runtime {
let runtime = Builder::new()
.thread_name("shared-mem-")
.threaded_scheduler()
.enable_all()
.build()
.expect("[shared mempool] failed to create runtime");
let executor = runtime.handle();
let mempool = Arc::new(Mutex::new(CoreMempool::new(&config)));
let storage_client: Arc<dyn StorageRead> =
Arc::new(StorageReadServiceClient::new(&config.storage.address));
let vm_validator = Arc::new(VMValidator::new(
&config,
Arc::clone(&storage_client),
executor.clone(),
));
start_shared_mempool(
runtime.handle(),
config,
mempool,
mempool_network_handles,
client_events,
consensus_requests,
state_sync_requests,
storage_client,
vm_validator,
vec![],
None,
);
runtime
} | .iter()
.map(|t| smp.validator.validate_transaction(t.0.clone())),
)
.await; |
html2json.py | from bs4 import BeautifulSoup
import os
import json
import datetime
import random
import difflib
import time
import openWeather as OW
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch | # add on some random milliseconds to the end so we don't get exact matches
# add on hours for difference between MST and UTC
ofst = time.altzone/3600
return int(unix_time(dt) * 1000.0) + ofst*60*60*1000 + random.randint(0,1000)
def check_for_duplicate(millis, dir):
# my export had duplicates, so maybe others will have them as well?
files = os.listdir(dir)
for file in files:
#get the stuff before the '-'
short = int(file.strip('-').split('-')[0])
# if we are within 1000 ms then these are duplicates
if abs(abs(millis) - short) <= 1000:
return True
return False
#fname = sys.argv[1]
#output = sys.argv[2]
fname = 'memories.html'
outdir = 'output/'
timezone = 'MST'
with open (fname, "r") as myfile:
data=myfile.readlines()
for line in data:
if line.startswith('\t<p> </p>\t'):
entryJSON = {}
line_soup = BeautifulSoup(line,'html.parser')
# force to UTC timezone so the millis calculation works out correctly
dateText = line_soup.select('p.sDateCreated')[0].getText() + ' ' + timezone
dateTime = datetime.datetime.strptime(dateText, '%A, %B %d, %Y at %H:%M %Z')
milli = unix_time_millis(dateTime, timezone)
# check to see if this is a duplicate entry
if check_for_duplicate(milli, outdir):
print 'duplicate -- skipping'
continue
randKey = ''.join(random.choice('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') for i in range(16))
entryJSON['date_journal'] = milli
entryJSON['date_modified'] = milli + 1000*120 #make this up, 2 minutes
id = str(milli) + '-' + randKey
entryJSON['id'] = id
entryJSON['music_artist'] = ''
entryJSON['music_title'] = ''
entryText = line_soup.select('span.sNote')[0].getText('\n\n',strip=True).replace('"','\"')
entryJSON['text'] = entryText
if len(entryText) > 512:
prevEnd = 512
else:
prevEnd = len(entryText)
entryJSON['preview_text']=entryText[0:prevEnd]
entryJSON['mood']=0
# I will do photos by hand, because Journey can only handle 1... I don't want to make that choice with an algorithm
entryJSON['photos'] = []
# get list of tags, lowercase and no spaces
tagText = line_soup.select('span.sTags')[0].getText().lower().split(',')
tags = [x.strip().replace(' ','') for x in tagText]
if len(tags) > 1:
entryJSON['tags'] = tags
else:
if tags[0] == '':
entryJSON['tags'] = []
else:
entryJSON['tags'] = tags
geoText = line_soup.select('p.sLocation')[0].get_text('&&').split('&&')
entryJSON['address'] = geoText[1]
# deafult weather information
entryJSON['weather']={'id':-1, 'icon':'','place':'','degree_c':1.7976931348623157E308, 'description':''}
entryJSON['lat'] = 1.7976931348623157E308
entryJSON['lon'] = 1.7976931348623157E308
# If there is data for weather, lat/lon etc..
if len(geoText) == 8:
lat, lon, ele = [float(x) for x in geoText[3].replace(',',' ').replace('alt:',' ').replace('m','').split()]
entryJSON['lat'] = lat * 10**4
entryJSON['lon'] = lon * 10**4
weatherText = geoText[5]
degC = float(weatherText.split()[0].replace('C',''))
s_ind = weatherText.find('C')
e_ind = weatherText.find(',')
desc = weatherText[s_ind+1:e_ind].strip().lower()
entryJSON['weather']['degree_c'] = degC
# get the closest matching "open weather description, id, and icon"
x = difflib.get_close_matches(desc,list(OW.weather.keys()),n=1)
if x:
# if something was returned
entryJSON['weather']['description'] = x[0]
entryJSON['weather']['id'] = OW.weather[x[0]]['id']
entryJSON['weather']['icon'] = OW.weather[x[0]]['icon']
with open(outdir + id + '.json', 'w') as outfile:
json.dump(entryJSON, outfile, sort_keys=True, indent=2) | return delta.total_seconds()
def unix_time_millis(dt, tz): |
App.js | import React, { Component } from 'react'
import './App.css'
import Section from '../Components/Section/Section'
import FeedbackOptions from '../Components/FeedbackOptions/FeedbackOptions'
import Statistics from '../Components/Statistics/Statistics'
import Notification from '../Components/Notification/Notifacation'
export default class | extends Component {
state = {
good: 0,
neutral: 0,
bad: 0,
}
handleClick = (e) => {
const name = e.target.name
console.log(name)
this.setState((prevState) => ({
[name]: prevState[name] + 1,
}))
console.log(this.state[name])
}
feedbacksCounter = () => {
const { good, neutral, bad } = this.state
return good + neutral + bad
}
positivePercentageCalculator = () => {
const result = this.feedbacksCounter()
const { good } = this.state
const percentage = (good * 100) / result
return Math.round(percentage)
}
render() {
const keys = Object.keys(this.state)
const total = this.feedbacksCounter()
const positivePercentage = this.positivePercentageCalculator()
return (
<div className="App">
<Section title="Feedback:">
<FeedbackOptions
options={keys}
handleClick={this.handleClick}
></FeedbackOptions>
</Section>
{total === 0 ? (
<Notification message="No feedback given" />
) : (
<Section title="Statistics">
<Statistics
good={this.state.good}
neutral={this.state.neutral}
bad={this.state.bad}
total={total}
positivePercentage={positivePercentage}
></Statistics>
</Section>
)}
</div>
)
}
}
| App |
Animal.ts | import {Command} from '../Command';
import {MemeInterface, TypeEnum, AlignEnum, TemplateEnum, FrameInterface, TextInterface} from '../Meme';
export class | extends Command implements MemeInterface {
public template = TemplateEnum.DefaultHTML;
public meme = {
w: 1446,
h: 1500,
image: {
url: "images/animal.jpg",
align: "center right",
},
};
public defaultFrameIndex = 0;
public frames: FrameInterface[] = [
{
type: TypeEnum.ImageOrText,
x: 30.2,
y: 482.9,
w: 697.1,
h: 514.3,
a: 0,
text: {
valign: AlignEnum.Center,
halign: AlignEnum.FlexEnd,
color: "black",
shadow: "white",
strokeSize: "2px",
strokeColor: "rgba(255,255,255,0.3)",
value: "",
},
}
];
}
| Animal |
mod.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
# check.rs
Within the check phase of type check, we check each item one at a time
(bodies of function expressions are checked as part of the containing
function). Inference is used to supply types wherever they are
unknown.
By far the most complex case is checking the body of a function. This
can be broken down into several distinct phases:
- gather: creates type variables to represent the type of each local
variable and pattern binding.
- main: the main pass does the lion's share of the work: it
determines the types of all expressions, resolves
methods, checks for most invalid conditions, and so forth. In
some cases, where a type is unknown, it may create a type or region
variable and use that as the type of an expression.
In the process of checking, various constraints will be placed on
these type variables through the subtyping relationships requested
through the `demand` module. The `typeck::infer` module is in charge
of resolving those constraints.
- regionck: after main is complete, the regionck pass goes over all
types looking for regions and making sure that they did not escape
into places they are not in scope. This may also influence the
final assignments of the various region variables if there is some
flexibility.
- vtable: find and records the impls to use for each trait bound that
appears on a type parameter.
- writeback: writes the final types within a function body, replacing
type variables with their final inferred types. These final types
are written into the `tcx.node_types` table, which should *never* contain
any reference to a type variable.
## Intermediate types
While type checking a function, the intermediate types for the
expressions, blocks, and so forth contained within the function are
stored in `fcx.node_types` and `fcx.node_type_substs`. These types
may contain unresolved type variables. After type checking is
complete, the functions in the writeback module are used to take the
types from this table, resolve them, and then write them into their
permanent home in the type context `ccx.tcx`.
This means that during inferencing you should use `fcx.write_ty()`
and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
nodes within the function.
The types of top-level items, which never contain unbound type
variables, are stored directly into the `tcx` tables.
n.b.: A type variable is not the same thing as a type parameter. A
type variable is rather an "instance" of a type parameter: that is,
given a generic function `fn foo<T>(t: T)`: while checking the
function `foo`, the type `ty_param(0)` refers to the type `T`, which
is treated in abstract. When `foo()` is called, however, `T` will be
substituted for a fresh type variable `N`. This variable will
eventually be resolved to some concrete type (which might itself be
type parameter).
*/
use middle::const_eval;
use middle::pat_util::pat_id_map;
use middle::pat_util;
use middle::lint::unreachable_code;
use middle::subst::Subst;
use middle::ty::{FnSig, VariantInfo};
use middle::ty::{ty_param_bounds_and_ty, ty_param_substs_and_ty};
use middle::ty::{substs, param_ty, Disr, ExprTyProvider};
use middle::ty;
use middle::ty_fold::TypeFolder;
use middle::typeck::astconv::AstConv;
use middle::typeck::astconv::{ast_region_to_region, ast_ty_to_ty};
use middle::typeck::astconv;
use middle::typeck::check::_match::pat_ctxt;
use middle::typeck::check::method::{AutoderefReceiver};
use middle::typeck::check::method::{AutoderefReceiverFlag};
use middle::typeck::check::method::{CheckTraitsAndInherentMethods};
use middle::typeck::check::method::{CheckTraitsOnly, DontAutoderefReceiver};
use middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig;
use middle::typeck::check::regionmanip::relate_free_regions;
use middle::typeck::check::vtable::{LocationInfo, VtableContext};
use middle::typeck::CrateCtxt;
use middle::typeck::infer::{resolve_type, force_tvar};
use middle::typeck::infer;
use middle::typeck::rscope::RegionScope;
use middle::typeck::{lookup_def_ccx};
use middle::typeck::no_params;
use middle::typeck::{require_same_types, method_map, vtable_map};
use middle::lang_items::TypeIdLangItem;
use util::common::{block_query, indenter, loop_query};
use util::ppaux::UserString;
use util::ppaux;
use std::hashmap::HashMap;
use std::result;
use std::util::replace;
use std::vec;
use syntax::abi::AbiSet;
use syntax::ast::{provided, required};
use syntax::ast;
use syntax::ast_map;
use syntax::ast_util::local_def;
use syntax::ast_util;
use syntax::attr;
use syntax::codemap::Span;
use syntax::codemap;
use syntax::opt_vec::OptVec;
use syntax::opt_vec;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::visit;
use syntax::visit::Visitor;
use syntax;
pub mod _match;
pub mod vtable;
pub mod writeback;
pub mod regionmanip;
pub mod regionck;
pub mod demand;
pub mod method;
pub struct SelfInfo {
self_ty: ty::t,
self_id: ast::NodeId,
span: Span
}
/// Fields that are part of a `FnCtxt` which are inherited by
/// closures defined within the function. For example:
///
/// fn foo() {
/// do bar() { ... }
/// }
///
/// Here, the function `foo()` and the closure passed to
/// `bar()` will each have their own `FnCtxt`, but they will
/// share the inherited fields.
pub struct Inherited {
infcx: @mut infer::InferCtxt,
locals: @mut HashMap<ast::NodeId, ty::t>,
param_env: ty::ParameterEnvironment,
// Temporary tables:
node_types: @mut HashMap<ast::NodeId, ty::t>,
node_type_substs: @mut HashMap<ast::NodeId, ty::substs>,
adjustments: @mut HashMap<ast::NodeId, @ty::AutoAdjustment>,
method_map: method_map,
vtable_map: vtable_map,
}
#[deriving(Clone)]
pub enum FnKind {
// A do-closure.
DoBlock,
// A normal closure or fn item.
Vanilla
}
#[deriving(Clone)]
pub struct PurityState {
def: ast::NodeId,
purity: ast::purity,
priv from_fn: bool
}
impl PurityState {
pub fn function(purity: ast::purity, def: ast::NodeId) -> PurityState {
PurityState { def: def, purity: purity, from_fn: true }
}
pub fn recurse(&mut self, blk: &ast::Block) -> PurityState {
match self.purity {
// If this unsafe, then if the outer function was already marked as
// unsafe we shouldn't attribute the unsafe'ness to the block. This
// way the block can be warned about instead of ignoring this
// extraneous block (functions are never warned about).
ast::unsafe_fn if self.from_fn => *self,
purity => {
let (purity, def) = match blk.rules {
ast::UnsafeBlock(..) => (ast::unsafe_fn, blk.id),
ast::DefaultBlock => (purity, self.def),
};
PurityState{ def: def,
purity: purity,
from_fn: false }
}
}
}
}
/// Whether `check_binop` allows overloaded operators to be invoked.
#[deriving(Eq)]
enum AllowOverloadedOperatorsFlag {
AllowOverloadedOperators,
DontAllowOverloadedOperators,
}
#[deriving(Clone)]
pub struct FnCtxt {
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
// expects the types within the function to be consistent.
err_count_on_creation: uint,
ret_ty: ty::t,
ps: PurityState,
// Sometimes we generate region pointers where the precise region
// to use is not known. For example, an expression like `&x.f`
// where `x` is of type `@T`: in this case, we will be rooting
// `x` onto the stack frame, and we could choose to root it until
// the end of (almost) any enclosing block or expression. We
// want to pick the narrowest block that encompasses all uses.
//
// What we do in such cases is to generate a region variable with
// `region_lb` as a lower bound. The regionck pass then adds
// other constriants based on how the variable is used and region
// inference selects the ultimate value. Finally, borrowck is
// charged with guaranteeing that the value whose address was taken
// can actually be made to live as long as it needs to live.
region_lb: ast::NodeId,
// Says whether we're inside a for loop, in a do block
// or neither. Helps with error messages involving the
// function return type.
fn_kind: FnKind,
inh: @Inherited,
ccx: @mut CrateCtxt,
}
impl Inherited {
fn new(tcx: ty::ctxt,
param_env: ty::ParameterEnvironment)
-> Inherited {
Inherited {
infcx: infer::new_infer_ctxt(tcx),
locals: @mut HashMap::new(),
param_env: param_env,
node_types: @mut HashMap::new(),
node_type_substs: @mut HashMap::new(),
adjustments: @mut HashMap::new(),
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
}
}
}
// Used by check_const and check_enum_variants
pub fn blank_fn_ctxt(ccx: @mut CrateCtxt,
rty: ty::t,
region_bnd: ast::NodeId)
-> @mut FnCtxt {
// It's kind of a kludge to manufacture a fake function context
// and statement context, but we might as well do write the code only once
let param_env = ty::ParameterEnvironment { free_substs: substs::empty(),
self_param_bound: None,
type_param_bounds: ~[] };
@mut FnCtxt {
err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty,
ps: PurityState::function(ast::impure_fn, 0),
region_lb: region_bnd,
fn_kind: Vanilla,
inh: @Inherited::new(ccx.tcx, param_env),
ccx: ccx
}
}
impl ExprTyProvider for FnCtxt {
fn expr_ty(&self, ex: &ast::Expr) -> ty::t {
self.expr_ty(ex)
}
fn ty_ctxt(&self) -> ty::ctxt {
self.ccx.tcx
}
}
struct CheckItemTypesVisitor { ccx: @mut CrateCtxt }
impl Visitor<()> for CheckItemTypesVisitor {
fn visit_item(&mut self, i:@ast::item, _:()) {
check_item(self.ccx, i);
visit::walk_item(self, i, ());
}
}
pub fn check_item_types(ccx: @mut CrateCtxt, crate: &ast::Crate) {
let mut visit = CheckItemTypesVisitor { ccx: ccx };
visit::walk_crate(&mut visit, crate, ());
}
pub fn check_bare_fn(ccx: @mut CrateCtxt,
decl: &ast::fn_decl,
body: ast::P<ast::Block>,
id: ast::NodeId,
self_info: Option<SelfInfo>,
fty: ty::t,
param_env: ty::ParameterEnvironment) {
match ty::get(fty).sty {
ty::ty_bare_fn(ref fn_ty) => {
let fcx =
check_fn(ccx, self_info, fn_ty.purity,
&fn_ty.sig, decl, id, body, Vanilla,
@Inherited::new(ccx.tcx, param_env));
vtable::resolve_in_block(fcx, body);
regionck::regionck_fn(fcx, body);
writeback::resolve_type_vars_in_fn(fcx, decl, body, self_info);
}
_ => ccx.tcx.sess.impossible_case(body.span,
"check_bare_fn: function type expected")
}
}
struct GatherLocalsVisitor {
fcx: @mut FnCtxt,
tcx: ty::ctxt,
}
impl GatherLocalsVisitor {
fn assign(&mut self, nid: ast::NodeId, ty_opt: Option<ty::t>) {
match ty_opt {
None => {
// infer the variable's type
let var_id = self.fcx.infcx().next_ty_var_id();
let var_ty = ty::mk_var(self.fcx.tcx(), var_id);
self.fcx.inh.locals.insert(nid, var_ty);
}
Some(typ) => {
// take type that the user specified
self.fcx.inh.locals.insert(nid, typ);
}
}
}
}
impl Visitor<()> for GatherLocalsVisitor {
// Add explicitly-declared locals.
fn visit_local(&mut self, local:@ast::Local, _:()) {
let o_ty = match local.ty.node {
ast::ty_infer => None,
_ => Some(self.fcx.to_ty(local.ty))
};
self.assign(local.id, o_ty);
debug!("Local variable {} is assigned type {}",
self.fcx.pat_to_str(local.pat),
self.fcx.infcx().ty_to_str(
self.fcx.inh.locals.get_copy(&local.id)));
visit::walk_local(self, local, ());
}
// Add pattern bindings.
fn visit_pat(&mut self, p:&ast::Pat, _:()) {
match p.node {
ast::PatIdent(_, ref path, _)
if pat_util::pat_is_binding(self.fcx.ccx.tcx.def_map, p) => {
self.assign(p.id, None);
debug!("Pattern binding {} is assigned to {}",
self.tcx.sess.str_of(path.segments[0].identifier),
self.fcx.infcx().ty_to_str(
self.fcx.inh.locals.get_copy(&p.id)));
}
_ => {}
}
visit::walk_pat(self, p, ());
}
fn visit_block(&mut self, b:ast::P<ast::Block>, _:()) {
// non-obvious: the `blk` variable maps to region lb, so
// we have to keep this up-to-date. This
// is... unfortunate. It'd be nice to not need this.
self.fcx.with_region_lb(b.id, || visit::walk_block(self, b, ()));
}
// Don't descend into fns and items
fn visit_fn(&mut self, _:&visit::fn_kind, _:&ast::fn_decl,
_:ast::P<ast::Block>, _:Span, _:ast::NodeId, _:()) { }
fn visit_item(&mut self, _:@ast::item, _:()) { }
}
pub fn | (ccx: @mut CrateCtxt,
opt_self_info: Option<SelfInfo>,
purity: ast::purity,
fn_sig: &ty::FnSig,
decl: &ast::fn_decl,
id: ast::NodeId,
body: ast::P<ast::Block>,
fn_kind: FnKind,
inherited: @Inherited) -> @mut FnCtxt
{
/*!
* Helper used by check_bare_fn and check_expr_fn. Does the
* grungy work of checking a function body and returns the
* function context used for that purpose, since in the case of a
* fn item there is still a bit more to do.
*
* - ...
* - inherited: other fields inherited from the enclosing fn (if any)
*/
let tcx = ccx.tcx;
let err_count_on_creation = tcx.sess.err_count();
// First, we have to replace any bound regions in the fn and self
// types with free ones. The free region references will be bound
// the node_id of the body block.
let (opt_self_info, fn_sig) = {
let opt_self_ty = opt_self_info.map(|i| i.self_ty);
let (_, opt_self_ty, fn_sig) =
replace_bound_regions_in_fn_sig(
tcx, opt_self_ty, fn_sig,
|br| ty::ReFree(ty::FreeRegion {scope_id: body.id,
bound_region: br}));
let opt_self_info =
opt_self_info.map(
|si| SelfInfo {self_ty: opt_self_ty.unwrap(), .. si});
(opt_self_info, fn_sig)
};
relate_free_regions(tcx, opt_self_info.map(|s| s.self_ty), &fn_sig);
let arg_tys = fn_sig.inputs.map(|a| *a);
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={:?}, ret_ty={:?}, opt_self_ty={:?})",
arg_tys.map(|&a| ppaux::ty_to_str(tcx, a)),
ppaux::ty_to_str(tcx, ret_ty),
opt_self_info.map(|si| ppaux::ty_to_str(tcx, si.self_ty)));
// Create the function context. This is either derived from scratch or,
// in the case of function expressions, based on the outer context.
let fcx: @mut FnCtxt = {
@mut FnCtxt {
err_count_on_creation: err_count_on_creation,
ret_ty: ret_ty,
ps: PurityState::function(purity, id),
region_lb: body.id,
fn_kind: fn_kind,
inh: inherited,
ccx: ccx
}
};
gather_locals(fcx, decl, body, arg_tys, opt_self_info);
check_block_with_expected(fcx, body, Some(ret_ty));
// We unify the tail expr's type with the
// function result type, if there is a tail expr.
match body.expr {
Some(tail_expr) => {
let tail_expr_ty = fcx.expr_ty(tail_expr);
// Special case: we print a special error if there appears
// to be do-block/for-loop confusion
demand::suptype_with_fn(fcx, tail_expr.span, false,
fcx.ret_ty, tail_expr_ty,
|sp, e, a, s| {
fcx.report_mismatched_return_types(sp, e, a, s) });
}
None => ()
}
for self_info in opt_self_info.iter() {
fcx.write_ty(self_info.self_id, self_info.self_ty);
}
for (input, arg) in decl.inputs.iter().zip(arg_tys.iter()) {
fcx.write_ty(input.id, *arg);
}
return fcx;
fn gather_locals(fcx: @mut FnCtxt,
decl: &ast::fn_decl,
body: ast::P<ast::Block>,
arg_tys: &[ty::t],
opt_self_info: Option<SelfInfo>) {
let tcx = fcx.ccx.tcx;
let mut visit = GatherLocalsVisitor { fcx: fcx, tcx: tcx, };
// Add the self parameter
for self_info in opt_self_info.iter() {
visit.assign(self_info.self_id, Some(self_info.self_ty));
debug!("self is assigned to {}",
fcx.infcx().ty_to_str(
fcx.inh.locals.get_copy(&self_info.self_id)));
}
// Add formal parameters.
for (arg_ty, input) in arg_tys.iter().zip(decl.inputs.iter()) {
// Create type variables for each argument.
pat_util::pat_bindings(tcx.def_map,
input.pat,
|_bm, pat_id, _sp, _path| {
visit.assign(pat_id, None);
});
// Check the pattern.
let pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(tcx.def_map, input.pat),
};
_match::check_pat(&pcx, input.pat, *arg_ty);
}
visit.visit_block(body, ());
}
}
pub fn check_no_duplicate_fields(tcx: ty::ctxt,
fields: ~[(ast::Ident, Span)]) {
let mut field_names = HashMap::new();
for p in fields.iter() {
let (id, sp) = *p;
let orig_sp = field_names.find(&id).map(|x| *x);
match orig_sp {
Some(orig_sp) => {
tcx.sess.span_err(sp, format!("Duplicate field name {} in record type declaration",
tcx.sess.str_of(id)));
tcx.sess.span_note(orig_sp, "First declaration of this field occurred here");
break;
}
None => {
field_names.insert(id, sp);
}
}
}
}
pub fn check_struct(ccx: @mut CrateCtxt, id: ast::NodeId, span: Span) {
let tcx = ccx.tcx;
// Check that the class is instantiable
check_instantiable(tcx, span, id);
if ty::lookup_simd(tcx, local_def(id)) {
check_simd(tcx, span, id);
}
}
pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
debug!("check_item(it.id={}, it.ident={})",
it.id,
ty::item_path_str(ccx.tcx, local_def(it.id)));
let _indenter = indenter();
match it.node {
ast::item_static(_, _, e) => check_const(ccx, it.span, e, it.id),
ast::item_enum(ref enum_definition, _) => {
check_enum_variants(ccx,
it.span,
enum_definition.variants,
it.id);
}
ast::item_fn(decl, _, _, _, body) => {
let fn_tpt = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id));
// FIXME(#5121) -- won't work for lifetimes that appear in type bounds
let param_env = ty::construct_parameter_environment(
ccx.tcx,
None,
*fn_tpt.generics.type_param_defs,
[],
[],
body.id);
check_bare_fn(ccx, decl, body, it.id, None, fn_tpt.ty, param_env);
}
ast::item_impl(_, ref opt_trait_ref, _, ref ms) => {
debug!("item_impl {} with id {}", ccx.tcx.sess.str_of(it.ident), it.id);
let impl_tpt = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id));
for m in ms.iter() {
check_method_body(ccx, &impl_tpt.generics, None, *m);
}
match *opt_trait_ref {
Some(ref ast_trait_ref) => {
let impl_trait_ref =
ty::node_id_to_trait_ref(ccx.tcx, ast_trait_ref.ref_id);
check_impl_methods_against_trait(ccx,
it.span,
&impl_tpt.generics,
ast_trait_ref,
impl_trait_ref,
*ms);
vtable::resolve_impl(ccx, it, &impl_tpt.generics,
impl_trait_ref);
}
None => { }
}
}
ast::item_trait(_, _, ref trait_methods) => {
let trait_def = ty::lookup_trait_def(ccx.tcx, local_def(it.id));
for trait_method in (*trait_methods).iter() {
match *trait_method {
required(..) => {
// Nothing to do, since required methods don't have
// bodies to check.
}
provided(m) => {
check_method_body(ccx, &trait_def.generics,
Some(trait_def.trait_ref), m);
}
}
}
}
ast::item_struct(..) => {
check_struct(ccx, it.id, it.span);
}
ast::item_ty(ref t, ref generics) => {
let tpt_ty = ty::node_id_to_type(ccx.tcx, it.id);
check_bounds_are_used(ccx, t.span, &generics.ty_params, tpt_ty);
}
ast::item_foreign_mod(ref m) => {
if m.abis.is_intrinsic() {
for item in m.items.iter() {
check_intrinsic_type(ccx, *item);
}
} else {
for item in m.items.iter() {
let tpt = ty::lookup_item_type(ccx.tcx, local_def(item.id));
if tpt.generics.has_type_params() {
ccx.tcx.sess.span_err(item.span, "foreign items may not have type parameters");
}
match item.node {
ast::foreign_item_fn(ref fn_decl, _) => {
if fn_decl.variadic && !m.abis.is_c() {
ccx.tcx.sess.span_err(
item.span, "variadic function must have C calling convention");
}
}
_ => {}
}
}
}
}
_ => {/* nothing to do */ }
}
}
fn check_method_body(ccx: @mut CrateCtxt,
item_generics: &ty::Generics,
self_bound: Option<@ty::TraitRef>,
method: @ast::method) {
/*!
* Type checks a method body.
*
* # Parameters
* - `item_generics`: generics defined on the impl/trait that contains
* the method
* - `self_bound`: bound for the `Self` type parameter, if any
* - `method`: the method definition
*/
debug!("check_method_body(item_generics={}, \
self_bound={}, \
method.id={})",
item_generics.repr(ccx.tcx),
self_bound.repr(ccx.tcx),
method.id);
let method_def_id = local_def(method.id);
let method_ty = ty::method(ccx.tcx, method_def_id);
let method_generics = &method_ty.generics;
let param_env =
ty::construct_parameter_environment(
ccx.tcx,
self_bound,
*item_generics.type_param_defs,
*method_generics.type_param_defs,
item_generics.region_param_defs,
method.body.id);
// Compute the self type and fty from point of view of inside fn
let opt_self_info = method_ty.transformed_self_ty.map(|ty| {
SelfInfo {self_ty: ty.subst(ccx.tcx, ¶m_env.free_substs),
self_id: method.self_id,
span: method.explicit_self.span}
});
let fty = ty::node_id_to_type(ccx.tcx, method.id);
let fty = fty.subst(ccx.tcx, ¶m_env.free_substs);
check_bare_fn(
ccx,
method.decl,
method.body,
method.id,
opt_self_info,
fty,
param_env);
}
fn check_impl_methods_against_trait(ccx: @mut CrateCtxt,
impl_span: Span,
impl_generics: &ty::Generics,
ast_trait_ref: &ast::trait_ref,
impl_trait_ref: &ty::TraitRef,
impl_methods: &[@ast::method]) {
// Locate trait methods
let tcx = ccx.tcx;
let trait_methods = ty::trait_methods(tcx, impl_trait_ref.def_id);
// Check existing impl methods to see if they are both present in trait
// and compatible with trait signature
for impl_method in impl_methods.iter() {
let impl_method_def_id = local_def(impl_method.id);
let impl_method_ty = ty::method(ccx.tcx, impl_method_def_id);
// If this is an impl of a trait method, find the corresponding
// method definition in the trait.
let opt_trait_method_ty =
trait_methods.iter().
find(|tm| tm.ident.name == impl_method_ty.ident.name);
match opt_trait_method_ty {
Some(trait_method_ty) => {
compare_impl_method(ccx.tcx,
impl_generics,
impl_method_ty,
impl_method.span,
impl_method.body.id,
*trait_method_ty,
&impl_trait_ref.substs);
}
None => {
tcx.sess.span_err(
impl_method.span,
format!("method `{}` is not a member of trait `{}`",
tcx.sess.str_of(impl_method_ty.ident),
pprust::path_to_str(&ast_trait_ref.path,
tcx.sess.intr())));
}
}
}
// Check for missing methods from trait
let provided_methods = ty::provided_trait_methods(tcx,
impl_trait_ref.def_id);
let mut missing_methods = ~[];
for trait_method in trait_methods.iter() {
let is_implemented =
impl_methods.iter().any(
|m| m.ident.name == trait_method.ident.name);
let is_provided =
provided_methods.iter().any(
|m| m.ident.name == trait_method.ident.name);
if !is_implemented && !is_provided {
missing_methods.push(
format!("`{}`", ccx.tcx.sess.str_of(trait_method.ident)));
}
}
if !missing_methods.is_empty() {
tcx.sess.span_err(
impl_span,
format!("not all trait methods implemented, missing: {}",
missing_methods.connect(", ")));
}
}
/**
* Checks that a method from an impl/class conforms to the signature of
* the same method as declared in the trait.
*
* # Parameters
*
* - impl_generics: the generics declared on the impl itself (not the method!)
* - impl_m: type of the method we are checking
* - impl_m_span: span to use for reporting errors
* - impl_m_body_id: id of the method body
* - trait_m: the method in the trait
* - trait_substs: the substitutions used on the type of the trait
*/
pub fn compare_impl_method(tcx: ty::ctxt,
impl_generics: &ty::Generics,
impl_m: @ty::Method,
impl_m_span: Span,
impl_m_body_id: ast::NodeId,
trait_m: &ty::Method,
trait_substs: &ty::substs) {
debug!("compare_impl_method()");
let infcx = infer::new_infer_ctxt(tcx);
let impl_tps = impl_generics.type_param_defs.len();
// Try to give more informative error messages about self typing
// mismatches. Note that any mismatch will also be detected
// below, where we construct a canonical function type that
// includes the self parameter as a normal parameter. It's just
// that the error messages you get out of this code are a bit more
// inscrutable, particularly for cases where one method has no
// self.
match (&trait_m.explicit_self, &impl_m.explicit_self) {
(&ast::sty_static, &ast::sty_static) => {}
(&ast::sty_static, _) => {
tcx.sess.span_err(
impl_m_span,
format!("method `{}` has a `{}` declaration in the impl, \
but not in the trait",
tcx.sess.str_of(trait_m.ident),
pprust::explicit_self_to_str(&impl_m.explicit_self,
tcx.sess.intr())));
return;
}
(_, &ast::sty_static) => {
tcx.sess.span_err(
impl_m_span,
format!("method `{}` has a `{}` declaration in the trait, \
but not in the impl",
tcx.sess.str_of(trait_m.ident),
pprust::explicit_self_to_str(&trait_m.explicit_self,
tcx.sess.intr())));
return;
}
_ => {
// Let the type checker catch other errors below
}
}
let num_impl_m_type_params = impl_m.generics.type_param_defs.len();
let num_trait_m_type_params = trait_m.generics.type_param_defs.len();
if num_impl_m_type_params != num_trait_m_type_params {
tcx.sess.span_err(
impl_m_span,
format!("method `{}` has {} type parameter(s), but its trait \
declaration has {} type parameter(s)",
tcx.sess.str_of(trait_m.ident),
num_impl_m_type_params,
num_trait_m_type_params));
return;
}
if impl_m.fty.sig.inputs.len() != trait_m.fty.sig.inputs.len() {
tcx.sess.span_err(
impl_m_span,
format!("method `{}` has {} parameter{} \
but the declaration in trait `{}` has {}",
tcx.sess.str_of(trait_m.ident),
impl_m.fty.sig.inputs.len(),
if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" },
ty::item_path_str(tcx, trait_m.def_id),
trait_m.fty.sig.inputs.len()));
return;
}
for (i, trait_param_def) in trait_m.generics.type_param_defs.iter().enumerate() {
// For each of the corresponding impl ty param's bounds...
let impl_param_def = &impl_m.generics.type_param_defs[i];
// Check that the impl does not require any builtin-bounds
// that the trait does not guarantee:
let extra_bounds =
impl_param_def.bounds.builtin_bounds -
trait_param_def.bounds.builtin_bounds;
if !extra_bounds.is_empty() {
tcx.sess.span_err(
impl_m_span,
format!("in method `{}`, \
type parameter {} requires `{}`, \
which is not required by \
the corresponding type parameter \
in the trait declaration",
tcx.sess.str_of(trait_m.ident),
i,
extra_bounds.user_string(tcx)));
return;
}
// FIXME(#2687)---we should be checking that the bounds of the
// trait imply the bounds of the subtype, but it appears we
// are...not checking this.
if impl_param_def.bounds.trait_bounds.len() !=
trait_param_def.bounds.trait_bounds.len()
{
tcx.sess.span_err(
impl_m_span,
format!("in method `{}`, \
type parameter {} has {} trait bound(s), but the \
corresponding type parameter in \
the trait declaration has {} trait bound(s)",
tcx.sess.str_of(trait_m.ident),
i, impl_param_def.bounds.trait_bounds.len(),
trait_param_def.bounds.trait_bounds.len()));
return;
}
}
// Create a substitution that maps the type parameters on the impl
// to themselves and which replace any references to bound regions
// in the self type with free regions. So, for example, if the
// impl type is "&'a str", then this would replace the self
// type with a free region `self`.
let dummy_impl_tps: ~[ty::t] =
impl_generics.type_param_defs.iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i, t.def_id)).
collect();
let dummy_method_tps: ~[ty::t] =
impl_m.generics.type_param_defs.iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)).
collect();
let dummy_impl_regions: OptVec<ty::Region> =
impl_generics.region_param_defs.iter().
map(|l| ty::ReFree(ty::FreeRegion {
scope_id: impl_m_body_id,
bound_region: ty::BrNamed(l.def_id, l.ident)})).
collect();
let dummy_substs = ty::substs {
tps: vec::append(dummy_impl_tps, dummy_method_tps),
regions: ty::NonerasedRegions(dummy_impl_regions),
self_ty: None };
// We are going to create a synthetic fn type that includes
// both the method's self argument and its normal arguments.
// So a method like `fn(&self, a: uint)` would be converted
// into a function `fn(self: &T, a: uint)`.
let mut trait_fn_args = ~[];
let mut impl_fn_args = ~[];
// For both the trait and the impl, create an argument to
// represent the self argument (unless this is a static method).
// This argument will have the *transformed* self type.
for &t in trait_m.transformed_self_ty.iter() {
trait_fn_args.push(t);
}
for &t in impl_m.transformed_self_ty.iter() {
impl_fn_args.push(t);
}
// Add in the normal arguments.
trait_fn_args.push_all(trait_m.fty.sig.inputs);
impl_fn_args.push_all(impl_m.fty.sig.inputs);
// Create a bare fn type for trait/impl that includes self argument
let trait_fty =
ty::mk_bare_fn(tcx,
ty::BareFnTy {
purity: trait_m.fty.purity,
abis: trait_m.fty.abis,
sig: ty::FnSig {
binder_id: trait_m.fty.sig.binder_id,
inputs: trait_fn_args,
output: trait_m.fty.sig.output,
variadic: false
}
});
let impl_fty =
ty::mk_bare_fn(tcx,
ty::BareFnTy {
purity: impl_m.fty.purity,
abis: impl_m.fty.abis,
sig: ty::FnSig {
binder_id: impl_m.fty.sig.binder_id,
inputs: impl_fn_args,
output: impl_m.fty.sig.output,
variadic: false
}
});
// Perform substitutions so that the trait/impl methods are expressed
// in terms of the same set of type/region parameters:
// - replace trait type parameters with those from `trait_substs`,
// except with any reference to bound self replaced with `dummy_self_r`
// - replace method parameters on the trait with fresh, dummy parameters
// that correspond to the parameters we will find on the impl
// - replace self region with a fresh, dummy region
let impl_fty = {
debug!("impl_fty (pre-subst): {}", ppaux::ty_to_str(tcx, impl_fty));
impl_fty.subst(tcx, &dummy_substs)
};
debug!("impl_fty (post-subst): {}", ppaux::ty_to_str(tcx, impl_fty));
let trait_fty = {
let substs { regions: trait_regions,
tps: trait_tps,
self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs);
let substs = substs {
regions: trait_regions,
tps: vec::append(trait_tps, dummy_method_tps),
self_ty: self_ty,
};
debug!("trait_fty (pre-subst): {} substs={}",
trait_fty.repr(tcx), substs.repr(tcx));
trait_fty.subst(tcx, &substs)
};
debug!("trait_fty (post-subst): {}", trait_fty.repr(tcx));
match infer::mk_subty(infcx, false, infer::MethodCompatCheck(impl_m_span),
impl_fty, trait_fty) {
result::Ok(()) => {}
result::Err(ref terr) => {
tcx.sess.span_err(
impl_m_span,
format!("method `{}` has an incompatible type: {}",
tcx.sess.str_of(trait_m.ident),
ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr);
}
}
}
impl AstConv for FnCtxt {
fn tcx(&self) -> ty::ctxt { self.ccx.tcx }
fn get_item_ty(&self, id: ast::DefId) -> ty::ty_param_bounds_and_ty {
ty::lookup_item_type(self.tcx(), id)
}
fn get_trait_def(&self, id: ast::DefId) -> @ty::TraitDef {
ty::lookup_trait_def(self.tcx(), id)
}
fn ty_infer(&self, _span: Span) -> ty::t {
self.infcx().next_ty_var()
}
}
impl FnCtxt {
pub fn infcx(&self) -> @mut infer::InferCtxt {
self.inh.infcx
}
pub fn err_count_since_creation(&self) -> uint {
self.ccx.tcx.sess.err_count() - self.err_count_on_creation
}
pub fn vtable_context<'a>(&'a self) -> VtableContext<'a> {
VtableContext {
infcx: self.infcx(),
param_env: &self.inh.param_env
}
}
}
impl RegionScope for @mut infer::InferCtxt {
fn anon_regions(&self,
span: Span,
count: uint) -> Result<~[ty::Region], ()> {
Ok(vec::from_fn(
count,
|_| self.next_region_var(infer::MiscVariable(span))))
}
}
impl FnCtxt {
pub fn tag(&self) -> ~str {
unsafe {
format!("{}", self as *FnCtxt)
}
}
pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> ty::t {
match self.inh.locals.find(&nid) {
Some(&t) => t,
None => {
self.tcx().sess.span_bug(
span,
format!("No type for local variable {:?}", nid));
}
}
}
pub fn block_region(&self) -> ty::Region {
ty::ReScope(self.region_lb)
}
#[inline]
pub fn write_ty(&self, node_id: ast::NodeId, ty: ty::t) {
debug!("write_ty({}, {}) in fcx {}",
node_id, ppaux::ty_to_str(self.tcx(), ty), self.tag());
self.inh.node_types.insert(node_id, ty);
}
pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::substs) {
if !ty::substs_is_noop(&substs) {
debug!("write_substs({}, {}) in fcx {}",
node_id,
ty::substs_to_str(self.tcx(), &substs),
self.tag());
self.inh.node_type_substs.insert(node_id, substs);
}
}
pub fn write_ty_substs(&self,
node_id: ast::NodeId,
ty: ty::t,
substs: ty::substs) {
let ty = ty::subst(self.tcx(), &substs, ty);
self.write_ty(node_id, ty);
self.write_substs(node_id, substs);
}
pub fn write_autoderef_adjustment(&self,
node_id: ast::NodeId,
derefs: uint) {
if derefs == 0 { return; }
self.write_adjustment(
node_id,
@ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: derefs,
autoref: None })
);
}
pub fn write_adjustment(&self,
node_id: ast::NodeId,
adj: @ty::AutoAdjustment) {
debug!("write_adjustment(node_id={:?}, adj={:?})", node_id, adj);
self.inh.adjustments.insert(node_id, adj);
}
pub fn write_nil(&self, node_id: ast::NodeId) {
self.write_ty(node_id, ty::mk_nil());
}
pub fn write_bot(&self, node_id: ast::NodeId) {
self.write_ty(node_id, ty::mk_bot());
}
pub fn write_error(@mut self, node_id: ast::NodeId) {
self.write_ty(node_id, ty::mk_err());
}
pub fn to_ty(&self, ast_t: &ast::Ty) -> ty::t {
ast_ty_to_ty(self, &self.infcx(), ast_t)
}
pub fn pat_to_str(&self, pat: @ast::Pat) -> ~str {
pat.repr(self.tcx())
}
pub fn expr_ty(&self, ex: &ast::Expr) -> ty::t {
match self.inh.node_types.find(&ex.id) {
Some(&t) => t,
None => {
self.tcx().sess.bug(format!("no type for expr in fcx {}",
self.tag()));
}
}
}
pub fn node_ty(&self, id: ast::NodeId) -> ty::t {
match self.inh.node_types.find(&id) {
Some(&t) => t,
None => {
self.tcx().sess.bug(
format!("no type for node {}: {} in fcx {}",
id, ast_map::node_id_to_str(
self.tcx().items, id,
token::get_ident_interner()),
self.tag()));
}
}
}
pub fn node_ty_substs(&self, id: ast::NodeId) -> ty::substs {
match self.inh.node_type_substs.find(&id) {
Some(ts) => (*ts).clone(),
None => {
self.tcx().sess.bug(
format!("no type substs for node {}: {} in fcx {}",
id, ast_map::node_id_to_str(self.tcx().items, id,
token::get_ident_interner()),
self.tag()));
}
}
}
pub fn opt_node_ty_substs(&self,
id: ast::NodeId,
f: |&ty::substs| -> bool)
-> bool {
match self.inh.node_type_substs.find(&id) {
Some(s) => f(s),
None => true
}
}
pub fn mk_subty(&self,
a_is_expected: bool,
origin: infer::TypeOrigin,
sub: ty::t,
sup: ty::t)
-> Result<(), ty::type_err> {
infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
}
pub fn can_mk_subty(&self, sub: ty::t, sup: ty::t)
-> Result<(), ty::type_err> {
infer::can_mk_subty(self.infcx(), sub, sup)
}
pub fn mk_assignty(&self,
expr: @ast::Expr,
sub: ty::t,
sup: ty::t)
-> Result<(), ty::type_err> {
match infer::mk_coercety(self.infcx(),
false,
infer::ExprAssignable(expr),
sub,
sup) {
Ok(None) => result::Ok(()),
Err(ref e) => result::Err((*e)),
Ok(Some(adjustment)) => {
self.write_adjustment(expr.id, adjustment);
Ok(())
}
}
}
pub fn can_mk_assignty(&self, sub: ty::t, sup: ty::t)
-> Result<(), ty::type_err> {
infer::can_mk_coercety(self.infcx(), sub, sup)
}
pub fn mk_eqty(&self,
a_is_expected: bool,
origin: infer::TypeOrigin,
sub: ty::t,
sup: ty::t)
-> Result<(), ty::type_err> {
infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
}
pub fn mk_subr(&self,
a_is_expected: bool,
origin: infer::SubregionOrigin,
sub: ty::Region,
sup: ty::Region) {
infer::mk_subr(self.infcx(), a_is_expected, origin, sub, sup)
}
pub fn with_region_lb<R>(@mut self, lb: ast::NodeId, f: || -> R) -> R {
let old_region_lb = self.region_lb;
self.region_lb = lb;
let v = f();
self.region_lb = old_region_lb;
v
}
pub fn type_error_message(&self,
sp: Span,
mk_msg: |~str| -> ~str,
actual_ty: ty::t,
err: Option<&ty::type_err>) {
self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
}
pub fn report_mismatched_return_types(&self,
sp: Span,
e: ty::t,
a: ty::t,
err: &ty::type_err) {
// Derived error
if ty::type_is_error(e) || ty::type_is_error(a) {
return;
}
self.infcx().report_mismatched_types(sp, e, a, err)
}
pub fn report_mismatched_types(&self,
sp: Span,
e: ty::t,
a: ty::t,
err: &ty::type_err) {
self.infcx().report_mismatched_types(sp, e, a, err)
}
}
pub fn do_autoderef(fcx: @mut FnCtxt, sp: Span, t: ty::t) -> (ty::t, uint) {
/*!
*
* Autoderefs the type `t` as many times as possible, returning
* a new type and a counter for how many times the type was
* deref'd. If the counter is non-zero, the receiver is responsible
* for inserting an AutoAdjustment record into `tcx.adjustments`
* so that trans/borrowck/etc know about this autoderef. */
let mut t1 = t;
let mut enum_dids = ~[];
let mut autoderefs = 0;
loop {
let sty = structure_of(fcx, sp, t1);
// Some extra checks to detect weird cycles and so forth:
match *sty {
ty::ty_box(inner) | ty::ty_uniq(inner) |
ty::ty_rptr(_, inner) => {
match ty::get(t1).sty {
ty::ty_infer(ty::TyVar(v1)) => {
ty::occurs_check(fcx.ccx.tcx, sp, v1,
ty::mk_box(fcx.ccx.tcx, inner));
}
_ => ()
}
}
ty::ty_enum(ref did, _) => {
// Watch out for a type like `enum t = @t`. Such a
// type would otherwise infinitely auto-deref. Only
// autoderef loops during typeck (basically, this one
// and the loops in typeck::check::method) need to be
// concerned with this, as an error will be reported
// on the enum definition as well because the enum is
// not instantiable.
if enum_dids.contains(did) {
return (t1, autoderefs);
}
enum_dids.push(*did);
}
_ => { /*ok*/ }
}
// Otherwise, deref if type is derefable:
match ty::deref_sty(fcx.ccx.tcx, sty, false) {
None => {
return (t1, autoderefs);
}
Some(mt) => {
autoderefs += 1;
t1 = mt.ty
}
}
};
}
// AST fragment checking
pub fn check_lit(fcx: @mut FnCtxt, lit: @ast::lit) -> ty::t {
let tcx = fcx.ccx.tcx;
match lit.node {
ast::lit_str(..) => ty::mk_estr(tcx, ty::vstore_slice(ty::ReStatic)),
ast::lit_binary(..) => {
ty::mk_evec(tcx, ty::mt{ ty: ty::mk_u8(), mutbl: ast::MutImmutable },
ty::vstore_slice(ty::ReStatic))
}
ast::lit_char(_) => ty::mk_char(),
ast::lit_int(_, t) => ty::mk_mach_int(t),
ast::lit_uint(_, t) => ty::mk_mach_uint(t),
ast::lit_int_unsuffixed(_) => {
// An unsuffixed integer literal could have any integral type,
// so we create an integral type variable for it.
ty::mk_int_var(tcx, fcx.infcx().next_int_var_id())
}
ast::lit_float(_, t) => ty::mk_mach_float(t),
ast::lit_float_unsuffixed(_) => {
// An unsuffixed floating point literal could have any floating point
// type, so we create a floating point type variable for it.
ty::mk_float_var(tcx, fcx.infcx().next_float_var_id())
}
ast::lit_nil => ty::mk_nil(),
ast::lit_bool(_) => ty::mk_bool()
}
}
pub fn valid_range_bounds(ccx: @mut CrateCtxt,
from: @ast::Expr,
to: @ast::Expr)
-> Option<bool> {
match const_eval::compare_lit_exprs(ccx.tcx, from, to) {
Some(val) => Some(val <= 0),
None => None
}
}
pub fn check_expr_has_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr));
});
}
pub fn check_expr_coercable_to_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::coerce(fcx, expr.span, expected, expr)
});
}
pub fn check_expr_with_hint(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
check_expr_with_unifier(fcx, expr, Some(expected), || ())
}
pub fn check_expr_with_opt_hint(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: Option<ty::t>) {
check_expr_with_unifier(fcx, expr, expected, || ())
}
pub fn check_expr(fcx: @mut FnCtxt, expr: @ast::Expr) {
check_expr_with_unifier(fcx, expr, None, || ())
}
// determine the `self` type, using fresh variables for all variables
// declared on the impl declaration e.g., `impl<A,B> for ~[(A,B)]`
// would return ($0, $1) where $0 and $1 are freshly instantiated type
// variables.
pub fn impl_self_ty(vcx: &VtableContext,
location_info: &LocationInfo, // (potential) receiver for
// this impl
did: ast::DefId)
-> ty_param_substs_and_ty {
let tcx = vcx.tcx();
let (n_tps, n_rps, raw_ty) = {
let ity = ty::lookup_item_type(tcx, did);
(ity.generics.type_param_defs.len(),
ity.generics.region_param_defs.len(),
ity.ty)
};
let rps =
vcx.infcx.next_region_vars(
infer::BoundRegionInTypeOrImpl(location_info.span),
n_rps);
let tps = vcx.infcx.next_ty_vars(n_tps);
let substs = substs {regions: ty::NonerasedRegions(opt_vec::from(rps)),
self_ty: None,
tps: tps};
let substd_ty = ty::subst(tcx, &substs, raw_ty);
ty_param_substs_and_ty { substs: substs, ty: substd_ty }
}
// Only for fields! Returns <none> for methods>
// Indifferent to privacy flags
pub fn lookup_field_ty(tcx: ty::ctxt,
class_id: ast::DefId,
items: &[ty::field_ty],
fieldname: ast::Name,
substs: &ty::substs) -> Option<ty::t> {
let o_field = items.iter().find(|f| f.name == fieldname);
o_field.map(|f| ty::lookup_field_type(tcx, class_id, f.id, substs))
}
// Controls whether the arguments are automatically referenced. This is useful
// for overloaded binary and unary operators.
pub enum DerefArgs {
DontDerefArgs,
DoDerefArgs
}
// Given the provenance of a static method, returns the generics of the static
// method's container.
fn generics_of_static_method_container(type_context: ty::ctxt,
provenance: ast::MethodProvenance)
-> ty::Generics {
match provenance {
ast::FromTrait(trait_def_id) => {
ty::lookup_trait_def(type_context, trait_def_id).generics
}
ast::FromImpl(impl_def_id) => {
ty::lookup_item_type(type_context, impl_def_id).generics
}
}
}
// Verifies that type parameters supplied in paths are in the right
// locations.
fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
path: &ast::Path,
def: ast::Def) {
// We only care about checking the case in which the path has two or
// more segments.
if path.segments.len() < 2 {
return
}
// Verify that no lifetimes or type parameters are present anywhere
// except the final two elements of the path.
for i in range(0, path.segments.len() - 2) {
for lifetime in path.segments[i].lifetimes.iter() {
function_context.tcx()
.sess
.span_err(lifetime.span,
"lifetime parameters may not \
appear here");
break;
}
for typ in path.segments[i].types.iter() {
function_context.tcx()
.sess
.span_err(typ.span,
"type parameters may not appear here");
break;
}
}
// If there are no parameters at all, there is nothing more to do; the
// rest of typechecking will (attempt to) infer everything.
if path.segments
.iter()
.all(|s| s.lifetimes.is_empty() && s.types.is_empty()) {
return
}
match def {
// If this is a static method of a trait or implementation, then
// ensure that the segment of the path which names the trait or
// implementation (the penultimate segment) is annotated with the
// right number of type parameters.
ast::DefStaticMethod(_, provenance, _) => {
let generics =
generics_of_static_method_container(function_context.ccx.tcx,
provenance);
let name = match provenance {
ast::FromTrait(_) => "trait",
ast::FromImpl(_) => "impl",
};
let trait_segment = &path.segments[path.segments.len() - 2];
// Make sure lifetime parameterization agrees with the trait or
// implementation type.
let trait_region_parameter_count = generics.region_param_defs.len();
let supplied_region_parameter_count = trait_segment.lifetimes.len();
if trait_region_parameter_count != supplied_region_parameter_count
&& supplied_region_parameter_count != 0 {
function_context.tcx()
.sess
.span_err(path.span,
format!("expected {} lifetime parameter(s), \
found {} lifetime parameter(s)",
trait_region_parameter_count,
supplied_region_parameter_count));
}
// Make sure the number of type parameters supplied on the trait
// or implementation segment equals the number of type parameters
// on the trait or implementation definition.
let trait_type_parameter_count = generics.type_param_defs.len();
let supplied_type_parameter_count = trait_segment.types.len();
if trait_type_parameter_count != supplied_type_parameter_count {
let trait_count_suffix = if trait_type_parameter_count == 1 {
""
} else {
"s"
};
let supplied_count_suffix =
if supplied_type_parameter_count == 1 {
""
} else {
"s"
};
function_context.tcx()
.sess
.span_err(path.span,
format!("the {} referenced by this \
path has {} type \
parameter{}, but {} type \
parameter{} were supplied",
name,
trait_type_parameter_count,
trait_count_suffix,
supplied_type_parameter_count,
supplied_count_suffix))
}
}
_ => {
// Verify that no lifetimes or type parameters are present on
// the penultimate segment of the path.
let segment = &path.segments[path.segments.len() - 2];
for lifetime in segment.lifetimes.iter() {
function_context.tcx()
.sess
.span_err(lifetime.span,
"lifetime parameters may not
appear here");
break;
}
for typ in segment.types.iter() {
function_context.tcx()
.sess
.span_err(typ.span,
"type parameters may not appear \
here");
break;
}
}
}
}
/// Invariant:
/// If an expression has any sub-expressions that result in a type error,
/// inspecting that expression's type with `ty::type_is_error` will return
/// true. Likewise, if an expression is known to diverge, inspecting its
/// type with `ty::type_is_bot` will return true (n.b.: since Rust is
/// strict, _|_ can appear in the type of an expression that does not,
/// itself, diverge: for example, fn() -> _|_.)
/// Note that inspecting a type's structure *directly* may expose the fact
/// that there are actually multiple representations for both `ty_err` and
/// `ty_bot`, so avoid that when err and bot need to be handled differently.
pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
expr: @ast::Expr,
expected: Option<ty::t>,
unifier: ||) {
debug!(">> typechecking");
fn check_method_argument_types(
fcx: @mut FnCtxt,
sp: Span,
method_fn_ty: ty::t,
callee_expr: @ast::Expr,
args: &[@ast::Expr],
sugar: ast::CallSugar,
deref_args: DerefArgs) -> ty::t
{
if ty::type_is_error(method_fn_ty) {
let err_inputs = err_args(args.len());
check_argument_types(fcx, sp, err_inputs, callee_expr,
args, sugar, deref_args, false);
method_fn_ty
} else {
match ty::get(method_fn_ty).sty {
ty::ty_bare_fn(ref fty) => {
check_argument_types(fcx, sp, fty.sig.inputs, callee_expr,
args, sugar, deref_args, fty.sig.variadic);
fty.sig.output
}
_ => {
fcx.tcx().sess.span_bug(
sp,
format!("Method without bare fn type"));
}
}
}
}
fn check_argument_types(fcx: @mut FnCtxt,
sp: Span,
fn_inputs: &[ty::t],
callee_expr: @ast::Expr,
args: &[@ast::Expr],
sugar: ast::CallSugar,
deref_args: DerefArgs,
variadic: bool) {
/*!
*
* Generic function that factors out common logic from
* function calls, method calls and overloaded operators.
*/
let tcx = fcx.ccx.tcx;
// Grab the argument types, supplying fresh type variables
// if the wrong number of arguments were supplied
let supplied_arg_count = args.len();
let expected_arg_count = fn_inputs.len();
let formal_tys = if expected_arg_count == supplied_arg_count {
fn_inputs.map(|a| *a)
} else if variadic {
if supplied_arg_count >= expected_arg_count {
fn_inputs.map(|a| *a)
} else {
let msg = format!(
"this function takes at least {0, plural, =1{# parameter} \
other{# parameters}} but {1, plural, =1{# parameter was} \
other{# parameters were}} supplied", expected_arg_count, supplied_arg_count);
tcx.sess.span_err(sp, msg);
err_args(supplied_arg_count)
}
} else {
let suffix = match sugar {
ast::NoSugar => "",
ast::DoSugar => " (including the closure passed by \
the `do` keyword)",
ast::ForSugar => " (including the closure passed by \
the `for` keyword)"
};
let msg = format!(
"this function takes {0, plural, =1{# parameter} \
other{# parameters}} but {1, plural, =1{# parameter was} \
other{# parameters were}} supplied{2}",
expected_arg_count, supplied_arg_count, suffix);
tcx.sess.span_err(sp, msg);
err_args(supplied_arg_count)
};
debug!("check_argument_types: formal_tys={:?}",
formal_tys.map(|t| fcx.infcx().ty_to_str(*t)));
// Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments
// that are not anonymous functions, then we typecheck the anonymous
// functions. This is so that we have more information about the types
// of arguments when we typecheck the functions. This isn't really the
// right way to do this.
let xs = [false, true];
for check_blocks in xs.iter() {
let check_blocks = *check_blocks;
debug!("check_blocks={}", check_blocks);
// More awful hacks: before we check the blocks, try to do
// an "opportunistic" vtable resolution of any trait
// bounds on the call.
if check_blocks {
vtable::early_resolve_expr(callee_expr, fcx, true);
}
// For variadic functions, we don't have a declared type for all of
// the arguments hence we only do our usual type checking with
// the arguments who's types we do know.
let t = if variadic {
expected_arg_count
} else {
supplied_arg_count
};
for (i, arg) in args.iter().take(t).enumerate() {
let is_block = match arg.node {
ast::ExprFnBlock(..) |
ast::ExprProc(..) |
ast::ExprDoBody(..) => true,
_ => false
};
if is_block == check_blocks {
debug!("checking the argument");
let mut formal_ty = formal_tys[i];
match deref_args {
DoDerefArgs => {
match ty::get(formal_ty).sty {
ty::ty_rptr(_, mt) => formal_ty = mt.ty,
ty::ty_err => (),
_ => {
fcx.ccx.tcx.sess.span_bug(arg.span, "no ref");
}
}
}
DontDerefArgs => {}
}
check_expr_coercable_to_type(fcx, *arg, formal_ty);
}
}
}
// We also need to make sure we at least write the ty of the other
// arguments which we skipped above.
if variadic {
for arg in args.iter().skip(expected_arg_count) {
check_expr(fcx, *arg);
// There are a few types which get autopromoted when passed via varargs
// in C but we just error out instead and require explicit casts.
let arg_ty = structurally_resolved_type(fcx, arg.span, fcx.expr_ty(*arg));
match ty::get(arg_ty).sty {
ty::ty_float(ast::ty_f32) => {
fcx.type_error_message(arg.span,
|t| format!("can't pass an {} to variadic function, \
cast to c_double", t), arg_ty, None);
}
ty::ty_int(ast::ty_i8) | ty::ty_int(ast::ty_i16) | ty::ty_bool => {
fcx.type_error_message(arg.span,
|t| format!("can't pass {} to variadic function, cast to c_int",
t), arg_ty, None);
}
ty::ty_uint(ast::ty_u8) | ty::ty_uint(ast::ty_u16) => {
fcx.type_error_message(arg.span,
|t| format!("can't pass {} to variadic function, cast to c_uint",
t), arg_ty, None);
}
_ => {}
}
}
}
}
fn err_args(len: uint) -> ~[ty::t] {
vec::from_fn(len, |_| ty::mk_err())
}
// A generic function for checking assignment expressions
fn check_assignment(fcx: @mut FnCtxt,
lhs: @ast::Expr,
rhs: @ast::Expr,
id: ast::NodeId) {
check_expr(fcx, lhs);
let lhs_type = fcx.expr_ty(lhs);
check_expr_has_type(fcx, rhs, lhs_type);
fcx.write_ty(id, ty::mk_nil());
// The callee checks for bot / err, we don't need to
}
fn write_call(fcx: @mut FnCtxt,
call_expr: @ast::Expr,
output: ty::t,
sugar: ast::CallSugar) {
let ret_ty = match sugar {
ast::ForSugar => {
match ty::get(output).sty {
ty::ty_bool => {}
_ => fcx.type_error_message(call_expr.span, |actual| {
format!("expected `for` closure to return `bool`, \
but found `{}`", actual) },
output, None)
}
ty::mk_nil()
}
_ => output
};
fcx.write_ty(call_expr.id, ret_ty);
}
// A generic function for doing all of the checking for call expressions
fn check_call(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
call_expr: @ast::Expr,
f: @ast::Expr,
args: &[@ast::Expr],
sugar: ast::CallSugar) {
// Index expressions need to be handled separately, to inform them
// that they appear in call position.
check_expr(fcx, f);
// Store the type of `f` as the type of the callee
let fn_ty = fcx.expr_ty(f);
// FIXME(#6273) should write callee type AFTER regions have
// been subst'd. However, it is awkward to deal with this
// now. Best thing would I think be to just have a separate
// "callee table" that contains the FnSig and not a general
// purpose ty::t
fcx.write_ty(callee_id, fn_ty);
// Extract the function signature from `in_fty`.
let fn_sty = structure_of(fcx, f.span, fn_ty);
// This is the "default" function signature, used in case of error.
// In that case, we check each argument against "error" in order to
// set up all the node type bindings.
let error_fn_sig = FnSig {
binder_id: ast::CRATE_NODE_ID,
inputs: err_args(args.len()),
output: ty::mk_err(),
variadic: false
};
let fn_sig = match *fn_sty {
ty::ty_bare_fn(ty::BareFnTy {sig: ref sig, ..}) |
ty::ty_closure(ty::ClosureTy {sig: ref sig, ..}) => sig,
_ => {
fcx.type_error_message(call_expr.span, |actual| {
format!("expected function but \
found `{}`", actual) }, fn_ty, None);
&error_fn_sig
}
};
// Replace any bound regions that appear in the function
// signature with region variables
let (_, _, fn_sig) =
replace_bound_regions_in_fn_sig(fcx.tcx(),
None,
fn_sig,
|br| fcx.infcx()
.next_region_var(
infer::BoundRegionInFnCall(call_expr.span, br)));
// Call the generic checker.
check_argument_types(fcx, call_expr.span, fn_sig.inputs, f,
args, sugar, DontDerefArgs, fn_sig.variadic);
write_call(fcx, call_expr, fn_sig.output, sugar);
}
// Checks a method call.
fn check_method_call(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
expr: @ast::Expr,
rcvr: @ast::Expr,
method_name: ast::Ident,
args: &[@ast::Expr],
tps: &[ast::P<ast::Ty>],
sugar: ast::CallSugar) {
check_expr(fcx, rcvr);
// no need to check for bot/err -- callee does that
let expr_t = structurally_resolved_type(fcx,
expr.span,
fcx.expr_ty(rcvr));
let tps = tps.map(|&ast_ty| fcx.to_ty(ast_ty));
match method::lookup(fcx,
expr,
rcvr,
callee_id,
method_name.name,
expr_t,
tps,
DontDerefArgs,
CheckTraitsAndInherentMethods,
AutoderefReceiver) {
Some(ref entry) => {
let method_map = fcx.inh.method_map;
method_map.insert(expr.id, (*entry));
}
None => {
debug!("(checking method call) failing expr is {}", expr.id);
fcx.type_error_message(expr.span,
|actual| {
format!("type `{}` does not implement any method in scope \
named `{}`",
actual,
fcx.ccx.tcx.sess.str_of(method_name))
},
expr_t,
None);
// Add error type for the result
fcx.write_error(expr.id);
fcx.write_error(callee_id);
}
}
// Call the generic checker.
let fn_ty = fcx.node_ty(callee_id);
let ret_ty = check_method_argument_types(fcx, expr.span,
fn_ty, expr, args, sugar,
DontDerefArgs);
write_call(fcx, expr, ret_ty, sugar);
}
// A generic function for checking the then and else in an if
// or if-check
fn check_then_else(fcx: @mut FnCtxt,
cond_expr: @ast::Expr,
then_blk: &ast::Block,
opt_else_expr: Option<@ast::Expr>,
id: ast::NodeId,
sp: Span,
expected: Option<ty::t>) {
check_expr_has_type(fcx, cond_expr, ty::mk_bool());
let branches_ty = match opt_else_expr {
Some(else_expr) => {
check_block_with_expected(fcx, then_blk, expected);
let then_ty = fcx.node_ty(then_blk.id);
check_expr_with_opt_hint(fcx, else_expr, expected);
let else_ty = fcx.expr_ty(else_expr);
infer::common_supertype(fcx.infcx(),
infer::IfExpression(sp),
true,
then_ty,
else_ty)
}
None => {
check_block_no_value(fcx, then_blk);
ty::mk_nil()
}
};
let cond_ty = fcx.expr_ty(cond_expr);
let if_ty = if ty::type_is_error(cond_ty) {
ty::mk_err()
} else if ty::type_is_bot(cond_ty) {
ty::mk_bot()
} else {
branches_ty
};
fcx.write_ty(id, if_ty);
}
fn lookup_op_method(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
op_ex: @ast::Expr,
self_ex: @ast::Expr,
self_t: ty::t,
opname: ast::Name,
args: ~[@ast::Expr],
deref_args: DerefArgs,
autoderef_receiver: AutoderefReceiverFlag,
unbound_method: ||,
_expected_result: Option<ty::t>
)
-> ty::t {
match method::lookup(fcx, op_ex, self_ex,
callee_id, opname, self_t, [],
deref_args, CheckTraitsOnly, autoderef_receiver) {
Some(ref origin) => {
let method_ty = fcx.node_ty(callee_id);
let method_map = fcx.inh.method_map;
method_map.insert(op_ex.id, *origin);
check_method_argument_types(fcx, op_ex.span,
method_ty, op_ex, args,
ast::NoSugar, deref_args)
}
_ => {
unbound_method();
// Check the args anyway
// so we get all the error messages
let expected_ty = ty::mk_err();
check_method_argument_types(fcx, op_ex.span,
expected_ty, op_ex, args,
ast::NoSugar, deref_args);
ty::mk_err()
}
}
}
// could be either a expr_binop or an expr_assign_binop
fn check_binop(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
expr: @ast::Expr,
op: ast::BinOp,
lhs: @ast::Expr,
rhs: @ast::Expr,
// Used only in the error case
expected_result: Option<ty::t>,
allow_overloaded_operators: AllowOverloadedOperatorsFlag
) {
let tcx = fcx.ccx.tcx;
check_expr(fcx, lhs);
// Callee does bot / err checking
let lhs_t = structurally_resolved_type(fcx, lhs.span,
fcx.expr_ty(lhs));
if ty::type_is_integral(lhs_t) && ast_util::is_shift_binop(op) {
// Shift is a special case: rhs can be any integral type
check_expr(fcx, rhs);
let rhs_t = fcx.expr_ty(rhs);
require_integral(fcx, rhs.span, rhs_t);
fcx.write_ty(expr.id, lhs_t);
return;
}
if ty::is_binopable(tcx, lhs_t, op) {
let tvar = fcx.infcx().next_ty_var();
demand::suptype(fcx, expr.span, tvar, lhs_t);
check_expr_has_type(fcx, rhs, tvar);
let result_t = match op {
ast::BiEq | ast::BiNe | ast::BiLt | ast::BiLe | ast::BiGe |
ast::BiGt => {
ty::mk_bool()
}
_ => {
lhs_t
}
};
fcx.write_ty(expr.id, result_t);
return;
}
if op == ast::BiOr || op == ast::BiAnd {
// This is an error; one of the operands must have the wrong
// type
fcx.write_error(expr.id);
fcx.write_error(rhs.id);
fcx.type_error_message(expr.span, |actual| {
format!("binary operation {} cannot be applied \
to type `{}`",
ast_util::binop_to_str(op), actual)},
lhs_t, None)
}
// Check for overloaded operators if allowed.
let result_t;
if allow_overloaded_operators == AllowOverloadedOperators {
result_t = check_user_binop(fcx,
callee_id,
expr,
lhs,
lhs_t,
op,
rhs,
expected_result);
} else {
fcx.type_error_message(expr.span,
|actual| {
format!("binary operation {} cannot be \
applied to type `{}`",
ast_util::binop_to_str(op),
actual)
},
lhs_t,
None);
result_t = ty::mk_err();
}
fcx.write_ty(expr.id, result_t);
if ty::type_is_error(result_t) {
fcx.write_ty(rhs.id, result_t);
}
}
fn check_user_binop(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
ex: @ast::Expr,
lhs_expr: @ast::Expr,
lhs_resolved_t: ty::t,
op: ast::BinOp,
rhs: @ast::Expr,
expected_result: Option<ty::t>) -> ty::t {
let tcx = fcx.ccx.tcx;
match ast_util::binop_to_method_name(op) {
Some(ref name) => {
let if_op_unbound = || {
fcx.type_error_message(ex.span, |actual| {
format!("binary operation {} cannot be applied \
to type `{}`",
ast_util::binop_to_str(op), actual)},
lhs_resolved_t, None)
};
return lookup_op_method(fcx, callee_id, ex, lhs_expr, lhs_resolved_t,
token::intern(*name),
~[rhs], DoDerefArgs, DontAutoderefReceiver, if_op_unbound,
expected_result);
}
None => ()
};
check_expr(fcx, rhs);
// If the or operator is used it might be that the user forgot to
// supply the do keyword. Let's be more helpful in that situation.
if op == ast::BiOr {
match ty::get(lhs_resolved_t).sty {
ty::ty_bare_fn(_) | ty::ty_closure(_) => {
tcx.sess.span_note(
ex.span, "did you forget the `do` keyword for the call?");
}
_ => ()
}
}
ty::mk_err()
}
fn check_user_unop(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
op_str: &str,
mname: &str,
ex: @ast::Expr,
rhs_expr: @ast::Expr,
rhs_t: ty::t,
expected_t: Option<ty::t>)
-> ty::t {
lookup_op_method(
fcx, callee_id, ex, rhs_expr, rhs_t,
token::intern(mname), ~[],
DoDerefArgs, DontAutoderefReceiver,
|| {
fcx.type_error_message(ex.span, |actual| {
format!("cannot apply unary operator `{}` to type `{}`",
op_str, actual)
}, rhs_t, None);
}, expected_t)
}
// Resolves `expected` by a single level if it is a variable and passes it
// through the `unpack` function. It there is no expected type or
// resolution is not possible (e.g., no constraints yet present), just
// returns `none`.
fn unpack_expected<O>(
fcx: @mut FnCtxt,
expected: Option<ty::t>,
unpack: |&ty::sty| -> Option<O>)
-> Option<O> {
match expected {
Some(t) => {
match resolve_type(fcx.infcx(), t, force_tvar) {
Ok(t) => unpack(&ty::get(t).sty),
_ => None
}
}
_ => None
}
}
fn check_expr_fn(fcx: @mut FnCtxt,
expr: @ast::Expr,
ast_sigil_opt: Option<ast::Sigil>,
decl: &ast::fn_decl,
body: ast::P<ast::Block>,
fn_kind: FnKind,
expected: Option<ty::t>) {
let tcx = fcx.ccx.tcx;
// Find the expected input/output types (if any). Substitute
// fresh bound regions for any bound regions we find in the
// expected types so as to avoid capture.
//
// Also try to pick up inferred purity and sigil, defaulting
// to impure and block. Note that we only will use those for
// block syntax lambdas; that is, lambdas without explicit
// sigils.
let expected_sty = unpack_expected(fcx,
expected,
|x| Some((*x).clone()));
let error_happened = false;
let (expected_sig,
expected_purity,
expected_sigil,
expected_onceness,
expected_bounds) = {
match expected_sty {
Some(ty::ty_closure(ref cenv)) => {
let (_, _, sig) =
replace_bound_regions_in_fn_sig(
tcx, None, &cenv.sig,
|_| fcx.inh.infcx.fresh_bound_region(expr.id));
(Some(sig), cenv.purity, cenv.sigil,
cenv.onceness, cenv.bounds)
}
_ => {
// Not an error! Means we're inferring the closure type
let mut sigil = ast::BorrowedSigil;
let mut onceness = ast::Many;
let mut bounds = ty::EmptyBuiltinBounds();
match expr.node {
ast::ExprProc(..) => {
sigil = ast::OwnedSigil;
onceness = ast::Once;
bounds.add(ty::BoundSend);
}
_ => ()
}
(None, ast::impure_fn, sigil,
onceness, bounds)
}
}
};
// If the proto is specified, use that, otherwise select a
// proto based on inference.
let (sigil, purity) = match ast_sigil_opt {
Some(p) => (p, ast::impure_fn),
None => (expected_sigil, expected_purity)
};
// construct the function type
let fn_ty = astconv::ty_of_closure(fcx,
&fcx.infcx(),
expr.id,
sigil,
purity,
expected_onceness,
expected_bounds,
&None,
decl,
expected_sig,
expr.span);
let fty_sig;
let fty = if error_happened {
fty_sig = FnSig {
binder_id: ast::CRATE_NODE_ID,
inputs: fn_ty.sig.inputs.map(|_| ty::mk_err()),
output: ty::mk_err(),
variadic: false
};
ty::mk_err()
} else {
let fn_ty_copy = fn_ty.clone();
fty_sig = fn_ty.sig.clone();
ty::mk_closure(tcx, fn_ty_copy)
};
debug!("check_expr_fn_with_unifier fty={}",
fcx.infcx().ty_to_str(fty));
fcx.write_ty(expr.id, fty);
let (inherited_purity, id) =
ty::determine_inherited_purity((fcx.ps.purity, fcx.ps.def),
(purity, expr.id),
sigil);
check_fn(fcx.ccx, None, inherited_purity, &fty_sig,
decl, id, body, fn_kind, fcx.inh);
}
// Check field access expressions
fn check_field(fcx: @mut FnCtxt,
expr: @ast::Expr,
base: @ast::Expr,
field: ast::Name,
tys: &[ast::P<ast::Ty>]) {
let tcx = fcx.ccx.tcx;
let bot = check_expr(fcx, base);
let expr_t = structurally_resolved_type(fcx, expr.span,
fcx.expr_ty(base));
let (base_t, derefs) = do_autoderef(fcx, expr.span, expr_t);
match *structure_of(fcx, expr.span, base_t) {
ty::ty_struct(base_id, ref substs) => {
// This is just for fields -- the same code handles
// methods in both classes and traits
// (1) verify that the class id actually has a field called
// field
debug!("class named {}", ppaux::ty_to_str(tcx, base_t));
let cls_items = ty::lookup_struct_fields(tcx, base_id);
match lookup_field_ty(tcx, base_id, cls_items,
field, &(*substs)) {
Some(field_ty) => {
// (2) look up what field's type is, and return it
fcx.write_ty(expr.id, field_ty);
fcx.write_autoderef_adjustment(base.id, derefs);
return bot;
}
None => ()
}
}
_ => ()
}
let tps : ~[ty::t] = tys.iter().map(|&ty| fcx.to_ty(ty)).collect();
match method::lookup(fcx,
expr,
base,
expr.id,
field,
expr_t,
tps,
DontDerefArgs,
CheckTraitsAndInherentMethods,
AutoderefReceiver) {
Some(_) => {
fcx.type_error_message(
expr.span,
|actual| {
format!("attempted to take value of method `{}` on type `{}` \
(try writing an anonymous function)",
token::interner_get(field), actual)
},
expr_t, None);
}
None => {
fcx.type_error_message(
expr.span,
|actual| {
format!("attempted access of field `{}` on type `{}`, \
but no field with that name was found",
token::interner_get(field), actual)
},
expr_t, None);
}
}
fcx.write_error(expr.id);
}
fn check_struct_or_variant_fields(fcx: @mut FnCtxt,
struct_ty: ty::t,
span: Span,
class_id: ast::DefId,
node_id: ast::NodeId,
substitutions: ty::substs,
field_types: &[ty::field_ty],
ast_fields: &[ast::Field],
check_completeness: bool) {
let tcx = fcx.ccx.tcx;
let mut class_field_map = HashMap::new();
let mut fields_found = 0;
for field in field_types.iter() {
class_field_map.insert(field.name, (field.id, false));
}
let mut error_happened = false;
// Typecheck each field.
for field in ast_fields.iter() {
let mut expected_field_type = ty::mk_err();
let pair = class_field_map.find(&field.ident.node.name).map(|x| *x);
match pair {
None => {
fcx.type_error_message(
field.ident.span,
|actual| {
format!("structure `{}` has no field named `{}`",
actual, tcx.sess.str_of(field.ident.node))
}, struct_ty, None);
error_happened = true;
}
Some((_, true)) => {
tcx.sess.span_err(
field.ident.span,
format!("field `{}` specified more than once",
tcx.sess.str_of(field.ident.node)));
error_happened = true;
}
Some((field_id, false)) => {
expected_field_type =
ty::lookup_field_type(
tcx, class_id, field_id, &substitutions);
class_field_map.insert(
field.ident.node.name, (field_id, true));
fields_found += 1;
}
}
// Make sure to give a type to the field even if there's
// an error, so we can continue typechecking
check_expr_coercable_to_type(
fcx,
field.expr,
expected_field_type);
}
if error_happened {
fcx.write_error(node_id);
}
if check_completeness && !error_happened {
// Make sure the programmer specified all the fields.
assert!(fields_found <= field_types.len());
if fields_found < field_types.len() {
let mut missing_fields = ~[];
for class_field in field_types.iter() {
let name = class_field.name;
let (_, seen) = *class_field_map.get(&name);
if !seen {
missing_fields.push(
~"`" + token::interner_get(name) + "`");
}
}
tcx.sess.span_err(span,
format!("missing field{}: {}",
if missing_fields.len() == 1 {
""
} else {
"s"
},
missing_fields.connect(", ")));
}
}
if !error_happened {
fcx.write_ty(node_id, ty::mk_struct(fcx.ccx.tcx,
class_id, substitutions));
}
}
fn check_struct_constructor(fcx: @mut FnCtxt,
id: ast::NodeId,
span: codemap::Span,
class_id: ast::DefId,
fields: &[ast::Field],
base_expr: Option<@ast::Expr>) {
let tcx = fcx.ccx.tcx;
// Look up the number of type parameters and the raw type, and
// determine whether the class is region-parameterized.
let item_type = ty::lookup_item_type(tcx, class_id);
let type_parameter_count = item_type.generics.type_param_defs.len();
let region_parameter_count = item_type.generics.region_param_defs.len();
let raw_type = item_type.ty;
// Generate the struct type.
let regions = fcx.infcx().next_region_vars(
infer::BoundRegionInTypeOrImpl(span),
region_parameter_count);
let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count);
let substitutions = substs {
regions: ty::NonerasedRegions(opt_vec::from(regions)),
self_ty: None,
tps: type_parameters
};
let mut struct_type = ty::subst(tcx, &substitutions, raw_type);
// Look up and check the fields.
let class_fields = ty::lookup_struct_fields(tcx, class_id);
check_struct_or_variant_fields(fcx,
struct_type,
span,
class_id,
id,
substitutions,
class_fields,
fields,
base_expr.is_none());
if ty::type_is_error(fcx.node_ty(id)) {
struct_type = ty::mk_err();
}
// Check the base expression if necessary.
match base_expr {
None => {}
Some(base_expr) => {
check_expr_has_type(fcx, base_expr, struct_type);
if ty::type_is_bot(fcx.node_ty(base_expr.id)) {
struct_type = ty::mk_bot();
}
}
}
// Write in the resulting type.
fcx.write_ty(id, struct_type);
}
fn check_struct_enum_variant(fcx: @mut FnCtxt,
id: ast::NodeId,
span: codemap::Span,
enum_id: ast::DefId,
variant_id: ast::DefId,
fields: &[ast::Field]) {
let tcx = fcx.ccx.tcx;
// Look up the number of type parameters and the raw type, and
// determine whether the enum is region-parameterized.
let item_type = ty::lookup_item_type(tcx, enum_id);
let type_parameter_count = item_type.generics.type_param_defs.len();
let region_parameter_count = item_type.generics.region_param_defs.len();
let raw_type = item_type.ty;
// Generate the enum type.
let regions = fcx.infcx().next_region_vars(
infer::BoundRegionInTypeOrImpl(span),
region_parameter_count);
let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count);
let substitutions = substs {
regions: ty::NonerasedRegions(opt_vec::from(regions)),
self_ty: None,
tps: type_parameters
};
let enum_type = ty::subst(tcx, &substitutions, raw_type);
// Look up and check the enum variant fields.
let variant_fields = ty::lookup_struct_fields(tcx, variant_id);
check_struct_or_variant_fields(fcx,
enum_type,
span,
variant_id,
id,
substitutions,
variant_fields,
fields,
true);
fcx.write_ty(id, enum_type);
}
let tcx = fcx.ccx.tcx;
let id = expr.id;
match expr.node {
ast::ExprVstore(ev, vst) => {
let typ = match ev.node {
ast::ExprLit(@codemap::Spanned { node: ast::lit_str(..), .. }) => {
let tt = ast_expr_vstore_to_vstore(fcx, ev, vst);
ty::mk_estr(tcx, tt)
}
ast::ExprVec(ref args, mutbl) => {
let tt = ast_expr_vstore_to_vstore(fcx, ev, vst);
let mutability;
let mut any_error = false;
let mut any_bot = false;
match vst {
ast::ExprVstoreMutBox | ast::ExprVstoreMutSlice => {
mutability = ast::MutMutable
}
_ => mutability = mutbl
}
let t: ty::t = fcx.infcx().next_ty_var();
for e in args.iter() {
check_expr_has_type(fcx, *e, t);
let arg_t = fcx.expr_ty(*e);
if ty::type_is_error(arg_t) {
any_error = true;
}
else if ty::type_is_bot(arg_t) {
any_bot = true;
}
}
if any_error {
ty::mk_err()
}
else if any_bot {
ty::mk_bot()
}
else {
ty::mk_evec(tcx, ty::mt {ty: t, mutbl: mutability}, tt)
}
}
ast::ExprRepeat(element, count_expr, mutbl) => {
check_expr_with_hint(fcx, count_expr, ty::mk_uint());
let _ = ty::eval_repeat_count(fcx, count_expr);
let tt = ast_expr_vstore_to_vstore(fcx, ev, vst);
let mutability = match vst {
ast::ExprVstoreMutBox | ast::ExprVstoreMutSlice => {
ast::MutMutable
}
_ => mutbl
};
let t: ty::t = fcx.infcx().next_ty_var();
check_expr_has_type(fcx, element, t);
let arg_t = fcx.expr_ty(element);
if ty::type_is_error(arg_t) {
ty::mk_err()
} else if ty::type_is_bot(arg_t) {
ty::mk_bot()
} else {
ty::mk_evec(tcx, ty::mt {ty: t, mutbl: mutability}, tt)
}
}
_ =>
tcx.sess.span_bug(expr.span, "vstore modifier on non-sequence")
};
fcx.write_ty(ev.id, typ);
fcx.write_ty(id, typ);
}
ast::ExprLit(lit) => {
let typ = check_lit(fcx, lit);
fcx.write_ty(id, typ);
}
ast::ExprBinary(callee_id, op, lhs, rhs) => {
check_binop(fcx,
callee_id,
expr,
op,
lhs,
rhs,
expected,
AllowOverloadedOperators);
let lhs_ty = fcx.expr_ty(lhs);
let rhs_ty = fcx.expr_ty(rhs);
if ty::type_is_error(lhs_ty) ||
ty::type_is_error(rhs_ty) {
fcx.write_error(id);
}
else if ty::type_is_bot(lhs_ty) ||
(ty::type_is_bot(rhs_ty) && !ast_util::lazy_binop(op)) {
fcx.write_bot(id);
}
}
ast::ExprAssignOp(callee_id, op, lhs, rhs) => {
check_binop(fcx,
callee_id,
expr,
op,
lhs,
rhs,
expected,
DontAllowOverloadedOperators);
let lhs_t = fcx.expr_ty(lhs);
let result_t = fcx.expr_ty(expr);
demand::suptype(fcx, expr.span, result_t, lhs_t);
let tcx = fcx.tcx();
if !ty::expr_is_lval(tcx, fcx.ccx.method_map, lhs) {
tcx.sess.span_err(lhs.span, "illegal left-hand side expression");
}
// Overwrite result of check_binop...this preserves existing behavior
// but seems quite dubious with regard to user-defined methods
// and so forth. - Niko
if !ty::type_is_error(result_t)
&& !ty::type_is_bot(result_t) {
fcx.write_nil(expr.id);
}
}
ast::ExprUnary(callee_id, unop, oprnd) => {
let exp_inner = unpack_expected(fcx, expected, |sty| {
match unop {
ast::UnBox(_) | ast::UnUniq => match *sty {
ty::ty_box(ref mt) | ty::ty_uniq(ref mt) => Some(mt.ty),
_ => None
},
ast::UnNot | ast::UnNeg => expected,
ast::UnDeref => None
}
});
check_expr_with_opt_hint(fcx, oprnd, exp_inner);
let mut oprnd_t = fcx.expr_ty(oprnd);
if !ty::type_is_error(oprnd_t) &&
!ty::type_is_bot(oprnd_t) {
match unop {
ast::UnBox(mutbl) => {
oprnd_t = ty::mk_box(tcx,
ty::mt {ty: oprnd_t, mutbl: mutbl});
}
ast::UnUniq => {
oprnd_t = ty::mk_uniq(tcx,
ty::mt {ty: oprnd_t,
mutbl: ast::MutImmutable});
}
ast::UnDeref => {
let sty = structure_of(fcx, expr.span, oprnd_t);
let operand_ty = ty::deref_sty(tcx, sty, true);
match operand_ty {
Some(mt) => {
oprnd_t = mt.ty
}
None => {
match *sty {
ty::ty_enum(..) => {
tcx.sess.span_err(
expr.span,
"can only dereference enums with a single variant which \
has a single argument");
}
ty::ty_struct(..) => {
tcx.sess.span_err(
expr.span,
"can only dereference structs with one anonymous field");
}
_ => {
fcx.type_error_message(expr.span,
|actual| {
format!("type {} cannot be dereferenced", actual)
}, oprnd_t, None);
}
}
}
}
}
ast::UnNot => {
oprnd_t = structurally_resolved_type(fcx, oprnd.span,
oprnd_t);
if !(ty::type_is_integral(oprnd_t) ||
ty::get(oprnd_t).sty == ty::ty_bool) {
oprnd_t = check_user_unop(fcx, callee_id,
"!", "not", expr, oprnd, oprnd_t,
expected);
}
}
ast::UnNeg => {
oprnd_t = structurally_resolved_type(fcx, oprnd.span,
oprnd_t);
if !(ty::type_is_integral(oprnd_t) ||
ty::type_is_fp(oprnd_t)) {
oprnd_t = check_user_unop(fcx, callee_id,
"-", "neg", expr, oprnd, oprnd_t, expected);
}
}
}
}
fcx.write_ty(id, oprnd_t);
}
ast::ExprAddrOf(mutbl, oprnd) => {
let hint = unpack_expected(
fcx, expected,
|sty| match *sty { ty::ty_rptr(_, ref mt) => Some(mt.ty),
_ => None });
check_expr_with_opt_hint(fcx, oprnd, hint);
// Note: at this point, we cannot say what the best lifetime
// is to use for resulting pointer. We want to use the
// shortest lifetime possible so as to avoid spurious borrowck
// errors. Moreover, the longest lifetime will depend on the
// precise details of the value whose address is being taken
// (and how long it is valid), which we don't know yet until type
// inference is complete.
//
// Therefore, here we simply generate a region variable. The
// region inferencer will then select the ultimate value.
// Finally, borrowck is charged with guaranteeing that the
// value whose address was taken can actually be made to live
// as long as it needs to live.
let region = fcx.infcx().next_region_var(
infer::AddrOfRegion(expr.span));
let tm = ty::mt { ty: fcx.expr_ty(oprnd), mutbl: mutbl };
let oprnd_t = if ty::type_is_error(tm.ty) {
ty::mk_err()
} else if ty::type_is_bot(tm.ty) {
ty::mk_bot()
}
else {
ty::mk_rptr(tcx, region, tm)
};
fcx.write_ty(id, oprnd_t);
}
ast::ExprPath(ref pth) => {
let defn = lookup_def(fcx, pth.span, id);
check_type_parameter_positions_in_path(fcx, pth, defn);
let tpt = ty_param_bounds_and_ty_for_def(fcx, expr.span, defn);
instantiate_path(fcx, pth, tpt, defn, expr.span, expr.id);
}
ast::ExprSelf => {
let definition = lookup_def(fcx, expr.span, id);
let ty_param_bounds_and_ty =
ty_param_bounds_and_ty_for_def(fcx, expr.span, definition);
fcx.write_ty(id, ty_param_bounds_and_ty.ty);
}
ast::ExprInlineAsm(ref ia) => {
for &(_, input) in ia.inputs.iter() {
check_expr(fcx, input);
}
for &(_, out) in ia.outputs.iter() {
check_expr(fcx, out);
}
fcx.write_nil(id);
}
ast::ExprMac(_) => tcx.sess.bug("unexpanded macro"),
ast::ExprBreak(_) => { fcx.write_bot(id); }
ast::ExprAgain(_) => { fcx.write_bot(id); }
ast::ExprRet(expr_opt) => {
let ret_ty = fcx.ret_ty;
match expr_opt {
None => match fcx.mk_eqty(false, infer::Misc(expr.span),
ret_ty, ty::mk_nil()) {
result::Ok(_) => { /* fall through */ }
result::Err(_) => {
tcx.sess.span_err(
expr.span,
"`return;` in function returning non-nil");
}
},
Some(e) => {
check_expr_has_type(fcx, e, ret_ty);
}
}
fcx.write_bot(id);
}
ast::ExprLogLevel => {
fcx.write_ty(id, ty::mk_u32())
}
ast::ExprParen(a) => {
check_expr_with_opt_hint(fcx, a, expected);
fcx.write_ty(id, fcx.expr_ty(a));
}
ast::ExprAssign(lhs, rhs) => {
check_assignment(fcx, lhs, rhs, id);
let tcx = fcx.tcx();
if !ty::expr_is_lval(tcx, fcx.ccx.method_map, lhs) {
tcx.sess.span_err(lhs.span, "illegal left-hand side expression");
}
let lhs_ty = fcx.expr_ty(lhs);
let rhs_ty = fcx.expr_ty(rhs);
if ty::type_is_error(lhs_ty) || ty::type_is_error(rhs_ty) {
fcx.write_error(id);
}
else if ty::type_is_bot(lhs_ty) || ty::type_is_bot(rhs_ty) {
fcx.write_bot(id);
}
else {
fcx.write_nil(id);
}
}
ast::ExprIf(cond, then_blk, opt_else_expr) => {
check_then_else(fcx, cond, then_blk, opt_else_expr,
id, expr.span, expected);
}
ast::ExprWhile(cond, body) => {
check_expr_has_type(fcx, cond, ty::mk_bool());
check_block_no_value(fcx, body);
let cond_ty = fcx.expr_ty(cond);
let body_ty = fcx.node_ty(body.id);
if ty::type_is_error(cond_ty) || ty::type_is_error(body_ty) {
fcx.write_error(id);
}
else if ty::type_is_bot(cond_ty) {
fcx.write_bot(id);
}
else {
fcx.write_nil(id);
}
}
ast::ExprForLoop(..) =>
fail!("non-desugared expr_for_loop"),
ast::ExprLoop(body, _) => {
check_block_no_value(fcx, (body));
if !may_break(tcx, expr.id, body) {
fcx.write_bot(id);
}
else {
fcx.write_nil(id);
}
}
ast::ExprMatch(discrim, ref arms) => {
_match::check_match(fcx, expr, discrim, *arms);
}
ast::ExprFnBlock(decl, body) => {
check_expr_fn(fcx,
expr,
Some(ast::BorrowedSigil),
decl,
body,
Vanilla,
expected);
}
ast::ExprProc(decl, body) => {
check_expr_fn(fcx,
expr,
Some(ast::OwnedSigil),
decl,
body,
Vanilla,
expected);
}
ast::ExprDoBody(b) => {
let expected_sty = unpack_expected(fcx,
expected,
|x| Some((*x).clone()));
let inner_ty = match expected_sty {
Some(ty::ty_closure(ref closure_ty))
if closure_ty.sigil == ast::OwnedSigil => {
expected.unwrap()
}
_ => match expected {
Some(expected_t) => {
fcx.type_error_message(expr.span, |actual| {
format!("last argument in `do` call \
has non-procedure type: {}",
actual)
}, expected_t, None);
let err_ty = ty::mk_err();
fcx.write_ty(id, err_ty);
err_ty
}
None => {
fcx.tcx().sess.impossible_case(
expr.span,
"do body must have expected type")
}
}
};
match b.node {
ast::ExprFnBlock(decl, body) => {
check_expr_fn(fcx, b, None,
decl, body, DoBlock, Some(inner_ty));
demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b));
}
// argh
_ => fail!("expected fn ty")
}
fcx.write_ty(expr.id, fcx.node_ty(b.id));
}
ast::ExprBlock(b) => {
check_block_with_expected(fcx, b, expected);
fcx.write_ty(id, fcx.node_ty(b.id));
}
ast::ExprCall(f, ref args, sugar) => {
check_call(fcx, expr.id, expr, f, *args, sugar);
let f_ty = fcx.expr_ty(f);
let (args_bot, args_err) = args.iter().fold((false, false),
|(rest_bot, rest_err), a| {
// is this not working?
let a_ty = fcx.expr_ty(*a);
(rest_bot || ty::type_is_bot(a_ty),
rest_err || ty::type_is_error(a_ty))});
if ty::type_is_error(f_ty) || args_err {
fcx.write_error(id);
}
else if ty::type_is_bot(f_ty) || args_bot {
fcx.write_bot(id);
}
}
ast::ExprMethodCall(callee_id, rcvr, ident, ref tps, ref args, sugar) => {
check_method_call(fcx, callee_id, expr, rcvr, ident, *args, *tps, sugar);
let f_ty = fcx.expr_ty(rcvr);
let arg_tys = args.map(|a| fcx.expr_ty(*a));
let (args_bot, args_err) = arg_tys.iter().fold((false, false),
|(rest_bot, rest_err), a| {
(rest_bot || ty::type_is_bot(*a),
rest_err || ty::type_is_error(*a))});
if ty::type_is_error(f_ty) || args_err {
fcx.write_error(id);
}
else if ty::type_is_bot(f_ty) || args_bot {
fcx.write_bot(id);
}
}
ast::ExprCast(e, t) => {
check_expr(fcx, e);
let t_1 = fcx.to_ty(t);
let t_e = fcx.expr_ty(e);
debug!("t_1={}", fcx.infcx().ty_to_str(t_1));
debug!("t_e={}", fcx.infcx().ty_to_str(t_e));
if ty::type_is_error(t_e) {
fcx.write_error(id);
}
else if ty::type_is_bot(t_e) {
fcx.write_bot(id);
}
else {
match ty::get(t_1).sty {
// This will be looked up later on
ty::ty_trait(..) => (),
_ => {
if ty::type_is_nil(t_e) {
fcx.type_error_message(expr.span, |actual| {
format!("cast from nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
} else if ty::type_is_nil(t_1) {
fcx.type_error_message(expr.span, |actual| {
format!("cast to nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
let t1 = structurally_resolved_type(fcx, e.span, t_1);
let te = structurally_resolved_type(fcx, e.span, t_e);
let t_1_is_scalar = type_is_scalar(fcx, expr.span, t_1);
let t_1_is_char = type_is_char(fcx, expr.span, t_1);
let t_1_is_bare_fn = type_is_bare_fn(fcx, expr.span, t_1);
// casts to scalars other than `char` and `bare fn` are trivial
let t_1_is_trivial = t_1_is_scalar &&
!t_1_is_char && !t_1_is_bare_fn;
if type_is_c_like_enum(fcx, expr.span, t_e) && t_1_is_trivial {
// casts from C-like enums are allowed
} else if t_1_is_char {
if ty::get(te).sty != ty::ty_uint(ast::ty_u8) {
fcx.type_error_message(expr.span, |actual| {
format!("only `u8` can be cast as `char`, not `{}`", actual)
}, t_e, None);
}
} else if ty::get(t1).sty == ty::ty_bool {
fcx.tcx().sess.span_err(expr.span,
"cannot cast as `bool`, compare with zero instead");
} else if type_is_region_ptr(fcx, expr.span, t_e) &&
type_is_unsafe_ptr(fcx, expr.span, t_1) {
fn is_vec(t: ty::t) -> bool {
match ty::get(t).sty {
ty::ty_evec(_,_) => true,
_ => false
}
}
fn types_compatible(fcx: @mut FnCtxt, sp: Span,
t1: ty::t, t2: ty::t) -> bool {
if !is_vec(t1) {
false
} else {
let el = ty::sequence_element_type(fcx.tcx(),
t1);
infer::mk_eqty(fcx.infcx(), false,
infer::Misc(sp), el, t2).is_ok()
}
}
// Due to the limitations of LLVM global constants,
// region pointers end up pointing at copies of
// vector elements instead of the original values.
// To allow unsafe pointers to work correctly, we
// need to special-case obtaining an unsafe pointer
// from a region pointer to a vector.
/* this cast is only allowed from &[T] to *T or
&T to *T. */
match (&ty::get(te).sty, &ty::get(t_1).sty) {
(&ty::ty_rptr(_, mt1), &ty::ty_ptr(mt2))
if types_compatible(fcx, e.span,
mt1.ty, mt2.ty) => {
/* this case is allowed */
}
_ => {
demand::coerce(fcx, e.span, t_1, e);
}
}
} else if !(type_is_scalar(fcx,expr.span,t_e)
&& t_1_is_trivial) {
/*
If more type combinations should be supported than are
supported here, then file an enhancement issue and
record the issue number in this comment.
*/
fcx.type_error_message(expr.span, |actual| {
format!("non-scalar cast: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
}
}
fcx.write_ty(id, t_1);
}
}
ast::ExprVec(ref args, mutbl) => {
let t: ty::t = fcx.infcx().next_ty_var();
for e in args.iter() {
check_expr_has_type(fcx, *e, t);
}
let typ = ty::mk_evec(tcx, ty::mt {ty: t, mutbl: mutbl},
ty::vstore_fixed(args.len()));
fcx.write_ty(id, typ);
}
ast::ExprRepeat(element, count_expr, mutbl) => {
check_expr_with_hint(fcx, count_expr, ty::mk_uint());
let count = ty::eval_repeat_count(fcx, count_expr);
let t: ty::t = fcx.infcx().next_ty_var();
check_expr_has_type(fcx, element, t);
let element_ty = fcx.expr_ty(element);
if ty::type_is_error(element_ty) {
fcx.write_error(id);
}
else if ty::type_is_bot(element_ty) {
fcx.write_bot(id);
}
else {
let t = ty::mk_evec(tcx, ty::mt {ty: t, mutbl: mutbl},
ty::vstore_fixed(count));
fcx.write_ty(id, t);
}
}
ast::ExprTup(ref elts) => {
let flds = unpack_expected(fcx, expected, |sty| {
match *sty {
ty::ty_tup(ref flds) => Some((*flds).clone()),
_ => None
}
});
let mut bot_field = false;
let mut err_field = false;
let elt_ts = elts.iter().enumerate().map(|(i, e)| {
let opt_hint = match flds {
Some(ref fs) if i < fs.len() => Some(fs[i]),
_ => None
};
check_expr_with_opt_hint(fcx, *e, opt_hint);
let t = fcx.expr_ty(*e);
err_field = err_field || ty::type_is_error(t);
bot_field = bot_field || ty::type_is_bot(t);
t
}).collect();
if bot_field {
fcx.write_bot(id);
} else if err_field {
fcx.write_error(id);
} else {
let typ = ty::mk_tup(tcx, elt_ts);
fcx.write_ty(id, typ);
}
}
ast::ExprStruct(ref path, ref fields, base_expr) => {
// Resolve the path.
match tcx.def_map.find(&id) {
Some(&ast::DefStruct(type_def_id)) => {
check_struct_constructor(fcx, id, expr.span, type_def_id,
*fields, base_expr);
}
Some(&ast::DefVariant(enum_id, variant_id, _)) => {
check_struct_enum_variant(fcx, id, expr.span, enum_id,
variant_id, *fields);
}
_ => {
tcx.sess.span_bug(path.span,
"structure constructor does not name a structure type");
}
}
}
ast::ExprField(base, field, ref tys) => {
check_field(fcx, expr, base, field.name, *tys);
}
ast::ExprIndex(callee_id, base, idx) => {
check_expr(fcx, base);
check_expr(fcx, idx);
let raw_base_t = fcx.expr_ty(base);
let idx_t = fcx.expr_ty(idx);
if ty::type_is_error(raw_base_t) || ty::type_is_bot(raw_base_t) {
fcx.write_ty(id, raw_base_t);
} else if ty::type_is_error(idx_t) || ty::type_is_bot(idx_t) {
fcx.write_ty(id, idx_t);
} else {
let (base_t, derefs) = do_autoderef(fcx, expr.span, raw_base_t);
let base_sty = structure_of(fcx, expr.span, base_t);
match ty::index_sty(base_sty) {
Some(mt) => {
require_integral(fcx, idx.span, idx_t);
fcx.write_ty(id, mt.ty);
fcx.write_autoderef_adjustment(base.id, derefs);
}
None => {
let resolved = structurally_resolved_type(fcx,
expr.span,
raw_base_t);
let index_ident = tcx.sess.ident_of("index");
let error_message = || {
fcx.type_error_message(expr.span,
|actual| {
format!("cannot index a value \
of type `{}`",
actual)
},
base_t,
None);
};
let ret_ty = lookup_op_method(fcx,
callee_id,
expr,
base,
resolved,
index_ident.name,
~[idx],
DoDerefArgs,
AutoderefReceiver,
error_message,
expected);
fcx.write_ty(id, ret_ty);
}
}
}
}
}
debug!("type of expr({}) {} is...", expr.id,
syntax::print::pprust::expr_to_str(expr, tcx.sess.intr()));
debug!("... {}, expected is {}",
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
match expected {
Some(t) => ppaux::ty_to_str(tcx, t),
_ => ~"empty"
});
unifier();
}
pub fn require_integral(fcx: @mut FnCtxt, sp: Span, t: ty::t) {
if !type_is_integral(fcx, sp, t) {
fcx.type_error_message(sp, |actual| {
format!("mismatched types: expected integral type but found `{}`",
actual)
}, t, None);
}
}
pub fn check_decl_initializer(fcx: @mut FnCtxt,
nid: ast::NodeId,
init: @ast::Expr)
{
let local_ty = fcx.local_ty(init.span, nid);
check_expr_coercable_to_type(fcx, init, local_ty)
}
pub fn check_decl_local(fcx: @mut FnCtxt, local: @ast::Local) {
let tcx = fcx.ccx.tcx;
let t = fcx.local_ty(local.span, local.id);
fcx.write_ty(local.id, t);
match local.init {
Some(init) => {
check_decl_initializer(fcx, local.id, init);
let init_ty = fcx.expr_ty(init);
if ty::type_is_error(init_ty) || ty::type_is_bot(init_ty) {
fcx.write_ty(local.id, init_ty);
}
}
_ => {}
}
let pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(tcx.def_map, local.pat),
};
_match::check_pat(&pcx, local.pat, t);
let pat_ty = fcx.node_ty(local.pat.id);
if ty::type_is_error(pat_ty) || ty::type_is_bot(pat_ty) {
fcx.write_ty(local.id, pat_ty);
}
}
pub fn check_stmt(fcx: @mut FnCtxt, stmt: @ast::Stmt) {
let node_id;
let mut saw_bot = false;
let mut saw_err = false;
match stmt.node {
ast::StmtDecl(decl, id) => {
node_id = id;
match decl.node {
ast::DeclLocal(ref l) => {
check_decl_local(fcx, *l);
let l_t = fcx.node_ty(l.id);
saw_bot = saw_bot || ty::type_is_bot(l_t);
saw_err = saw_err || ty::type_is_error(l_t);
}
ast::DeclItem(_) => {/* ignore for now */ }
}
}
ast::StmtExpr(expr, id) => {
node_id = id;
// Check with expected type of ()
check_expr_has_type(fcx, expr, ty::mk_nil());
let expr_ty = fcx.expr_ty(expr);
saw_bot = saw_bot || ty::type_is_bot(expr_ty);
saw_err = saw_err || ty::type_is_error(expr_ty);
}
ast::StmtSemi(expr, id) => {
node_id = id;
check_expr(fcx, expr);
let expr_ty = fcx.expr_ty(expr);
saw_bot |= ty::type_is_bot(expr_ty);
saw_err |= ty::type_is_error(expr_ty);
}
ast::StmtMac(..) => fcx.ccx.tcx.sess.bug("unexpanded macro")
}
if saw_bot {
fcx.write_bot(node_id);
}
else if saw_err {
fcx.write_error(node_id);
}
else {
fcx.write_nil(node_id)
}
}
pub fn check_block_no_value(fcx: @mut FnCtxt, blk: &ast::Block) {
check_block_with_expected(fcx, blk, Some(ty::mk_nil()));
let blkty = fcx.node_ty(blk.id);
if ty::type_is_error(blkty) {
fcx.write_error(blk.id);
}
else if ty::type_is_bot(blkty) {
fcx.write_bot(blk.id);
}
else {
let nilty = ty::mk_nil();
demand::suptype(fcx, blk.span, nilty, blkty);
}
}
pub fn check_block(fcx0: @mut FnCtxt, blk: &ast::Block) {
check_block_with_expected(fcx0, blk, None)
}
pub fn check_block_with_expected(fcx: @mut FnCtxt,
blk: &ast::Block,
expected: Option<ty::t>) {
let purity_state = fcx.ps.recurse(blk);
let prev = replace(&mut fcx.ps, purity_state);
fcx.with_region_lb(blk.id, || {
let mut warned = false;
let mut last_was_bot = false;
let mut any_bot = false;
let mut any_err = false;
for s in blk.stmts.iter() {
check_stmt(fcx, *s);
let s_id = ast_util::stmt_id(*s);
let s_ty = fcx.node_ty(s_id);
if last_was_bot && !warned && match s.node {
ast::StmtDecl(@codemap::Spanned { node: ast::DeclLocal(_),
..}, _) |
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => {
true
}
_ => false
} {
fcx.ccx.tcx.sess.add_lint(unreachable_code, s_id, s.span,
~"unreachable statement");
warned = true;
}
if ty::type_is_bot(s_ty) {
last_was_bot = true;
}
any_bot = any_bot || ty::type_is_bot(s_ty);
any_err = any_err || ty::type_is_error(s_ty);
}
match blk.expr {
None => if any_err {
fcx.write_error(blk.id);
}
else if any_bot {
fcx.write_bot(blk.id);
}
else {
fcx.write_nil(blk.id);
},
Some(e) => {
if any_bot && !warned {
fcx.ccx.tcx.sess.add_lint(unreachable_code, e.id, e.span,
~"unreachable expression");
}
check_expr_with_opt_hint(fcx, e, expected);
let ety = fcx.expr_ty(e);
fcx.write_ty(blk.id, ety);
if any_err {
fcx.write_error(blk.id);
}
else if any_bot {
fcx.write_bot(blk.id);
}
}
};
});
fcx.ps = prev;
}
pub fn check_const(ccx: @mut CrateCtxt,
sp: Span,
e: @ast::Expr,
id: ast::NodeId) {
let rty = ty::node_id_to_type(ccx.tcx, id);
let fcx = blank_fn_ctxt(ccx, rty, e.id);
let declty = fcx.ccx.tcx.tcache.get(&local_def(id)).ty;
check_const_with_ty(fcx, sp, e, declty);
}
pub fn check_const_with_ty(fcx: @mut FnCtxt,
_: Span,
e: @ast::Expr,
declty: ty::t) {
check_expr(fcx, e);
let cty = fcx.expr_ty(e);
demand::suptype(fcx, e.span, declty, cty);
regionck::regionck_expr(fcx, e);
writeback::resolve_type_vars_in_expr(fcx, e);
}
/// Checks whether a type can be created without an instance of itself.
/// This is similar but different from the question of whether a type
/// can be represented. For example, the following type:
///
/// enum foo { None, Some(foo) }
///
/// is instantiable but is not representable. Similarly, the type
///
/// enum foo { Some(@foo) }
///
/// is representable, but not instantiable.
pub fn check_instantiable(tcx: ty::ctxt,
sp: Span,
item_id: ast::NodeId) {
let item_ty = ty::node_id_to_type(tcx, item_id);
if !ty::is_instantiable(tcx, item_ty) {
tcx.sess.span_err(sp, format!("this type cannot be instantiated \
without an instance of itself; \
consider using `Option<{}>`",
ppaux::ty_to_str(tcx, item_ty)));
}
}
pub fn check_simd(tcx: ty::ctxt, sp: Span, id: ast::NodeId) {
let t = ty::node_id_to_type(tcx, id);
if ty::type_needs_subst(t) {
tcx.sess.span_err(sp, "SIMD vector cannot be generic");
return;
}
match ty::get(t).sty {
ty::ty_struct(did, ref substs) => {
let fields = ty::lookup_struct_fields(tcx, did);
if fields.is_empty() {
tcx.sess.span_err(sp, "SIMD vector cannot be empty");
return;
}
let e = ty::lookup_field_type(tcx, did, fields[0].id, substs);
if !fields.iter().all(
|f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {
tcx.sess.span_err(sp, "SIMD vector should be homogeneous");
return;
}
if !ty::type_is_machine(e) {
tcx.sess.span_err(sp, "SIMD vector element type should be \
machine type");
return;
}
}
_ => ()
}
}
pub fn check_enum_variants(ccx: @mut CrateCtxt,
sp: Span,
vs: &[ast::P<ast::variant>],
id: ast::NodeId) {
fn disr_in_range(ccx: @mut CrateCtxt,
ty: attr::IntType,
disr: ty::Disr) -> bool {
fn uint_in_range(ccx: @mut CrateCtxt, ty: ast::uint_ty, disr: ty::Disr) -> bool {
match ty {
ast::ty_u8 => disr as u8 as Disr == disr,
ast::ty_u16 => disr as u16 as Disr == disr,
ast::ty_u32 => disr as u32 as Disr == disr,
ast::ty_u64 => disr as u64 as Disr == disr,
ast::ty_u => uint_in_range(ccx, ccx.tcx.sess.targ_cfg.uint_type, disr)
}
}
fn int_in_range(ccx: @mut CrateCtxt, ty: ast::int_ty, disr: ty::Disr) -> bool {
match ty {
ast::ty_i8 => disr as i8 as Disr == disr,
ast::ty_i16 => disr as i16 as Disr == disr,
ast::ty_i32 => disr as i32 as Disr == disr,
ast::ty_i64 => disr as i64 as Disr == disr,
ast::ty_i => int_in_range(ccx, ccx.tcx.sess.targ_cfg.int_type, disr)
}
}
match ty {
attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
}
}
fn do_check(ccx: @mut CrateCtxt,
vs: &[ast::P<ast::variant>],
id: ast::NodeId,
hint: attr::ReprAttr)
-> ~[@ty::VariantInfo] {
let rty = ty::node_id_to_type(ccx.tcx, id);
let mut variants: ~[@ty::VariantInfo] = ~[];
let mut disr_vals: ~[ty::Disr] = ~[];
let mut prev_disr_val: Option<ty::Disr> = None;
for &v in vs.iter() {
// If the discriminant value is specified explicitly in the enum check whether the
// initialization expression is valid, otherwise use the last value plus one.
let mut current_disr_val = match prev_disr_val {
Some(prev_disr_val) => prev_disr_val + 1,
None => ty::INITIAL_DISCRIMINANT_VALUE
};
match v.node.disr_expr {
Some(e) => {
debug!("disr expr, checking {}", pprust::expr_to_str(e, ccx.tcx.sess.intr()));
let fcx = blank_fn_ctxt(ccx, rty, e.id);
let declty = ty::mk_int_var(ccx.tcx, fcx.infcx().next_int_var_id());
check_const_with_ty(fcx, e.span, e, declty);
// check_expr (from check_const pass) doesn't guarantee
// that the expression is in an form that eval_const_expr can
// handle, so we may still get an internal compiler error
match const_eval::eval_const_expr_partial(&ccx.tcx, e) {
Ok(const_eval::const_int(val)) => current_disr_val = val as Disr,
Ok(const_eval::const_uint(val)) => current_disr_val = val as Disr,
Ok(_) => {
ccx.tcx.sess.span_err(e.span, "expected signed integer constant");
}
Err(ref err) => {
ccx.tcx.sess.span_err(e.span, format!("expected constant: {}", (*err)));
}
}
},
None => ()
};
// Check for duplicate discriminant values
if disr_vals.contains(¤t_disr_val) {
ccx.tcx.sess.span_err(v.span, "discriminant value already exists");
}
// Check for unrepresentable discriminant values
match hint {
attr::ReprAny | attr::ReprExtern => (),
attr::ReprInt(sp, ity) => {
if !disr_in_range(ccx, ity, current_disr_val) {
ccx.tcx.sess.span_err(v.span,
"discriminant value outside specified type");
ccx.tcx.sess.span_note(sp, "discriminant type specified here");
}
}
}
disr_vals.push(current_disr_val);
let variant_info = @VariantInfo::from_ast_variant(ccx.tcx, v, current_disr_val);
prev_disr_val = Some(current_disr_val);
variants.push(variant_info);
}
return variants;
}
let rty = ty::node_id_to_type(ccx.tcx, id);
let hint = ty::lookup_repr_hint(ccx.tcx, ast::DefId { crate: ast::LOCAL_CRATE, node: id });
if hint != attr::ReprAny && vs.len() <= 1 {
ccx.tcx.sess.span_err(sp, format!("unsupported representation for {}variant enum",
if vs.len() == 1 { "uni" } else { "zero-" }))
}
let variants = do_check(ccx, vs, id, hint);
// cache so that ty::enum_variants won't repeat this work
ccx.tcx.enum_var_cache.insert(local_def(id), @variants);
// Check that it is possible to represent this enum:
let mut outer = true;
let did = local_def(id);
if ty::type_structurally_contains(ccx.tcx, rty, |sty| {
match *sty {
ty::ty_enum(id, _) if id == did => {
if outer { outer = false; false }
else { true }
}
_ => false
}
}) {
ccx.tcx.sess.span_err(sp,
"illegal recursive enum type; \
wrap the inner value in a box to make it representable");
}
// Check that it is possible to instantiate this enum:
//
// This *sounds* like the same that as representable, but it's
// not. See def'n of `check_instantiable()` for details.
check_instantiable(ccx.tcx, sp, id);
}
pub fn lookup_def(fcx: @mut FnCtxt, sp: Span, id: ast::NodeId) -> ast::Def {
lookup_def_ccx(fcx.ccx, sp, id)
}
// Returns the type parameter count and the type for the given definition.
pub fn ty_param_bounds_and_ty_for_def(fcx: @mut FnCtxt,
sp: Span,
defn: ast::Def)
-> ty_param_bounds_and_ty {
match defn {
ast::DefArg(nid, _) | ast::DefLocal(nid, _) | ast::DefSelf(nid, _) |
ast::DefBinding(nid, _) => {
let typ = fcx.local_ty(sp, nid);
return no_params(typ);
}
ast::DefFn(id, _) | ast::DefStaticMethod(id, _, _) |
ast::DefStatic(id, _) | ast::DefVariant(_, id, _) |
ast::DefStruct(id) => {
return ty::lookup_item_type(fcx.ccx.tcx, id);
}
ast::DefUpvar(_, inner, _, _) => {
return ty_param_bounds_and_ty_for_def(fcx, sp, *inner);
}
ast::DefTrait(_) |
ast::DefTy(_) |
ast::DefPrimTy(_) |
ast::DefTyParam(..)=> {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found type");
}
ast::DefMod(..) | ast::DefForeignMod(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found module");
}
ast::DefUse(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found use");
}
ast::DefRegion(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found region");
}
ast::DefTyParamBinder(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found type parameter");
}
ast::DefLabel(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found label");
}
ast::DefSelfTy(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found self ty");
}
ast::DefMethod(..) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found method");
}
}
}
// Instantiates the given path, which must refer to an item with the given
// number of type parameters and type.
pub fn instantiate_path(fcx: @mut FnCtxt,
pth: &ast::Path,
tpt: ty_param_bounds_and_ty,
def: ast::Def,
span: Span,
node_id: ast::NodeId) {
debug!(">>> instantiate_path");
let ty_param_count = tpt.generics.type_param_defs.len();
let mut ty_substs_len = 0;
for segment in pth.segments.iter() {
ty_substs_len += segment.types.len()
}
debug!("tpt={} ty_param_count={:?} ty_substs_len={:?}",
tpt.repr(fcx.tcx()),
ty_param_count,
ty_substs_len);
// determine the region parameters, using the value given by the user
// (if any) and otherwise using a fresh region variable
let num_expected_regions = tpt.generics.region_param_defs.len();
let num_supplied_regions = pth.segments.last().lifetimes.len();
let regions = if num_expected_regions == num_supplied_regions {
pth.segments.last().lifetimes.map(
|l| ast_region_to_region(fcx.tcx(), l))
} else {
if num_supplied_regions != 0 {
fcx.ccx.tcx.sess.span_err(
span,
format!("expected {} lifetime parameter(s), \
found {} lifetime parameter(s)",
num_expected_regions, num_supplied_regions));
}
opt_vec::from(fcx.infcx().next_region_vars(
infer::BoundRegionInTypeOrImpl(span),
num_expected_regions))
};
// Special case: If there is a self parameter, omit it from the list of
// type parameters.
//
// Here we calculate the "user type parameter count", which is the number
// of type parameters actually manifest in the AST. This will differ from
// the internal type parameter count when there are self types involved.
let (user_type_parameter_count, self_parameter_index) = match def {
ast::DefStaticMethod(_, provenance @ ast::FromTrait(_), _) => {
let generics = generics_of_static_method_container(fcx.ccx.tcx,
provenance);
(ty_param_count - 1, Some(generics.type_param_defs.len()))
}
_ => (ty_param_count, None),
};
// determine values for type parameters, using the values given by
// the user (if any) and otherwise using fresh type variables
let tps = if ty_substs_len == 0 {
fcx.infcx().next_ty_vars(ty_param_count)
} else if ty_param_count == 0 {
fcx.ccx.tcx.sess.span_err
(span, "this item does not take type parameters");
fcx.infcx().next_ty_vars(ty_param_count)
} else if ty_substs_len > user_type_parameter_count {
fcx.ccx.tcx.sess.span_err
(span,
format!("too many type parameters provided: expected {}, found {}",
user_type_parameter_count, ty_substs_len));
fcx.infcx().next_ty_vars(ty_param_count)
} else if ty_substs_len < user_type_parameter_count {
fcx.ccx.tcx.sess.span_err
(span,
format!("not enough type parameters provided: expected {}, found {}",
user_type_parameter_count, ty_substs_len));
fcx.infcx().next_ty_vars(ty_param_count)
} else {
// Build up the list of type parameters, inserting the self parameter
// at the appropriate position.
let mut result = ~[];
let mut pushed = false;
for (i, &ast_type) in pth.segments
.iter()
.flat_map(|segment| segment.types.iter())
.enumerate() {
match self_parameter_index {
Some(index) if index == i => {
result.push(fcx.infcx().next_ty_vars(1)[0]);
pushed = true;
}
_ => {}
}
result.push(fcx.to_ty(ast_type))
}
// If the self parameter goes at the end, insert it there.
if !pushed && self_parameter_index.is_some() {
result.push(fcx.infcx().next_ty_vars(1)[0])
}
assert_eq!(result.len(), ty_param_count)
result
};
let substs = substs {
regions: ty::NonerasedRegions(regions),
self_ty: None,
tps: tps
};
fcx.write_ty_substs(node_id, tpt.ty, substs);
debug!("<<<");
}
// Resolves `typ` by a single level if `typ` is a type variable. If no
// resolution is possible, then an error is reported.
pub fn structurally_resolved_type(fcx: @mut FnCtxt, sp: Span, tp: ty::t)
-> ty::t {
match infer::resolve_type(fcx.infcx(), tp, force_tvar) {
Ok(t_s) if !ty::type_is_ty_var(t_s) => t_s,
_ => {
fcx.type_error_message(sp, |_actual| {
~"the type of this value must be known in this context"
}, tp, None);
demand::suptype(fcx, sp, ty::mk_err(), tp);
tp
}
}
}
// Returns the one-level-deep structure of the given type.
pub fn structure_of<'a>(fcx: @mut FnCtxt, sp: Span, typ: ty::t)
-> &'a ty::sty {
&ty::get(structurally_resolved_type(fcx, sp, typ)).sty
}
pub fn type_is_integral(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_integral(typ_s);
}
pub fn type_is_scalar(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_scalar(typ_s);
}
pub fn type_is_char(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_char(typ_s);
}
pub fn type_is_bare_fn(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_bare_fn(typ_s);
}
pub fn type_is_unsafe_ptr(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_unsafe_ptr(typ_s);
}
pub fn type_is_region_ptr(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_region_ptr(typ_s);
}
pub fn type_is_c_like_enum(fcx: @mut FnCtxt, sp: Span, typ: ty::t) -> bool {
let typ_s = structurally_resolved_type(fcx, sp, typ);
return ty::type_is_c_like_enum(fcx.ccx.tcx, typ_s);
}
pub fn ast_expr_vstore_to_vstore(fcx: @mut FnCtxt,
e: @ast::Expr,
v: ast::ExprVstore)
-> ty::vstore {
match v {
ast::ExprVstoreUniq => ty::vstore_uniq,
ast::ExprVstoreBox | ast::ExprVstoreMutBox => ty::vstore_box,
ast::ExprVstoreSlice | ast::ExprVstoreMutSlice => {
let r = fcx.infcx().next_region_var(infer::AddrOfSlice(e.span));
ty::vstore_slice(r)
}
}
}
// Returns true if b contains a break that can exit from b
pub fn may_break(cx: ty::ctxt, id: ast::NodeId, b: ast::P<ast::Block>) -> bool {
// First: is there an unlabeled break immediately
// inside the loop?
(loop_query(b, |e| {
match *e {
ast::ExprBreak(_) => true,
_ => false
}
})) ||
// Second: is there a labeled break with label
// <id> nested anywhere inside the loop?
(block_query(b, |e| {
match e.node {
ast::ExprBreak(Some(_)) =>
match cx.def_map.find(&e.id) {
Some(&ast::DefLabel(loop_id)) if id == loop_id => true,
_ => false,
},
_ => false
}}))
}
pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
span: Span,
tps: &OptVec<ast::TyParam>,
ty: ty::t) {
debug!("check_bounds_are_used(n_tps={}, ty={})",
tps.len(), ppaux::ty_to_str(ccx.tcx, ty));
// make a vector of booleans initially false, set to true when used
if tps.len() == 0u { return; }
let mut tps_used = vec::from_elem(tps.len(), false);
ty::walk_ty(ty, |t| {
match ty::get(t).sty {
ty::ty_param(param_ty {idx, ..}) => {
debug!("Found use of ty param \\#{}", idx);
tps_used[idx] = true;
}
_ => ()
}
});
for (i, b) in tps_used.iter().enumerate() {
if !*b {
ccx.tcx.sess.span_err(
span, format!("type parameter `{}` is unused",
ccx.tcx.sess.str_of(tps.get(i).ident)));
}
}
}
pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
fn param(ccx: @mut CrateCtxt, n: uint) -> ty::t {
ty::mk_param(ccx.tcx, n, local_def(0))
}
let tcx = ccx.tcx;
let nm = ccx.tcx.sess.str_of(it.ident);
let name = nm.as_slice();
let (n_tps, inputs, output) = if name.starts_with("atomic_") {
let split : ~[&str] = name.split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
//We only care about the operation here
match split[1] {
"cxchg" => (0, ~[ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)),
ty::mk_int()),
ty::mk_int(),
ty::mk_int()
], ty::mk_int()),
"load" => (0,
~[
ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), ty::mk_int())
],
ty::mk_int()),
"store" => (0,
~[
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), ty::mk_int()),
ty::mk_int()
],
ty::mk_nil()),
"xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
"min" | "umax" | "umin" => {
(0, ~[ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)),
ty::mk_int()), ty::mk_int() ], ty::mk_int())
}
"fence" => {
(0, ~[], ty::mk_nil())
}
op => {
tcx.sess.span_err(it.span,
format!("unrecognized atomic operation function: `{}`",
op));
return;
}
}
} else {
match name {
"abort" => (0, ~[], ty::mk_bot()),
"breakpoint" => (0, ~[], ty::mk_nil()),
"size_of" |
"pref_align_of" | "min_align_of" => (1u, ~[], ty::mk_uint()),
"init" => (1u, ~[], param(ccx, 0u)),
"uninit" => (1u, ~[], param(ccx, 0u)),
"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
"move_val" | "move_val_init" => {
(1u,
~[
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), param(ccx, 0)),
param(ccx, 0u)
],
ty::mk_nil())
}
"needs_drop" => (1u, ~[], ty::mk_bool()),
"owns_managed" => (1u, ~[], ty::mk_bool()),
"atomic_xchg" | "atomic_xadd" | "atomic_xsub" |
"atomic_xchg_acq" | "atomic_xadd_acq" | "atomic_xsub_acq" |
"atomic_xchg_rel" | "atomic_xadd_rel" | "atomic_xsub_rel" => {
(0,
~[
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), ty::mk_int()),
ty::mk_int()
],
ty::mk_int())
}
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
mutbl: ast::MutImmutable
});
(1u, ~[], td_ptr)
}
"type_id" => {
let langid = ccx.tcx.lang_items.require(TypeIdLangItem);
match langid {
Ok(did) => (1u, ~[], ty::mk_struct(ccx.tcx, did, substs {
self_ty: None,
tps: ~[],
regions: ty::NonerasedRegions(opt_vec::Empty)
}) ),
Err(msg) => { tcx.sess.span_fatal(it.span, msg); }
}
},
"visit_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
};
let region = ty::ReLateBound(it.id, ty::BrAnon(0));
let visitor_object_ty = match ty::visitor_object_ty(tcx, region) {
Ok((_, vot)) => vot,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
mutbl: ast::MutImmutable
});
(0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil())
}
"morestack_addr" => {
(0u, ~[], ty::mk_nil_ptr(ccx.tcx))
}
"offset" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_int()
],
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}))
}
"copy_nonoverlapping_memory" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_uint()
],
ty::mk_nil())
}
"copy_memory" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_uint()
],
ty::mk_nil())
}
"set_memory" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
ty::mk_u8(),
ty::mk_uint()
],
ty::mk_nil())
}
"sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"powif32" => {
(0,
~[ ty::mk_f32(), ty::mk_i32() ],
ty::mk_f32())
}
"powif64" => {
(0,
~[ ty::mk_f64(), ty::mk_i32() ],
ty::mk_f64())
}
"sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"powf32" => {
(0,
~[ ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32())
}
"powf64" => {
(0,
~[ ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64())
}
"expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"fmaf32" => {
(0,
~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32())
}
"fmaf64" => {
(0,
~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64())
}
"fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"copysignf32" => (0, ~[ ty::mk_f32(), ty::mk_f32() ], ty::mk_f32()),
"copysignf64" => (0, ~[ ty::mk_f64(), ty::mk_f64() ], ty::mk_f64()),
"floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"rintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"rintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"nearbyintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"nearbyintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"roundf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"roundf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
(0, ~[ty::mk_i8(), ty::mk_i8()],
ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])),
"i16_add_with_overflow" | "i16_sub_with_overflow" | "i16_mul_with_overflow" =>
(0, ~[ty::mk_i16(), ty::mk_i16()],
ty::mk_tup(tcx, ~[ty::mk_i16(), ty::mk_bool()])),
"i32_add_with_overflow" | "i32_sub_with_overflow" | "i32_mul_with_overflow" =>
(0, ~[ty::mk_i32(), ty::mk_i32()],
ty::mk_tup(tcx, ~[ty::mk_i32(), ty::mk_bool()])),
"i64_add_with_overflow" | "i64_sub_with_overflow" | "i64_mul_with_overflow" =>
(0, ~[ty::mk_i64(), ty::mk_i64()],
ty::mk_tup(tcx, ~[ty::mk_i64(), ty::mk_bool()])),
"u8_add_with_overflow" | "u8_sub_with_overflow" | "u8_mul_with_overflow" =>
(0, ~[ty::mk_u8(), ty::mk_u8()],
ty::mk_tup(tcx, ~[ty::mk_u8(), ty::mk_bool()])),
"u16_add_with_overflow" | "u16_sub_with_overflow" | "u16_mul_with_overflow" =>
(0, ~[ty::mk_u16(), ty::mk_u16()],
ty::mk_tup(tcx, ~[ty::mk_u16(), ty::mk_bool()])),
"u32_add_with_overflow" | "u32_sub_with_overflow" | "u32_mul_with_overflow"=>
(0, ~[ty::mk_u32(), ty::mk_u32()],
ty::mk_tup(tcx, ~[ty::mk_u32(), ty::mk_bool()])),
"u64_add_with_overflow" | "u64_sub_with_overflow" | "u64_mul_with_overflow" =>
(0, ~[ty::mk_u64(), ty::mk_u64()],
ty::mk_tup(tcx, ~[ty::mk_u64(), ty::mk_bool()])),
ref other => {
tcx.sess.span_err(it.span,
format!("unrecognized intrinsic function: `{}`",
*other));
return;
}
}
};
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
purity: ast::unsafe_fn,
abis: AbiSet::Intrinsic(),
sig: FnSig {binder_id: it.id,
inputs: inputs,
output: output,
variadic: false}
});
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs.len();
if i_n_tps != n_tps {
tcx.sess.span_err(it.span, format!("intrinsic has wrong number \
of type parameters: found {}, \
expected {}", i_n_tps, n_tps));
} else {
require_same_types(
tcx, None, false, it.span, i_ty.ty, fty,
|| format!("intrinsic has wrong type: \
expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty)));
}
}
| check_fn |
main.rs | #![type_length_limit="1884939"]
use std::{convert::Infallible, env, net::SocketAddr, str};
use hyper::{Body, Request, Response, Server, service::{make_service_fn, service_fn}};
use serenity::{
async_trait,
model::{channel::Message, gateway::Ready},
prelude::*,
};
use rand::Rng;
use select::{document::Document, node::Node};
use select::predicate::Name;
const HELP_MESSAGE: &str = "
Hello there, I'm a friendly llama!";
const HELP_COMMAND: &str = "%help";
const SHOW_COMMAND: &str = "%pokazlame";
const YESNO_COMMAND: &str = "lame czy";
const CHOOSE_COMMAND: &str = "lame wybierz";
const YESNO_RESPONSES: [&str; 17] = ["Tak.", "Nie.", "Oczywiście!", "Zapomnij!", "да!", "Нет!", "Jeszcze jak!", "No chyba nie.", "Ja!", "Nein!", "Oui~", "Nee", "Spoczko", "No chyba ty", "是的", "いいえ", "Ya, m8!"];
struct Handler;
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, ctx: Context, msg: Message) {
// println!("Got {}", msg.content);
let mut response = String::new();
match msg.content.as_str() {
HELP_COMMAND => {
response = String::from(HELP_MESSAGE);
}
SHOW_COMMAND => {
response = String::from(findimage().await.unwrap());
}
&_ => {
if msg.content.starts_with(YESNO_COMMAND) {
let reply = format!(":8ball: {}", YESNO_RESPONSES[rand::thread_rng().gen_range(0..17)]);
if let Err(why) = msg.reply(&ctx.http, reply).await {
println!("Error sending message: {:?}", why);
}
} else if msg.content.starts_with(CHOOSE_COMMAND) {
let mut choices = Vec::new();
for word in msg.content.split_whitespace() {
if word!="lame" && word!="wybierz" && word!="czy" {
choices.push(word);
}
}
let mut length = choices.len();
let reply = format!("wybieram {}", choices[rand::thread_rng().gen_range(0..length)]);
if let Err(why) = msg.reply(&ctx.http, reply).await {
println!("Error sending message: {:?}", why);
}
}
}
}
if response.len() == 0 {
return;
}
println!("Sending response {}", response);
if let Err(why) = msg.channel_id.say(&ctx.http, response).await {
println!("Error sending message: {:?}", why);
}
}
async fn ready(&self, _: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
fn get_absolute_uri(n: Node) -> Option<&str> {
match n.attr("src") {
Some(link) => {
if link.starts_with("http") {
return Some(link);
}
}
None => {
return None;
}
}
return None
}
async fn findimage() -> R | ing, Box<dyn std::error::Error>> {
let res = reqwest::get("https://www.google.com/search?q=llama&sclient=img&tbm=isch").await;
let body = res.unwrap().text().await.unwrap();
let body = Document::from(body.as_str());
let image_iter = body
.find(Name("img"))
.filter_map(|n| get_absolute_uri(n)); // this is an iterator
let random_num = rand::thread_rng().gen_range(0..image_iter.count());
// TODO: find out how can we copy iterator instead of having to regenerate it
let mut image_iter = body
.find(Name("img"))
.filter_map(|n| get_absolute_uri(n)); // this is an iterator
let image = image_iter.nth(random_num); // TODO: handle None scenario
return Ok(String::from(image.unwrap()));
}
async fn hello_world(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
Ok(Response::new("Hello, World".into()))
}
#[tokio::main]
async fn main() {
let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment");
let mut client = Client::new(&token)
.event_handler(Handler)
.await
.expect("Err creating client");
// run simple http server to satisfy healthchecks
let addr = SocketAddr::from(([0, 0, 0, 0], 8080));
// A `Service` is needed for every connection, so this
// creates one from our `hello_world` function.
let make_svc = make_service_fn(|_conn| async {
// service_fn converts our function into a `Service`
Ok::<_, Infallible>(service_fn(hello_world))
});
let server = Server::bind(&addr).serve(make_svc);
let (server_result, client_result) = tokio::join!(server, client.start());
if let Err(why) = client_result {
println!("Client error: {:?}", why);
}
if let Err(why) = server_result {
println!("Server error: {:?}", why);
}
}
| esult<Str |
test_v2.py | from secrets import token_bytes
import pytest
import pyseto
from pyseto import DecryptError, EncryptError, Key, VerifyError
from pyseto.versions.v2 import V2Local, V2Public
from .utils import get_path, load_key
class TestV2Local:
"""
Tests for v2.local.
"""
@pytest.mark.parametrize(
"key, msg",
[
(b"", "key must be specified."),
(token_bytes(1), "key must be 32 bytes long."),
(token_bytes(8), "key must be 32 bytes long."),
(token_bytes(16), "key must be 32 bytes long."),
(token_bytes(31), "key must be 32 bytes long."),
(token_bytes(33), "key must be 32 bytes long."),
],
)
def test_v2_local_new_with_invalid_arg(self, key, msg):
with pytest.raises(ValueError) as err:
Key.new(2, "local", key)
pytest.fail("Key.new() should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"key",
[
None,
0,
token_bytes(65),
],
)
def test_v2_local__generate_hash_with_invalid_arg(self, key):
|
@pytest.mark.parametrize(
"ptk",
[
None,
0,
],
)
def test_v2_local__encode_pie_with_invalid_ptk(self, ptk):
with pytest.raises(EncryptError) as err:
V2Local._encode_pie("v2.local-wrap.pie.", token_bytes(32), ptk)
pytest.fail("V2Local._encode_pie() should fail.")
assert "Failed to encrypt." in str(err.value)
def test_v2_local_decrypt_via_decode_with_wrong_key(self):
k1 = Key.new(2, "local", token_bytes(32))
k2 = Key.new(2, "local", token_bytes(32))
token = pyseto.encode(k1, b"Hello world!")
with pytest.raises(DecryptError) as err:
pyseto.decode(k2, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to decrypt." in str(err.value)
def test_v2_local_encrypt_with_invalid_arg(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(EncryptError) as err:
k.encrypt(None)
pytest.fail("pyseto.encrypt() should fail.")
assert "Failed to generate internal nonce." in str(err.value)
@pytest.mark.parametrize(
"nonce",
[
token_bytes(1),
token_bytes(8),
token_bytes(23),
token_bytes(25),
token_bytes(32),
],
)
def test_v2_local_encrypt_via_encode_with_wrong_nonce(self, nonce):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
pyseto.encode(k, b"Hello world!", nonce=nonce)
pytest.fail("pyseto.encode() should fail.")
assert "nonce must be 24 bytes long." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.public.AAAAAAAAAAAAAAAA", "Invalid PASERK type: public."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"local-wrap needs wrapping_key.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: secret-wrap.",
),
(
"k2.local-pw.AAAAAAAAAAAAAAAA",
"local-pw needs password.",
),
(
"k2.seal.AAAAAAAAAAAAAAAA",
"seal needs unsealing_key.",
),
],
)
def test_v2_local_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k2.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local-wrap.xxx.AAAAAAAAAAAAAAAA", "Unknown wrapping algorithm: xxx."),
("k2.xxx.pie.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
],
)
def test_v2_local_from_paserk_with_wrapping_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, wrapping_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.seal.AAAAAAAAAAAAAAAA", "Invalid or unsupported PEM format."),
],
)
def test_v2_local_from_paserk_with_unsealing_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, unsealing_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
def test_v2_local_to_paserk_with_invalid_sealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"not-PEM-formatted-key")
pytest.fail("Key.from_paserk should fail.")
assert "Invalid or unsupported PEM format." in str(err.value)
def test_v2_local_from_paserk_with_wrong_unsealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with open(get_path("keys/public_key_x25519.pem")) as key_file:
sealed_key = k.to_paserk(sealing_key=key_file.read())
with open(get_path("keys/private_key_x25519_2.pem")) as key_file:
unsealing_key = key_file.read()
with pytest.raises(DecryptError) as err:
Key.from_paserk(sealed_key, unsealing_key=unsealing_key)
pytest.fail("Key.from_paserk should fail.")
assert "Failed to unseal a key." in str(err.value)
class TestV2Public:
"""
Tests for v2.public.
"""
def test_v2_public_to_paserk_id(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519.pem"))
assert sk.to_peer_paserk_id() == pk.to_paserk_id()
assert pk.to_peer_paserk_id() == ""
def test_v2_public_verify_via_encode_with_wrong_key(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519_2.pem"))
token = pyseto.encode(sk, b"Hello world!")
with pytest.raises(VerifyError) as err:
pyseto.decode(pk, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to verify." in str(err.value)
def test_v2_public_to_paserk_with_sealing_key(self):
k = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"xxx")
pytest.fail("pyseto.to_paserk() should fail.")
assert "Key sealing can only be used for local key." in str(err.value)
# def test_v2_public_from_paserk_with_wrong_unsealing_key(self):
# key = Key.new(2, "local", token_bytes(32))
# pk = Key.new(2, "public", load_key("keys/public_key_ed25519.pem"))
# sealing_key = pk.public_bytes(Encoding.Raw, PublicFormat.Raw)
# sealed = key.to_paserk(sealing_key=sealing_key)
# sk = Key.new(2, "public", load_key("keys/private_key_ed25519_2.pem"))
# with pytest.raises(ValueError) as err:
# Key.from_paserk(unsealing_key=unsealing_key)
# pytest.fail("pyseto.from_paserk() should fail.")
# assert "Failed to unseal a key." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.local.AAAAAAAAAAAAAAAA", "Invalid PASERK type: local."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: local-wrap.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"secret-wrap needs wrapping_key.",
),
(
"k2.secret-pw.AAAAAAAAAAAAAAAA",
"secret-pw needs password.",
),
],
)
def test_v2_public_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Public.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
| with pytest.raises(EncryptError) as err:
V2Local._generate_hash(key, b"Hello world!", 32)
pytest.fail("V2Local._generate_hash() should fail.")
assert "Failed to generate hash." in str(err.value) |
ssh.go | package ssh
import (
"fmt"
"io"
"io/ioutil"
"log"
"net"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/unknwon/com"
"golang.org/x/crypto/ssh"
)
type Config struct {
HOST string
SSH_PORT int
SSH_SERVER_MACS []string
SSH_SERVER_CIPHERS []string
GitBinPath string
ProjectRoot string
}
var (
DefaultConfig = Config{
SSH_PORT: 8722,
HOST: "0.0.0.0",
GitBinPath: "/usr/local/bin/git",
SSH_SERVER_CIPHERS: []string{"aes128-ctr", "aes192-ctr", "aes256-ctr", "[email protected]", "arcfour256", "arcfour128"},
SSH_SERVER_MACS: []string{"[email protected]", "hmac-sha2-256", "hmac-sha1"},
}
)
func | () {
dir, err := os.Getwd()
if err == nil {
DefaultConfig.ProjectRoot = fmt.Sprintf("%s/repo", dir)
}
}
func cleanCommand(cmd string) string {
i := strings.Index(cmd, "git")
if i == -1 {
return cmd
}
return cmd[i:]
}
func parseSSHCmd(cmd string) (string, string) {
ss := strings.SplitN(cmd, " ", 2)
if len(ss) != 2 {
return "", ""
}
return ss[0], strings.Replace(ss[1], "'/", "'", 1)
}
func handleServerConn(keyID string, chans <-chan ssh.NewChannel) {
for newChan := range chans {
if newChan.ChannelType() != "session" {
_ = newChan.Reject(ssh.UnknownChannelType, "unknown channel type")
continue
}
ch, reqs, err := newChan.Accept()
if err != nil {
log.Printf("Error accepting channel: %v", err)
continue
}
go func(in <-chan *ssh.Request) {
defer func() {
_ = ch.Close()
}()
for req := range in {
payload := cleanCommand(string(req.Payload))
switch req.Type {
case "env":
var env struct {
Name string
Value string
}
if err := ssh.Unmarshal(req.Payload, &env); err != nil {
log.Printf("SSH: Invalid env payload %q: %v", req.Payload, err)
continue
}
if env.Name == "" || env.Value == "" {
log.Printf("SSH: Invalid env arguments: %+v", env)
continue
}
_, stderr, err := com.ExecCmd("env", fmt.Sprintf("%s=%s", env.Name, env.Value))
if err != nil {
log.Printf("env: %v - %s", err, stderr)
return
}
case "exec":
cmdName := strings.TrimLeft(payload, "'()")
verb, cmdArgs := parseSSHCmd(cmdName)
repoFullName := strings.ToLower(strings.Trim(cmdArgs, "'"))
repoPath := fmt.Sprintf("%s/%s", DefaultConfig.ProjectRoot, repoFullName)
cmd := exec.Command("git", verb[4:], repoPath)
stdout, err := cmd.StdoutPipe()
if err != nil {
log.Printf("SSH: StdoutPipe: %v", err)
return
}
stderr, err := cmd.StderrPipe()
if err != nil {
log.Printf("SSH: StderrPipe: %v", err)
return
}
input, err := cmd.StdinPipe()
if err != nil {
log.Printf("SSH: StdinPipe: %v", err)
return
}
// FIXME: check timeout
if err = cmd.Start(); err != nil {
log.Printf("SSH: Start: %v", err)
return
}
_ = req.Reply(true, nil)
go func() {
_, _ = io.Copy(input, ch)
}()
_, _ = io.Copy(ch, stdout)
_, _ = io.Copy(ch.Stderr(), stderr)
if err = cmd.Wait(); err != nil {
log.Printf("SSH: Wait: %v", err)
return
}
_, _ = ch.SendRequest("exit-status", false, []byte{0, 0, 0, 0})
return
default:
}
}
}(reqs)
}
}
func portListen(config *ssh.ServerConfig, host string, port int) {
link := fmt.Sprintf("%s:%d", host, port)
fmt.Println(link)
listener, err := net.Listen("tcp", link)
if err != nil {
log.Printf("Failed to start SSH server: %v", err)
}
for {
// Once a ServerConfig has been configured, connections can be accepted.
conn, err := listener.Accept()
if err != nil {
log.Printf("SSH: Error accepting incoming connection: %v", err)
continue
}
// Before use, a handshake must be performed on the incoming net.Conn.
// It must be handled in a separate goroutine,
// otherwise one user could easily block entire loop.
// For example, user could be asked to trust server key fingerprint and hangs.
go func() {
log.Printf("SSH: Handshaking for %s", conn.RemoteAddr())
sConn, chans, reqs, err := ssh.NewServerConn(conn, config)
if err != nil {
if err == io.EOF {
log.Printf("SSH: Handshaking was terminated: %v", err)
} else {
log.Printf("SSH: Error on handshaking: %v", err)
}
return
}
log.Printf("SSH: Connection from %s (%s)", sConn.RemoteAddr(), sConn.ClientVersion())
// The incoming Request channel must be serviced.
go ssh.DiscardRequests(reqs)
go handleServerConn(sConn.Permissions.Extensions["key-id"], chans)
}()
}
}
func Listen(host string, port int, ciphers, macs []string) {
config := &ssh.ServerConfig{
Config: ssh.Config{
Ciphers: ciphers,
MACs: macs,
},
PublicKeyCallback: func(conn ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {
content := strings.TrimSpace(string(ssh.MarshalAuthorizedKey(key)))
fmt.Println(key)
fmt.Println(content)
// pkey, err := db.SearchPublicKeyByContent(strings.TrimSpace(string(ssh.MarshalAuthorizedKey(key))))
// if err != nil {
// log.Printf("SearchPublicKeyByContent: %v", err)
// return nil, err
// }
return &ssh.Permissions{Extensions: map[string]string{"key-id": "1"}}, nil
},
}
keyPath := filepath.Join("data", "ssh", "facegit.rsa")
if !com.IsExist(keyPath) {
if err := os.MkdirAll(filepath.Dir(keyPath), os.ModePerm); err != nil {
panic(err)
}
_, stderr, err := com.ExecCmd("ssh-keygen", "-f", keyPath, "-t", "rsa", "-m", "PEM", "-N", "")
if err != nil {
panic(fmt.Sprintf("Failed to generate private key: %v - %s", err, stderr))
}
log.Printf("SSH: New private key is generateed: %s", keyPath)
}
privateBytes, err := ioutil.ReadFile(keyPath)
if err != nil {
panic("SSH: Failed to load private key: " + err.Error())
}
private, err := ssh.ParsePrivateKey(privateBytes)
if err != nil {
panic("SSH: Failed to parse private key: " + err.Error())
}
config.AddHostKey(private)
fmt.Println("start ssh service")
portListen(config, host, port)
}
| init |
index.js | const approuter = require('@sap/approuter');
const config = require('./xs-app.json');
config.routes.forEach((oRoute) => {
if (oRoute.localDir && oRoute.localDir === "webapp") {
oRoute.localDir = "../webapp"
} | })
approuter().start({ xsappConfig: config }); |
|
test.py | import argparse
from tqdm import tqdm
import numpy as np
import cv2
from config import cfg
import torch
from base import Tester
from utils.vis import vis_keypoints
from utils.pose_utils import flip
import torch.backends.cudnn as cudnn
def | ():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=str, dest='gpu_ids')
parser.add_argument('--test_epoch', type=str, dest='test_epoch')
args = parser.parse_args()
# test gpus
if not args.gpu_ids:
assert 0, "Please set proper gpu ids"
if '-' in args.gpu_ids:
gpus = args.gpu_ids.split('-')
gpus[0] = int(gpus[0])
gpus[1] = int(gpus[1]) + 1
args.gpu_ids = ','.join(map(lambda x: str(x), list(range(*gpus))))
assert args.test_epoch, 'Test epoch is required.'
return args
def main():
args = parse_args()
cfg.set_args(args.gpu_ids)
cudnn.fastest = True
cudnn.benchmark = True
cudnn.deterministic = False
cudnn.enabled = True
tester = Tester(args.test_epoch)
tester._make_batch_generator()
tester._make_model()
preds = []
with torch.no_grad():
for itr, input_img in enumerate(tqdm(tester.batch_generator)):
# forward
coord_out = tester.model(input_img)
if cfg.flip_test:
flipped_input_img = flip(input_img, dims=3)
flipped_coord_out = tester.model(flipped_input_img)
flipped_coord_out[:, :, 0] = cfg.output_shape[1] - flipped_coord_out[:, :, 0] - 1
for pair in tester.flip_pairs:
flipped_coord_out[:, pair[0], :], flipped_coord_out[:, pair[1], :] = flipped_coord_out[:, pair[1], :].clone(), flipped_coord_out[:, pair[0], :].clone()
coord_out = (coord_out + flipped_coord_out)/2.
vis = False
if vis:
filename = str(itr)
tmpimg = input_img[0].cpu().numpy()
tmpimg = tmpimg * np.array(cfg.pixel_std).reshape(3,1,1) + np.array(cfg.pixel_mean).reshape(3,1,1)
tmpimg = tmpimg.astype(np.uint8)
tmpimg = tmpimg[::-1, :, :]
tmpimg = np.transpose(tmpimg,(1,2,0)).copy()
tmpkps = np.zeros((3,tester.joint_num))
tmpkps[:2,:] = coord_out[0,:,:2].cpu().numpy().transpose(1,0) / cfg.output_shape[0] * cfg.input_shape[0]
tmpkps[2,:] = 1
tmpimg = vis_keypoints(tmpimg, tmpkps, tester.skeleton)
cv2.imwrite(filename + '_output.jpg', tmpimg)
coord_out = coord_out.cpu().numpy()
preds.append(coord_out)
# evaluate
preds = np.concatenate(preds, axis=0)
tester._evaluate(preds, cfg.result_dir)
if __name__ == "__main__":
main()
| parse_args |
Wrapper.js | import React, { Component } from "react";
import DefaultWrapper from "react-styleguidist/lib/rsg-components/Wrapper/Wrapper";
import { ComponentsProvider } from "@reactioncommerce/components-context";
import { StripeProvider } from "react-stripe-elements";
import appComponents from "../appComponents";
class Wrapper extends Component {
render() {
return (
<StripeProvider apiKey="pk_test_zggzXnHNapGS1EKUV7BSLn3p">
<ComponentsProvider value={appComponents}>
<DefaultWrapper {...this.props} />
</ComponentsProvider> | </StripeProvider>
);
}
}
export default Wrapper; | |
help_test.go | // Copyright (c) 2014 The DiviProject developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package btcjson_test
import (
"reflect"
"testing"
"github.com/DiviProject/divid/btcjson"
)
// TestHelpReflectInternals ensures the various help functions which deal with
// reflect types work as expected for various Go types.
func TestHelpReflectInternals(t *testing.T) {
t.Parallel()
tests := []struct {
name string
reflectType reflect.Type
indentLevel int
key string
examples []string
isComplex bool
help string
isInvalid bool
}{
{
name: "int",
reflectType: reflect.TypeOf(int(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "*int",
reflectType: reflect.TypeOf((*int)(nil)), | examples: []string{"n"},
help: "n (json-type-value) fdk",
isInvalid: true,
},
{
name: "int8",
reflectType: reflect.TypeOf(int8(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "int16",
reflectType: reflect.TypeOf(int16(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "int32",
reflectType: reflect.TypeOf(int32(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "int64",
reflectType: reflect.TypeOf(int64(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "uint",
reflectType: reflect.TypeOf(uint(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "uint8",
reflectType: reflect.TypeOf(uint8(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "uint16",
reflectType: reflect.TypeOf(uint16(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "uint32",
reflectType: reflect.TypeOf(uint32(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "uint64",
reflectType: reflect.TypeOf(uint64(0)),
key: "json-type-numeric",
examples: []string{"n"},
help: "n (json-type-numeric) fdk",
},
{
name: "float32",
reflectType: reflect.TypeOf(float32(0)),
key: "json-type-numeric",
examples: []string{"n.nnn"},
help: "n.nnn (json-type-numeric) fdk",
},
{
name: "float64",
reflectType: reflect.TypeOf(float64(0)),
key: "json-type-numeric",
examples: []string{"n.nnn"},
help: "n.nnn (json-type-numeric) fdk",
},
{
name: "string",
reflectType: reflect.TypeOf(""),
key: "json-type-string",
examples: []string{`"json-example-string"`},
help: "\"json-example-string\" (json-type-string) fdk",
},
{
name: "bool",
reflectType: reflect.TypeOf(true),
key: "json-type-bool",
examples: []string{"json-example-bool"},
help: "json-example-bool (json-type-bool) fdk",
},
{
name: "array of int",
reflectType: reflect.TypeOf([1]int{0}),
key: "json-type-arrayjson-type-numeric",
examples: []string{"[n,...]"},
help: "[n,...] (json-type-arrayjson-type-numeric) fdk",
},
{
name: "slice of int",
reflectType: reflect.TypeOf([]int{0}),
key: "json-type-arrayjson-type-numeric",
examples: []string{"[n,...]"},
help: "[n,...] (json-type-arrayjson-type-numeric) fdk",
},
{
name: "struct",
reflectType: reflect.TypeOf(struct{}{}),
key: "json-type-object",
examples: []string{"{", "}\t\t"},
isComplex: true,
help: "{\n} ",
},
{
name: "struct indent level 1",
reflectType: reflect.TypeOf(struct{ field int }{}),
indentLevel: 1,
key: "json-type-object",
examples: []string{
" \"field\": n,\t(json-type-numeric)\t-field",
" },\t\t",
},
help: "{\n" +
" \"field\": n, (json-type-numeric) -field\n" +
"} ",
isComplex: true,
},
{
name: "array of struct indent level 0",
reflectType: func() reflect.Type {
type s struct {
field int
}
return reflect.TypeOf([]s{})
}(),
key: "json-type-arrayjson-type-object",
examples: []string{
"[{",
" \"field\": n,\t(json-type-numeric)\ts-field",
"},...]",
},
help: "[{\n" +
" \"field\": n, (json-type-numeric) s-field\n" +
"},...]",
isComplex: true,
},
{
name: "array of struct indent level 1",
reflectType: func() reflect.Type {
type s struct {
field int
}
return reflect.TypeOf([]s{})
}(),
indentLevel: 1,
key: "json-type-arrayjson-type-object",
examples: []string{
" \"field\": n,\t(json-type-numeric)\ts-field",
" },...],\t\t",
},
help: "[{\n" +
" \"field\": n, (json-type-numeric) s-field\n" +
"},...]",
isComplex: true,
},
{
name: "map",
reflectType: reflect.TypeOf(map[string]string{}),
key: "json-type-object",
examples: []string{"{",
" \"fdk--key\": fdk--value, (json-type-object) fdk--desc",
" ...", "}",
},
help: "{\n" +
" \"fdk--key\": fdk--value, (json-type-object) fdk--desc\n" +
" ...\n" +
"}",
isComplex: true,
},
{
name: "complex",
reflectType: reflect.TypeOf(complex64(0)),
key: "json-type-value",
examples: []string{"json-example-unknown"},
help: "json-example-unknown (json-type-value) fdk",
isInvalid: true,
},
}
xT := func(key string) string {
return key
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Ensure the description key is the expected value.
key := btcjson.TstReflectTypeToJSONType(xT, test.reflectType)
if key != test.key {
t.Errorf("Test #%d (%s) unexpected key - got: %v, "+
"want: %v", i, test.name, key, test.key)
continue
}
// Ensure the generated example is as expected.
examples, isComplex := btcjson.TstReflectTypeToJSONExample(xT,
test.reflectType, test.indentLevel, "fdk")
if isComplex != test.isComplex {
t.Errorf("Test #%d (%s) unexpected isComplex - got: %v, "+
"want: %v", i, test.name, isComplex,
test.isComplex)
continue
}
if len(examples) != len(test.examples) {
t.Errorf("Test #%d (%s) unexpected result length - "+
"got: %v, want: %v", i, test.name, len(examples),
len(test.examples))
continue
}
for j, example := range examples {
if example != test.examples[j] {
t.Errorf("Test #%d (%s) example #%d unexpected "+
"example - got: %v, want: %v", i,
test.name, j, example, test.examples[j])
continue
}
}
// Ensure the generated result type help is as expected.
helpText := btcjson.TstResultTypeHelp(xT, test.reflectType, "fdk")
if helpText != test.help {
t.Errorf("Test #%d (%s) unexpected result help - "+
"got: %v, want: %v", i, test.name, helpText,
test.help)
continue
}
isValid := btcjson.TstIsValidResultType(test.reflectType.Kind())
if isValid != !test.isInvalid {
t.Errorf("Test #%d (%s) unexpected result type validity "+
"- got: %v", i, test.name, isValid)
continue
}
}
}
// TestResultStructHelp ensures the expected help text format is returned for
// various Go struct types.
func TestResultStructHelp(t *testing.T) {
t.Parallel()
tests := []struct {
name string
reflectType reflect.Type
expected []string
}{
{
name: "empty struct",
reflectType: func() reflect.Type {
type s struct{}
return reflect.TypeOf(s{})
}(),
expected: nil,
},
{
name: "struct with primitive field",
reflectType: func() reflect.Type {
type s struct {
field int
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"field\": n,\t(json-type-numeric)\ts-field",
},
},
{
name: "struct with primitive field and json tag",
reflectType: func() reflect.Type {
type s struct {
Field int `json:"f"`
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"f\": n,\t(json-type-numeric)\ts-f",
},
},
{
name: "struct with array of primitive field",
reflectType: func() reflect.Type {
type s struct {
field []int
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"field\": [n,...],\t(json-type-arrayjson-type-numeric)\ts-field",
},
},
{
name: "struct with sub-struct field",
reflectType: func() reflect.Type {
type s2 struct {
subField int
}
type s struct {
field s2
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"field\": {\t(json-type-object)\ts-field",
"{",
" \"subfield\": n,\t(json-type-numeric)\ts2-subfield",
"}\t\t",
},
},
{
name: "struct with sub-struct field pointer",
reflectType: func() reflect.Type {
type s2 struct {
subField int
}
type s struct {
field *s2
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"field\": {\t(json-type-object)\ts-field",
"{",
" \"subfield\": n,\t(json-type-numeric)\ts2-subfield",
"}\t\t",
},
},
{
name: "struct with array of structs field",
reflectType: func() reflect.Type {
type s2 struct {
subField int
}
type s struct {
field []s2
}
return reflect.TypeOf(s{})
}(),
expected: []string{
"\"field\": [{\t(json-type-arrayjson-type-object)\ts-field",
"[{",
" \"subfield\": n,\t(json-type-numeric)\ts2-subfield",
"},...]",
},
},
}
xT := func(key string) string {
return key
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
results := btcjson.TstResultStructHelp(xT, test.reflectType, 0)
if len(results) != len(test.expected) {
t.Errorf("Test #%d (%s) unexpected result length - "+
"got: %v, want: %v", i, test.name, len(results),
len(test.expected))
continue
}
for j, result := range results {
if result != test.expected[j] {
t.Errorf("Test #%d (%s) result #%d unexpected "+
"result - got: %v, want: %v", i,
test.name, j, result, test.expected[j])
continue
}
}
}
}
// TestHelpArgInternals ensures the various help functions which deal with
// arguments work as expected for various argument types.
func TestHelpArgInternals(t *testing.T) {
t.Parallel()
tests := []struct {
name string
method string
reflectType reflect.Type
defaults map[int]reflect.Value
help string
}{
{
name: "command with no args",
method: "test",
reflectType: func() reflect.Type {
type s struct{}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "",
},
{
name: "command with one required arg",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Field int
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. field (json-type-numeric, help-required) test-field\n",
},
{
name: "command with one optional arg, no default",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Optional *int
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. optional (json-type-numeric, help-optional) test-optional\n",
},
{
name: "command with one optional arg with default",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Optional *string
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: func() map[int]reflect.Value {
defVal := "test"
return map[int]reflect.Value{
0: reflect.ValueOf(&defVal),
}
}(),
help: "1. optional (json-type-string, help-optional, help-default=\"test\") test-optional\n",
},
{
name: "command with struct field",
method: "test",
reflectType: func() reflect.Type {
type s2 struct {
F int8
}
type s struct {
Field s2
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. field (json-type-object, help-required) test-field\n" +
"{\n" +
" \"f\": n, (json-type-numeric) s2-f\n" +
"} \n",
},
{
name: "command with map field",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Field map[string]float64
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. field (json-type-object, help-required) test-field\n" +
"{\n" +
" \"test-field--key\": test-field--value, (json-type-object) test-field--desc\n" +
" ...\n" +
"}\n",
},
{
name: "command with slice of primitives field",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Field []int64
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. field (json-type-arrayjson-type-numeric, help-required) test-field\n",
},
{
name: "command with slice of structs field",
method: "test",
reflectType: func() reflect.Type {
type s2 struct {
F int64
}
type s struct {
Field []s2
}
return reflect.TypeOf((*s)(nil))
}(),
defaults: nil,
help: "1. field (json-type-arrayjson-type-object, help-required) test-field\n" +
"[{\n" +
" \"f\": n, (json-type-numeric) s2-f\n" +
"},...]\n",
},
}
xT := func(key string) string {
return key
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
help := btcjson.TstArgHelp(xT, test.reflectType, test.defaults,
test.method)
if help != test.help {
t.Errorf("Test #%d (%s) unexpected help - got:\n%v\n"+
"want:\n%v", i, test.name, help, test.help)
continue
}
}
}
// TestMethodHelp ensures the method help function works as expected for various
// command structs.
func TestMethodHelp(t *testing.T) {
t.Parallel()
tests := []struct {
name string
method string
reflectType reflect.Type
defaults map[int]reflect.Value
resultTypes []interface{}
help string
}{
{
name: "command with no args or results",
method: "test",
reflectType: func() reflect.Type {
type s struct{}
return reflect.TypeOf((*s)(nil))
}(),
help: "test\n\ntest--synopsis\n\n" +
"help-arguments:\nhelp-arguments-none\n\n" +
"help-result:\nhelp-result-nothing\n",
},
{
name: "command with no args and one primitive result",
method: "test",
reflectType: func() reflect.Type {
type s struct{}
return reflect.TypeOf((*s)(nil))
}(),
resultTypes: []interface{}{(*int64)(nil)},
help: "test\n\ntest--synopsis\n\n" +
"help-arguments:\nhelp-arguments-none\n\n" +
"help-result:\nn (json-type-numeric) test--result0\n",
},
{
name: "command with no args and two results",
method: "test",
reflectType: func() reflect.Type {
type s struct{}
return reflect.TypeOf((*s)(nil))
}(),
resultTypes: []interface{}{(*int64)(nil), nil},
help: "test\n\ntest--synopsis\n\n" +
"help-arguments:\nhelp-arguments-none\n\n" +
"help-result (test--condition0):\nn (json-type-numeric) test--result0\n\n" +
"help-result (test--condition1):\nhelp-result-nothing\n",
},
{
name: "command with primitive arg and no results",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Field bool
}
return reflect.TypeOf((*s)(nil))
}(),
help: "test field\n\ntest--synopsis\n\n" +
"help-arguments:\n1. field (json-type-bool, help-required) test-field\n\n" +
"help-result:\nhelp-result-nothing\n",
},
{
name: "command with primitive optional and no results",
method: "test",
reflectType: func() reflect.Type {
type s struct {
Field *bool
}
return reflect.TypeOf((*s)(nil))
}(),
help: "test (field)\n\ntest--synopsis\n\n" +
"help-arguments:\n1. field (json-type-bool, help-optional) test-field\n\n" +
"help-result:\nhelp-result-nothing\n",
},
}
xT := func(key string) string {
return key
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
help := btcjson.TestMethodHelp(xT, test.reflectType,
test.defaults, test.method, test.resultTypes)
if help != test.help {
t.Errorf("Test #%d (%s) unexpected help - got:\n%v\n"+
"want:\n%v", i, test.name, help, test.help)
continue
}
}
}
// TestGenerateHelpErrors ensures the GenerateHelp function returns the expected
// errors.
func TestGenerateHelpErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
method string
resultTypes []interface{}
err btcjson.Error
}{
{
name: "unregistered command",
method: "boguscommand",
err: btcjson.Error{ErrorCode: btcjson.ErrUnregisteredMethod},
},
{
name: "non-pointer result type",
method: "help",
resultTypes: []interface{}{0},
err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType},
},
{
name: "invalid result type",
method: "help",
resultTypes: []interface{}{(*complex64)(nil)},
err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType},
},
{
name: "missing description",
method: "help",
resultTypes: []interface{}{(*string)(nil), nil},
err: btcjson.Error{ErrorCode: btcjson.ErrMissingDescription},
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
_, err := btcjson.GenerateHelp(test.method, nil,
test.resultTypes...)
if reflect.TypeOf(err) != reflect.TypeOf(test.err) {
t.Errorf("Test #%d (%s) wrong error - got %T (%v), "+
"want %T", i, test.name, err, err, test.err)
continue
}
gotErrorCode := err.(btcjson.Error).ErrorCode
if gotErrorCode != test.err.ErrorCode {
t.Errorf("Test #%d (%s) mismatched error code - got "+
"%v (%v), want %v", i, test.name, gotErrorCode,
err, test.err.ErrorCode)
continue
}
}
}
// TestGenerateHelp performs a very basic test to ensure GenerateHelp is working
// as expected. The internal are testd much more thoroughly in other tests, so
// there is no need to add more tests here.
func TestGenerateHelp(t *testing.T) {
t.Parallel()
descs := map[string]string{
"help--synopsis": "test",
"help-command": "test",
}
help, err := btcjson.GenerateHelp("help", descs)
if err != nil {
t.Fatalf("GenerateHelp: unexpected error: %v", err)
}
wantHelp := "help (\"command\")\n\n" +
"test\n\nArguments:\n1. command (string, optional) test\n\n" +
"Result:\nNothing\n"
if help != wantHelp {
t.Fatalf("GenerateHelp: unexpected help - got\n%v\nwant\n%v",
help, wantHelp)
}
} | key: "json-type-value", |
void_type.rs | use llvm_sys::prelude::LLVMTypeRef;
use crate::context::ContextRef;
use crate::types::traits::AsTypeRef;
use crate::types::{Type, BasicTypeEnum, FunctionType};
/// A `VoidType` is a special type with no possible direct instances. It's only
/// useful as a function return type.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct VoidType<'ctx> {
void_type: Type<'ctx>,
}
impl<'ctx> VoidType<'ctx> {
pub(crate) fn new(void_type: LLVMTypeRef) -> Self {
assert!(!void_type.is_null());
VoidType {
void_type: Type::new(void_type),
}
}
// REVIEW: Always false -> const fn?
/// Gets whether or not this `VoidType` is sized or not. This may always
/// be false and as such this function may be removed in the future.
///
/// # Example
///
/// ```no_run
/// use inkwell::context::Context;
///
/// let context = Context::create();
/// let void_type = context.void_type();
///
/// assert!(void_type.is_sized());
/// ```
pub fn is_sized(self) -> bool {
self.void_type.is_sized()
}
/// Gets a reference to the `Context` this `VoidType` was created in.
///
/// # Example
///
/// ```no_run
/// use inkwell::context::Context;
///
/// let context = Context::create();
/// let void_type = context.void_type();
///
/// assert_eq!(*void_type.get_context(), context);
/// ```
pub fn get_context(self) -> ContextRef<'ctx> {
self.void_type.get_context()
}
/// Creates a `FunctionType` with this `VoidType` for its return type.
/// This means the function does not return.
///
/// # Example
///
/// ```no_run
/// use inkwell::context::Context;
///
/// let context = Context::create();
/// let void_type = context.void_type();
/// let fn_type = void_type.fn_type(&[], false);
/// ```
pub fn fn_type(self, param_types: &[BasicTypeEnum<'ctx>], is_var_args: bool) -> FunctionType<'ctx> {
self.void_type.fn_type(param_types, is_var_args)
}
// See Type::print_to_stderr note on 5.0+ status
/// Prints the definition of a `VoidType` to stderr. Not available in newer LLVM versions.
#[llvm_versions(3.7..=4.0)]
pub fn print_to_stderr(self) {
self.void_type.print_to_stderr()
}
}
impl AsTypeRef for VoidType<'_> {
fn as_type_ref(&self) -> LLVMTypeRef |
}
| {
self.void_type.ty
} |
refs.py | # Copyright 2017,2018,2019,2020,2021 Sony Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from six.moves import range
import itertools
import numpy as np
def get_conv_out_size(w, k, p, s, d=1):
return (w + 2 * p - (d * (k - 1) + 1)) // s + 1
def get_deconv_out_size(w, k, p, s, d):
return s * (w - 1) - 2 * p + (d * (k - 1) + 1)
def get_pool_out_size(w, k, p, s, ignore_border):
return (w + p - ((k - p) if ignore_border else 1)) // s + 1
class ChannelLastToFirstTranspose(object):
|
def convolution_1d(x, w, b, pad, stride, dilation, group, dtype=np.float32):
"""
"""
C, H = x.shape
K, Cg, M = w.shape
Ho = get_conv_out_size(H, M, pad[0], stride[0], dilation[0])
x_pad = np.zeros((C, H + pad[0] * 2), dtype=dtype)
x_pad[:, pad[0]:pad[0] + H] = x
y = np.zeros((K, Ho), dtype=dtype)
for k in range(K):
g = int(k // (K // group))
for ho in range(Ho):
hi = ho * stride[0] + np.arange(0, M) * dilation[0]
ci = np.arange(g * Cg, (g + 1) * Cg)
y[k, ho] = (w[k] * x_pad[np.ix_(ci, hi)]).sum()
if b is not None:
y += b[..., np.newaxis]
return y
def convolution_2d(x, w, b, pad, stride, dilation, group, dtype=np.float32):
"""
"""
C, H, W = x.shape
K, Cg, M, N = w.shape
Ho = get_conv_out_size(H, M, pad[0], stride[0], dilation[0])
Wo = get_conv_out_size(W, N, pad[1], stride[1], dilation[1])
x_pad = np.zeros((C, H + pad[0] * 2, W + pad[1] * 2), dtype=dtype)
x_pad[:, pad[0]:pad[0] + H, pad[1]:pad[1] + W] = x
y = np.zeros((K, Ho, Wo), dtype=dtype)
for k in range(K):
g = int(k // (K // group))
for ho in range(Ho):
for wo in range(Wo):
hi = ho * stride[0] + np.arange(0, M) * dilation[0]
wi = wo * stride[1] + np.arange(0, N) * dilation[1]
ci = np.arange(g * Cg, (g + 1) * Cg)
y[k, ho, wo] = (w[k] * x_pad[np.ix_(ci, hi, wi)]).sum()
if b is not None:
y += b[..., np.newaxis, np.newaxis]
return y
def convolution_nd(x, w, b, pad, stride, dilation, group, dtype=np.float32):
"""
"""
C = x.shape[0]
inshape = x.shape[1:]
ndim = len(inshape)
assert w.ndim == ndim + 2
K, Cg = w.shape[:2]
kshape = w.shape[2:]
def get_conv_out_size_recursive(d, ndim):
if d == ndim:
return []
s = get_conv_out_size(
inshape[d], kshape[d], pad[d], stride[d], dilation[d])
return [s] + get_conv_out_size_recursive(d + 1, ndim)
outshape = get_conv_out_size_recursive(0, ndim)
inshape_pad = [C] + [inshape[d] + 2 * pad[d] for d in range(ndim)]
x_pad = np.zeros(inshape_pad, dtype=dtype)
x_pad[[slice(None,)] + [slice(pad[d], pad[d] + inshape[d])
for d in range(ndim)]] = x
y = np.zeros([K] + outshape, dtype=dtype)
for k in range(K):
g = int(k // (K // group))
for outindex in itertools.product(*map(range, outshape)):
inindex = [outindex[d] * stride[d] +
np.arange(0, kshape[d]) * dilation[d] for d in range(ndim)]
ci = np.arange(g * Cg, (g + 1) * Cg)
y[(k,) + tuple(outindex)] = (w[k] *
x_pad[np.ix_(ci, *inindex)]).sum()
if b is not None:
y += b[[Ellipsis] + [np.newaxis for d in range(ndim)]]
return y
def deconvolution_1d(x, w, b, pad, stride, dilation, group, dtype=np.float32,
output_padding=(0,)):
y = x
K, Ho = y.shape
K, Cg, M = w.shape
C = Cg * group
H = (get_deconv_out_size(Ho, M, pad[0], stride[0], dilation[0])
+ output_padding[0])
x_pad = np.zeros((C, H + pad[0] * 2), dtype=dtype)
for k in range(K):
g = int(k // (K // group))
for ho in range(Ho):
hi = ho * stride[0] + np.arange(0, M) * dilation[0]
ci = np.arange(g * Cg, (g + 1) * Cg)
x_pad[np.ix_(ci, hi)] += w[k] * y[k, ho]
x = x_pad[:, pad[0]:pad[0] + H]
if b is not None:
x += b[..., np.newaxis]
return x
def deconvolution_2d(x, w, b, pad, stride, dilation, group, dtype=np.float32,
output_padding=(0, 0)):
y = x
K, Ho, Wo = y.shape
K, Cg, M, N = w.shape
C = Cg * group
H = (get_deconv_out_size(Ho, M, pad[0], stride[0], dilation[0])
+ output_padding[0])
W = (get_deconv_out_size(Wo, N, pad[1], stride[1], dilation[1])
+ output_padding[1])
x_pad = np.zeros((C, H + pad[0] * 2, W + pad[1] * 2), dtype=dtype)
for k in range(K):
g = int(k // (K // group))
for ho in range(Ho):
for wo in range(Wo):
hi = ho * stride[0] + np.arange(0, M) * dilation[0]
wi = wo * stride[1] + np.arange(0, N) * dilation[1]
ci = np.arange(g * Cg, (g + 1) * Cg)
x_pad[np.ix_(ci, hi, wi)] += w[k] * y[k, ho, wo]
x = x_pad[:, pad[0]:pad[0] + H, pad[1]:pad[1] + W]
if b is not None:
x += b[..., np.newaxis, np.newaxis]
return x
def deformable_convolution_2d(x, w, offset, mask, b, pad, stride,
dilation, group, deformable_group,
channel_last, dtype=np.float32):
"""
Deformable convlution 2D for a single batch data
"""
C, H, W = x.shape # without batch dimension
K, Cg, M, N = w.shape
assert C == Cg * \
group, "Wrong shape, x: {}, w: {}".format(x.shape, w.shape)
assert offset.shape[0] == 2 * deformable_group * M * N, \
"Wrong shape offset: {}, 2 * deformable_group * Kw * Kh: {}".format(
offset.shape, 2 * deformable_group * M * N)
assert offset.shape[1:] == (
H, W), "Wrong shape, offset: {}, w: {}".format(offset.shape, w.shape)
assert mask.shape[0] == deformable_group * M * N, \
"Wrong shape mask: {}, deformable_group * Kw * Kh: {}".format(
mask.shape, deformable_group * M * N)
assert mask.shape[1:] == (
H, W), "Wrong shape, mask: {}, w: {}".format(mask.shape, w.shape)
assert pad[0] < (w.shape[2] + 1)//2 and pad[1] < (w.shape[3] +
1)//2, "Wrong shape, kernel: {}, pad: {}".format(w.shape[2:], pad)
# Zero padding
x_pad = np.zeros((C, H + pad[0] * 2, W + pad[1] * 2), dtype=dtype)
x_pad[:, pad[0]:pad[0] + H, pad[1]:pad[1] + W] = x
# Create and initialize output variable
Ho = get_conv_out_size(H, M, pad[0], stride[0], dilation[0])
Wo = get_conv_out_size(W, N, pad[1], stride[1], dilation[1])
y = np.zeros((K, Ho, Wo), dtype=dtype)
_, Hp, Wp = x_pad.shape
# Deformable Convolution
for k in range(K):
for c in range(C//group):
g = k // (K//group)
ci = Cg * g + c
dg = ci // (C // deformable_group)
for ho in range(Ho):
for wo in range(Wo):
# Get the input coordinates {(hi, wi)} which are
# mapped to the output coordinate (ho, wo) by the kernel.
hi = ho * stride[0] + np.arange(0, M) * dilation[0]
wi = wo * stride[1] + np.arange(0, N) * dilation[1]
# Apply the kernel
modulated_x = np.zeros((M, N), dtype=dtype)
for m in range(M):
for n in range(N):
# Shift (hi, wi) to (ph, pw) by using offset
ph = hi[m] + offset[2*((dg*M*N) + (m * N) + n),
ho * stride[0], wo * stride[1]]
pw = wi[n] + offset[2*((dg*M*N) + (m * N) + n) + 1,
ho * stride[0], wo * stride[1]]
# Bilinear interpolation
h_low = int(np.floor(ph))
w_low = int(np.floor(pw))
h_high = h_low + 1
w_high = w_low + 1
if h_low >= Hp or w_low >= Wp or \
h_high < 0 or w_high < 0:
# Out of bounds.
# Interpolation cannot be perform.
val = 0
else:
v1 = 0 # (h_low, w_low)
v2 = 0 # (h_low, w_high)
v3 = 0 # (h_high, w_low)
v4 = 0 # (h_high, w_high)
if h_low >= 0 and w_low >= 0:
v1 = x_pad[ci, h_low, w_low]
if h_low >= 0 and w_high < Wp:
v2 = x_pad[ci, h_low, w_high]
if h_high < Hp and w_low >= 0:
v3 = x_pad[ci, h_high, w_low]
if h_high < Hp and w_high < Wp:
v4 = x_pad[ci, h_high, w_high]
lh = ph - h_low
lw = pw - w_low
hh = 1 - lh
hw = 1 - lw
w1 = hh * hw
w2 = hh * lw
w3 = lh * hw
w4 = lh * lw
val = w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4
# Apply mask
val *= mask[(dg*M*N) + (m * N) + n,
ho * stride[0], wo * stride[1]]
modulated_x[m, n] = val
y[k, ho, wo] += (w[k, c] * modulated_x).sum()
if b is not None:
y += b[..., np.newaxis, np.newaxis]
return y
def pooling_2d(x, mode, kernel, stride, pad, ignore_border=True,
including_pad=True, dtype=np.float32):
"""
"""
assert mode in ['average', 'sum', 'max']
C, H, W = x.shape
Ho = get_pool_out_size(H, kernel[0], pad[0], stride[0], ignore_border)
Wo = get_pool_out_size(W, kernel[1], pad[1], stride[1], ignore_border)
Hi = H + pad[0] + (pad[0] if ignore_border else kernel[0] - 1)
Wi = W + pad[1] + (pad[1] if ignore_border else kernel[1] - 1)
x_pad = np.ones((C, Hi, Wi), dtype=dtype)
x_pad *= x.min() if mode == 'max' else 0
x_pad[:, pad[0]:pad[0] + H, pad[1]:pad[1] + W] = x
if mode == 'average':
b_pad = np.zeros((C, Hi, Wi), dtype=np.uint)
h_beg = int(not including_pad) * pad[0]
w_beg = int(not including_pad) * pad[1]
h_end = H + (1 + int(including_pad)) * pad[0]
w_end = W + (1 + int(including_pad)) * pad[1]
b_pad[:, h_beg:h_end, w_beg:w_end] = 1
y = np.zeros((C, Ho, Wo), dtype=dtype)
for c in range(C):
for ho in range(Ho):
for wo in range(Wo):
hi = ho * stride[0] + np.arange(0, kernel[0])
wi = wo * stride[1] + np.arange(0, kernel[1])
yy = y[c]
xx = x_pad[c]
if mode == "max":
yy[ho, wo] = xx[np.ix_(hi, wi)].max()
elif mode == "sum":
yy[ho, wo] = xx[np.ix_(hi, wi)].sum()
elif mode == "average":
pad_sum = xx[np.ix_(hi, wi)].sum()
pad_cnt = b_pad[c][np.ix_(hi, wi)].sum()
yy[ho, wo] = pad_sum / pad_cnt
return y
def pooling_3d(x, mode, kernel, stride, pad, ignore_border=True,
including_pad=True, dtype=np.float32):
"""
"""
assert mode in ['average', 'sum', 'max']
C, Z, H, W = x.shape
Zo = get_pool_out_size(Z, kernel[0], pad[0], stride[0], ignore_border)
Ho = get_pool_out_size(H, kernel[1], pad[1], stride[1], ignore_border)
Wo = get_pool_out_size(W, kernel[2], pad[2], stride[2], ignore_border)
Zi = Z + pad[0] + (pad[0] if ignore_border else kernel[0] - 1)
Hi = H + pad[1] + (pad[1] if ignore_border else kernel[1] - 1)
Wi = W + pad[2] + (pad[2] if ignore_border else kernel[2] - 1)
x_pad = np.ones((C, Zi, Hi, Wi), dtype=dtype)
x_pad *= x.min() if mode == 'max' else 0
x_pad[:, pad[0]:pad[0] + Z, pad[1]:pad[1] + H, pad[2]:pad[2] + W] = x
if mode == 'average':
b_pad = np.zeros((C, Zi, Hi, Wi), dtype=np.uint)
z_beg = int(not including_pad) * pad[0]
h_beg = int(not including_pad) * pad[1]
w_beg = int(not including_pad) * pad[2]
z_end = Z + (1 + int(including_pad)) * pad[0]
h_end = H + (1 + int(including_pad)) * pad[1]
w_end = W + (1 + int(including_pad)) * pad[2]
b_pad[:, z_beg:z_end, h_beg:h_end, w_beg:w_end] = 1
#b_pad[:, pad[0]:pad[0] + Z, pad[1]:pad[1] + H, pad[2]:pad[2] + W] = 1
y = np.zeros((C, Zo, Ho, Wo), dtype=dtype)
for c in range(C):
for zo in range(Zo):
for ho in range(Ho):
for wo in range(Wo):
zi = zo * stride[0] + np.arange(0, kernel[0])
hi = ho * stride[1] + np.arange(0, kernel[1])
wi = wo * stride[2] + np.arange(0, kernel[2])
yy = y[c]
xx = x_pad[c]
if mode == "max":
yy[zo, ho, wo] = xx[np.ix_(zi, hi, wi)].max()
elif mode == "sum":
yy[zo, ho, wo] = xx[np.ix_(zi, hi, wi)].sum()
elif mode == "average":
pool_sum = xx[np.ix_(zi, hi, wi)].sum()
pool_cnt = b_pad[c][np.ix_(zi, hi, wi)].sum()
yy[zo, ho, wo] = pool_sum / pool_cnt
return y
def generate_rotation_2d(rng, B):
rotates = []
for i in range(B):
degree = 2 * np.pi * (2.0 * rng.rand() - 1.0)
c, s = np.cos(degree), np.sin(degree)
rotate = np.asarray([[c, -s],
[s, c]])
rotates.append(rotate)
return np.asarray(rotates)
def generate_rotation_3d(rng, B):
rotates = []
for i in range(B):
alpha = np.pi * (2.0 * rng.rand() - 1.0)
beta = np.pi / 2.0 * (2.0 * rng.rand() - 1.0)
gamma = np.pi * (2.0 * rng.rand() - 1.0)
c, s = np.cos(alpha), np.sin(alpha)
Ra = np.asarray([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
c, s = np.cos(beta), np.sin(beta)
Rb = np.asarray([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
c, s = np.cos(gamma), np.sin(gamma)
Rg = np.asarray([[1, 0, 0],
[0, c, -s],
[0, s, c]])
rotate = Ra.dot(Rb).dot(Rg)
rotates.append(rotate)
return np.asarray(rotates)
def generate_transformation_2d(rng, batch_size):
rotate = generate_rotation_2d(rng, batch_size)
translate = (2.0 * rng.rand(batch_size, 2, 1) - 1.0) * 0.001
theta = np.concatenate([rotate, translate], axis=2)
return theta.astype(np.float32)
def generate_transformation_3d(rng, batch_size):
rotate = generate_rotation_3d(rng, batch_size)
translate = (2.0 * rng.rand(batch_size, 3, 1) - 1.0) * 0.001
theta = np.concatenate([rotate, translate], axis=2)
return theta.astype(np.float32)
def generate_normalized_grid_2d(B, size, align_corners):
H, W = size
hgrid = np.linspace(-1.0, 1.0, H)
wgrid = np.linspace(-1.0, 1.0, W)
hgrid = hgrid if align_corners else hgrid * (H - 1) / H
wgrid = wgrid if align_corners else wgrid * (W - 1) / W
w, h = np.meshgrid(wgrid, hgrid)
x = w.reshape(-1)
y = h.reshape(-1)
t = np.ones(len(x))
normalized_grid = np.stack((x, y, t), axis=1)
normalized_grid = normalized_grid.reshape(H, W, 3)
normalized_grid = np.repeat(
normalized_grid[np.newaxis, :, :, :], B, axis=0)
return normalized_grid.astype(np.float32)
def generate_normalized_grid_3d(B, size, align_corners):
D, H, W = size
dgrid = np.linspace(-1.0, 1.0, D)
hgrid = np.linspace(-1.0, 1.0, H)
wgrid = np.linspace(-1.0, 1.0, W)
dgrid = dgrid if align_corners else dgrid * (D - 1) / D
hgrid = hgrid if align_corners else hgrid * (H - 1) / H
wgrid = wgrid if align_corners else wgrid * (W - 1) / W
h, d, w = np.meshgrid(hgrid, dgrid, wgrid)
x = w.reshape(-1)
y = h.reshape(-1)
z = d.reshape(-1)
t = np.ones(len(x))
normalized_grid = np.stack((x, y, z, t), axis=1)
normalized_grid = normalized_grid.reshape(D, H, W, 4)
normalized_grid = np.repeat(
normalized_grid[np.newaxis, :, :, :, :], B, axis=0)
return normalized_grid.astype(np.float32)
def affine_grid_2d(affine, size, align_corners):
B = affine.shape[0]
H, W = size
grid_t = generate_normalized_grid_2d(B, size, align_corners)
grid_s = np.matmul(grid_t.reshape(B, H * W, 3),
affine.transpose((0, 2, 1)))
grid_s = grid_s.reshape(B, H, W, 2)
return grid_s.astype(np.float32)
def affine_grid_3d(affine, size, align_corners):
B = affine.shape[0]
D, H, W = size
grid_t = generate_normalized_grid_3d(B, size, align_corners)
grid_s = np.matmul(grid_t.reshape(B, D * H * W, 4),
affine.transpose((0, 2, 1)))
grid_s = grid_s.reshape(B, D, H, W, 3)
return grid_s.astype(np.float32)
def pad_sequence(sequences, batch_first):
# sequences: list of nparray
# sequences[i]: (T_i, D_1, ..., D_M)
Ds = () if len(sequences[0].shape) == 1 else sequences[0].shape[1:]
B = len(sequences)
T = max([seq.shape[0] for seq in sequences])
data = np.zeros((B, T) + Ds) if batch_first else np.zeros((T, B) + Ds)
for b, seq in enumerate(sequences):
l = seq.shape[0]
if batch_first:
data[b, :l] = seq
else:
data[:l, b] = seq
return data
| def __init__(self, dim, kdim):
base_axis = dim - kdim - 1
up_to_base = tuple(range(0, base_axis))
self.axes = up_to_base + (dim - 1,) + tuple(range(base_axis, dim - 1))
self.inv_axes = up_to_base + \
tuple(range(base_axis + 1, dim)) + (base_axis,)
def __call__(self, x):
return x.transpose(self.axes).copy()
def inv(self, x):
return x.transpose(self.inv_axes).copy() |
block_test.py | from __future__ import annotations
from numpy import int32
from pentagram.interpret.block import interpret_block
from pentagram.interpret.test import init_test_frame_stack
from pentagram.machine import MachineExpressionStack
from pentagram.machine import MachineFrameStack
from pentagram.machine import MachineNumber
from pentagram.syntax import SyntaxBlock
from pentagram.syntax import SyntaxExpression
from pentagram.syntax import SyntaxNumber
def test_interpret_block_enter() -> None:
|
def test_interpret_block_exit() -> None:
block = SyntaxBlock(
[SyntaxExpression([SyntaxNumber(int32(4))])]
)
frame_stack = init_test_frame_stack(
block, MachineExpressionStack([]), statement_index=1
)
interpret_block(frame_stack)
assert frame_stack == MachineFrameStack([])
| block = SyntaxBlock(
[SyntaxExpression([SyntaxNumber(int32(4))])]
)
frame_stack = init_test_frame_stack(
block, MachineExpressionStack([])
)
interpret_block(frame_stack)
assert frame_stack == init_test_frame_stack(
block,
MachineExpressionStack([MachineNumber(int32(4))]),
term_index=1,
) |
service.rs | // Copyright 2015-2020 Parity Technologies (UK) Ltd.
// This file is part of Parity Ethereum.
// Parity Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
//! Tests for the snapshot service.
use std::fs;
use std::sync::Arc;
use tempdir::TempDir;
use blockchain::BlockProvider;
use ethcore::client::{Client, ClientConfig};
use client_traits::{BlockInfo, ImportBlock};
use common_types::{
io_message::ClientIoMessage,
ids::BlockId,
snapshot::Progress,
verification::Unverified,
snapshot::{ManifestData, RestorationStatus},
};
use snapshot::{
chunk_state, chunk_secondary, SnapshotService,
io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter},
service::{Service, ServiceParams, Guard, Restoration, RestorationParams},
PowSnapshot,
};
use spec;
use ethcore::{
miner,
test_helpers::{new_db, new_temp_db, generate_dummy_client_with_spec_and_data, restoration_db_handler}
};
use parking_lot::{Mutex, RwLock};
use ethcore_io::{IoChannel, IoService};
use kvdb_rocksdb::DatabaseConfig;
use journaldb::Algorithm;
#[test]
fn sends_async_messages() {
let gas_prices = vec![1.into(), 2.into(), 3.into(), 999.into()];
let client = generate_dummy_client_with_spec_and_data(spec::new_null, 400, 5, &gas_prices, false);
let service = IoService::<ClientIoMessage<Client>>::start().unwrap();
let spec = spec::new_test();
let tempdir = TempDir::new("").unwrap();
let dir = tempdir.path().join("snapshot");
let snapshot_params = ServiceParams {
engine: spec.engine.clone(),
genesis_block: spec.genesis_block(),
restoration_db_handler: restoration_db_handler(Default::default()),
pruning: Algorithm::Archive,
channel: service.channel(),
snapshot_root: dir,
client,
};
let service = Service::new(snapshot_params).unwrap();
assert!(service.manifest().is_none());
assert!(service.chunk(Default::default()).is_none());
assert_eq!(service.status(), RestorationStatus::Inactive);
let manifest = ManifestData {
version: 2,
state_hashes: vec![],
block_hashes: vec![],
state_root: Default::default(),
block_number: 0,
block_hash: Default::default(),
};
service.begin_restore(manifest);
service.abort_restore();
service.restore_state_chunk(Default::default(), vec![]);
service.restore_block_chunk(Default::default(), vec![]);
}
#[test]
fn cannot_finish_with_invalid_chunks() {
use ethereum_types::H256;
use kvdb_rocksdb::DatabaseConfig;
let spec = spec::new_test();
let tempdir = TempDir::new("").unwrap();
let state_hashes: Vec<_> = (0..5).map(|_| H256::random()).collect();
let block_hashes: Vec<_> = (0..5).map(|_| H256::random()).collect();
let db_config = DatabaseConfig::with_columns(ethcore_db::NUM_COLUMNS);
let gb = spec.genesis_block();
let flag = ::std::sync::atomic::AtomicBool::new(true);
let engine = &*spec.engine.clone();
let params = RestorationParams::new(
ManifestData {
version: 2,
state_hashes: state_hashes.clone(),
block_hashes: block_hashes.clone(),
state_root: H256::zero(),
block_number: 100000,
block_hash: H256::zero(),
},
Algorithm::Archive,
restoration_db_handler(db_config).open(&tempdir.path().to_owned()).unwrap(),
None,
&gb,
Guard::benign(),
engine,
);
let mut restoration = Restoration::new(params).unwrap();
let definitely_bad_chunk = [1, 2, 3, 4, 5];
for hash in state_hashes {
assert!(restoration.feed_state(hash, &definitely_bad_chunk, &flag).is_err());
assert!(!restoration.is_done());
}
for hash in block_hashes {
assert!(restoration.feed_blocks(hash, &definitely_bad_chunk, &*spec.engine, &flag).is_err());
assert!(!restoration.is_done());
}
}
#[test]
fn restored_is_equivalent() {
let _ = ::env_logger::try_init();
const NUM_BLOCKS: u32 = 400;
const TX_PER: usize = 5;
let gas_prices = vec![1.into(), 2.into(), 3.into(), 999.into()];
let client = generate_dummy_client_with_spec_and_data(spec::new_null, NUM_BLOCKS, TX_PER, &gas_prices, false);
let tempdir = TempDir::new("").unwrap();
let client_db = tempdir.path().join("client_db");
let path = tempdir.path().join("snapshot");
let db_config = DatabaseConfig::with_columns(ethcore_db::NUM_COLUMNS);
let restoration = restoration_db_handler(db_config);
let blockchain_db = restoration.open(&client_db).unwrap();
let spec = spec::new_null();
let client2 = Client::new(
Default::default(),
&spec,
blockchain_db,
Arc::new(miner::Miner::new_for_tests(&spec, None)),
IoChannel::disconnected(),
).unwrap();
let service_params = ServiceParams {
engine: spec.engine.clone(),
genesis_block: spec.genesis_block(),
restoration_db_handler: restoration,
pruning: ::journaldb::Algorithm::Archive,
channel: IoChannel::disconnected(),
snapshot_root: path,
client: client2.clone(),
};
let service = Service::new(service_params).unwrap();
service.take_snapshot(&*client, NUM_BLOCKS as u64).unwrap();
let manifest = service.manifest().unwrap();
service.init_restore(manifest.clone(), true).unwrap();
assert!(service.init_restore(manifest.clone(), true).is_ok());
for hash in manifest.state_hashes {
let chunk = service.chunk(hash).unwrap();
service.feed_state_chunk(hash, &chunk);
}
for hash in manifest.block_hashes {
let chunk = service.chunk(hash).unwrap();
service.feed_block_chunk(hash, &chunk);
}
assert_eq!(service.status(), RestorationStatus::Inactive);
for x in 0..NUM_BLOCKS {
let block1 = client.block(BlockId::Number(x as u64)).unwrap();
let block2 = client2.block(BlockId::Number(x as u64)).unwrap();
assert_eq!(block1, block2);
}
}
// on windows the guards deletion (remove_dir_all)
// is not happening (error directory is not empty).
// So the test is disabled until windows api behave.
#[cfg(not(target_os = "windows"))]
#[test]
fn guards_delete_folders() {
let gas_prices = vec![1.into(), 2.into(), 3.into(), 999.into()];
let client = generate_dummy_client_with_spec_and_data(spec::new_null, 400, 5, &gas_prices, false);
let spec = spec::new_null();
let tempdir = TempDir::new("").unwrap();
let service_params = ServiceParams {
engine: spec.engine.clone(),
genesis_block: spec.genesis_block(),
restoration_db_handler: restoration_db_handler(DatabaseConfig::with_columns(ethcore_db::NUM_COLUMNS)),
pruning: ::journaldb::Algorithm::Archive,
channel: IoChannel::disconnected(),
snapshot_root: tempdir.path().to_owned(),
client: client,
};
let service = Service::new(service_params).unwrap();
let path = tempdir.path().join("restoration");
let manifest = ManifestData {
version: 2,
state_hashes: vec![],
block_hashes: vec![],
block_number: 0,
block_hash: Default::default(),
state_root: Default::default(),
};
service.init_restore(manifest.clone(), true).unwrap();
assert!(path.exists());
// The `db` folder should have been deleted,
// while the `temp` one kept
service.abort_restore();
assert!(!path.join("db").exists());
assert!(path.join("temp").exists());
service.init_restore(manifest.clone(), true).unwrap();
assert!(path.exists());
drop(service);
assert!(!path.join("db").exists());
assert!(path.join("temp").exists());
}
#[test]
fn keep_ancient_blocks() {
let _ = ::env_logger::try_init();
// Test variables | // Temporary folders
let tempdir = TempDir::new("").unwrap();
let snapshot_path = tempdir.path().join("SNAP");
// Generate blocks
let gas_prices = vec![1.into(), 2.into(), 3.into(), 999.into()];
let spec_f = spec::new_null;
let spec = spec_f();
let client = generate_dummy_client_with_spec_and_data(spec_f, NUM_BLOCKS as u32, 5, &gas_prices, false);
let bc = client.chain();
// Create the Snapshot
let best_hash = bc.best_block_hash();
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
let block_hashes = chunk_secondary(
Box::new(SNAPSHOT_MODE),
&bc,
best_hash,
&writer,
&RwLock::new(Progress::new())
).unwrap();
let state_db = client.state_db().journal_db().boxed_clone();
let start_header = bc.block_header_data(&best_hash).unwrap();
let state_root = start_header.state_root();
let state_hashes = chunk_state(
state_db.as_hash_db(),
&state_root,
&writer,
&RwLock::new(Progress::new()),
None,
0
).unwrap();
let manifest = ManifestData {
version: 2,
state_hashes,
state_root,
block_hashes,
block_number: NUM_BLOCKS,
block_hash: best_hash,
};
writer.into_inner().finish(manifest.clone()).unwrap();
// Initialize the Client
let db_config = DatabaseConfig::with_columns(ethcore_db::NUM_COLUMNS);
let client_db = new_temp_db(&tempdir.path());
let client2 = Client::new(
ClientConfig::default(),
&spec,
client_db,
Arc::new(miner::Miner::new_for_tests(&spec, None)),
IoChannel::disconnected(),
).unwrap();
// Add some ancient blocks
for block_number in 1..50 {
let block_hash = bc.block_hash(block_number).unwrap();
let block = bc.block(&block_hash).unwrap();
client2.import_block(Unverified::from_rlp(block.into_inner()).unwrap()).unwrap();
}
client2.flush_queue();
// Restore the Snapshot
let reader = PackedReader::new(&snapshot_path).unwrap().unwrap();
let service_params = ServiceParams {
engine: spec.engine.clone(),
genesis_block: spec.genesis_block(),
restoration_db_handler: restoration_db_handler(db_config),
pruning: ::journaldb::Algorithm::Archive,
channel: IoChannel::disconnected(),
snapshot_root: tempdir.path().to_owned(),
client: client2.clone(),
};
let service = Service::new(service_params).unwrap();
service.init_restore(manifest.clone(), false).unwrap();
for hash in &manifest.block_hashes {
let chunk = reader.chunk(*hash).unwrap();
service.feed_block_chunk(*hash, &chunk);
}
for hash in &manifest.state_hashes {
let chunk = reader.chunk(*hash).unwrap();
service.feed_state_chunk(*hash, &chunk);
}
match service.status() {
RestorationStatus::Inactive => (),
RestorationStatus::Failed => panic!("Snapshot Restoration has failed."),
RestorationStatus::Ongoing { .. } => panic!("Snapshot Restoration should be done."),
_ => panic!("Invalid Snapshot Service status."),
}
// Check that the latest block number is the right one
assert_eq!(client2.block(BlockId::Latest).unwrap().number(), NUM_BLOCKS as u64);
// Check that we have blocks in [NUM_BLOCKS - NUM_SNAPSHOT_BLOCKS + 1 ; NUM_BLOCKS]
// but none before
assert!(client2.block(BlockId::Number(NUM_BLOCKS - NUM_SNAPSHOT_BLOCKS + 1)).is_some());
assert!(client2.block(BlockId::Number(100)).is_none());
// Check that the first 50 blocks have been migrated
for block_number in 1..49 {
assert!(client2.block(BlockId::Number(block_number)).is_some());
}
}
#[test]
fn recover_aborted_recovery() {
let _ = env_logger::try_init();
const NUM_BLOCKS: u32 = 400;
let gas_prices = vec![1.into(), 2.into(), 3.into(), 999.into()];
let client = generate_dummy_client_with_spec_and_data(spec::new_null, NUM_BLOCKS, 5, &gas_prices, false);
let spec = spec::new_null();
let tempdir = TempDir::new("").unwrap();
let db_config = DatabaseConfig::with_columns(ethcore_db::NUM_COLUMNS);
let client_db = new_db();
let client2 = Client::new(
Default::default(),
&spec,
client_db,
Arc::new(miner::Miner::new_for_tests(&spec, None)),
IoChannel::disconnected(),
).unwrap();
let service_params = ServiceParams {
engine: spec.engine.clone(),
genesis_block: spec.genesis_block(),
restoration_db_handler: restoration_db_handler(db_config),
pruning: ::journaldb::Algorithm::Archive,
channel: IoChannel::disconnected(),
snapshot_root: tempdir.path().to_owned(),
client: client2.clone(),
};
let service = Service::new(service_params).unwrap();
service.take_snapshot(&*client, NUM_BLOCKS as u64).unwrap();
let manifest = service.manifest().unwrap();
service.init_restore(manifest.clone(), true).unwrap();
// Restore only the state chunks
for hash in &manifest.state_hashes {
let chunk = service.chunk(*hash).unwrap();
service.feed_state_chunk(*hash, &chunk);
}
match service.status() {
RestorationStatus::Ongoing { block_chunks_done, state_chunks_done, .. } => {
assert_eq!(state_chunks_done, manifest.state_hashes.len() as u32);
assert_eq!(block_chunks_done, 0);
},
e => panic!("Snapshot restoration must be ongoing ; {:?}", e),
}
// Abort the restore...
service.abort_restore();
// And try again!
service.init_restore(manifest.clone(), true).unwrap();
match service.status() {
RestorationStatus::Ongoing { block_chunks_done, state_chunks_done, .. } => {
assert_eq!(state_chunks_done, manifest.state_hashes.len() as u32);
assert_eq!(block_chunks_done, 0);
},
e => panic!("Snapshot restoration must be ongoing ; {:?}", e),
}
// Remove the snapshot directory, and restart the restoration
// It shouldn't have restored any previous blocks
fs::remove_dir_all(tempdir.path()).unwrap();
// And try again!
service.init_restore(manifest.clone(), true).unwrap();
match service.status() {
RestorationStatus::Ongoing { block_chunks_done, state_chunks_done, .. } => {
assert_eq!(block_chunks_done, 0);
assert_eq!(state_chunks_done, 0);
},
_ => panic!("Snapshot restoration must be ongoing"),
}
} | const NUM_BLOCKS: u64 = 500;
const NUM_SNAPSHOT_BLOCKS: u64 = 300;
const SNAPSHOT_MODE: PowSnapshot = PowSnapshot { blocks: NUM_SNAPSHOT_BLOCKS, max_restore_blocks: NUM_SNAPSHOT_BLOCKS };
|
query_authorizer.go | package meta
import (
"fmt"
"github.com/influxdata/influxql"
)
// QueryAuthorizer determines whether a user is authorized to execute a given query.
type QueryAuthorizer struct {
Client *Client
}
// NewQueryAuthorizer returns a new instance of QueryAuthorizer.
func NewQueryAuthorizer(c *Client) *QueryAuthorizer |
// AuthorizeQuery authorizes u to execute q on database.
// Database can be "" for queries that do not require a database.
// If no user is provided it will return an error unless the query's first statement is to create
// a root user.
func (a *QueryAuthorizer) AuthorizeQuery(u User, query *influxql.Query, database string) error {
// Special case if no users exist.
if n := a.Client.UserCount(); n == 0 {
// Ensure there is at least one statement.
if len(query.Statements) > 0 {
// First statement in the query must create a user with admin privilege.
cu, ok := query.Statements[0].(*influxql.CreateUserStatement)
if ok && cu.Admin == true {
return nil
}
}
return &ErrAuthorize{
Query: query,
Database: database,
Message: "create admin user first or disable authentication",
}
}
if u == nil {
return &ErrAuthorize{
Query: query,
Database: database,
Message: "no user provided",
}
}
return u.AuthorizeQuery(database, query)
}
func (u *UserInfo) AuthorizeQuery(database string, query *influxql.Query) error {
// Admin privilege allows the user to execute all statements.
if u.Admin {
return nil
}
// Check each statement in the query.
for _, stmt := range query.Statements {
// Get the privileges required to execute the statement.
privs, err := stmt.RequiredPrivileges()
if err != nil {
return err
}
// Make sure the user has the privileges required to execute
// each statement.
for _, p := range privs {
if p.Admin {
// Admin privilege already checked so statement requiring admin
// privilege cannot be run.
return &ErrAuthorize{
Query: query,
User: u.Name,
Database: database,
Message: fmt.Sprintf("statement '%s', requires admin privilege", stmt),
}
}
// Use the db name specified by the statement or the db
// name passed by the caller if one wasn't specified by
// the statement.
db := p.Name
if db == "" {
db = database
}
if !u.AuthorizeDatabase(p.Privilege, db) {
return &ErrAuthorize{
Query: query,
User: u.Name,
Database: database,
Message: fmt.Sprintf("statement '%s', requires %s on %s", stmt, p.Privilege.String(), db),
}
}
}
}
return nil
}
// ErrAuthorize represents an authorization error.
type ErrAuthorize struct {
Query *influxql.Query
User string
Database string
Message string
}
// Error returns the text of the error.
func (e ErrAuthorize) Error() string {
if e.User == "" {
return fmt.Sprint(e.Message)
}
return fmt.Sprintf("%s not authorized to execute %s", e.User, e.Message)
}
| {
return &QueryAuthorizer{
Client: c,
}
} |
show_test.go | package cmd
import (
"errors"
"testing"
"github.com/stretchr/testify/assert" | func TestGetSearchQueries(t *testing.T) {
stationsConfig := map[string]utils.Stations{
"Bhaisipati, Ktm, Nepal": {"Bhaisipati, KTM, Nepal", 10495, "kathmandu"},
"Us Embassy, Kathmandu, Nepal": {"US Embassy, Kathmandu, Nepal", 9468, "kathmandu"},
}
config := utils.Config{ApiKey: "abcdef", Stations: stationsConfig}
want := map[string][]int{"kathmandu": {10495, 9468}}
got := groupSearchQueries(config)
assert.Equal(t, want, got)
}
type MockRequest struct {
handleRequest func() (*aqi.Response, error)
}
func (req *MockRequest) Fetch(query string) (*aqi.Response, error) {
return req.handleRequest()
}
func TestGetFilteredStations(t *testing.T) {
res := &aqi.Response{Data: []aqi.Weather{
{
AirQuality: "100", UID: 11, Station: aqi.Station{Name: "Kathmandu"},
},
{
AirQuality: "80", UID: 55, Station: aqi.Station{Name: "Bhaktapur"},
},
}}
req := &MockRequest{}
aqi.HTTPClient = req
req.handleRequest = func() (*aqi.Response, error) {
return res, nil
}
groupedStations := map[string][]int{"kathmandu": {11, 55}}
got, _ := getFilteredResponse(groupedStations)
assert.Equal(t, res, got)
}
func TestGetFilteredStationsReturnsError(t *testing.T) {
req := &MockRequest{}
aqi.HTTPClient = req
req.handleRequest = func() (*aqi.Response, error) {
return nil, errors.New("api is not responsive")
}
groupedStations := map[string][]int{"kathmandu": {11, 55}}
_, err := getFilteredResponse(groupedStations)
want := "api is not responsive"
assert.Equal(t, want, err.Error())
} | "github.com/yankeexe/air-quality-cli/pkg/aqi"
"github.com/yankeexe/air-quality-cli/pkg/utils"
)
|
opentsdb_test.go | package opentsdb
import (
"reflect"
"testing"
"github.com/influxdb/telegraf/testutil"
"github.com/stretchr/testify/require"
)
func TestBuildTagsTelnet(t *testing.T) {
var tagtests = []struct {
ptIn map[string]string
outTags []string
}{
{
map[string]string{"one": "two", "three": "four"},
[]string{"one=two", "three=four"},
},
{
map[string]string{"aaa": "bbb"},
[]string{"aaa=bbb"},
},
{
map[string]string{"one": "two", "aaa": "bbb"},
[]string{"aaa=bbb", "one=two"},
},
{
map[string]string{},
[]string{},
},
} | t.Errorf("\nexpected %+v\ngot %+v\n", tt.outTags, tags)
}
}
}
func TestWrite(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
o := &OpenTSDB{
Host: testutil.GetLocalHost(),
Port: 4242,
Prefix: "prefix.test.",
}
// Verify that we can connect to the OpenTSDB instance
err := o.Connect()
require.NoError(t, err)
// Verify that we can successfully write data to OpenTSDB
err = o.Write(testutil.MockBatchPoints().Points())
require.NoError(t, err)
// Verify postive and negative test cases of writing data
bp := testutil.MockBatchPoints()
bp.AddPoint(testutil.TestPoint(float64(1.0), "justametric.float"))
bp.AddPoint(testutil.TestPoint(int64(123456789), "justametric.int"))
bp.AddPoint(testutil.TestPoint(uint64(123456789012345), "justametric.uint"))
bp.AddPoint(testutil.TestPoint("Lorem Ipsum", "justametric.string"))
bp.AddPoint(testutil.TestPoint(float64(42.0), "justametric.anotherfloat"))
err = o.Write(bp.Points())
require.NoError(t, err)
} | for _, tt := range tagtests {
tags := buildTags(tt.ptIn)
if !reflect.DeepEqual(tags, tt.outTags) { |
data_utils.py | import os
import sys
try:
import cPickle as pickle
except ImportError:
import _pickle as pickle
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
def _read_data(data_path, train_files):
"""Reads CIFAR-10 format data. Always returns NHWC format.
Returns:
images: np tensor of size [N, H, W, C]
labels: np tensor of size [N]
"""
images, labels = [], []
for file_name in train_files:
print(file_name)
full_name = os.path.join(data_path, file_name)
with open(full_name, 'rb') as finp:
data = pickle.load(finp, encoding='bytes')
batch_images = data[b"data"].astype(np.float32) / 255.0
batch_labels = np.array(data[b"labels"], dtype=np.int32)
images.append(batch_images)
labels.append(batch_labels)
images = np.concatenate(images, axis=0)
labels = np.concatenate(labels, axis=0)
images = np.reshape(images, [-1, 3, 32, 32])
images = np.transpose(images, [0, 2, 3, 1])
return images, labels
def _read_fmnist_data(data_path):
"""Reads Fashion-Mnist data. Returns NHWC format.
Returns:
images: np tensor of size [N, H, W, C]
labels: np tensor of size [N]
"""
images, labels = {},{}
data = input_data.read_data_sets(data_path)
images["train"] = data.train.images.reshape(-1, 1, 28, 28) / 255.0
images["test"] = data.test.images.reshape(-1, 1, 28, 28) / 255.0
images["train"] = np.transpose(images["train"], [0, 2, 3, 1])
images["test"] = np.transpose(images["test"], [0, 2, 3, 1])
labels["train"] = np.array(data.train.labels, dtype = np.int32)
labels["test"] = np.array(data.test.labels, dtype = np.int32)
print("Read and processed data..")
print(labels["test"])
return images, labels
def valid_split_data(images, labels, num_valids=5000):
if num_valids:
images["valid"] = images["train"][-num_valids:]
labels["valid"] = labels["train"][-num_valids:]
images["train"] = images["train"][:-num_valids]
labels["train"] = labels["train"][:-num_valids]
else:
images["valid"], labels["valid"] = None, None
return images, labels
def read_data(data_path, num_valids=5000, dataset = "cifar"):
print("-" * 80)
print("Reading data")
print(os.getcwd())
images, labels = {}, {}
if(dataset == "fmnist"):
print("Fashion-Mnist")
images, labels = _read_fmnist_data(data_path) |
if dataset == "stacking":
images["path"] = data_path
return images, labels
else:
train_files = [
"data_batch_1",
"data_batch_2",
"data_batch_3",
"data_batch_4",
"data_batch_5",
]
test_file = [
"test_batch",
]
images["train"], labels["train"] = _read_data(data_path, train_files)
images, labels = valid_split_data(images, labels, num_valids)
images["test"], labels["test"] = _read_data(data_path, test_file)
print("Prepropcess: [subtract mean], [divide std]")
mean = np.mean(images["train"], axis=(0, 1, 2), keepdims=True)
std = np.std(images["train"], axis=(0, 1, 2), keepdims=True)
print("mean: {}".format(np.reshape(mean * 255.0, [-1])))
print("std: {}".format(np.reshape(std * 255.0, [-1])))
images["train"] = (images["train"] - mean) / std
if num_valids:
images["valid"] = (images["valid"] - mean) / std
images["test"] = (images["test"] - mean) / std
return images, labels | images, labels = valid_split_data(images, labels, num_valids)
return images, labels |
decode.go | package main
import (
"encoding/binary"
"log"
"math"
"github.com/jovandeginste/medisana-bs/structs"
)
func decodePerson(data []byte) (person structs.Person) |
func decodeWeight(data []byte) (weight structs.Weight) {
/*
fixed: byte: 0 [0x1d]
weight: byte: 1 & 2 [kg*100]
timestamp: byte 5-8 Unix timestamp
person: byte 13 [1..8]
*/
weight.Valid = (data[0] == 0x1d)
weight.Weight = float32(decode16(data, 1)) / 100.0
weight.Timestamp = sanitizeTimestamp(decode32(data, 5))
weight.Person = decode8(data, 13)
return
}
func decodeBody(data []byte) (body structs.Body) {
/*
fixed: byte 0 [0x6f]
timestamp: byte 1-4 Unix timestamp
person: byte 5 [1..8]
kcal: byte 6 & 7 first nibble = 0xf, [kcal]
fat: byte 8 & 9 first nibble = 0xf, [fat*10]
tbw: byte 10 & 11 first nibble = 0xf, [tbw*10]
muscle: byte 12 & 13 first nibble = 0xf, [muscle*10]
bone: byte 14 & 15 first nibble = 0xf, [bone*10]
*/
body.Valid = (data[0] == 0x6f)
body.Timestamp = sanitizeTimestamp(decode32(data, 1))
body.Person = decode8(data, 5)
body.Kcal = decode16(data, 6)
body.Fat = smallValue(decode16(data, 8))
body.Tbw = smallValue(decode16(data, 10))
body.Muscle = smallValue(decode16(data, 12))
body.Bone = smallValue(decode16(data, 14))
return
}
func smallValue(value int) float32 {
return float32(0x0fff&value) / 10.0
}
func decode8(data []byte, firstByte int) int {
myUint := data[firstByte]
return int(myUint)
}
func decode16(data []byte, firstByte int) int {
myUint := binary.LittleEndian.Uint16(data[firstByte:(firstByte + 2)])
return int(myUint)
}
func decode32(data []byte, firstByte int) int {
myUint := binary.LittleEndian.Uint32(data[firstByte:(firstByte + 4)])
return int(myUint)
}
func sanitizeTimestamp(timestamp int) int {
retTS := 0
if timestamp+config.TimeOffset < math.MaxInt32 {
retTS = timestamp + config.TimeOffset
} else {
retTS = timestamp
}
if timestamp >= math.MaxInt32 {
retTS = 0
}
return retTS
}
func decodeData(req []byte) {
result := new(structs.PartialMetric)
switch req[0] {
case 0x84:
person := decodePerson(req)
result.Person = person
case 0x1D:
weight := decodeWeight(req)
result.Weight = weight
case 0x6F:
body := decodeBody(req)
result.Body = body
default:
log.Printf("[DECODE] Unhandled data encountered: [% X]\n", req)
}
metricChan <- result
}
| {
/*
fixed: byte 0 [0x84]
person: byte 2 [1..8]
gender: byte 4 (1=male, 2=female) [1|2]
age: byte 5 [year]
size: byte 6 [cm]
activity: byte 8 (0=normal, 3=high) [0|3]
*/
person.Valid = (data[0] == 0x84)
person.Person = decode8(data, 2)
if data[4] == 1 {
person.Gender = "male"
} else {
person.Gender = "female"
}
person.Age = decode8(data, 5)
person.Size = decode8(data, 6)
if data[8] == 3 {
person.Activity = "high"
} else {
person.Activity = "normal"
}
return
} |
PostsRepository.ts | import { getMongoRepository, MongoRepository } from 'typeorm';
import IPostsRepository from '@modules/posts/repositories/IPostsRepository';
import ICreatePostDTO from '@modules/posts/dtos/ICreatePostDTO';
import { ObjectID } from 'mongodb';
import IUpdatePostDTO from '@modules/posts/dtos/IUpdatePostDTO';
import Post from '../schemas/Post';
class PostsRepository implements IPostsRepository {
private ormRepository: MongoRepository<Post>;
constructor() {
this.ormRepository = getMongoRepository(Post, 'mongo');
}
public async create(postData: ICreatePostDTO): Promise<Post> {
const post = this.ormRepository.create(postData);
await this.ormRepository.save(post);
return post;
}
public async findById(id: string): Promise<Post | undefined> {
const findPost = await this.ormRepository.findOne(id);
return findPost;
}
public async findAllByUser(user_id: string): Promise<Post[]> {
const findPosts = await this.ormRepository.find({ where: { user_id } });
return findPosts;
}
public async delete(post_id: string): Promise<void> {
await this.ormRepository.findOneAndDelete({
_id: new ObjectID(post_id),
});
}
public async update({ post_id, content }: IUpdatePostDTO): Promise<Post> {
const post = await this.ormRepository.findOneAndUpdate(
{
_id: new ObjectID(post_id), | },
);
post.value.content = content;
return post.value;
}
}
export default PostsRepository; | },
{
$set: { content, updated_at: new Date() }, |
urlhint.rs | // Copyright 2015-2020 Parity Technologies (UK) Ltd.
// This file is part of Open Ethereum.
// Open Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Open Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Open Ethereum. If not, see <http://www.gnu.org/licenses/>.
//! URLHint Contract
use std::sync::Weak;
use rustc_hex::ToHex;
use mime::{self, Mime};
use mime_guess;
use ethereum_types::{H256, Address};
use registrar::RegistrarClient;
use types::ids::BlockId;
use_contract!(urlhint, "res/urlhint.json");
const COMMIT_LEN: usize = 20;
const GITHUB_HINT: &'static str = "githubhint";
/// GithubHint entries with commit set as `0x0..01` should be treated
/// as Github Dapp, downloadable zip files, than can be extracted, containing
/// the manifest.json file along with the dapp
static GITHUB_DAPP_COMMIT: &[u8; COMMIT_LEN] = &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1];
/// Github-hosted dapp.
#[derive(Debug, PartialEq)]
pub struct GithubApp {
/// Github Account
pub account: String,
/// Github Repository
pub repo: String,
/// Commit on Github
pub commit: [u8; COMMIT_LEN],
/// Dapp owner address
pub owner: Address,
}
impl GithubApp {
/// Returns URL of this Github-hosted dapp package.
pub fn url(&self) -> String {
// Since https fetcher doesn't support redirections we use direct link
// format!("https://github.com/{}/{}/archive/{}.zip", self.account, self.repo, self.commit.to_hex())
format!("https://codeload.github.com/{}/{}/zip/{}", self.account, self.repo, self.commit.to_hex::<String>())
}
fn commit(bytes: &[u8]) -> Option<[u8;COMMIT_LEN]> {
if bytes.len() < COMMIT_LEN {
return None;
}
let mut commit = [0; COMMIT_LEN];
for i in 0..COMMIT_LEN {
commit[i] = bytes[i];
}
Some(commit)
}
}
/// Hash-Addressed Content
#[derive(Debug, PartialEq)]
pub struct Content {
/// URL of the content
pub url: String,
/// MIME type of the content
pub mime: Mime,
/// Content owner address
pub owner: Address,
}
/// Result of resolving id to URL
#[derive(Debug, PartialEq)]
pub enum URLHintResult {
/// Dapp
Dapp(GithubApp),
/// GithubDapp
GithubDapp(Content),
/// Content
Content(Content),
}
/// URLHint Contract interface
pub trait URLHint: Send + Sync {
/// Resolves given id to registrar entry.
fn resolve(&self, id: H256) -> Result<Option<URLHintResult>, String>;
}
/// `URLHintContract` API
pub struct URLHintContract {
client: Weak<dyn RegistrarClient>,
}
impl URLHintContract {
/// Creates new `URLHintContract`
pub fn new(client: Weak<dyn RegistrarClient>) -> Self {
URLHintContract {
client: client,
}
}
}
fn get_urlhint_content(account_slash_repo: String, owner: Address) -> Content {
let mime = guess_mime_type(&account_slash_repo).unwrap_or(mime::APPLICATION_JSON);
Content {
url: account_slash_repo,
mime,
owner,
}
}
fn decode_urlhint_output(
account_slash_repo: String,
commit: [u8; 20],
owner: Address
) -> Option<URLHintResult> {
if owner == Address::zero() {
return None;
}
let commit = GithubApp::commit(&commit);
if commit == Some(Default::default()) {
let content = get_urlhint_content(account_slash_repo, owner);
return Some(URLHintResult::Content(content));
}
if commit == Some(*GITHUB_DAPP_COMMIT) {
let content = get_urlhint_content(account_slash_repo, owner);
return Some(URLHintResult::GithubDapp(content));
}
let (account, repo) = {
let mut it = account_slash_repo.split('/');
match (it.next(), it.next()) {
(Some(account), Some(repo)) => (account.into(), repo.into()),
_ => return None,
}
};
commit.map(|commit| URLHintResult::Dapp(GithubApp {
account: account,
repo: repo,
commit: commit,
owner: owner,
}))
}
impl URLHint for URLHintContract {
fn resolve(&self, id: H256) -> Result<Option<URLHintResult>, String> {
use urlhint::urlhint::functions::entries::{encode_input, decode_output};
let client = self.client.clone().upgrade()
.ok_or_else(|| "Registrar/contract client unavailable".to_owned())?;
let returned_address = client.get_address(GITHUB_HINT, BlockId::Latest)?;
if let Some(address) = returned_address {
let data = encode_input(id);
let output_bytes = client.call_contract(BlockId::Latest, address, data)?;
let (account_slash_repo, commit, owner) = decode_output(&output_bytes)
.map_err(|e| e.to_string())?;
let url_hint = decode_urlhint_output(account_slash_repo, commit, owner);
Ok(url_hint)
} else {
Ok(None)
}
}
}
fn guess_mime_type(url: &str) -> Option<Mime> {
const CONTENT_TYPE: &'static str = "content-type=";
let mut it = url.split('#');
// skip url
let url = it.next();
// get meta headers
let metas = it.next();
if let Some(metas) = metas {
for meta in metas.split('&') {
let meta = meta.to_lowercase();
if meta.starts_with(CONTENT_TYPE) {
return meta[CONTENT_TYPE.len()..].parse().ok();
}
}
}
url.and_then(|url| {
url.split('.').last()
}).and_then(|extension| {
mime_guess::from_ext(extension).first()
})
}
#[cfg(test)]
pub mod tests {
use std::sync::Arc;
use std::str::FromStr;
use rustc_hex::FromHex;
use super::*;
use super::guess_mime_type;
use parking_lot::Mutex;
use ethereum_types::Address;
use bytes::{Bytes, ToPretty};
use call_contract::CallContract;
pub struct FakeRegistrar {
pub calls: Arc<Mutex<Vec<(String, String)>>>,
pub responses: Mutex<Vec<Result<Bytes, String>>>,
}
pub const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
pub const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
impl FakeRegistrar {
pub fn new() -> Self {
FakeRegistrar {
calls: Arc::new(Mutex::new(Vec::new())),
responses: Mutex::new(
vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok(Vec::new())
]
),
}
}
}
impl CallContract for FakeRegistrar {
fn call_contract(
&self,
_block: BlockId,
address: Address,
data: Bytes
) -> Result<Bytes, String> {
self.calls.lock().push((address.to_hex(), data.to_hex()));
let res = self.responses.lock().remove(0);
res
}
}
impl RegistrarClient for FakeRegistrar {
fn registrar_address(&self) -> Option<Address> {
Some(REGISTRAR.parse().unwrap())
}
}
fn h256_from_short_str(s: &str) -> H256 {
let mut bytes = s.as_bytes().to_vec();
bytes.resize(32usize, 0u8);
H256::from_slice(bytes.as_ref())
}
#[test]
fn should_call_registrar_and_urlhint_contracts() {
// given
let registrar = FakeRegistrar::new();
let resolve_result = {
use ethabi::{encode, Token};
encode(&[Token::String(String::new()), Token::FixedBytes(vec![0; 20]), Token::Address([0; 20].into())])
};
registrar.responses.lock()[1] = Ok(resolve_result);
let calls = registrar.calls.clone();
let registrar = Arc::new(registrar) as Arc<dyn RegistrarClient>;
let urlhint = URLHintContract::new(Arc::downgrade(®istrar));
// when
let res = urlhint.resolve(h256_from_short_str("test")).unwrap();
let calls = calls.lock();
let call0 = calls.get(0).expect("Registrar resolve called");
let call1 = calls.get(1).expect("URLHint Resolve called");
// then
assert!(res.is_none());
assert_eq!(call0.0, REGISTRAR);
assert_eq!(call0.1,
"6795dbcd058740ee9a5a3fb9f1cfa10752baec87e09cc45cd7027fd54708271aca300c75000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000000".to_owned()
);
assert_eq!(call1.0, URLHINT);
assert_eq!(call1.1,
"267b69227465737400000000000000000000000000000000000000000000000000000000".to_owned()
);
}
#[test]
fn should_decode_urlhint_output() {
// given
let mut registrar = FakeRegistrar::new();
registrar.responses = Mutex::new(vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok("0000000000000000000000000000000000000000000000000000000000000060ec4c1fe06c808fe3739858c347109b1f5f1ed4b5000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff0000000000000000000000000000000000000000000000000000000000000011657468636f72652f64616f2e636c61696d000000000000000000000000000000".from_hex().unwrap()),
]);
let registrar = Arc::new(registrar) as Arc<dyn RegistrarClient>;
let urlhint = URLHintContract::new(Arc::downgrade(®istrar));
// when
let res = urlhint.resolve(h256_from_short_str("test")).unwrap();
let c: Vec<u8> = "ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap();
// then
assert_eq!(res, Some(URLHintResult::Dapp(GithubApp {
account: "ethcore".into(),
repo: "dao.claim".into(),
commit: GithubApp::commit(&c).unwrap(),
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
})))
}
#[test]
fn should_decode_urlhint_content_output() {
// given
let mut registrar = FakeRegistrar::new();
registrar.responses = Mutex::new(vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003c68747470733a2f2f7061726974792e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e6700000000".from_hex().unwrap()),
]); | let urlhint = URLHintContract::new(Arc::downgrade(®istrar));
// when
let res = urlhint.resolve(h256_from_short_str("test")).unwrap();
// then
assert_eq!(res, Some(URLHintResult::Content(Content {
url: "https://parity.io/assets/images/ethcore-black-horizontal.png".into(),
mime: mime::IMAGE_PNG,
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
})))
}
#[test]
fn should_return_valid_url() {
// given
let app = GithubApp {
account: "test".into(),
repo: "xyz".into(),
commit: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
owner: Address::zero(),
};
// when
let url = app.url();
// then
assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned());
}
#[test]
fn should_guess_mime_type_from_url() {
let url1 = "https://parity.io/parity";
let url2 = "https://parity.io/parity#content-type=image/png";
let url3 = "https://parity.io/parity#something&content-type=image/png";
let url4 = "https://parity.io/parity.png#content-type=image/jpeg";
let url5 = "https://parity.io/parity.png";
assert_eq!(guess_mime_type(url1), None);
assert_eq!(guess_mime_type(url2), Some(mime::IMAGE_PNG));
assert_eq!(guess_mime_type(url3), Some(mime::IMAGE_PNG));
assert_eq!(guess_mime_type(url4), Some(mime::IMAGE_JPEG));
assert_eq!(guess_mime_type(url5), Some(mime::IMAGE_PNG));
}
} |
let registrar = Arc::new(registrar) as Arc<dyn RegistrarClient>; |
test.rs | // OptParseError
mod test_0 {
use optpaerr_5::OptParseError;
#[cfg(feature = "option_argument")]
use optpaerr_5::OptParseErrorKind;
//
#[test]
fn test_invalid_option() {
let err = OptParseError::invalid_option("--abc");
let thing = format!("{}", err);
let expect = "Invalid option: --abc";
assert_eq!(thing, expect);
}
#[test]
fn test_missing_option() {
let err = OptParseError::missing_option("--abc");
let thing = format!("{}", err);
let expect = "Missing option: --abc";
assert_eq!(thing, expect);
}
//
#[cfg(feature = "option_argument")]
#[test]
fn test_attr() {
let err = OptParseError::unexpected_option_argument("--abc", "defg");
assert_eq!(err.kind(), OptParseErrorKind::UnexpectedOptionArgument);
assert_eq!(err.desc1_str(), "--abc");
assert_eq!(err.to_string(), "Unexpected option argument: --abc: defg");
}
#[cfg(feature = "option_argument")]
#[test]
fn test_invalid_option_argument() {
let err = OptParseError::invalid_option_argument("--abc", "invalid float literal");
let thing = format!("{}", err);
let expect = "Invalid option argument: --abc: invalid float literal";
assert_eq!(thing, expect);
}
#[cfg(feature = "option_argument")]
#[test]
fn test_unexpected_option_argument() {
let err = OptParseError::unexpected_option_argument("--abc", "defg");
let thing = format!("{}", err);
let expect = "Unexpected option argument: --abc: defg";
assert_eq!(thing, expect);
}
#[cfg(feature = "option_argument")]
#[test]
fn test_missing_option_argument() {
let err = OptParseError::missing_option_argument("--abc");
let thing = format!("{}", err);
let expect = "Missing option argument: --abc";
assert_eq!(thing, expect);
}
//
#[cfg(feature = "argument")]
#[test]
fn | () {
let err = OptParseError::missing_argument("<input>");
let thing = format!("{}", err);
let expect = "Missing argument: <input>";
assert_eq!(thing, expect);
}
#[cfg(feature = "argument")]
#[test]
fn test_unexpected_argument() {
let err = OptParseError::unexpected_argument("xyz");
let thing = format!("{}", err);
let expect = "Unexpected argument: xyz";
assert_eq!(thing, expect);
}
//
#[cfg(feature = "subcommand")]
#[test]
fn test_invalid_subcommand() {
let err = OptParseError::invalid_subcommand("new");
let thing = format!("{}", err);
let expect = "Invalid subcommand: new";
assert_eq!(thing, expect);
}
#[cfg(feature = "subcommand")]
#[test]
fn test_missing_subcommand() {
let err = OptParseError::missing_subcommand("<command>");
let thing = format!("{}", err);
let expect = "Missing subcommand: <command>";
assert_eq!(thing, expect);
}
//
#[test]
#[cfg(feature = "ambiguous")]
fn test_ambiguous_option() {
let err = OptParseError::ambiguous_option("--abc", "abcd, abce");
let thing = format!("{}", err);
let expect = "Ambiguous option: --abc: abcd, abce";
assert_eq!(thing, expect);
}
#[test]
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
fn test_ambiguous_subcommand() {
let err = OptParseError::ambiguous_subcommand("new", "new-first, new-second");
let thing = format!("{}", err);
let expect = "Ambiguous subcommand: new: new-first, new-second";
assert_eq!(thing, expect);
}
}
// OptParseErrors
mod test_1 {
use optpaerr_5::OptParseError;
use optpaerr_5::OptParseErrors;
//
#[test]
fn test_errors() {
let mut errs = OptParseErrors::new();
assert_eq!(errs.is_empty(), true);
assert_eq!(errs.len(), 0);
//
errs.push(OptParseError::invalid_option("--abc"));
errs.push(OptParseError::missing_option("--abc"));
#[cfg(feature = "option_argument")]
{
errs.push(OptParseError::invalid_option_argument(
"--abc",
"invalid float literal",
));
errs.push(OptParseError::unexpected_option_argument("--abc", "defg"));
errs.push(OptParseError::missing_option_argument("--abc"));
}
#[cfg(feature = "argument")]
{
errs.push(OptParseError::missing_argument("<input>"));
errs.push(OptParseError::unexpected_argument("xyz"));
}
#[cfg(feature = "subcommand")]
{
errs.push(OptParseError::invalid_subcommand("new"));
errs.push(OptParseError::missing_subcommand("<command>"));
}
#[cfg(feature = "ambiguous")]
errs.push(OptParseError::ambiguous_option("--abc", "abcd, abce"));
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
errs.push(OptParseError::ambiguous_subcommand(
"new",
"new-first, new-second",
));
//
assert_eq!(errs.is_empty(), false);
//
let len = 2;
#[cfg(feature = "option_argument")]
let len = len + 3;
#[cfg(feature = "argument")]
let len = len + 2;
#[cfg(feature = "ambiguous")]
let len = len + 1;
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
let len = len + 1;
#[cfg(feature = "subcommand")]
let len = len + 2;
assert_eq!(errs.len(), len);
//
let thing = format!("{}", errs);
let expect = concat!("Invalid option: --abc\n", "Missing option: --abc",);
#[cfg(feature = "option_argument")]
let expect = expect.to_string()
+ concat!(
"\n",
"Invalid option argument: --abc: invalid float literal\n",
"Unexpected option argument: --abc: defg\n",
"Missing option argument: --abc",
);
#[cfg(feature = "argument")]
let expect = expect.to_string()
+ concat!(
"\n",
"Missing argument: <input>\n",
"Unexpected argument: xyz",
);
#[cfg(feature = "subcommand")]
let expect = expect.to_string()
+ concat!(
"\n",
"Invalid subcommand: new\n",
"Missing subcommand: <command>",
);
#[cfg(feature = "ambiguous")]
let expect = expect.to_string() + concat!("\n", "Ambiguous option: --abc: abcd, abce",);
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
let expect =
expect.to_string() + concat!("\n", "Ambiguous subcommand: new: new-first, new-second",);
assert_eq!(thing, expect);
}
#[test]
fn test_errors_append() {
let mut errs = OptParseErrors::new();
errs.push(OptParseError::invalid_option("--abc"));
errs.push(OptParseError::missing_option("--abc"));
#[cfg(feature = "option_argument")]
{
errs.push(OptParseError::invalid_option_argument(
"--abc",
"invalid float literal",
));
errs.push(OptParseError::unexpected_option_argument("--abc", "defg"));
}
#[cfg(feature = "ambiguous")]
errs.push(OptParseError::ambiguous_option("--abc", "abcd, abce"));
//
let mut errs2 = OptParseErrors::new();
errs2.push(OptParseError::invalid_option("--abcd"));
errs2.push(OptParseError::missing_option("--abcd"));
#[cfg(feature = "option_argument")]
errs2.push(OptParseError::missing_option_argument("--abc"));
#[cfg(feature = "argument")]
{
errs2.push(OptParseError::missing_argument("<input>"));
errs2.push(OptParseError::unexpected_argument("xyz"));
}
#[cfg(feature = "subcommand")]
{
errs2.push(OptParseError::invalid_subcommand("new"));
errs2.push(OptParseError::missing_subcommand("<command>"));
}
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
errs2.push(OptParseError::ambiguous_subcommand(
"new",
"new-first, new-second",
));
//
errs.append(errs2);
//
let thing = format!("{}", errs);
let expect1 = concat!("Invalid option: --abc\n", "Missing option: --abc\n",);
#[cfg(feature = "option_argument")]
let expect1 = expect1.to_string()
+ concat!(
"Invalid option argument: --abc: invalid float literal\n",
"Unexpected option argument: --abc: defg\n",
);
#[cfg(feature = "ambiguous")]
let expect1 = expect1.to_string() + concat!("Ambiguous option: --abc: abcd, abce\n",);
let expect2 = concat!("Invalid option: --abcd\n", "Missing option: --abcd\n",);
#[cfg(feature = "option_argument")]
let expect2 = expect2.to_string() + "Missing option argument: --abc\n";
#[cfg(feature = "argument")]
let expect2 = expect2.to_string()
+ concat!("Missing argument: <input>\n", "Unexpected argument: xyz\n",);
#[cfg(feature = "subcommand")]
let expect2 = expect2.to_string()
+ concat!(
"Invalid subcommand: new\n",
"Missing subcommand: <command>\n",
);
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
let expect2 =
expect2.to_string() + concat!("Ambiguous subcommand: new: new-first, new-second\n",);
let expect = expect1.to_string() + &expect2;
assert_eq!(thing + "\n", expect);
}
#[test]
fn test_errors_iter() {
let mut errs = OptParseErrors::new();
errs.push(OptParseError::invalid_option("--abc"));
errs.push(OptParseError::missing_option("--abc"));
#[cfg(feature = "option_argument")]
{
errs.push(OptParseError::invalid_option_argument(
"--abc",
"invalid float literal",
));
errs.push(OptParseError::unexpected_option_argument("--abc", "defg"));
errs.push(OptParseError::missing_option_argument("--abc"));
}
#[cfg(feature = "argument")]
{
errs.push(OptParseError::missing_argument("<input>"));
errs.push(OptParseError::unexpected_argument("xyz"));
}
#[cfg(feature = "subcommand")]
{
errs.push(OptParseError::invalid_subcommand("new"));
errs.push(OptParseError::missing_subcommand("<command>"));
}
#[cfg(feature = "ambiguous")]
errs.push(OptParseError::ambiguous_option("--abc", "abcd, abce"));
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
errs.push(OptParseError::ambiguous_subcommand(
"new",
"new-first, new-second",
));
//
let thing = {
let mut s = String::new();
let mut it = errs.iter();
while let Some(err) = it.next() {
let ss = format!("{}\n", err);
s.push_str(ss.as_str());
}
s
};
let expect = concat!("Invalid option: --abc\n", "Missing option: --abc\n",);
#[cfg(feature = "option_argument")]
let expect = expect.to_string()
+ concat!(
"Invalid option argument: --abc: invalid float literal\n",
"Unexpected option argument: --abc: defg\n",
"Missing option argument: --abc\n",
);
#[cfg(feature = "argument")]
let expect = expect.to_string()
+ concat!("Missing argument: <input>\n", "Unexpected argument: xyz\n",);
#[cfg(feature = "subcommand")]
let expect = expect.to_string()
+ concat!(
"Invalid subcommand: new\n",
"Missing subcommand: <command>\n",
);
#[cfg(feature = "ambiguous")]
let expect = expect.to_string() + concat!("Ambiguous option: --abc: abcd, abce\n",);
#[cfg(all(feature = "ambiguous", feature = "subcommand"))]
let expect =
expect.to_string() + concat!("Ambiguous subcommand: new: new-first, new-second\n",);
assert_eq!(thing, expect);
}
}
| test_missing_argument |
cli.py | """payu.cli
========
Command line interface tools
:copyright: Copyright 2011 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details
"""
import argparse
from distutils import sysconfig
import importlib
import os
import pkgutil
import shlex
import subprocess
import sys
import payu
import payu.envmod as envmod
from payu.models import index as supported_models
import payu.subcommands
# Default configuration
DEFAULT_CONFIG = 'config.yaml'
def parse():
|
def get_model_type(model_type, config):
"""Determine and validate the active model type."""
# If no model type is given, then check the config file
if not model_type:
model_type = config.get('model')
# If there is still no model type, try the parent directory
if not model_type:
model_type = os.path.basename(os.path.abspath(os.pardir))
print('payu: warning: Assuming model is {0} based on parent directory '
'name.'.format(model_type))
if model_type not in supported_models:
print('payu: error: Unknown model {0}'.format(model_type))
sys.exit(-1)
def set_env_vars(init_run=None, n_runs=None, lab_path=None, dir_path=None,
reproduce=None):
"""Construct the environment variables used by payu for resubmissions."""
payu_env_vars = {}
# Setup Python dynamic library link
lib_paths = sysconfig.get_config_vars('LIBDIR')
payu_env_vars['LD_LIBRARY_PATH'] = ':'.join(lib_paths)
if 'PYTHONPATH' in os.environ:
payu_env_vars['PYTHONPATH'] = os.environ['PYTHONPATH']
# Set (or import) the path to the PAYU scripts (PAYU_PATH)
# NOTE: We may be able to use sys.path[0] here.
payu_binpath = os.environ.get('PAYU_PATH')
if not payu_binpath or not os.path.isdir(payu_binpath):
payu_binpath = os.path.dirname(sys.argv[0])
payu_env_vars['PAYU_PATH'] = payu_binpath
# Set the run counters
if init_run:
init_run = int(init_run)
assert init_run >= 0
payu_env_vars['PAYU_CURRENT_RUN'] = init_run
if n_runs:
n_runs = int(n_runs)
assert n_runs > 0
payu_env_vars['PAYU_N_RUNS'] = n_runs
# Import explicit project paths
if lab_path:
payu_env_vars['PAYU_LAB_PATH'] = os.path.normpath(lab_path)
if dir_path:
payu_env_vars['PAYU_DIR_PATH'] = os.path.normpath(dir_path)
if reproduce:
payu_env_vars['PAYU_REPRODUCE'] = reproduce
return payu_env_vars
def submit_job(pbs_script, pbs_config, pbs_vars=None):
"""Submit a userscript the scheduler."""
# Initialisation
if pbs_vars is None:
pbs_vars = {}
pbs_flags = []
pbs_queue = pbs_config.get('queue', 'normal')
pbs_flags.append('-q {queue}'.format(queue=pbs_queue))
pbs_project = pbs_config.get('project', os.environ['PROJECT'])
pbs_flags.append('-P {project}'.format(project=pbs_project))
pbs_resources = ['walltime', 'ncpus', 'mem', 'jobfs']
for res_key in pbs_resources:
res_flags = []
res_val = pbs_config.get(res_key)
if res_val:
res_flags.append('{key}={val}'.format(key=res_key, val=res_val))
if res_flags:
pbs_flags.append('-l {res}'.format(res=','.join(res_flags)))
# TODO: Need to pass lab.config_path somehow...
pbs_jobname = pbs_config.get('jobname', os.path.basename(os.getcwd()))
if pbs_jobname:
# PBSPro has a 15-character jobname limit
pbs_flags.append('-N {name}'.format(name=pbs_jobname[:15]))
pbs_priority = pbs_config.get('priority')
if pbs_priority:
pbs_flags.append('-p {priority}'.format(priority=pbs_priority))
pbs_flags.append('-l wd')
pbs_join = pbs_config.get('join', 'n')
if pbs_join not in ('oe', 'eo', 'n'):
print('payu: error: unknown qsub IO stream join setting.')
sys.exit(-1)
else:
pbs_flags.append('-j {join}'.format(join=pbs_join))
# Append environment variables to qsub command
# TODO: Support full export of environment variables: `qsub -V`
pbs_vstring = ','.join('{0}={1}'.format(k, v)
for k, v in pbs_vars.items())
pbs_flags.append('-v ' + pbs_vstring)
# Append any additional qsub flags here
pbs_flags_extend = pbs_config.get('qsub_flags')
if pbs_flags_extend:
pbs_flags.append(pbs_flags_extend)
if not os.path.isabs(pbs_script):
# NOTE: PAYU_PATH is always set if `set_env_vars` was always called.
# This is currently always true, but is not explicitly enforced.
# So this conditional check is a bit redundant.
payu_bin = pbs_vars.get('PAYU_PATH', os.path.dirname(sys.argv[0]))
pbs_script = os.path.join(payu_bin, pbs_script)
assert os.path.isfile(pbs_script)
# Set up environment modules here for PBS.
envmod.setup()
envmod.module('load', 'pbs')
# Construct job submission command
cmd = 'qsub {flags} -- {python} {script}'.format(
flags=' '.join(pbs_flags),
python=sys.executable,
script=pbs_script
)
print(cmd)
subprocess.check_call(shlex.split(cmd))
| """Parse the command line inputs and execute the subcommand."""
# Build the list of subcommand modules
modnames = [mod for (_, mod, _)
in pkgutil.iter_modules(payu.subcommands.__path__,
prefix=payu.subcommands.__name__ + '.')
if mod.endswith('_cmd')]
subcmds = [importlib.import_module(mod) for mod in modnames]
# Construct the subcommand parser
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version',
version='payu {0}'.format(payu.__version__))
subparsers = parser.add_subparsers()
for cmd in subcmds:
cmd_parser = subparsers.add_parser(cmd.title, **cmd.parameters)
cmd_parser.set_defaults(run_cmd=cmd.runcmd)
for arg in cmd.arguments:
cmd_parser.add_argument(*arg['flags'], **arg['parameters'])
# Display help if no arguments are provided
if len(sys.argv) == 1:
parser.print_help()
else:
args = vars(parser.parse_args())
run_cmd = args.pop('run_cmd')
run_cmd(**args) |
lib.rs | extern crate proc_macro;
extern crate proc_macro2;
use proc_macro2::Span;
use quote::quote;
use syn::{parse_macro_input, DeriveInput, Ident};
#[proc_macro_derive(AutoConfig, attributes(location))]
pub fn | (input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let ident = &ast.ident;
let generics = &ast.generics;
let where_clause = &ast.generics.where_clause;
let path_attr = ast
.attrs
.iter()
.filter(|a| a.path.get_ident().unwrap().to_string() == "location")
.next()
.unwrap_or_else(|| panic!("缺失 #[location = ?] 的附加属性"));
let path_value: String = match path_attr
.parse_meta()
.unwrap_or_else(|_| panic!("Failed to parse meta"))
{
syn::Meta::NameValue(value) => match value.lit {
syn::Lit::Str(i) => i.value().to_string(),
_ => panic!("location值的类型不支持"),
},
_ => {
panic!("location值的类型不支持")
}
};
let uppercase_indent = Ident::new(&ident.to_string().to_ascii_uppercase(), Span::call_site());
let expanded: proc_macro2::TokenStream = quote! {
pub static #uppercase_indent: once_cell::sync::Lazy<#ident> = once_cell::sync::Lazy::new(|| {
use std::path::Path;
let path:&str= #path_value;
let path = Path::new(path);
let config = #ident::read_or_create_config(path).unwrap();
config
});
impl #ident #generics #where_clause {
#[allow(unused_variables)]
pub fn save(&self) {
use std::path::Path;
use std::fs;
use log::info;
let path:&str= #path_value;
let path = Path::new(&path);
let ser = serde_yaml::to_string(self).unwrap();
info!("配置文件已被保存");
fs::create_dir_all(path.parent().unwrap_or(Path::new("./"))).unwrap();
fs::write(path, ser).unwrap();
}
#[allow(unused_variables)]
pub fn default_string() -> String {
match serde_yaml::to_string(&Self::default()) {
Ok(v) => v,
Err(_) => "serde_yaml error".to_string(),
}
}
#[allow(unused_variables)]
fn read_or_create_config(path: &Path) -> Result<Self, anyhow::Error> {
use std::fs;
use log::error;
use yaml_rust::{YamlLoader, YamlEmitter};
use yaml_rust::Yaml;
use std::mem::discriminant;
use linked_hash_map::LinkedHashMap;
fn merge_hash(former: &LinkedHashMap<Yaml, Yaml>,latter: &LinkedHashMap<Yaml, Yaml>) -> anyhow::Result<LinkedHashMap<Yaml, Yaml>>{
// if it's empty hash
if latter.len() == 0 {
return Ok(former.clone());
}
let mut res = latter.clone();
for section in latter {
match former.contains_key(section.0) {
// if former dont's have this key
false => {},
// if former have this key, then merge
true => {
let former_value = former.get(section.0).unwrap().clone();
let latter_value = section.1.clone();
// if they are the same type, then merge
if discriminant(&former_value) == discriminant(&latter_value){
match latter_value.as_hash() {
// if it's hash type
Some(_) => {
let res_value = merge_hash(&former_value.as_hash().unwrap(), &latter_value.as_hash().unwrap())?;
res.insert(section.0.clone(), Yaml::Hash(res_value));
},
// if it's not a hash type
None => {
res.insert(section.0.clone(), former_value);
},
}
}
},
};
}
Ok(res)
}
if !path.exists() {
fs::create_dir_all(path.parent().unwrap_or(Path::new("./")))?;
fs::write(path, Self::default_string())?;
};
let data = fs::read(path)?;
let result: Result<Self, serde_yaml::Error> = serde_yaml::from_slice(&data);
let result = match result {
Ok(val) => val,
Err(_) => {
let latter_string = Self::default_string();
let reanme_path = format!("{}.old", path.clone().to_string_lossy());
log::warn!("无法对配置文件进行反序列化。");
log::warn!("这可能是由于版本更新导致的配置文件不兼容造成的。");
log::warn!("原文件已被改为{},正在尝试自动合并配置文件。",reanme_path);
let rename_path = Path::new(&reanme_path);
let former_str = String::from_utf8_lossy(&data);
let former = YamlLoader::load_from_str(&former_str).unwrap()[0].as_hash().unwrap().clone();
let latter = YamlLoader::load_from_str(&latter_string).unwrap()[0].as_hash().unwrap().clone();
let res = yaml_rust::Yaml::Hash(merge_hash(&former,&latter).unwrap());
let mut res_string = String::new();
{
let mut emitter = YamlEmitter::new(&mut res_string);
emitter.dump(&res).unwrap(); // dump the YAML object to a String
}
fs::rename(path, rename_path)?;
fs::write(path, res_string.clone())?;
match serde_yaml::from_slice::<Self>(res_string.as_bytes()) {
Err(_) => {
log::warn!("配置文件合并失败,请手动合并配置文件");
Self::default()
}
Ok(val) => {
log::warn!("配置文件合并完成,但仍推荐检查配置文件");
val
}
}
}
};
Ok(result)
}
}
}.into();
proc_macro::TokenStream::from(expanded)
}
use proc_macro2::TokenStream;
#[proc_macro_attribute]
pub fn basic_derive(
_metadata: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let input: TokenStream = input.into();
let output = quote! {
#[derive(Debug, Serialize, Deserialize,Educe)]
#[educe(Default)]
#input
};
output.into()
}
| auto_config_derive |
EvoNN.py | """
A module to implement the evolutionary algorithm for
a feedforward neural network.
Crossover and mutation
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
import math
import csv
import warnings
import numpy as np
import random
import copy
from datetime import datetime
warnings.filterwarnings("ignore") # never print matching warnings
sys.path.append("/Users/Payu/Desktop/EvoNN_package/EvoNN_DNN") #thrid party's libararies, absolute path
"""Constant"""
NUM_LAYERS = 5 # Assertion test number of layers
"""Activation function"""
def sigmoid(x):
return 1/(1+np.exp(-x))
def tanh(x):
return np.tanh(x)
"""Loss function"""
def | (y_predicted, y_true):
y_predicted = y_predicted.reshape((y_predicted.shape[0],))
return np.sqrt(np.mean((y_predicted - y_true)**2))
"""Return predicted value array"""
def Identity(final_layer_values):
return final_layer_values[:]
class Evolver:
def __init__( self,
G=10, # Maximum iteration
early_stopping=10, # Minimum iteration
node_per_layer = [10], # Number of nodes per layer
MU=10, # Number of parents
LAMBDA=10, # Number of offspring
P_m=0.1, # Weight mutation probability
P_mf=0.1, # Function mutation probablity
R_m=1.0, # Weight mutation radius
P_c=0.5, # Crossover proportion
P_b=0.01, # Bias mutation probablity
R_b=1.0, # Bias mutation radius
elitism=True, # Elitism involves copying a small proportion of the fittest candidates, unchanged, into the next generation.
tournament_size=2, # Selecting individuals from a population
fitness_function=RMSE,
final_activation_function=Identity,
additional_functions=[],
random_state=None,
verbose=0):
self.generation_number = G
self.early_stopping = early_stopping
self.node_per_layer = node_per_layer
self.mu = MU
self.lam = LAMBDA
self.P_M = P_m
self.P_MF = P_mf
self.P_C = P_c
self.R_M = R_m
self.P_B = P_b
self.R_B = R_b
self.ELITISM = elitism
self.TOURNAMENT_SIZE = tournament_size
self.fitness = fitness_function
self.final_activation = final_activation_function
self.functions = {0: sigmoid,
1: tanh} # Using a dictionary to select function
if (random_state is not None):
time_seconds = int(datetime.now().timestamp()) # Python 3.3+ only
np.random.seed(random_state + time_seconds) # add system time to generate random number
random.seed(random_state + time_seconds)
self.verbose = verbose
self.final_population = None
self.best_individual = None
key = len(self.functions) # add additional activation functions
for additional_function in additional_functions:
self.functions[key] = additional_function
key += 1
print("Network has {} layers, they are {}.".format(len(self.node_per_layer), self.node_per_layer))
######################################################################################
"""Train the EvoNN"""
def fit(self, X_train, Y_train, X_val = None, Y_val = None):
#initialization
if (self.verbose >= 1):
print("Input is a {} X {} matrix".format(X_train.shape[0], X_train.shape[1]))
if (X_val is not None):
print("Validation is a {} X {} matrix".format(X_val.shape[0], X_val.shape[1]))
self.X_train = X_train
self.Y_train = Y_train
self.X_val = X_val
self.Y_val = Y_val
self.feature_number = X_train.shape[1]
try:
self.output_number = Y_train.shape[1] # more than one column
except IndexError:
self.output_number = 1
offspring = []
population = self.initialize_population() # "mu" used at start to create a populatiion pool of network
average_fitness, average_fitness_validate, best_fitness_train, best_fitness_validate, best_individual = self.evaluate_population(population)
validate_timer = 0
best_fitness_validate_of_all_generations = best_fitness_validate # the smaller the better
best_individual_validate = best_individual
curr_generation_number = 1
while ((curr_generation_number < self.generation_number + 1) and (self.early_stopping > validate_timer)):
if (curr_generation_number % 5 == 0):
print("run for {} generations".format(curr_generation_number))
if (self.verbose >= 1):
printout_statement = "Generation "+str(curr_generation_number)
printout_statement += "\tTrain "
printout_statement += "\tbest fitness train: "+str(best_fitness_train)
if (self.X_val is not None):
printout_statement += "\tValidate "
printout_statement += "\tbest fitness: "+str(best_fitness_validate_of_all_generations)
else:
print("Please specify validate set.")
exit()
print(printout_statement)
offspring = self.make_offspring(population) # a list of offspring; use lam to generate
for theOffspring in offspring:
theOffspring.mutate(self.P_M, self.P_MF, self.R_M, self.P_B, self.R_B)
population = [] # generate new population
# Only one best individual
if (self.ELITISM == True):
copy_individual = EvoNN.copyIndividual(best_individual)
population.append(copy_individual)
init_range = 1
else:
init_range = 0
"""Generate next parent generation"""
for i in range(init_range, self.mu):
theOriginal = self.tournament_selection(offspring, self.TOURNAMENT_SIZE)
copy_individual = EvoNN.copyIndividual(theOriginal)
population.append(copy_individual)
average_fitness, average_fitness_validate, best_fitness_train, best_fitness_validate, best_individual = self.evaluate_population(population)
if (self.X_val is not None):
if (best_fitness_validate < best_fitness_validate_of_all_generations):
best_fitness_validate_of_all_generations = best_fitness_validate
best_individual_validate = copy.deepcopy(best_individual)
validate_timer = 0
else:
validate_timer += 1 # if no improvement
else:
print("Please specify validate set.")
exit()
curr_generation_number += 1
self.best_individual = copy.deepcopy(best_individual_validate)
self.final_population = copy.deepcopy(population)
if (self.verbose >= 1):
print(self.best_individual)
######################################################################################
""""Predict on test dataset"""
def predict_proba(self, X_test):
return self.best_individual.get_output(X_test)
######################################################################################
"""Predict on test dataset"""
def predict(self, X_test):
return self.best_individual.get_output(X_test)
######################################################################################
def initialize_population(self):
if (self.verbose >= 1):
print("Initializing population...")
my_population = []
for i in range(self.mu):
theIndividual = EvoNN.newIndividual(self.feature_number, self.output_number, self.final_activation, hidden_size = self.node_per_layer, function_dictionary = self.functions)
my_population.append(theIndividual) # theIndividual is a standalone network
if (self.verbose >= 1):
print("\t\t\t {}".format(my_population[i]))
if (self.verbose >= 1):
print("Population initialized")
return my_population
######################################################################################
def evaluate_population(self, the_population):
if (self.verbose >= 1):
print("Evaluating population")
""""Initialize parameters"""
average_fitness_train = 0.0 # the whole population
average_fitness_validate = 0.0
population_count_train = 0
population_count_validate = 0
best_fitness_train = the_population[0].fitness
best_fitness_validate = the_population[0].fitness
best_individual = the_population[0]
for individual in the_population:
Y_predict = individual.get_output(self.X_train)
fitness_value_train = self.fitness(Y_predict, self.Y_train) # Y_train is a 2d one-hot coding matrix
individual.fitness = fitness_value_train
if not (math.isnan(fitness_value_train)):
average_fitness_train += fitness_value_train
population_count_train += 1
"""best_fitness_train: the smaller the better"""
if (fitness_value_train < best_fitness_train):
best_fitness_train = fitness_value_train
best_individual = individual
if (self.X_val is not None):
Y_val_predict = individual.get_output(self.X_val)
fitness_value_validate = self.fitness(Y_val_predict, self.Y_val)
average_fitness_validate += fitness_value_validate
population_count_validate += 1
else:
print("Please speficy validate dataset")
exit()
Y_val_predict = best_individual.get_output(self.X_val)
best_fitness_validate = self.fitness(Y_val_predict, self.Y_val)
average_fitness_train /= population_count_train
average_fitness_validate /= population_count_validate
if (self.verbose >= 1):
print("Population evaluated")
return average_fitness_train, average_fitness_validate, best_fitness_train, best_fitness_validate, best_individual
######################################################################################
def make_offspring(self, the_population):
if (self.verbose >= 1):
print("Making offspring")
offspring_population = []
for i in range(self.lam):
offspring_population.append(self.create_offspring(the_population))
if (self.verbose >= 1):
print("Made offspring")
return offspring_population
######################################################################################
def create_offspring(self, the_population):
random_chance = random.random()
if (random_chance <= self.P_C): # crossover proportion
parent1 = self.tournament_selection(the_population)
parent2 = self.tournament_selection(the_population)
theIndividual = EvoNN.crossoverIndividual(parent1, parent2)
assert len(theIndividual.hidden_layer_size) == NUM_LAYERS # test number of layers is correct
return theIndividual
else:
original = self.tournament_selection(the_population)
theIndividual = EvoNN.copyIndividual(original)
assert len(theIndividual.hidden_layer_size) == NUM_LAYERS # test number of layers is correct
return theIndividual
######################################################################################
""""Tournament selection"""
def tournament_selection(self, the_population, tournament_size=2):
population_size = len(the_population)
the_tournament = []
for i in range(tournament_size):
the_tournament.append(the_population[random.randint(0, population_size-1)])
"""Initialization"""
best_fitness = the_tournament[0].fitness
best_individual = the_tournament[0]
for i in range(1, tournament_size):
if (the_tournament[i].fitness < best_fitness):
best_fitness = the_tournament[i].fitness
best_individual = the_tournament[i]
return best_individual
##########################################################################################
class EvoNN:
default_function_dictionary = {0: sigmoid,
1: tanh}
##########################################################################################
def __init__(self):
pass
##########################################################################################
""""Generate new standalone feedforward network"""
@classmethod
def newIndividual(cls, input_size, output_size, final_activation_function, hidden_size=[10], function_dictionary = None):
theIndividual = cls() #theIndividual is a class
if (function_dictionary is None):
theIndividual.function_dictionary = self.default_function_dictionary
else:
theIndividual.function_dictionary = function_dictionary
theIndividual.fitness = float('inf') # initial fitness is inf
theIndividual.input_size = input_size
theIndividual.hidden_layer_size = hidden_size # number of layers, a list
num_hidden_layers = len(theIndividual.hidden_layer_size)
theIndividual.hidden_layer_bias = [] # a list of numpy 1d array
theIndividual.hidden_layer_functions = [] # a list of numpy 1d array
for node_size in hidden_size: # hidden_size is a list
theIndividual.hidden_layer_bias.append(np.random.uniform(size=(node_size)))
theIndividual.hidden_layer_functions.append(np.random.randint( len(theIndividual.function_dictionary.keys()), size=node_size ))
theIndividual.output_size = output_size
theIndividual.final_activation = final_activation_function # softmax, probability function
theIndividual.input_to_hidden_matrix = np.random.uniform(size=( input_size, hidden_size[0]))
if (num_hidden_layers > 1):
theIndividual.hidden_to_hidden_matrix = []
for curr_layer in range(num_hidden_layers - 1):
theIndividual.hidden_to_hidden_matrix.append(np.random.uniform(size=(hidden_size[curr_layer], hidden_size[curr_layer + 1])))
theIndividual.hidden_to_output_matrix = np.random.uniform(size=( hidden_size[-1], output_size))
return theIndividual
##########################################################################################
@classmethod
def crossoverIndividual(cls, individual1, individual2):
theIndividual = cls() # the offspring individual
theIndividual.function_dictionary = individual1.function_dictionary
input_size = individual1.input_to_hidden_matrix.shape[0]
output_size = individual1.hidden_to_output_matrix.shape[1]
theIndividual.fitness = float('inf')
theIndividual.input_size = input_size
hidden_size = individual1.hidden_layer_size # a list array
num_hidden_layers = len(hidden_size)
# generate offspring arch
theIndividual.hidden_layer_size = copy.deepcopy(hidden_size)
theIndividual.hidden_layer_bias = []
theIndividual.hidden_layer_functions = []
for node_size in hidden_size:
theIndividual.hidden_layer_bias.append(np.zeros(node_size))
theIndividual.hidden_layer_functions.append(np.zeros(node_size))
theIndividual.output_size = output_size
theIndividual.final_activation = individual1.final_activation
"""crossover activation function and bias"""
for layer in range(num_hidden_layers):
# crossover activation function
probablity_matrix = np.random.uniform(size=(hidden_size[layer]))
theIndividual.hidden_layer_functions[layer][probablity_matrix <= 0.5] = individual1.hidden_layer_functions[layer][probablity_matrix <= 0.5]
theIndividual.hidden_layer_functions[layer][probablity_matrix > 0.5] = individual2.hidden_layer_functions[layer][probablity_matrix > 0.5]
# crossover bias
probablity_matrix = np.random.uniform(size=(hidden_size[layer]))
theIndividual.hidden_layer_bias[layer][probablity_matrix <= 0.5] = individual1.hidden_layer_bias[layer][probablity_matrix <= 0.5]
theIndividual.hidden_layer_bias[layer][probablity_matrix > 0.5] = individual2.hidden_layer_bias[layer][probablity_matrix > 0.5]
"""crossover weight matrix"""
# input to hidden matrix
theIndividual.input_to_hidden_matrix = np.zeros((input_size, hidden_size[0]))
probablity_matrix = np.random.uniform(size=(input_size, hidden_size[0]))
theIndividual.input_to_hidden_matrix[probablity_matrix <= 0.5] = individual1.input_to_hidden_matrix[probablity_matrix <= 0.5]
theIndividual.input_to_hidden_matrix[probablity_matrix > 0.5] = individual2.input_to_hidden_matrix[probablity_matrix > 0.5]
# hidden to hidden matrix
if (num_hidden_layers > 1):
theIndividual.hidden_to_hidden_matrix = []
for curr_layer in range(num_hidden_layers - 1):
new_hidden_to_hidden_matrix = np.zeros((hidden_size[curr_layer], hidden_size[curr_layer + 1]))
probablity_matrix = np.random.uniform(size=(hidden_size[curr_layer], hidden_size[curr_layer + 1]))
new_hidden_to_hidden_matrix[probablity_matrix <= 0.5] = individual1.hidden_to_hidden_matrix[curr_layer][probablity_matrix <= 0.5]
new_hidden_to_hidden_matrix[probablity_matrix > 0.5] = individual2.hidden_to_hidden_matrix[curr_layer][probablity_matrix > 0.5]
theIndividual.hidden_to_hidden_matrix.append(new_hidden_to_hidden_matrix)
# hidden to output matrix
theIndividual.hidden_to_output_matrix = np.zeros((hidden_size[-1], output_size))
probablity_matrix = np.random.uniform(size=((hidden_size[-1], output_size)))
theIndividual.hidden_to_output_matrix[probablity_matrix <= 0.5] = individual1.hidden_to_output_matrix[probablity_matrix <= 0.5]
theIndividual.hidden_to_output_matrix[probablity_matrix > 0.5] = individual2.hidden_to_output_matrix[probablity_matrix > 0.5]
return theIndividual
##########################################################################################
""""Deep copy individual"""
@classmethod
def copyIndividual(cls, theOriginal):
theIndividual = cls()
theIndividual.function_dictionary = theOriginal.function_dictionary
input_size = theOriginal.input_to_hidden_matrix.shape[0]
output_size = theOriginal.hidden_to_output_matrix.shape[1]
theIndividual.fitness = float('inf')
theIndividual.input_size = input_size
theIndividual.hidden_layer_size = copy.deepcopy(theOriginal.hidden_layer_size)
# deep copy bias and activation function
theIndividual.hidden_layer_bias = copy.deepcopy(theOriginal.hidden_layer_bias)
theIndividual.hidden_layer_functions = copy.deepcopy(theOriginal.hidden_layer_functions)
theIndividual.output_size = output_size
theIndividual.final_activation = theOriginal.final_activation
# deep copy weight matrix
theIndividual.input_to_hidden_matrix = copy.deepcopy(theOriginal.input_to_hidden_matrix)
if (len(theIndividual.hidden_layer_size) > 1):
theIndividual.hidden_to_hidden_matrix = copy.deepcopy(theOriginal.hidden_to_hidden_matrix)
theIndividual.hidden_to_output_matrix = copy.deepcopy(theOriginal.hidden_to_output_matrix)
return theIndividual
##########################################################################################
def mutate_matrix(self, the_matrix, probablity, radius):
probablity_matrix = np.random.uniform(size=(the_matrix.shape))
mutation_matrix = np.random.uniform(low = -radius, high=radius, size=(the_matrix.shape))
the_matrix[probablity_matrix <= probablity] += mutation_matrix[probablity_matrix <= probablity]
return the_matrix
##########################################################################################
def mutate(self, P_m, P_mf, R_m, P_b, R_b):
input_size = self.input_size
hidden_size= self.hidden_layer_size # a list
num_hidden_layers = len(self.hidden_layer_size)
output_size = self.hidden_to_output_matrix.shape[1]
""""Mutate input to hidden matrix"""
self.input_to_hidden_matrix = self.mutate_matrix(self.input_to_hidden_matrix, P_m, R_m)
""""Mutate activation function and bias"""
function_number = len(self.function_dictionary.keys())
for layer in range(num_hidden_layers):
# mutate activation function
probablity_matrix = np.random.uniform(size=(hidden_size[layer]))
function_mutation_matrix = np.random.randint(0, function_number - 1,size=(hidden_size[layer]))
self.hidden_layer_functions[layer][probablity_matrix <= P_mf] = function_mutation_matrix[probablity_matrix <= P_mf]
# mutate bias
self.hidden_layer_bias[layer] = self.mutate_matrix(self.hidden_layer_bias[layer], P_b, R_b)
"""Mutate hidden to hidden matrix"""
if (num_hidden_layers > 1):
for layer in range(num_hidden_layers - 1):
self.hidden_to_hidden_matrix[layer] = self.mutate_matrix(self.hidden_to_hidden_matrix[layer], P_m, R_m)
"""Mutate hidden to output matrix"""
self.hidden_to_output_matrix = self.mutate_matrix(self.hidden_to_output_matrix, P_m, R_m)
##########################################################################################
"""Output is a 2d (sample_size, classification_number) array"""
def get_output(self, X_train):
sample_size = X_train.shape[0]
hidden_layer_input = np.dot(X_train, self.input_to_hidden_matrix) + np.tile(self.hidden_layer_bias[0], (sample_size, 1)) # y = wx+b
for i in range(hidden_layer_input.shape[1]): # z = f(wx+b)
functionIndex = self.hidden_layer_functions[0][i]
myFunction = self.function_dictionary[functionIndex]
hidden_layer_input[:, i] = myFunction(hidden_layer_input[:, i])
hidden_layer_matrix = np.copy(hidden_layer_input) # deep copy
if (len(self.hidden_layer_size) > 1):
for i in range(len(self.hidden_layer_size) - 1): # aw+b
hidden_layer_matrix = np.dot(hidden_layer_matrix, self.hidden_to_hidden_matrix[i]) + np.tile(self.hidden_layer_bias[i+1],(sample_size, 1)) # y = wx+b
for j in range(hidden_layer_matrix.shape[1]): # z = f(wx+b)
functionIndex = self.hidden_layer_functions[i+1][j]
myFunction = self.function_dictionary[functionIndex]
hidden_layer_matrix[:, j] = myFunction(hidden_layer_matrix[:, j])
output_layer_input = np.dot(hidden_layer_matrix, self.hidden_to_output_matrix)
output = self.final_activation(output_layer_input)
return output
| RMSE |
sync-rwlock-write-mode-shouldnt-escape.rs | // error-pattern: reference is not valid outside of its lifetime
use std;
import std::sync;
fn main() { | y = some(write_mode);
}
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).write { }
} | let x = ~sync::rwlock();
let mut y = none;
do x.write_downgrade |write_mode| { |
contacts_api.go | /*
* DialMyCalls API
*
* The DialMyCalls API
*
* OpenAPI spec version: 2.0.1
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dialmycalls
import (
"strings"
"fmt"
"errors"
"net/url"
"encoding/json"
)
type Contacts struct {
Configuration Configuration
}
func | () *Contacts {
configuration := NewConfiguration()
return &Contacts{
Configuration: *configuration,
}
}
func NewContactsWithBasePath(basePath string) *Contacts {
configuration := NewConfiguration()
configuration.BasePath = basePath
return &Contacts{
Configuration: *configuration,
}
}
/**
* Add Contact
* Add a contact to your contact list. <br><br> Returns a contact object on success, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X POST -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://[email protected]/2.0/contact ```
*
* @param createContactParameters Request body
* @return *Object
*/
func (a Contacts) CreateContact(createContactParameters CreateContactParameters) (*Object, *APIResponse, error) {
var httpMethod = "Post"
// create path and map variables
path := a.Configuration.BasePath + "/contact"
// verify the required parameter 'createContactParameters' is set
if &createContactParameters == nil {
return new(Object), nil, errors.New("Missing required parameter 'createContactParameters' when calling Contacts->CreateContact")
}
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
// body params
postBody = &createContactParameters
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
/**
* Delete Contact
* Delete a contact from your contact list. <br><br> Returns the following if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X DELETE https://[email protected]/2.0/contact/$CONTACT_ID ```
*
* @param contactId ContactId
* @return *Object
*/
func (a Contacts) DeleteContactById(contactId string) (*Object, *APIResponse, error) {
var httpMethod = "Delete"
// create path and map variables
path := a.Configuration.BasePath + "/contact/{ContactId}"
path = strings.Replace(path, "{"+"ContactId"+"}", fmt.Sprintf("%v", contactId), -1)
// verify the required parameter 'contactId' is set
if &contactId == nil {
return new(Object), nil, errors.New("Missing required parameter 'contactId' when calling Contacts->DeleteContactById")
}
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
/**
* Get Contact
* Retrieve a contact to your contact list. <br><br> Returns a contact object if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://[email protected]/2.0/contact/$CONTACT_ID ```
*
* @param contactId ContactId
* @return *Object
*/
func (a Contacts) GetContactById(contactId string) (*Object, *APIResponse, error) {
var httpMethod = "Get"
// create path and map variables
path := a.Configuration.BasePath + "/contact/{ContactId}"
path = strings.Replace(path, "{"+"ContactId"+"}", fmt.Sprintf("%v", contactId), -1)
// verify the required parameter 'contactId' is set
if &contactId == nil {
return new(Object), nil, errors.New("Missing required parameter 'contactId' when calling Contacts->GetContactById")
}
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
/**
* List Contacts
* Retrieve a list of contacts. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://[email protected]/2.0/contacts ```
*
* @param range_ Range (ie \"records=201-300\") of contacts requested
* @return *Object
*/
func (a Contacts) GetContacts(range_ string) (*Object, *APIResponse, error) {
var httpMethod = "Get"
// create path and map variables
path := a.Configuration.BasePath + "/contacts"
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
// header params "Range"
headerParams["Range"] = range_
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
/**
* List Contacts in Group
* Retrieve a list of contacts in a contact group. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://[email protected]/2.0/contacts/$GROUP_ID ```
*
* @param groupId GroupId
* @return *Object
*/
func (a Contacts) GetContactsByGroupId(groupId string) (*Object, *APIResponse, error) {
var httpMethod = "Get"
// create path and map variables
path := a.Configuration.BasePath + "/contacts/{GroupId}"
path = strings.Replace(path, "{"+"GroupId"+"}", fmt.Sprintf("%v", groupId), -1)
// verify the required parameter 'groupId' is set
if &groupId == nil {
return new(Object), nil, errors.New("Missing required parameter 'groupId' when calling Contacts->GetContactsByGroupId")
}
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
/**
* Update Contact
* Update an existing contact in your contact list. <br><br> Returns a contact object if a valid identifier was provided and input validation passed, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X PUT -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://[email protected]/2.0/contact/$CONTACT_ID ```
*
* @param updateContactByIdParameters Request body
* @param contactId ContactId
* @return *Object
*/
func (a Contacts) UpdateContactById(updateContactByIdParameters UpdateContactByIdParameters, contactId string) (*Object, *APIResponse, error) {
var httpMethod = "Put"
// create path and map variables
path := a.Configuration.BasePath + "/contact/{ContactId}"
path = strings.Replace(path, "{"+"ContactId"+"}", fmt.Sprintf("%v", contactId), -1)
// verify the required parameter 'updateContactByIdParameters' is set
if &updateContactByIdParameters == nil {
return new(Object), nil, errors.New("Missing required parameter 'updateContactByIdParameters' when calling Contacts->UpdateContactById")
}
// verify the required parameter 'contactId' is set
if &contactId == nil {
return new(Object), nil, errors.New("Missing required parameter 'contactId' when calling Contacts->UpdateContactById")
}
headerParams := make(map[string]string)
queryParams := url.Values{}
formParams := make(map[string]string)
var postBody interface{}
var fileName string
var fileBytes []byte
// authentication (api_key) required
// set key with prefix in header
headerParams["X-Auth-ApiKey"] = a.Configuration.GetAPIKeyWithPrefix("X-Auth-ApiKey")
// add default headers if any
for key := range a.Configuration.DefaultHeader {
headerParams[key] = a.Configuration.DefaultHeader[key]
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{ "application/json", "application/xml", }
// set Content-Type header
localVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
headerParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{
"application/json",
"application/xml",
}
// set Accept header
localVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
headerParams["Accept"] = localVarHttpHeaderAccept
}
// body params
postBody = &updateContactByIdParameters
var successPayload = new(Object)
httpResponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, fileName, fileBytes)
if err != nil {
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
err = json.Unmarshal(httpResponse.Body(), &successPayload)
return successPayload, NewAPIResponse(httpResponse.RawResponse), err
}
| NewContacts |
twopulsephotonecho_spyrelet.py | import numpy as np
import pyqtgraph as pg
import time
import csv
import os
from PyQt5.Qsci import QsciScintilla, QsciLexerPython
import matplotlib.pyplot as plt
from spyre import Spyrelet, Task, Element
from spyre.widgets.task import TaskWidget
from spyre.plotting import LinePlotWidget
from spyre.widgets.rangespace import Rangespace
from spyre.widgets.param_widget import ParamWidget
from spyre.widgets.repository_widget import RepositoryWidget
from lantz import Q_
import time
from lantz.drivers.keysight import Arbseq_Class
from lantz.drivers.keysight.seqbuild import SeqBuild
from lantz.drivers.keysight import Keysight_33622A
class TwoPulsePhotonEcho(Spyrelet):
requires = {
'fungen': Keysight_33622A
# 'srs': SRS900
}
qutag = None
xs = np.array([])
ys= np.array([])
hist=[]
def configureQutag(self):
qutagparams = self.qutag_params.widget.get()
start = qutagparams['Start Channel']
stop = qutagparams['Stop Channel']
##True = rising edge, False = falling edge. Final value is threshold voltage
self.qutag.setSignalConditioning(start,self.qutag.SIGNALCOND_MISC,True,1)
self.qutag.setSignalConditioning(stop,self.qutag.SIGNALCOND_MISC,True,0.1)
self.qutag.enableChannels((start,stop))
def createHistogram(self,stoparray, timebase, bincount, totalWidth, tau):
lowBound=1.9*tau
highBound=2.1*tau
hist = [0]*bincount
for stoptime in stoparray:
binNumber = int(stoptime*timebase*bincount/(totalWidth))
if binNumber >= bincount:
continue
print('error')
else:
hist[binNumber]+=1
out_name = "D:\\Data\\12.18.2019\\230_20dB"
x=[]
for i in range(bincount):
x.append(i*totalWidth/bincount)
np.savez(os.path.join(out_name,str(int(round(tau*1e6,0)))),hist,x)
print('Data stored under File Name: ' + str(tau))
def createPlottingHist(self, stoparray, timebase, bincount, totalWidth):
for stoptime in stoparray:
binNumber = int(stoptime*timebase*bincount/(totalWidth))
if binNumber >= bincount:
continue
else:
self.hist[binNumber]+=1
def | (self, bincount):
self.hist=[0]*bincount
@Task()
def startpulse(self, timestep=1e-9):
params = self.pulse_parameters.widget.get()
tau = params['start tau']
period = params['period'].magnitude
repeat_unit = params['repeat unit'].magnitude
pulse_width = params['pulse width'].magnitude
buffer_time = params['buffer time'].magnitude
shutter_offset = params['shutter offset'].magnitude
wholeRange=params['measuring range'].magnitude
self.configureQutag()
for i in range(int((params['stop tau']-params['start tau'])/params['step tau'])+1):
xs = np.array([])
ys= np.array([])
hist=[]
self.dataset.clear()
self.fungen.output[1] = 'OFF'
self.fungen.output[2] = 'OFF'
self.fungen.clear_mem(1)
self.fungen.clear_mem(2)
self.fungen.wait()
# self.srs.module_reset[5]
# self.srs.SIM928_voltage[5]=params['srs bias'].magnitude+0.000000001*i
# self.srs.SIM928_on[5]
## build pulse sequence for AWG channel 1
chn1buffer = Arbseq_Class('chn1buffer', timestep)
chn1buffer.delays = [0]
chn1buffer.heights = [0]
chn1buffer.widths = [repeat_unit]
chn1buffer.totaltime = repeat_unit
chn1buffer.nrepeats = buffer_time/repeat_unit
chn1buffer.repeatstring = 'repeat'
chn1buffer.markerstring = 'lowAtStart'
chn1buffer.markerloc = 0
chn1bufferwidth = repeat_unit*chn1buffer.nrepeats
chn1buffer.create_sequence()
chn1pulse = Arbseq_Class('chn1pulse', timestep)
chn1pulse.delays = [0]
chn1pulse.heights = [1]
chn1pulse.widths = [pulse_width]
chn1pulse.totaltime = pulse_width
chn1pulse.nrepeats = 0
chn1pulse.repeatstring = 'once'
chn1pulse.markerstring = 'highAtStartGoLow'
chn1pulse.markerloc = 0
chn1pulsewidth = pulse_width
chn1pulse.create_sequence()
chn1dc = Arbseq_Class('chn1dc', timestep)
chn1dc.delays = [0]
chn1dc.heights = [0]
chn1dc.widths = [repeat_unit]
chn1dc.totaltime = repeat_unit
chn1dc.repeatstring = 'repeat'
chn1dc.markerstring = 'lowAtStart'
chn1dc.markerloc = 0
chn1dcrepeats = int((tau.magnitude-1.5*pulse_width)/repeat_unit)
chn1dc.nrepeats = chn1dcrepeats
chn1dcwidth = repeat_unit*chn1dcrepeats
print(tau.magnitude, pulse_width, chn1dcrepeats)
chn1dc.create_sequence()
chn1pulse2 = Arbseq_Class('chn1pulse2', timestep)
chn1pulse2.delays = [0]
chn1pulse2.heights = [0]
chn1pulse2.widths = [pulse_width*2]
chn1pulse2.totaltime = pulse_width*2
chn1pulse2width = pulse_width*2
chn1pulse2.nrepeats = 0
chn1pulse2.repeatstring = 'once'
chn1pulse2.markerstring = 'lowAtStart'
chn1pulse2.markerloc = 0
chn1pulse2.create_sequence()
chn1pulse3 = Arbseq_Class('chn1pulse3', timestep)
chn1pulse3.delays = [0]
chn1pulse3.heights = [0]
chn1pulse3.widths = [repeat_unit]
chn1pulse3.totaltime = repeat_unit
chn1pulse3width = shutter_offset
chn1pulse3.nrepeats = shutter_offset/repeat_unit
chn1pulse3.repeatstring = 'repeat'
chn1pulse3.markerstring = 'lowAtStart'
chn1pulse3.markerloc = 0
chn1pulse3.create_sequence()
chn1dc2 = Arbseq_Class('chn1dc2', timestep)
chn1dc2.delays = [0]
chn1dc2.heights = [0]
chn1dc2.widths = [repeat_unit]
chn1dc2.totaltime = repeat_unit
chn1dc2.repeatstring = 'repeat'
chn1dc2.markerstring = 'lowAtStart'
chn1dc2repeats = int((period-chn1bufferwidth-chn1pulsewidth-chn1dcwidth-chn1pulse2width-chn1pulse3width)/repeat_unit)
chn1dc2.nrepeats = chn1dc2repeats
chn1dc2.markerloc = 0
#print((chn1dc2repeats*params['repeat unit'].magnitude) + tau.magnitude + params['pulse width'].magnitude)
print(params['repeat unit'].magnitude*chn1dc2.nrepeats)
chn1dc2.create_sequence()
## build pulse sequence for AWG channel 2
chn2buffer = Arbseq_Class('chn2buffer', timestep)
chn2buffer.delays = [0]
chn2buffer.heights = [1]
chn2buffer.widths = [repeat_unit]
chn2buffer.totaltime = repeat_unit
chn2buffer.nrepeats = buffer_time/repeat_unit
chn2buffer.repeatstring = 'repeat'
chn2buffer.markerstring = 'lowAtStart'
chn2buffer.markerloc = 0
chn2bufferwidth = repeat_unit*chn2buffer.nrepeats
chn2buffer.create_sequence()
chn2pulse1 = Arbseq_Class('chn2pulse1', timestep)
chn2pulse1.delays = [0]
chn2pulse1.heights = [1]
chn2pulse1.widths = [pulse_width]
chn2pulse1.totaltime = pulse_width
chn2pulse1width = pulse_width
chn2pulse1.nrepeats = 0
chn2pulse1.repeatstring = 'once'
chn2pulse1.markerstring = 'highAtStartGoLow'
chn2pulse1.markerloc = 0
chn2pulse1.create_sequence()
chn2dc1 = Arbseq_Class('chn2dc1', timestep)
chn2dc1.delays = [0]
chn2dc1.heights = [1]
chn2dc1.widths = [repeat_unit]
chn2dc1.totaltime = repeat_unit
chn2dc1.repeatstring = 'repeat'
chn2dc1.markerstring = 'lowAtStart'
chn2dc1.markerloc = 0
chn2dc1repeats = int((tau.magnitude-1.5*pulse_width)/repeat_unit)
chn2dc1.nrepeats = chn2dc1repeats
chn2dc1width = repeat_unit*chn2dc1repeats
chn2dc1.create_sequence()
chn2pulse2 = Arbseq_Class('chn2pulse2', timestep)
chn2pulse2.delays = [0]
chn2pulse2.heights = [1]
chn2pulse2.widths = [pulse_width*2]
chn2pulse2.totaltime = pulse_width*2
chn2pulse2width = pulse_width*2
chn2pulse2.nrepeats = 0
chn2pulse2.repeatstring = 'once'
chn2pulse2.markerstring = 'lowAtStart'
chn2pulse2.markerloc = 0
chn2pulse2.create_sequence()
chn2pulse3 = Arbseq_Class('chn2pulse3', timestep)
chn2pulse3.delays = [0]
chn2pulse3.heights = [1]
chn2pulse3.widths = [repeat_unit]
chn2pulse3.totaltime = repeat_unit
chn2pulse3width = shutter_offset
chn2pulse3.nrepeats = shutter_offset/repeat_unit
chn2pulse3.repeatstring = 'repeat'
chn2pulse3.markerstring = 'lowAtStart'
chn2pulse3.markerloc = 0
chn2pulse3.create_sequence()
chn2dc2 = Arbseq_Class('chn2dc2', timestep)
chn2dc2.delays = [0]
chn2dc2.heights = [-1]
chn2dc2.widths = [repeat_unit]
chn2dc2.totaltime = repeat_unit
chn2dc2.repeatstring = 'repeat'
chn2dc2.markerstring = 'lowAtStart'
chn2dc2repeats = int((period-chn2bufferwidth-chn2pulse1width-chn2dc1width-chn2pulse2width-chn2pulse3width)/repeat_unit)
chn2dc2.nrepeats = chn2dc2repeats
chn2dc2.markerloc = 0
print(repeat_unit*chn2dc2.nrepeats)
chn2dc2.create_sequence()
self.fungen.send_arb(chn1buffer, 1)
self.fungen.send_arb(chn1pulse, 1)
self.fungen.send_arb(chn1dc, 1)
self.fungen.send_arb(chn1pulse2, 1)
self.fungen.send_arb(chn1pulse3, 1)
self.fungen.send_arb(chn1dc2, 1)
self.fungen.send_arb(chn2buffer, 2)
self.fungen.send_arb(chn2pulse1, 2)
self.fungen.send_arb(chn2dc1, 2)
self.fungen.send_arb(chn2pulse2, 2)
self.fungen.send_arb(chn2pulse3, 2)
self.fungen.send_arb(chn2dc2, 2)
seq = [chn1buffer, chn1pulse, chn1dc, chn1pulse2, chn1pulse3, chn1dc2]
seq2 = [chn2buffer, chn2pulse1, chn2dc1, chn2pulse2, chn2pulse3, chn2dc2]
self.fungen.create_arbseq('twoPulse', seq, 1)
self.fungen.create_arbseq('shutter', seq2, 2)
self.fungen.wait()
self.fungen.voltage[1] = params['pulse height'].magnitude+0.000000000001*i
self.fungen.voltage[2] = 7.1+0.0000000000001*i
print(self.fungen.voltage[1], self.fungen.voltage[2])
self.fungen.output[2] = 'OFF'
self.fungen.trigger_delay(1,shutter_offset)
self.fungen.sync()
time.sleep(1)
self.fungen.output[1] = 'ON'
#self.fungen.output[2] = 'OFF'
time.sleep(1)
##Qutag Part
self.configureQutag()
qutagparams = self.qutag_params.widget.get()
lost = self.qutag.getLastTimestamps(True) # clear Timestamp buffer
stoptimestamp = 0
synctimestamp = 0
bincount = qutagparams['Bin Count']
timebase = self.qutag.getTimebase()
start = qutagparams['Start Channel']
stop = qutagparams['Stop Channel']
stoparray = []
tempStopArray = []
histCounter = 0
quenchCounter = 0
self.initHist(bincount)
for j in range(int(self.exp_parameters.widget.get()['# of Passes'])):
lost = self.qutag.getLastTimestamps(True)
time.sleep(period)
timestamps = self.qutag.getLastTimestamps(True)
tstamp = timestamps[0] # array of timestamps
tchannel = timestamps[1] # array of channels
values = timestamps[2] # number of recorded timestamps
# print(values)
for k in range(values):
# output all stop events together with the latest start event
# if tchannel[k] == start:
# synctimestamp = tstamp[k]
if tchannel[k]==stop:
#stoptimestamp = tstamp[k]
# if tstamp[k]*1e-6>2*tau.magnitude-1 and tstamp[k]*1e-6<2*tau.magnitude+2:
stoparray.append(tstamp[k])
#tempStopArray.append(stoptimestamp)
# histCounter+=1
# if histCounter%20==0:
# self.createPlottingHist(tempStopArray, timebase, bincount,qutagparams['Total Hist Width Multiplier']*tau.magnitude)
# self.xs = np.asarray(range(len(self.hist)))
# self.ys=np.asarray(self.hist)
# values = {
# 't': np.asarray(range(len(self.hist))),
# 'y': np.asarray(self.hist),
# }
# self.startpulse.acquire(values)
# tempStopArray = []
# TODO: quench protection
# if self.srs.SIM928_voltage[???] >= qunech threshold and quenchCounter<=10:
# self.srs.SIM928_off[6]
# time.sleep(period*10)
# self.srs.SIM928_on[6]
# quenchCounter+=1
# elif quenchCounter>10:
# print('quenched more than 10 times')
# break
# else:
# continue
self.createHistogram(stoparray, timebase, bincount,wholeRange,tau.magnitude)
print("here")
tau+=params['step tau']
#self.fungen.output[1] = 'OFF'
@Task()
def qutagInit(self):
print('qutag successfully initialized')
@Element(name='QuTAG Parameters')
def qutag_params(self):
params = [
# ('arbname', {'type': str, 'default': 'arbitrary_name'}),,
('Start Channel', {'type': int, 'default': 0}),
('Stop Channel', {'type': int, 'default': 2}),
('Total Hist Width Multiplier', {'type': int, 'default': 5}),
('Bin Count', {'type': int, 'default': 1000})
]
w = ParamWidget(params)
return w
@Element(name='Experiment Parameters')
def exp_parameters(self):
params = [
# ('arbname', {'type': str, 'default': 'arbitrary_name'}),,
('# of Passes', {'type': int, 'default': 100}),
# ('File Name', {'type': str})
]
w = ParamWidget(params)
return w
@Element(name='Histogram')
def averaged(self):
p = LinePlotWidget()
p.plot('Channel 1')
return p
@averaged.on(startpulse.acquired)
def averaged_update(self, ev):
w = ev.widget
xs = self.xs
ys = self.ys
w.set('Channel 1', xs=xs, ys=ys)
return
@Element(name='Pulse parameters')
def pulse_parameters(self):
params = [
# ('arbname', {'type': str, 'default': 'arbitrary_name'}),,
('pulse height', {'type': float, 'default': 3, 'units':'V'}),
('pulse width', {'type': float, 'default': 300e-9, 'units':'s'}),
('period', {'type': float, 'default': 0.1, 'units':'s'}),
('repeat unit', {'type': float, 'default': 50e-9, 'units':'s'}),
('start tau', {'type': float, 'default': 3e-6, 'units':'s'}),
('stop tau', {'type': float, 'default': 10e-6, 'units':'s'}),
('step tau', {'type': float, 'default': 1e-6, 'units':'s'}),
# ('srs bias', {'type': float, 'default': 1.2, 'units':'V'}),
('shutter offset', {'type': float, 'default': 500e-9, 'units':'s'}),
('measuring range', {'type': float, 'default': 70e-6, 'units':'s'}),
('buffer time', {'type': float, 'default': 100e-6, 'units':'s'}),
]
w = ParamWidget(params)
return w
@qutagInit.initializer
def initialize(self):
from lantz.drivers.qutools import QuTAG
self.qutag = QuTAG()
devType = self.qutag.getDeviceType()
if (devType == self.qutag.DEVTYPE_QUTAG):
print("found quTAG!")
else:
print("no suitable device found - demo mode activated")
print("Device timebase:" + str(self.qutag.getTimebase()))
return
@qutagInit.finalizer
def finalize(self):
return
@startpulse.initializer
def initialize(self):
self.fungen.output[1] = 'OFF'
self.fungen.output[2] = 'OFF'
self.fungen.clear_mem(1)
self.fungen.clear_mem(2)
self.fungen.wait()
@startpulse.finalizer
def finalize(self):
self.fungen.output[1] = 'OFF'
self.fungen.output[2] = 'OFF'
print('Two Pulse measurements complete.')
return | initHist |
learners.py | """
Deploy semi-supervised PU machine learning models.
This module provides classes for training, testing, and deploying a PU
learning model for predicting material synthesizability. Utility functions
for plotting aid in visualizing and analyzing results.
References:
[1] DOI: 10.1021/acsnano.8b08014
[2] DOI: 10.1145/1401890.1401920
[3] DOI: 10.1016/j.patrec.2013.06.010
"""
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import precision_recall_fscore_support
from sklearn.cluster import KMeans
from sklearn.mixture import GaussianMixture, BayesianGaussianMixture
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import precision_recall_curve
from sklearn.model_selection import RepeatedKFold
from sklearn.utils import resample
from mpl_toolkits.mplot3d import Axes3D
from monty.serialization import dumpfn
import pandas as pd
import seaborn as sns
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from pylab import rcParams
__author__ = "Nathan C. Frey, Jin Wang"
__copyright__ = "MIT License"
__version__ = "0.0.1"
__maintainer__ = "Nathan C. Frey"
__email__ = "[email protected]"
__status__ = "Development"
__date__ = "Aug 2017"
class PULearner:
def __init__(self):
"""A machine learning model that predicts material synthesizability.
Positive samples are experimentally synthesized materials. Unlabeled
samples are not-yet synthesized materials.
Features for training data might be generated by first-principles
(density functional theory) calculations, or structural or chemical
data looked up from a table.
Hyperparameters are initialized with sensible defaults, but any newly
trained model should have hyperparams carefully converged.
Attributes:
pu_stats (dict): Outputs of cv_baggingDT
df_U (DataFrame): Unlabeled data.
df_P (DataFrame): Positive data.
synth_scores (list): Synthesizability scores (between 0 and 1) of
unlabeled samples.
labels (list): Likely synthesizable (1) or not (0)
feat_importances (DataFrame): Feature importances from trained
decision tree classifiers. Index corresponds to feature index
in original data.
"""
self.pu_stats = None
self.df_U = None
self.df_P = None
self.synth_scores = None
self.labels = None
self.feat_importances = None
def cv_baggingDT(self, pu_data, splits=10, repeats=10, bags=100, filename=""):
"""
Train bagged decision tree base classifiers and do repeated
k-fold CV.
Synthesizability scores (0 = not synthesizable, 1 = already
synthesized) are generated for an unlabeled sample by averaging
the scores from the ensemble of decision tree classifiers that
have not been trained on that sample.
Args:
pu_data (json): A file where each row describes a material.
There MUST be a column called "PU_label" where a 1 value
indicates a synthesized (positive) compound and a 0 value
indicates an unlabeled compound.
splits (int): Number of splits in k-fold CV.
repeats (int): Number of repeated k-fold CV.
bags (int): Number of bags in bootstrap aggregation.
filename (string): Save model training results to file with
filename ending in .json or .pkl.
Returns:
pu_stats (dict): Metrics and outputs of PU learning model
training.
"""
print("Start PU Learning.")
# Preprocess data and set attributes
df = pd.read_json(pu_data)
df_P, df_U, X_P, X_U = self._process_pu_data(df)
self.df_P = df_P
self.df_U = df_U
# Split data into training and test splits for k-fold CV
kfold = RepeatedKFold(n_splits=splits, n_repeats=repeats, random_state=42)
# Scores for PU learning (tpr = True Positive Rate)
scores = []
tprs = []
# Predicted synthesis probability of CVed P and U sets
prob_P = np.ones(shape=(X_P.shape[0], splits * repeats))
prob_U = -np.ones(shape=(X_U.shape[0], splits * repeats))
# Feature importance
feat_rank = np.zeros(shape=(X_P.shape[1], splits * repeats))
idsp = 0 # index of repeated k splits
# Loop over P and U training/test samples
for (ptrain, ptest), (utrain, utest) in zip(kfold.split(X_P), kfold.split(X_U)):
# Number of P and U training samples
N_ptrain = X_P[ptrain].shape[0]
N_utrain = X_U[utrain].shape[0]
d = X_P.shape[1]
K = N_ptrain
train_label = np.zeros(shape=(N_ptrain + K,))
train_label[:N_ptrain] = 1.0 # Synthesized (positive)
# Out of bag samples
n_oob = np.zeros(shape=(N_utrain,))
f_oob = np.zeros(shape=(N_utrain, 2))
# Sums of probabilities of test sets
f_ptest = np.zeros(shape=(X_P[ptest].shape[0], 2))
f_utest = np.zeros(shape=(X_U[utest].shape[0], 2))
# Bootstrap resampling for each bag
for i in range(bags):
bootstrap_sample = np.random.choice(
np.arange(N_utrain), replace=True, size=K
)
# Positive samples and bootstrapped unlabeled samples
data_bootstrap = np.concatenate(
(X_P[ptrain], X_U[bootstrap_sample, :]), axis=0
)
# Train decision tree classifier
model = DecisionTreeClassifier(
max_depth=None,
max_features=None,
criterion="gini",
class_weight="balanced",
)
model.fit(data_bootstrap, train_label)
# Index for the oob samples
idx_oob = sorted(
set(range(N_utrain)) - set(np.unique(bootstrap_sample))
)
# Transductive learning on oob samples
f_oob[idx_oob] += model.predict_proba(X_U[utrain][idx_oob])
n_oob[idx_oob] += 1
f_ptest += model.predict_proba(X_P[ptest])
f_utest += model.predict_proba(X_U[utest])
feat_rank[:, idsp] = model.feature_importances_
# Predicted synthesis probabilities of unlabeled samples
predict_utrain = f_oob[:, 1] / n_oob
# Predicted probabilities for P and U test sets
predict_ptest = f_ptest[:, 1] / bags
predict_utest = f_utest[:, 1] / bags
# Find predicted positives
true_pos = predict_ptest[np.where(predict_ptest > 0.5)].shape[0]
u_pos = predict_utest[np.where(predict_utest > 0.5)].shape[0]
N_ptest = X_P[ptest].shape[0]
N_utest = X_U[utest].shape[0]
# Predicted positive ratio in test set
p_pred_pos = (true_pos + u_pos) / (N_ptest + N_utest) + 0.0001
# Compute PU recall (TPR) and score metrics
recall = true_pos / N_ptest
score = recall ** 2 / p_pred_pos
scores.append(score)
tprs.append(recall)
# Predicted probabilities
prob_P[ptest, idsp] = predict_ptest
prob_U[utrain, idsp] = predict_utrain
prob_U[utest, idsp] = predict_utest
idsp += 1
# Progress update
if (idsp + 1) % splits == 0:
tpr_tmp = np.asarray(tprs[-splits - 1 : -1])
print(
"Performed Repeated "
+ str(splits)
+ "-fold: "
+ str(idsp // splits + 1)
+ " out of "
+ str(repeats)
)
print(
"True Positive Rate: %0.2f (+/- %0.2f)"
% (tpr_tmp.mean(), tpr_tmp.std() * 2)
)
# Predicted labels from k-fold CV
label_U = np.zeros(shape=(X_U.shape[0], splits * repeats + 1), dtype=int)
label_U[:, : splits * repeats][np.where(prob_U > 0.5)] = 1
label_U[:, splits * repeats] = np.sum(
label_U[:, : splits * repeats + 1], axis=1
)
tprs = np.asarray(tprs)
scores = np.asarray(scores)
# Metrics for each model in the k-folds
label_U_rp = np.zeros(shape=(X_U.shape[0], repeats), dtype=int)
prob_U_rp = np.zeros(shape=(X_U.shape[0], repeats))
feat_rank_rp = np.zeros(shape=(X_U.shape[1], repeats))
tpr_rp = np.zeros(shape=(repeats,))
scores_rp = np.zeros(shape=(repeats,))
labels = np.zeros(shape=(X_U.shape[0],))
for i in range(repeats):
prob_U_rp[:, i] = prob_U[:, i * splits : (i + 1) * splits].mean(axis=1)
feat_rank_rp[:, i] = feat_rank[:, i * splits : (i + 1) * splits].mean(
axis=1
)
tpr_rp[i] = tprs[i * splits : (i + 1) * splits].mean()
scores_rp[i] = scores[i * splits : (i + 1) * splits].mean()
label_U_rp[np.where(prob_U_rp > 0.5)] = 1
prob = prob_U_rp.mean(axis=1)
labels[np.where(prob > 0.5)] = 1
# Get confidence interval of TPR for each kfold
tpr_low, tpr_up = self.bootstrapCI(tpr_rp)
scores_low, scores_up = self.bootstrapCI(scores_rp)
# PU learning metrics
metrics = np.asarray(
[tpr_rp.mean(), tpr_low, tpr_up, scores_rp.mean(), scores_low, scores_up]
)
print("Accuracy: %0.2f" % (tpr_rp.mean()))
print("95%% confidence interval: [%0.2f, %0.2f]" % (tpr_low, tpr_up))
# Metrics and results from training / testing
pu_stats = {
"prob": prob,
"labels": labels,
"metrics": metrics,
"prob_rp": prob_U_rp,
"label_rp": label_U_rp,
"tpr_rp": tpr_rp,
"scores_rp": scores_rp,
"feat_rank_rp": feat_rank_rp,
}
# Save results
if filename:
if filename.endswith(".json"):
dumpfn(pu_stats, filename)
if filename.endswith(".pkl"):
with open(filename, "wb") as file:
pickle.dump(pu_stats, file, protocol=pickle.HIGHEST_PROTOCOL)
self.pu_stats = pu_stats
return pu_stats
def bootstrapCI(self, data, ci=95, ns=10000):
"""Compute confidence interval of the TPR.
Args:
data (array): Array of TPRs for each kfold.
ci (int): Confidence interval.
ns (int): Number of bootstrap resamplings.
Returns:
lower (float): Lower endpoint of CI.
upper (float): Upper endpoint of CI.
"""
bs_rsample = []
for _ in range(ns):
rsample = resample(data, n_samples=len(data))
bs_rsample.append(np.mean(rsample))
bs_rsample = np.asarray(bs_rsample)
lower = np.percentile(bs_rsample, (100 - ci) / 2)
upper = np.percentile(bs_rsample, ci + (100 - ci) / 2)
return lower, upper
def corr_heatmap(self, num_feats=10, fname=""):
"""Plot correlation matrix between synthesizability and features.
cv_baggingDT must be run first.
Args:
num_feats (int): How many features to consider.
fname (str): Filename if correlation plot should be saved.
Returns:
None (generates plots)
"""
pu_stats = self.pu_stats
df_U = self.df_U
df_U_copy = df_U.drop(columns=["PU_label"])
# Get normalized, sorted & ranked list of most important features
synth_scores = pu_stats["prob"]
df_U_copy["synth_score"] = synth_scores
# Make correlation matrix of top "num_feats" features
corrmat = df_U_copy.corr()
cols = corrmat.nlargest(num_feats, "synth_score")["synth_score"].index
cm = np.corrcoef(df_U_copy[cols].values.T)
sns.set(style='ticks')
rcParams['figure.dpi'] = 300
fig, ax = plt.subplots(1, 1)
hm = sns.heatmap(
cm,
ax=ax,
cbar=True,
annot=True,
square=True,
fmt=".2f",
annot_kws={"size": 7},
yticklabels=cols.values,
xticklabels=cols.values,
)
if fname:
|
def get_feat_importances(self, plot_format=""):
"""Process output from PU learning k-fold cross validation.
cv_baggingDT must be run first.
If plot_format is specified, a feature importance plot will
be saved.
Args:
plot_format (str): svg, png, or pdf file format for saving simple
visualizations of feature importance and correlation.
"""
pu_stats = self.pu_stats
# Feature importances for individual repetitions of kfold CV
feat_rank_rp = pu_stats["feat_rank_rp"]
feat_importances = np.sum(feat_rank_rp, axis=1)
df_U = self.df_U
df_U = df_U._get_numeric_data()
df_U_copy = df_U.drop(columns=["PU_label"])
feat_names = df_U_copy.columns
# Index corresponds to feature in original data
df_feat = pd.DataFrame(columns=["feature", "importance"])
df_feat["feature"] = feat_names
df_feat["importance"] = feat_importances
# Sort by importance
df_feat_sort = df_feat.sort_values(by="importance", ascending=False)
max_value = df_feat["importance"].max()
# Normalize to 1
df_feat_sort["importance"] = df_feat_sort["importance"] / max_value
# Set feature importance attribute
self.feat_importances = df_feat
if plot_format in ["svg", "pdf", "png"]:
# Feature importance plot
fig, ax = plt.subplots(figsize=(10, 4))
with sns.axes_style(style="ticks"):
sns.barplot(x="feature", y="importance", data=df_feat_sort)
ax.set_xticklabels(
ax.get_xticklabels(), rotation=45, ha="right", fontsize=7
)
filename = "feat_importance." + plot_format
self.save_plot(filename, fig, ax)
@staticmethod
def _process_pu_data(data):
"""Utility method for processing input data.
Args:
data (DataFrame): Data with positive and unlabeled samples.
Returns:
X_P (array): Positive sample set.
X_U (array): Unlabeled sample set.
"""
df_P = data.query("PU_label == 1") # Positive value is 1
df_U = data.query("PU_label == 0") # Unlabeled value is 0
# Chop off PU label and drop non-numeric columns for sklearn
X_P = np.asarray(df_P.drop(columns=["PU_label"])._get_numeric_data())
X_U = np.asarray(df_U.drop(columns=["PU_label"])._get_numeric_data())
return df_P, df_U, X_P, X_U
@staticmethod
def save_plot(filename, fig, ax):
"""Utility method for saving simple visualizations.
Args:
filename (str): Name ending in .svg, .png, or .pdf
fig, ax (objects): Matplotlib objects.
Returns:
None
"""
sns.set_style("ticks")
fig.tight_layout()
fig.savefig(filename)
class PUInteract:
def __init__(self, df_parent, pu_parent, df_child, pu_child, merge_on=(), feats=()):
"""Consider parent and child phase PU learning scores.
This class looks at PU learning scores for parent bulk
compounds (e.g. layered h-BN) and scores of the child phases
along with descriptors like exfoliation energy and changes
in structural/electronic properties to predict (parent, child)
pairs that can be synthesized.
Parent and child must be linked by a column that allows the
dataframes to be merged. There should also be additional features
that characterize the structural and chemical differences between
parents and children, e.g. changes in bond lengths, etc.
Unsupervised clustering models are used to identify synthesizable
(parent/child) pairs.
Args:
df_parent (str): Parent data filename.
pu_parent (dict): Output from PULearner.cv_baggingDT.
df_child (str): Child data filename.
pu_child (dict): Output from PULearner.cv_baggingDT.
merge_on (tuple): Column name(s) on which to merge.
feats (tuple): Column names to use as features. If empty, use all
possible columns.
Attributes:
merged_df (DataFrame): (Parent, child) pair data.
X (array): Array representation of merged_df.
Returns:
None
"""
df_parent = pd.read_json(df_parent)
df_child = pd.read_json(df_child)
# Set scores from PULearner
df_parent["synth_score"] = 1
df_child["synth_score"] = 1
df_parent.loc[df_parent.eval("PU_label == 0"), "synth_score"] = pu_parent[
"prob"
]
df_child.loc[df_child.eval("PU_label == 0"), "synth_score"] = pu_child["prob"]
# Merge parent and child dfs
merge_on = list(merge_on)
df = pd.merge(
df_parent, df_child, on=merge_on, how="outer", suffixes=["_p", "_c"]
)
df.drop(columns=["PU_label_p", "PU_label_c"], inplace=True, axis=1)
if feats:
feat_names = [f + "_p" for f in feats] + [f + "_c" for f in feats]
df = df[feat_names]
self.merged_df = df
self.X = np.array(df)
def do_kmeans(self, n_clusters=2, seed=42):
"""Do k-means clustering on (parent, child) pairs.
Args:
n_clusters (int): Number of clusters.
seed (int): Fix random seed for kmeans reproducibility.
Returns:
kmeans_output (dict): kmeans cluster centers, cluster labels for
each (parent, child)
"""
np.random.seed(seed)
km = KMeans(n_clusters=n_clusters, random_state=seed)
km.fit(self.X)
kmeans_output = {
"cluster_centers": km.cluster_centers_,
"cluster_labels": km.labels_,
}
return kmeans_output
def do_gmixture(self, n_components=2, seed=42):
"""
Estimate parameters of a Gaussian mixture distribution of (parent,
child) data.
Args:
n_components (int): Number of components in GMM.
seed (int): Random seed.
Returns:
gmm_output (dict): Predicted labels of (parent, child) pairs and
predicted posterior probabilities of each component.
"""
np.random.seed(seed)
gmm = GaussianMixture(
n_components=n_components, random_state=seed, covariance_type="full"
)
gmm.fit(self.X)
gmm_labels = gmm.predict(self.X)
gmm_prob = gmm.predict_proba(self.X)[:, 0]
gmm_output = {"gmm_labels": gmm_labels, "gmm_prob": gmm_prob}
return gmm_output
def do_bgm(self, n_components=6, seed=42):
"""Bayesian Gaussian Mixture.
Infer the effective number of components in a Gaussian Mixture Model
via variational Bayesian estimation.
n_effective_componenents < n_components if the model sets some
weights close to 0.
Args:
n_components (int): Number of components in GMM.
seed (int): Random seed.
Returns:
bgm_output (dict): Labels and probabilities.
"""
np.random.seed(seed)
bgm = BayesianGaussianMixture(
n_components=n_components,
covariance_type="full",
weight_concentration_prior=1e-2,
weight_concentration_prior_type="dirichlet_process",
mean_precision_prior=1e-2,
init_params="random",
max_iter=100,
random_state=seed,
)
bgm.fit(self.X)
bgm_labels = bgm.predict(self.X)
bgm_prob = bgm.predict_proba(self.X)[:, 0]
bgm_output = {"bgm_labels": bgm_labels, "bgm_prob": bgm_prob}
return bgm_output
| self.save_plot(fname + ".png", fig, ax) |
remotejit.py | """RemoteJIT client/server config functions
"""
__all__ = ['RemoteJIT', 'Signature', 'Caller']
import os
import inspect
import warnings
import ctypes
from contextlib import nullcontext
from . import irtools
from .typesystem import Type, get_signature
from .thrift import Server, Dispatcher, dispatchermethod, Data, Client
from .utils import get_local_ip
from .targetinfo import TargetInfo
from .rbclib import tracing_allocator
# XXX WIP: the OmnisciCompilerPipeline is no longer omnisci-specific because
# we support Arrays even without omnisci, so it must be renamed and moved
# somewhere elsef
from .omnisci_backend import OmnisciCompilerPipeline
def isfunctionlike(obj):
"""Return True if object is function alike.
"""
if obj is None or isinstance(obj, (Signature, list, tuple, str, Caller)):
return False
return True
def extract_templates(options):
"""Extract templates mapping data from options dictionary.
If options does not contain "templates", it will be constructed
from all unknown options that have list values. Otherwise, the
corresponding value is returned with no further processing of
options content.
Parameters
----------
options : dict
Returns
-------
options : dict
A copy of input without templates mapping data.
templates : dict
Templates mapping which is a collections of pairs of template
name and a list of concrete types. Template name cannot
correspond to a concrete type.
"""
known_options = ['devices', 'local']
new_options = {}
templates = options.get('templates')
if templates is not None:
new_options.update(options)
del new_options['templates']
else:
templates = {}
for k, v in options.items():
if (isinstance(k, str) and isinstance(v, list) and k not in known_options):
templates[k] = v
else:
new_options[k] = v
return new_options, templates
class Signature(object):
"""Signature decorator for Python functions.
A Signature decorator may contain many signature objects
representing the prototypes of functions.
Signature decorators are re-usable and composeable. For example:
.. highlight:: python
.. code-block:: python
rjit = RemoteJIT(host='localhost' port=6274)
# remotebinaryfunc is Signature instance
remotebinaryfunc = rjit('int32(int32, int32)',
'float32(float32, float32)', ...)
# add will be Caller instance
@remotebinaryfunc
def add(a, b):
return a + b
# sub will be Caller instance
@remotebinaryfunc
def sub(a, b):
return a - b
add(1, 2) # returns 3
sub(1.0, 2.0) # returns -1.0
"""
def __init__(self, remotejit):
|
@property
def debug(self):
return self.remotejit.debug
@property
def local(self):
sig = Signature(self.remotejit.local)
sig.signatures.extend(self.signatures)
assert not self.signature_devices
assert not self.signature_templates
return sig
def __str__(self):
lst = ["'%s'" % (s,) for s in self.signatures]
return '%s(%s)' % (self.__class__.__name__, ', '.join(lst))
def __call__(self, obj, **options):
"""Decorate signatures or a function.
Parameters
----------
obj : {str, Signature, function, ...}
Specify object that represents a function type.
Keyword parameters
------------------
devices : list
Specify device names for the given set of signatures.
templates : dict
Specify template types mapping.
Returns
-------
result : {Signature, Caller}
If obj is a function, return Caller. Otherwise return self
that is extended with new signatures from obj.
Note
----
The validity of the input argument is not checked here. This
is because the bit-size of certain C types (e.g. size_t, long,
etc) depend on the target device which information will be
available at the compile stage. The target dependent
signatures can be retrieved using
`signature.get_signatures()`.
"""
if obj is None:
return self
options, templates = extract_templates(options)
devices = options.get('devices')
if isinstance(obj, Signature):
self.signatures.extend(obj.signatures)
self.signature_devices.update(obj.signature_devices)
self.remotejit.discard_last_compile()
if devices is not None:
for s in obj.signatures:
self.signature_devices[s] = devices
assert not templates
for s in obj.signatures:
t = obj.signature_templates.get(s)
if t is not None:
self.signature_templates[s] = t
return self
if isinstance(obj, Caller):
# return new Caller with extended signatures set
assert obj.remotejit is self.remotejit
final = Signature(self.remotejit)
final(self) # copies the signatures from self to final
final(obj.signature) # copies the signatures from obj to final
assert devices is None
assert not templates
return Caller(obj.func, final)
if isfunctionlike(obj):
final = Signature(self.remotejit)
final(self) # copies the signatures from self to final
assert devices is None
assert not templates
return Caller(obj, final)
self.signatures.append(obj)
self.remotejit.discard_last_compile()
if devices is not None:
self.signature_devices[obj] = devices
if templates:
self.signature_templates[obj] = templates
return self
def best_match(self, func, atypes: tuple) -> Type:
"""Return function type from signatures that matches best with given
argument types.
If no match is found, raise TypeError.
Parameters
----------
atypes : Type-tuple
Specify a tuple of argument types.
Returns
-------
ftype : Type
Function type that arguments match best with given argument
types.
"""
ftype = None
match_penalty = None
available_types = self.normalized(func).signatures
for typ in available_types:
penalty = typ.match(atypes)
if penalty is not None:
if ftype is None or penalty < match_penalty:
ftype = typ
match_penalty = penalty
if ftype is None:
satypes = ', '.join(map(str, atypes))
available = '; '.join(map(str, available_types))
raise TypeError(
f'found no matching function type to given argument types'
f' `{satypes}`. Available function types: {available}')
return ftype
def normalized(self, func=None):
"""Return a copy of Signature object where all signatures are
normalized to Type instances using the current target device
information.
Parameters
----------
func : {None, callable}
Python function that annotations are attached to signature.
Returns
-------
signature : Signature
"""
signature = Signature(self.remotejit)
fsig = Type.fromcallable(func) if func is not None else None
nargs = fsig.arity if func is not None else None
target_info = TargetInfo()
for sig in self.signatures:
devices = self.signature_devices.get(sig)
if not target_info.check_enabled(devices):
if self.debug:
print(f'{type(self).__name__}.normalized: skipping {sig} as'
f' not supported by devices: {devices}')
continue
templates = self.signature_templates.get(sig, {})
sig = Type.fromobject(sig)
if not sig.is_complete:
warnings.warn(f'Incomplete signature {sig} will be ignored')
continue
if not sig.is_function:
raise ValueError(
'expected signature representing function type,'
f' got `{sig}`')
if nargs is None:
nargs = sig.arity
elif sig.arity != nargs:
raise ValueError(f'signature `{sig}` must have arity {nargs}'
f' but got {len(sig[1])}')
if fsig is not None:
sig.inherit_annotations(fsig)
if not sig.is_concrete:
for csig in sig.apply_templates(templates):
assert isinstance(csig, Type), (sig, csig, type(csig))
if csig not in signature.signatures:
signature.signatures.append(csig)
else:
if sig not in signature.signatures:
signature.signatures.append(sig)
if fsig is not None and fsig.is_complete:
if fsig not in signature.signatures:
signature.signatures.append(fsig)
return signature
class Caller(object):
"""Remote JIT caller, holds the decorated function that can be
executed remotely.
"""
def __init__(self, func, signature: Signature):
"""Construct remote JIT caller instance.
Parameters
----------
func : callable
Specify a Python function that is used as a template to
remotely JIT compiled functions.
signature : Signature
Specify a collection of signatures.
local : bool
When True, local process will be interpreted as
remote. Useful for debugging.
"""
self.remotejit = signature.remotejit
self.signature = signature
func = self.remotejit.preprocess_callable(func)
self.func = func
self.nargs = len(get_signature(func).parameters)
# Attributes used in RBC user-interface
self._is_compiled = set() # items are (fname, ftype)
self._client = None
self.remotejit.add_caller(self)
@property
def local(self):
"""Return Caller instance that executes function calls on the local
host. Useful for debugging.
"""
return Caller(self.func, self.signature.local)
def __repr__(self):
return '%s(%s, %s, local=%s)' % (type(self).__name__, self.func,
self.signature, self.local)
def __str__(self):
return self.describe()
def describe(self):
"""Return LLVM IRs of all target devices.
"""
lst = ['']
fid = 0
for device, target_info in self.remotejit.targets.items():
with Type.alias(**self.remotejit.typesystem_aliases):
with target_info:
lst.append(f'{device:-^80}')
signatures = self.get_signatures()
signatures_map = {}
for sig in signatures:
fid += 1
signatures_map[fid] = sig
llvm_module, succesful_fids = irtools.compile_to_LLVM(
[(self.func, signatures_map)],
target_info,
pipeline_class=OmnisciCompilerPipeline,
debug=self.remotejit.debug)
lst.append(str(llvm_module))
lst.append(f'{"":-^80}')
return '\n'.join(lst)
def get_signatures(self):
"""Return a list of normalized signatures for given target device.
"""
return self.signature.normalized(self.func).signatures
# RBC user-interface
def __call__(self, *arguments, **options):
"""Return the result of a remote JIT compiled function call.
"""
device = options.get('device')
targets = self.remotejit.targets
if device is None:
if len(targets) > 1:
raise TypeError(
f'specifying device is required when target has more than'
f' one device. Available devices: {", ".join(targets)}')
device = tuple(targets)[0]
target_info = targets[device]
with target_info:
atypes = tuple(map(Type.fromvalue, arguments))
ftype = self.signature.best_match(self.func, atypes)
key = self.func.__name__, ftype
if key not in self._is_compiled:
self.remotejit.remote_compile(self.func, ftype, target_info)
self._is_compiled.add(key)
return self.remotejit.remote_call(self.func, ftype, arguments)
class RemoteJIT(object):
"""RemoteJIT is a decorator generator for user functions to be
remotely JIT compiled.
To use, define
.. highlight:: python
.. code-block:: python
rjit = RemoteJIT(host='localhost', port=6274)
@rjit
def foo(a: int, b: int) -> int:
return a + b
@rjit('double(double, double)',
'int64(int64, int64)')
def bar(a, b):
return a + b
# Finally, call
c = foo(1, 2) # c = 3
b = bar(7.0, 1.0) # b = 8.0
The sum will be evaluated in the remote host.
"""
multiplexed = True
thrift_content = None
typesystem_aliases = dict()
def __init__(self, host='localhost', port=11532,
local=False, debug=False, use_tracing_allocator=False):
"""Construct remote JIT function decorator.
The decorator is re-usable for different functions.
Parameters
----------
host : str
Specify the host name of IP of JIT server
port : {int, str}
Specify the service port of the JIT server
local : bool
When True, use local client. Useful for debugging.
debug : bool
When True, output debug messages.
use_tracing_allocator : bool
When True, enable the automatic detection of memory leaks.
"""
if host == 'localhost':
host = get_local_ip()
if use_tracing_allocator and not local:
raise ValueError('use_tracing_allocator=True can be used only with local=True')
self.debug = debug
self.use_tracing_allocator = use_tracing_allocator
self.host = host
self.port = int(port)
self.server_process = None
# A collection of Caller instances. Each represents a function
# that have many argument type dependent implementations.
self._callers = []
self._last_compile = None
self._targets = None
if local:
self._client = LocalClient(debug=debug,
use_tracing_allocator=use_tracing_allocator)
else:
self._client = None
@property
def local(self):
localjit = type(self)(local=True, debug=self.debug)
localjit._callers.extend(self._callers)
return localjit
def add_caller(self, caller):
self._callers.append(caller)
self.discard_last_compile()
def get_callers(self):
return self._callers
def reset(self):
"""Drop all callers definitions and compilation results.
"""
self._callers.clear()
self.discard_last_compile()
@property
def have_last_compile(self):
"""Check if compile data exists.
See `set_last_compile` method for more information.
"""
return self._last_compile is not None
def discard_last_compile(self):
"""Discard compile data.
See `set_last_compile` method for more information.
"""
self._last_compile = None
def set_last_compile(self, compile_data):
"""Save compile data.
The caller is responsible for discarding previous compiler
data by calling `discard_last_compile` method.
Parameters
----------
compile_data : object
Compile data can be any Python object. When None, it is
interpreted as no compile data is available.
Notes
-----
The have/discard/set_last_compile methods provide a way to
avoid unnecessary compilations when the remote server supports
registration of compiled functions. The corresponding
`register` method is expected to use the following pattern:
.. code-block:: python
def register(self):
if self.have_last_compile:
return
<compile defined functions>
self.set_last_compile(<compilation results>)
The `discard_last_compile()` method is called when the compile
data becomes obsolete or needs to be discarded. For instance,
the compile data will be discarded when calling the following
methods: `reset`, `add_caller`. Note that the `add_caller`
call is triggered when applying the remotejit decorator to a
Python function to be compiled.
"""
assert self._last_compile is None
self._last_compile = compile_data
def get_pending_names(self):
"""Return the names of functions that have not been registered to the
remote server.
"""
names = set()
if not self.have_last_compile:
for caller in reversed(self.get_callers()):
names.add(caller.func.__name__)
return names
def retrieve_targets(self):
"""Retrieve target device information from remote client.
Redefine this method if remote client is not native.
Returns
-------
targets : dict
Map of target device names and informations.
"""
# TODO: rename thrift API targets to get_device_parameters?
response = self.client(remotejit=dict(targets=()))
targets = {}
for device, data in response['remotejit']['targets'].items():
targets[device] = TargetInfo.fromjson(data)
return targets
@property
def targets(self):
"""Return device-target_info mapping of the remote server.
"""
if self._targets is None:
self._targets = self.retrieve_targets()
return self._targets
def __call__(self, *signatures, **options):
"""Define a remote JIT function signatures and template.
Parameters
----------
signatures : tuple
Specify signatures of a remote JIT function, or a Python
function as a template from which the remote JIT function
will be compiled.
Keyword parameters
------------------
local : bool
devices : list
Specify device names for the given set of signatures.
templates : dict
Specify template types mapping.
Returns
-------
sig: {Signature, Caller}
Signature decorator or Caller
Notes
-----
The signatures can be strings in the following form:
"<return type>(<argument type 1>, <argument type 2>, ...)"
or any other object that can be converted to function type,
see `Type.fromobject` for more information.
"""
if options.get('local'):
s = Signature(self.local)
else:
s = Signature(self)
devices = options.get('devices')
options, templates = extract_templates(options)
for sig in signatures:
s = s(sig, devices=devices, templates=templates)
return s
def start_server(self, background=False):
"""Start remotejit server from client.
"""
thrift_file = os.path.join(os.path.dirname(__file__),
'remotejit.thrift')
print('staring rpc.thrift server: %s' % (thrift_file), end='',
flush=True)
if self.debug:
print(flush=True)
dispatcher = DebugDispatcherRJIT
else:
dispatcher = DispatcherRJIT
if background:
ps = Server.run_bg(dispatcher, thrift_file,
dict(host=self.host, port=self.port,
debug=self.debug))
self.server_process = ps
else:
Server.run(dispatcher, thrift_file,
dict(host=self.host, port=self.port,
debug=self.debug))
print('... rpc.thrift server stopped', flush=True)
def stop_server(self):
"""Stop remotejit server from client.
"""
if self.server_process is not None and self.server_process.is_alive():
print('... stopping rpc.thrift server')
self.server_process.terminate()
self.server_process = None
@property
def client(self):
"""Return remote host connection as Client instance.
"""
if self._client is None:
self._client = Client(
host=self.host,
port=self.port,
multiplexed=self.multiplexed,
thrift_content=self.thrift_content,
socket_timeout=60000)
return self._client
def remote_compile(self, func, ftype: Type, target_info: TargetInfo):
"""Remote compile function and signatures to machine code.
The input function `func` is compiled to LLVM IR module, the
LLVM IR module is sent to remote host where the remote host is
expected to complete the compilation process.
Return the corresponding LLVM IR module instance which may be
useful for debugging.
"""
if self.debug:
print(f'remote_compile({func}, {ftype})')
llvm_module, succesful_fids = irtools.compile_to_LLVM(
[(func, {0: ftype})],
target_info,
pipeline_class=OmnisciCompilerPipeline,
debug=self.debug)
ir = str(llvm_module)
mangled_signatures = ';'.join([s.mangle() for s in [ftype]])
response = self.client(remotejit=dict(
compile=(func.__name__, mangled_signatures, ir)))
assert response['remotejit']['compile'], response
return llvm_module
def remote_call(self, func, ftype: Type, arguments: tuple):
"""Call function remotely on given arguments.
The input function `func` is called remotely by sending the
arguments data to remote host where the previously compiled
function (see `remote_compile` method) is applied to the
arguments, and the result is returned to local process.
"""
if self.debug:
print(f'remote_call({func}, {ftype}, {arguments})')
fullname = func.__name__ + ftype.mangle()
response = self.client(remotejit=dict(call=(fullname, arguments)))
return response['remotejit']['call']
def python(self, statement):
"""Execute Python statement remotely.
"""
response = self.client(remotejit=dict(python=(statement,)))
return response['remotejit']['python']
def preprocess_callable(self, func):
"""Preprocess func to be used as a remotejit function definition.
Parameters
----------
func : callable
Returns
-------
func : callable
Preprocessed func.
"""
return func
class DispatcherRJIT(Dispatcher):
"""Implements remotejit service methods.
"""
def __init__(self, server, debug=False, use_tracing_allocator=False):
super().__init__(server, debug=debug)
self.use_tracing_allocator = use_tracing_allocator
self.compiled_functions = dict()
self.engines = dict()
self.python_globals = dict()
self.python_locals = dict()
@dispatchermethod
def targets(self) -> dict:
"""Retrieve target device information.
Returns
-------
info : dict
Map of target devices and their properties.
"""
if self.use_tracing_allocator:
target_info = TargetInfo.host(name='host_cpu_tracing_allocator',
use_tracing_allocator=True)
else:
target_info = TargetInfo.host()
target_info.set('has_numba', True)
target_info.set('has_cpython', True)
return dict(cpu=target_info.tojson())
@dispatchermethod
def compile(self, name: str, signatures: str, ir: str) -> int:
"""JIT compile function.
Parameters
----------
name : str
Specify the function name.
signatures : str
Specify semi-colon separated list of mangled signatures.
ir : str
Specify LLVM IR representation of the function.
"""
engine = irtools.compile_IR(ir)
for msig in signatures.split(';'):
sig = Type.demangle(msig)
ctypes_sig = sig.toctypes()
assert sig.is_function
if sig[0].is_aggregate:
raise RuntimeError(
f'Functions with aggregate return type values are not supported,'
f' got function `{name}` with `{sig}` signature')
fullname = name + msig
addr = engine.get_function_address(fullname)
if self.debug:
print(f'compile({name}, {sig}) -> {hex(addr)}')
# storing engine as the owner of function addresses
if addr:
self.compiled_functions[fullname] = engine, ctypes_sig(addr), sig, ctypes_sig
else:
warnings.warn('No compilation result for {name}|{sig=}')
return True
@dispatchermethod
def call(self, fullname: str, arguments: tuple) -> Data:
"""Call JIT compiled function
Parameters
----------
fullname : str
Specify the full name of the function that is in form
"<name><mangled signature>"
arguments : tuple
Specify the arguments to the function.
"""
# if we are using a tracing allocator, automatically detect memory leaks
# at each call.
if self.use_tracing_allocator:
leak_detector = tracing_allocator.new_leak_detector()
else:
leak_detector = nullcontext()
with leak_detector:
return self._do_call(fullname, arguments)
def _do_call(self, fullname, arguments):
if self.debug:
print(f'call({fullname}, {arguments})')
ef = self.compiled_functions.get(fullname)
if ef is None:
raise RuntimeError(
f'no such compiled function `{fullname}`. Available functions:\n'
f' {"; ".join(list(self.compiled_functions))}\n.')
sig = ef[2]
ctypes_sig = ef[3]
if len(arguments) == 0:
assert sig.arity == 1 and sig[1][0].is_void, sig
else:
assert len(arguments) == sig.arity, (len(arguments), sig.arity)
ctypes_arguments = []
for typ, ctypes_typ, value in zip(sig[1], ctypes_sig._argtypes_, arguments):
if typ.is_custom:
typ = typ.get_struct_type()
if typ.is_struct:
if isinstance(value, tuple):
member_values = [t.toctypes()(value[i]) for i, t in enumerate(typ)]
else:
member_values = [t.toctypes()(getattr(value, t.name)) for t in typ]
ctypes_arguments.extend(member_values)
elif typ.is_pointer:
if isinstance(value, ctypes.c_void_p):
value = ctypes.cast(value, ctypes_typ)
else:
value = ctypes.cast(value, ctypes_typ)
ctypes_arguments.append(value)
else:
ctypes_arguments.append(value)
r = ef[1](*ctypes_arguments)
if sig[0].is_pointer and sig[0][0].is_void and isinstance(r, int):
r = ctypes.c_void_p(r)
if self.debug:
print(f'-> {r}')
if hasattr(r, 'topython'):
return r.topython()
return r
@dispatchermethod
def python(self, statement: str) -> int:
"""Execute Python statement.
"""
if self.debug:
print(f'python({statement!r})')
exec(statement, self.python_globals, self.python_locals)
return True
class DebugDispatcherRJIT(DispatcherRJIT):
"""
Enables debug messages.
"""
debug = True
class LocalClient(object):
"""Pretender of thrift.Client.
All calls will be made in a local process. Useful for debbuging.
"""
def __init__(self, debug=False, use_tracing_allocator=False):
self.dispatcher = DispatcherRJIT(None, debug=debug,
use_tracing_allocator=use_tracing_allocator)
def __call__(self, **services):
results = {}
for service_name, query_dict in services.items():
results[service_name] = {}
for mthname, args in query_dict.items():
mth = getattr(self.dispatcher, mthname)
mth = inspect.unwrap(mth)
results[service_name][mthname] = mth(self.dispatcher, *args)
return results
| assert isinstance(remotejit, RemoteJIT), type(remotejit)
self.remotejit = remotejit
self.signatures = []
self.signature_devices = {}
self.signature_templates = {} |
api_another_fake.go | /*
* OpenAPI Petstore
*
* This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
*
* API version: 1.0.0
* Generated by: OpenAPI Generator (https://openapi-generator.tech)
*/
package petstore
import (
"context"
"io/ioutil"
"net/http"
"net/url"
"strings"
)
// Linger please
var (
_ context.Context
)
type AnotherFakeApiService service
/*
AnotherFakeApiService To test special tags
To test special tags and operation ID starting with number
* @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background().
* @param client client model
@return Client
*/
func (a *AnotherFakeApiService) Call123TestSpecialTags(ctx context.Context, client Client) (Client, *http.Response, error) {
var (
localVarHttpMethod = strings.ToUpper("Patch")
localVarPostBody interface{}
localVarFormFileName string
localVarFileName string
localVarFileBytes []byte
localVarReturnValue Client
)
// create path and map variables
localVarPath := a.client.cfg.BasePath + "/another-fake/dummy"
localVarHeaderParams := make(map[string]string)
localVarQueryParams := url.Values{}
localVarFormParams := url.Values{} | // set Content-Type header
localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
localVarHeaderParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{"application/json"}
// set Accept header
localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
localVarHeaderParams["Accept"] = localVarHttpHeaderAccept
}
// body params
localVarPostBody = &client
r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)
if err != nil {
return localVarReturnValue, nil, err
}
localVarHttpResponse, err := a.client.callAPI(r)
if err != nil || localVarHttpResponse == nil {
return localVarReturnValue, localVarHttpResponse, err
}
localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)
localVarHttpResponse.Body.Close()
if err != nil {
return localVarReturnValue, localVarHttpResponse, err
}
if localVarHttpResponse.StatusCode >= 300 {
newErr := GenericOpenAPIError{
body: localVarBody,
error: localVarHttpResponse.Status,
}
if localVarHttpResponse.StatusCode == 200 {
var v Client
err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type"))
if err != nil {
newErr.error = err.Error()
return localVarReturnValue, localVarHttpResponse, newErr
}
newErr.model = v
return localVarReturnValue, localVarHttpResponse, newErr
}
return localVarReturnValue, localVarHttpResponse, newErr
}
err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type"))
if err != nil {
newErr := GenericOpenAPIError{
body: localVarBody,
error: err.Error(),
}
return localVarReturnValue, localVarHttpResponse, newErr
}
return localVarReturnValue, localVarHttpResponse, nil
} |
// to determine the Content-Type header
localVarHttpContentTypes := []string{"application/json"}
|
mempool.rs | use arraydeque::{ArrayDeque, Wrapping};
use bitcoin::consensus::encode::deserialize;
use bitcoin::Txid;
use itertools::Itertools;
#[cfg(not(feature = "liquid"))]
use bitcoin::consensus::encode::serialize;
#[cfg(feature = "liquid")]
use elements::{encode::serialize, AssetId};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::iter::FromIterator;
use std::sync::Arc;
use std::time::{Duration, Instant};
use crate::chain::{Network, OutPoint, Transaction, TxOut};
use crate::config::Config;
use crate::daemon::Daemon;
use crate::errors::*;
use crate::metrics::{GaugeVec, HistogramOpts, HistogramVec, MetricOpts, Metrics};
use crate::new_index::{
compute_script_hash, schema::FullHash, ChainQuery, FundingInfo, ScriptStats, SpendingInfo,
SpendingInput, TxHistoryInfo, Utxo,
};
use crate::util::fees::{make_fee_histogram, TxFeeInfo};
use crate::util::{extract_tx_prevouts, full_hash, has_prevout, is_spendable, Bytes};
#[cfg(feature = "liquid")]
use crate::elements::asset;
const RECENT_TXS_SIZE: usize = 10;
const BACKLOG_STATS_TTL: u64 = 10;
pub struct Mempool {
chain: Arc<ChainQuery>,
config: Arc<Config>,
txstore: HashMap<Txid, Transaction>,
feeinfo: HashMap<Txid, TxFeeInfo>,
history: HashMap<FullHash, Vec<TxHistoryInfo>>, // ScriptHash -> {history_entries}
edges: HashMap<OutPoint, (Txid, u32)>, // OutPoint -> (spending_txid, spending_vin)
recent: ArrayDeque<[TxOverview; RECENT_TXS_SIZE], Wrapping>, // The N most recent txs to enter the mempool
backlog_stats: (BacklogStats, Instant),
// monitoring
latency: HistogramVec, // mempool requests latency
delta: HistogramVec, // # of added/removed txs
count: GaugeVec, // current state of the mempool
// elements only
#[cfg(feature = "liquid")]
pub asset_history: HashMap<AssetId, Vec<TxHistoryInfo>>,
#[cfg(feature = "liquid")]
pub asset_issuance: HashMap<AssetId, asset::AssetRow>,
}
// A simplified transaction view used for the list of most recent transactions
#[derive(Serialize)]
pub struct TxOverview {
txid: Txid,
fee: u64,
vsize: u32,
#[cfg(not(feature = "liquid"))]
value: u64,
}
impl Mempool {
pub fn new(chain: Arc<ChainQuery>, metrics: &Metrics, config: Arc<Config>) -> Self {
Mempool {
chain,
config,
txstore: HashMap::new(),
feeinfo: HashMap::new(),
history: HashMap::new(),
edges: HashMap::new(),
recent: ArrayDeque::new(),
backlog_stats: (
BacklogStats::default(),
Instant::now() - Duration::from_secs(BACKLOG_STATS_TTL),
),
latency: metrics.histogram_vec(
HistogramOpts::new("mempool_latency", "Mempool requests latency (in seconds)"),
&["part"],
),
delta: metrics.histogram_vec(
HistogramOpts::new("mempool_delta", "# of transactions added/removed"),
&["type"],
),
count: metrics.gauge_vec(
MetricOpts::new("mempool_count", "# of elements currently at the mempool"),
&["type"],
),
#[cfg(feature = "liquid")]
asset_history: HashMap::new(),
#[cfg(feature = "liquid")]
asset_issuance: HashMap::new(),
}
}
pub fn network(&self) -> Network {
self.config.network_type
}
pub fn lookup_txn(&self, txid: &Txid) -> Option<Transaction> {
self.txstore.get(txid).cloned()
}
pub fn lookup_raw_txn(&self, txid: &Txid) -> Option<Bytes> {
self.txstore.get(txid).map(serialize)
}
pub fn lookup_spend(&self, outpoint: &OutPoint) -> Option<SpendingInput> {
self.edges.get(outpoint).map(|(txid, vin)| SpendingInput {
txid: *txid,
vin: *vin,
confirmed: None,
})
}
pub fn has_spend(&self, outpoint: &OutPoint) -> bool {
self.edges.contains_key(outpoint)
}
pub fn get_tx_fee(&self, txid: &Txid) -> Option<u64> {
Some(self.feeinfo.get(txid)?.fee)
}
pub fn has_unconfirmed_parents(&self, txid: &Txid) -> bool {
let tx = match self.txstore.get(txid) {
Some(tx) => tx,
None => return false,
};
tx.input
.iter()
.any(|txin| self.txstore.contains_key(&txin.previous_output.txid))
}
pub fn history(&self, scripthash: &[u8], limit: usize) -> Vec<Transaction> {
let _timer = self.latency.with_label_values(&["history"]).start_timer();
self.history
.get(scripthash)
.map_or_else(|| vec![], |entries| self._history(entries, limit))
}
fn _history(&self, entries: &[TxHistoryInfo], limit: usize) -> Vec<Transaction> {
entries
.iter()
.map(|e| e.get_txid())
.unique()
.take(limit)
.map(|txid| self.txstore.get(&txid).expect("missing mempool tx"))
.cloned()
.collect()
}
pub fn history_txids(&self, scripthash: &[u8], limit: usize) -> Vec<Txid> {
let _timer = self
.latency
.with_label_values(&["history_txids"])
.start_timer();
match self.history.get(scripthash) {
None => vec![],
Some(entries) => entries
.iter()
.map(|e| e.get_txid())
.unique()
.take(limit)
.collect(),
}
}
pub fn utxo(&self, scripthash: &[u8]) -> Vec<Utxo> {
let _timer = self.latency.with_label_values(&["utxo"]).start_timer();
let entries = match self.history.get(scripthash) {
None => return vec![],
Some(entries) => entries,
};
entries
.iter()
.filter_map(|entry| match entry {
TxHistoryInfo::Funding(info) => {
// Liquid requires some additional information from the txo that's not available in the TxHistoryInfo index.
#[cfg(feature = "liquid")]
let txo = self
.lookup_txo(&entry.get_funded_outpoint())
.expect("missing txo");
Some(Utxo {
txid: deserialize(&info.txid).expect("invalid txid"),
vout: info.vout as u32,
value: info.value,
confirmed: None,
#[cfg(feature = "liquid")]
asset: txo.asset,
#[cfg(feature = "liquid")]
nonce: txo.nonce,
#[cfg(feature = "liquid")]
witness: txo.witness,
})
}
TxHistoryInfo::Spending(_) => None,
#[cfg(feature = "liquid")]
TxHistoryInfo::Issuing(_)
| TxHistoryInfo::Burning(_)
| TxHistoryInfo::Pegin(_)
| TxHistoryInfo::Pegout(_) => unreachable!(),
})
.filter(|utxo| !self.has_spend(&OutPoint::from(utxo)))
.collect()
}
// @XXX avoid code duplication with ChainQuery::stats()?
pub fn stats(&self, scripthash: &[u8]) -> ScriptStats {
let _timer = self.latency.with_label_values(&["stats"]).start_timer();
let mut stats = ScriptStats::default();
let mut seen_txids = HashSet::new();
let entries = match self.history.get(scripthash) {
None => return stats,
Some(entries) => entries,
};
for entry in entries {
if seen_txids.insert(entry.get_txid()) {
stats.tx_count += 1;
}
match entry {
#[cfg(not(feature = "liquid"))]
TxHistoryInfo::Funding(info) => {
stats.funded_txo_count += 1;
stats.funded_txo_sum += info.value;
}
#[cfg(not(feature = "liquid"))]
TxHistoryInfo::Spending(info) => {
stats.spent_txo_count += 1;
stats.spent_txo_sum += info.value;
}
// Elements
#[cfg(feature = "liquid")]
TxHistoryInfo::Funding(_) => {
stats.funded_txo_count += 1;
}
#[cfg(feature = "liquid")]
TxHistoryInfo::Spending(_) => {
stats.spent_txo_count += 1;
}
#[cfg(feature = "liquid")]
TxHistoryInfo::Issuing(_)
| TxHistoryInfo::Burning(_)
| TxHistoryInfo::Pegin(_)
| TxHistoryInfo::Pegout(_) => unreachable!(),
};
}
stats
}
// Get all txids in the mempool
pub fn txids(&self) -> Vec<&Txid> {
let _timer = self.latency.with_label_values(&["txids"]).start_timer();
self.txstore.keys().collect()
}
// Get an overview of the most recent transactions
pub fn recent_txs_overview(&self) -> Vec<&TxOverview> {
// We don't bother ever deleting elements from the recent list.
// It may contain outdated txs that are no longer in the mempool,
// until they get pushed out by newer transactions.
self.recent.iter().collect()
}
pub fn backlog_stats(&self) -> &BacklogStats {
&self.backlog_stats.0
}
pub fn update(&mut self, daemon: &Daemon) -> Result<()> {
let _timer = self.latency.with_label_values(&["update"]).start_timer();
let new_txids = daemon
.getmempooltxids()
.chain_err(|| "failed to update mempool from daemon")?;
let old_txids = HashSet::from_iter(self.txstore.keys().cloned());
let to_remove: HashSet<&Txid> = old_txids.difference(&new_txids).collect();
// Download and add new transactions from bitcoind's mempool
let txids: Vec<&Txid> = new_txids.difference(&old_txids).collect();
let to_add = match daemon.gettransactions(&txids) {
Ok(txs) => txs,
Err(err) => {
warn!("failed to get {} transactions: {}", txids.len(), err); // e.g. new block or RBF
return Ok(()); // keep the mempool until next update()
}
};
// Add new transactions
self.add(to_add);
// Remove missing transactions
self.remove(to_remove);
self.count
.with_label_values(&["txs"])
.set(self.txstore.len() as f64);
// Update cached backlog stats (if expired)
if self.backlog_stats.1.elapsed() > Duration::from_secs(BACKLOG_STATS_TTL) {
let _timer = self
.latency
.with_label_values(&["update_backlog_stats"])
.start_timer();
self.backlog_stats = (BacklogStats::new(&self.feeinfo), Instant::now());
}
Ok(())
}
pub fn add_by_txid(&mut self, daemon: &Daemon, txid: &Txid) {
if self.txstore.get(txid).is_none() {
if let Ok(tx) = daemon.getmempooltx(&txid) {
self.add(vec![tx])
}
}
}
fn add(&mut self, txs: Vec<Transaction>) {
self.delta
.with_label_values(&["add"])
.observe(txs.len() as f64);
let _timer = self.latency.with_label_values(&["add"]).start_timer();
let mut txids = vec![];
// Phase 1: add to txstore
for tx in txs {
let txid = tx.txid();
txids.push(txid);
self.txstore.insert(txid, tx);
}
// Phase 2: index history and spend edges (can fail if some txos cannot be found)
let txos = match self.lookup_txos(&self.get_prevouts(&txids)) {
Ok(txos) => txos,
Err(err) => {
warn!("lookup txouts failed: {}", err);
// TODO: should we remove txids from txstore?
return;
}
};
for txid in txids {
let tx = self.txstore.get(&txid).expect("missing mempool tx");
let txid_bytes = full_hash(&txid[..]);
let prevouts = extract_tx_prevouts(&tx, &txos, false);
// Get feeinfo for caching and recent tx overview
let feeinfo = TxFeeInfo::new(&tx, &prevouts, self.config.network_type);
// recent is an ArrayDeque that automatically evicts the oldest elements
self.recent.push_front(TxOverview {
txid,
fee: feeinfo.fee,
vsize: feeinfo.vsize,
#[cfg(not(feature = "liquid"))]
value: prevouts.values().map(|prevout| prevout.value).sum(),
});
self.feeinfo.insert(txid, feeinfo);
// An iterator over (ScriptHash, TxHistoryInfo)
let spending = prevouts.into_iter().map(|(input_index, prevout)| {
let txi = tx.input.get(input_index as usize).unwrap();
(
compute_script_hash(&prevout.script_pubkey),
TxHistoryInfo::Spending(SpendingInfo {
txid: txid_bytes,
vin: input_index as u16,
prev_txid: full_hash(&txi.previous_output.txid[..]),
prev_vout: txi.previous_output.vout as u16,
value: prevout.value,
}),
)
});
let config = &self.config;
// An iterator over (ScriptHash, TxHistoryInfo)
let funding = tx
.output
.iter()
.enumerate()
.filter(|(_, txo)| is_spendable(txo) || config.index_unspendables)
.map(|(index, txo)| {
(
compute_script_hash(&txo.script_pubkey),
TxHistoryInfo::Funding(FundingInfo {
txid: txid_bytes,
vout: index as u16,
value: txo.value,
}),
)
});
// Index funding/spending history entries and spend edges
for (scripthash, entry) in funding.chain(spending) {
self.history
.entry(scripthash)
.or_insert_with(Vec::new)
.push(entry);
}
for (i, txi) in tx.input.iter().enumerate() {
self.edges.insert(txi.previous_output, (txid, i as u32));
}
// Index issued assets & native asset pegins/pegouts/burns
#[cfg(feature = "liquid")]
asset::index_mempool_tx_assets(
&tx,
self.config.network_type,
self.config.parent_network,
&mut self.asset_history,
&mut self.asset_issuance,
);
}
}
pub fn lookup_txo(&self, outpoint: &OutPoint) -> Result<TxOut> {
let mut outpoints = BTreeSet::new();
outpoints.insert(*outpoint);
Ok(self.lookup_txos(&outpoints)?.remove(outpoint).unwrap())
}
pub fn lookup_txos(&self, outpoints: &BTreeSet<OutPoint>) -> Result<HashMap<OutPoint, TxOut>> {
let _timer = self
.latency
.with_label_values(&["lookup_txos"])
.start_timer();
let confirmed_txos = self.chain.lookup_avail_txos(outpoints);
let mempool_txos = outpoints
.iter()
.filter(|outpoint| !confirmed_txos.contains_key(outpoint))
.map(|outpoint| {
self.txstore
.get(&outpoint.txid)
.and_then(|tx| tx.output.get(outpoint.vout as usize).cloned())
.map(|txout| (*outpoint, txout))
.chain_err(|| format!("missing outpoint {:?}", outpoint))
})
.collect::<Result<HashMap<OutPoint, TxOut>>>()?;
let mut txos = confirmed_txos;
txos.extend(mempool_txos);
Ok(txos)
}
fn get_prevouts(&self, txids: &[Txid]) -> BTreeSet<OutPoint> {
let _timer = self
.latency
.with_label_values(&["get_prevouts"])
.start_timer();
txids
.iter()
.map(|txid| self.txstore.get(txid).expect("missing mempool tx"))
.flat_map(|tx| {
tx.input
.iter()
.filter(|txin| has_prevout(txin))
.map(|txin| txin.previous_output)
})
.collect()
}
fn remove(&mut self, to_remove: HashSet<&Txid>) {
self.delta
.with_label_values(&["remove"])
.observe(to_remove.len() as f64);
let _timer = self.latency.with_label_values(&["remove"]).start_timer();
for txid in &to_remove {
self.txstore
.remove(*txid)
.unwrap_or_else(|| panic!("missing mempool tx {}", txid));
self.feeinfo.remove(*txid).or_else(|| {
warn!("missing mempool tx feeinfo {}", txid);
None
});
}
// TODO: make it more efficient (currently it takes O(|mempool|) time)
self.history.retain(|_scripthash, entries| {
entries.retain(|entry| !to_remove.contains(&entry.get_txid()));
!entries.is_empty()
});
#[cfg(feature = "liquid")]
asset::remove_mempool_tx_assets(
&to_remove,
&mut self.asset_history,
&mut self.asset_issuance,
);
self.edges
.retain(|_outpoint, (txid, _vin)| !to_remove.contains(txid));
}
#[cfg(feature = "liquid")]
pub fn asset_history(&self, asset_id: &AssetId, limit: usize) -> Vec<Transaction> {
let _timer = self
.latency
.with_label_values(&["asset_history"])
.start_timer();
self.asset_history
.get(asset_id)
.map_or_else(|| vec![], |entries| self._history(entries, limit))
}
}
#[derive(Serialize)]
pub struct BacklogStats {
pub count: u32,
pub vsize: u32, // in virtual bytes (= weight/4)
pub total_fee: u64, // in satoshis
pub fee_histogram: Vec<(f32, u32)>,
}
impl BacklogStats {
fn default() -> Self |
fn new(feeinfo: &HashMap<Txid, TxFeeInfo>) -> Self {
let (count, vsize, total_fee) = feeinfo
.values()
.fold((0, 0, 0), |(count, vsize, fee), feeinfo| {
(count + 1, vsize + feeinfo.vsize, fee + feeinfo.fee)
});
BacklogStats {
count,
vsize,
total_fee,
fee_histogram: make_fee_histogram(feeinfo.values().collect()),
}
}
}
| {
BacklogStats {
count: 0,
vsize: 0,
total_fee: 0,
fee_histogram: vec![(0.0, 0)],
}
} |
main_test.go | // Copyright 2015 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at | //
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
package roachpb_test
import _ "github.com/cockroachdb/cockroach/pkg/util/log" // for flags | //
// http://www.apache.org/licenses/LICENSE-2.0 |
Dictionaries.tsx | import React from 'react';
import {useDictionaries} from "../../hooks/useDictionaries";
export const Dictionaries: React.FC = () => {
useDictionaries(); |
return null
}; | |
f2029.go | package internal
import (
"math"
"unsafe"
)
func | (ctx *Context, l0 int32, l1 int32, l2 int32, l3 int32) {
var l4 int32
_ = l4
var l5 int32
_ = l5
var l6 int32
_ = l6
var l7 int64
_ = l7
var l8 int64
_ = l8
var l9 int64
_ = l9
var l10 int64
_ = l10
var l11 int64
_ = l11
var l12 int64
_ = l12
var l13 int64
_ = l13
var l14 int64
_ = l14
var l15 int64
_ = l15
var s0i32 int32
_ = s0i32
var s1i32 int32
_ = s1i32
var s2i32 int32
_ = s2i32
var s3i32 int32
_ = s3i32
var s4i32 int32
_ = s4i32
var s5i32 int32
_ = s5i32
var s6i32 int32
_ = s6i32
var s7i32 int32
_ = s7i32
var s8i32 int32
_ = s8i32
var s1i64 int64
_ = s1i64
var s2i64 int64
_ = s2i64
var s3i64 int64
_ = s3i64
var s4i64 int64
_ = s4i64
var s5i64 int64
_ = s5i64
var s6i64 int64
_ = s6i64
var s7i64 int64
_ = s7i64
var s8i64 int64
_ = s8i64
var s1f32 float32
_ = s1f32
var s2f32 float32
_ = s2f32
var s3f32 float32
_ = s3f32
var s4f32 float32
_ = s4f32
var s5f32 float32
_ = s5f32
s0i32 = l3
s1i32 = 1
if s0i32 >= s1i32 {
s0i32 = 1
} else {
s0i32 = 0
}
if s0i32 != 0 {
s0i32 = l1
s1i32 = l2
s0i32 = s0i32 + s1i32
l2 = s0i32
lbl1:
s0i32 = l0
s1i32 = l6
s2i32 = 3
s1i32 = s1i32 << (uint32(s2i32) & 31)
s0i32 = s0i32 + s1i32
s1i32 = l1
s1i64 = *(*int64)(unsafe.Pointer(&ctx.Mem[int(s1i32+0)]))
l7 = s1i64
s2i64 = 48
s1i64 = int64(uint64(s1i64) >> (uint64(s2i64) & 63))
s1i32 = int32(s1i64)
s2i32 = 32767
s1i32 = s1i32 & s2i32
l4 = s1i32
s2i32 = 13
s1i32 = s1i32 << (uint32(s2i32) & 31)
s2i32 = 939524096
s1i32 = s1i32 + s2i32
s2i32 = 0
s3i32 = l4
s4i32 = 1023
if uint32(s3i32) > uint32(s4i32) {
s3i32 = 1
} else {
s3i32 = 0
}
if s3i32 != 0 {
// s1i32 = s1i32
} else {
s1i32 = s2i32
}
s2i64 = l7
s3i64 = 32
s2i64 = int64(uint64(s2i64) >> (uint64(s3i64) & 63))
l12 = s2i64
s2i32 = int32(s2i64)
s3i32 = -2147483648
s2i32 = s2i32 & s3i32
s1i32 = s1i32 | s2i32
s1f32 = math.Float32frombits(uint32(s1i32))
s2i32 = l2
s2i64 = *(*int64)(unsafe.Pointer(&ctx.Mem[int(s2i32+0)]))
l8 = s2i64
s3i64 = 48
s2i64 = int64(uint64(s2i64) >> (uint64(s3i64) & 63))
s2i32 = int32(s2i64)
s3i32 = 32767
s2i32 = s2i32 & s3i32
l4 = s2i32
s3i32 = 13
s2i32 = s2i32 << (uint32(s3i32) & 31)
s3i32 = 939524096
s2i32 = s2i32 + s3i32
s3i32 = 0
s4i32 = l4
s5i32 = 1023
if uint32(s4i32) > uint32(s5i32) {
s4i32 = 1
} else {
s4i32 = 0
}
if s4i32 != 0 {
// s2i32 = s2i32
} else {
s2i32 = s3i32
}
s3i64 = l8
s4i64 = 32
s3i64 = int64(uint64(s3i64) >> (uint64(s4i64) & 63))
l13 = s3i64
s3i32 = int32(s3i64)
s4i32 = -2147483648
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s2f32 = math.Float32frombits(uint32(s2i32))
s1f32 = s1f32 + s2f32
s2i32 = l1
s2i64 = *(*int64)(unsafe.Pointer(&ctx.Mem[int(s2i32+8)]))
l9 = s2i64
s3i64 = 48
s2i64 = int64(uint64(s2i64) >> (uint64(s3i64) & 63))
s2i32 = int32(s2i64)
s3i32 = 32767
s2i32 = s2i32 & s3i32
l4 = s2i32
s3i32 = 13
s2i32 = s2i32 << (uint32(s3i32) & 31)
s3i32 = 939524096
s2i32 = s2i32 + s3i32
s3i32 = 0
s4i32 = l4
s5i32 = 1023
if uint32(s4i32) > uint32(s5i32) {
s4i32 = 1
} else {
s4i32 = 0
}
if s4i32 != 0 {
// s2i32 = s2i32
} else {
s2i32 = s3i32
}
s3i64 = l9
s4i64 = 32
s3i64 = int64(uint64(s3i64) >> (uint64(s4i64) & 63))
l14 = s3i64
s3i32 = int32(s3i64)
s4i32 = -2147483648
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s2f32 = math.Float32frombits(uint32(s2i32))
s1f32 = s1f32 + s2f32
s2i32 = l2
s2i64 = *(*int64)(unsafe.Pointer(&ctx.Mem[int(s2i32+8)]))
l10 = s2i64
s3i64 = 48
s2i64 = int64(uint64(s2i64) >> (uint64(s3i64) & 63))
s2i32 = int32(s2i64)
s3i32 = 32767
s2i32 = s2i32 & s3i32
l4 = s2i32
s3i32 = 13
s2i32 = s2i32 << (uint32(s3i32) & 31)
s3i32 = 939524096
s2i32 = s2i32 + s3i32
s3i32 = 0
s4i32 = l4
s5i32 = 1023
if uint32(s4i32) > uint32(s5i32) {
s4i32 = 1
} else {
s4i32 = 0
}
if s4i32 != 0 {
// s2i32 = s2i32
} else {
s2i32 = s3i32
}
s3i64 = l10
s4i64 = 32
s3i64 = int64(uint64(s3i64) >> (uint64(s4i64) & 63))
l15 = s3i64
s3i32 = int32(s3i64)
s4i32 = -2147483648
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s2f32 = math.Float32frombits(uint32(s2i32))
s1f32 = s1f32 + s2f32
s2f32 = 0.25
s1f32 = s1f32 * s2f32
s1i32 = int32(math.Float32bits(s1f32))
l4 = s1i32
s2i32 = 3
s1i32 = s1i32 << (uint32(s2i32) & 31)
s2i32 = 1073741824
s1i32 = s1i32 + s2i32
s2i32 = 0
s3i32 = l4
s4i32 = 2147475456
s3i32 = s3i32 & s4i32
s4i32 = 947904511
if uint32(s3i32) > uint32(s4i32) {
s3i32 = 1
} else {
s3i32 = 0
}
if s3i32 != 0 {
// s1i32 = s1i32
} else {
s1i32 = s2i32
}
s2i32 = l4
s3i32 = -2147483648
s2i32 = s2i32 & s3i32
s1i32 = s1i32 | s2i32
s2i32 = 16
s1i32 = int32(uint32(s1i32) >> (uint32(s2i32) & 31))
s1i64 = int64(uint32(s1i32))
s2i64 = 48
s1i64 = s1i64 << (uint64(s2i64) & 63)
s2i64 = 2147475456
s3i64 = 0
s4i64 = l7
s5i64 = 32767
s4i64 = s4i64 & s5i64
l11 = s4i64
s4i32 = int32(s4i64)
s5i32 = 1023
if uint32(s4i32) > uint32(s5i32) {
s4i32 = 1
} else {
s4i32 = 0
}
if s4i32 != 0 {
// s2i64 = s2i64
} else {
s2i64 = s3i64
}
s3i64 = l11
s4i64 = 13
s3i64 = s3i64 << (uint64(s4i64) & 63)
s4i64 = 939524096
s3i64 = s3i64 + s4i64
s2i64 = s2i64 & s3i64
s3i64 = l7
s4i64 = 16
s3i64 = s3i64 << (uint64(s4i64) & 63)
s4i64 = 2147483648
s3i64 = s3i64 & s4i64
s2i64 = s2i64 | s3i64
s2i32 = int32(s2i64)
s2f32 = math.Float32frombits(uint32(s2i32))
s3i64 = 2147475456
s4i64 = 0
s5i64 = l8
s6i64 = 32767
s5i64 = s5i64 & s6i64
l11 = s5i64
s5i32 = int32(s5i64)
s6i32 = 1023
if uint32(s5i32) > uint32(s6i32) {
s5i32 = 1
} else {
s5i32 = 0
}
if s5i32 != 0 {
// s3i64 = s3i64
} else {
s3i64 = s4i64
}
s4i64 = l11
s5i64 = 13
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 939524096
s4i64 = s4i64 + s5i64
s3i64 = s3i64 & s4i64
s4i64 = l8
s5i64 = 16
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 2147483648
s4i64 = s4i64 & s5i64
s3i64 = s3i64 | s4i64
s3i32 = int32(s3i64)
s3f32 = math.Float32frombits(uint32(s3i32))
s2f32 = s2f32 + s3f32
s3i64 = 2147475456
s4i64 = 0
s5i64 = l9
s6i64 = 32767
s5i64 = s5i64 & s6i64
l11 = s5i64
s5i32 = int32(s5i64)
s6i32 = 1023
if uint32(s5i32) > uint32(s6i32) {
s5i32 = 1
} else {
s5i32 = 0
}
if s5i32 != 0 {
// s3i64 = s3i64
} else {
s3i64 = s4i64
}
s4i64 = l11
s5i64 = 13
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 939524096
s4i64 = s4i64 + s5i64
s3i64 = s3i64 & s4i64
s4i64 = l9
s5i64 = 16
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 2147483648
s4i64 = s4i64 & s5i64
s3i64 = s3i64 | s4i64
s3i32 = int32(s3i64)
s3f32 = math.Float32frombits(uint32(s3i32))
s2f32 = s2f32 + s3f32
s3i64 = 2147475456
s4i64 = 0
s5i64 = l10
s6i64 = 32767
s5i64 = s5i64 & s6i64
l11 = s5i64
s5i32 = int32(s5i64)
s6i32 = 1023
if uint32(s5i32) > uint32(s6i32) {
s5i32 = 1
} else {
s5i32 = 0
}
if s5i32 != 0 {
// s3i64 = s3i64
} else {
s3i64 = s4i64
}
s4i64 = l11
s5i64 = 13
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 939524096
s4i64 = s4i64 + s5i64
s3i64 = s3i64 & s4i64
s4i64 = l10
s5i64 = 16
s4i64 = s4i64 << (uint64(s5i64) & 63)
s5i64 = 2147483648
s4i64 = s4i64 & s5i64
s3i64 = s3i64 | s4i64
s3i32 = int32(s3i64)
s3f32 = math.Float32frombits(uint32(s3i32))
s2f32 = s2f32 + s3f32
s3f32 = 0.25
s2f32 = s2f32 * s3f32
s2i32 = int32(math.Float32bits(s2f32))
l4 = s2i32
s3i32 = 16
s2i32 = int32(uint32(s2i32) >> (uint32(s3i32) & 31))
s3i32 = 32768
s2i32 = s2i32 & s3i32
s3i64 = l7
s3i32 = int32(s3i64)
s4i32 = -2147483648
s3i32 = s3i32 & s4i32
s4i64 = l7
s5i64 = 16
s4i64 = int64(uint64(s4i64) >> (uint64(s5i64) & 63))
l7 = s4i64
s4i32 = int32(s4i64)
s5i32 = 32767
s4i32 = s4i32 & s5i32
l5 = s4i32
s5i32 = 13
s4i32 = s4i32 << (uint32(s5i32) & 31)
s5i32 = 939524096
s4i32 = s4i32 + s5i32
s5i32 = 0
s6i32 = l5
s7i32 = 1023
if uint32(s6i32) > uint32(s7i32) {
s6i32 = 1
} else {
s6i32 = 0
}
if s6i32 != 0 {
// s4i32 = s4i32
} else {
s4i32 = s5i32
}
s3i32 = s3i32 | s4i32
s3f32 = math.Float32frombits(uint32(s3i32))
s4i64 = l8
s4i32 = int32(s4i64)
s5i32 = -2147483648
s4i32 = s4i32 & s5i32
s5i64 = l8
s6i64 = 16
s5i64 = int64(uint64(s5i64) >> (uint64(s6i64) & 63))
l8 = s5i64
s5i32 = int32(s5i64)
s6i32 = 32767
s5i32 = s5i32 & s6i32
l5 = s5i32
s6i32 = 13
s5i32 = s5i32 << (uint32(s6i32) & 31)
s6i32 = 939524096
s5i32 = s5i32 + s6i32
s6i32 = 0
s7i32 = l5
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i32 = s5i32
} else {
s5i32 = s6i32
}
s4i32 = s4i32 | s5i32
s4f32 = math.Float32frombits(uint32(s4i32))
s3f32 = s3f32 + s4f32
s4i64 = l9
s4i32 = int32(s4i64)
s5i32 = -2147483648
s4i32 = s4i32 & s5i32
s5i64 = l9
s6i64 = 16
s5i64 = int64(uint64(s5i64) >> (uint64(s6i64) & 63))
l9 = s5i64
s5i32 = int32(s5i64)
s6i32 = 32767
s5i32 = s5i32 & s6i32
l5 = s5i32
s6i32 = 13
s5i32 = s5i32 << (uint32(s6i32) & 31)
s6i32 = 939524096
s5i32 = s5i32 + s6i32
s6i32 = 0
s7i32 = l5
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i32 = s5i32
} else {
s5i32 = s6i32
}
s4i32 = s4i32 | s5i32
s4f32 = math.Float32frombits(uint32(s4i32))
s3f32 = s3f32 + s4f32
s4i64 = l10
s4i32 = int32(s4i64)
s5i32 = -2147483648
s4i32 = s4i32 & s5i32
s5i64 = l10
s6i64 = 16
s5i64 = int64(uint64(s5i64) >> (uint64(s6i64) & 63))
l10 = s5i64
s5i32 = int32(s5i64)
s6i32 = 32767
s5i32 = s5i32 & s6i32
l5 = s5i32
s6i32 = 13
s5i32 = s5i32 << (uint32(s6i32) & 31)
s6i32 = 939524096
s5i32 = s5i32 + s6i32
s6i32 = 0
s7i32 = l5
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i32 = s5i32
} else {
s5i32 = s6i32
}
s4i32 = s4i32 | s5i32
s4f32 = math.Float32frombits(uint32(s4i32))
s3f32 = s3f32 + s4f32
s4f32 = 0.25
s3f32 = s3f32 * s4f32
s3i32 = int32(math.Float32bits(s3f32))
l5 = s3i32
s4i32 = -2147483648
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s3i32 = l5
s4i32 = 3
s3i32 = s3i32 << (uint32(s4i32) & 31)
s4i32 = 1073741824
s3i32 = s3i32 + s4i32
s4i32 = -65536
s3i32 = s3i32 & s4i32
s4i32 = 0
s5i32 = l5
s6i32 = 2147475456
s5i32 = s5i32 & s6i32
s6i32 = 947904511
if uint32(s5i32) > uint32(s6i32) {
s5i32 = 1
} else {
s5i32 = 0
}
if s5i32 != 0 {
// s3i32 = s3i32
} else {
s3i32 = s4i32
}
s2i32 = s2i32 | s3i32
s3i32 = l4
s4i32 = -134217728
s3i32 = s3i32 - s4i32
s4i32 = 13
s3i32 = int32(uint32(s3i32) >> (uint32(s4i32) & 31))
s4i32 = 65535
s5i32 = 0
s6i32 = l4
s7i32 = 2147475456
s6i32 = s6i32 & s7i32
s7i32 = 947904511
if uint32(s6i32) > uint32(s7i32) {
s6i32 = 1
} else {
s6i32 = 0
}
if s6i32 != 0 {
// s4i32 = s4i32
} else {
s4i32 = s5i32
}
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s2i64 = int64(uint32(s2i32))
s1i64 = s1i64 | s2i64
s2i32 = 65535
s3i32 = 0
s4i64 = 2147475456
s5i64 = 0
s6i64 = l12
s7i64 = 32767
s6i64 = s6i64 & s7i64
l12 = s6i64
s6i32 = int32(s6i64)
s7i32 = 1023
if uint32(s6i32) > uint32(s7i32) {
s6i32 = 1
} else {
s6i32 = 0
}
if s6i32 != 0 {
// s4i64 = s4i64
} else {
s4i64 = s5i64
}
s5i64 = l12
s6i64 = 13
s5i64 = s5i64 << (uint64(s6i64) & 63)
s6i64 = 939524096
s5i64 = s5i64 + s6i64
s4i64 = s4i64 & s5i64
s5i64 = l7
s6i64 = 2147483648
s5i64 = s5i64 & s6i64
s4i64 = s4i64 | s5i64
s4i32 = int32(s4i64)
s4f32 = math.Float32frombits(uint32(s4i32))
s5i64 = 2147475456
s6i64 = 0
s7i64 = l13
s8i64 = 32767
s7i64 = s7i64 & s8i64
l7 = s7i64
s7i32 = int32(s7i64)
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i64 = s5i64
} else {
s5i64 = s6i64
}
s6i64 = l7
s7i64 = 13
s6i64 = s6i64 << (uint64(s7i64) & 63)
s7i64 = 939524096
s6i64 = s6i64 + s7i64
s5i64 = s5i64 & s6i64
s6i64 = l8
s7i64 = 2147483648
s6i64 = s6i64 & s7i64
s5i64 = s5i64 | s6i64
s5i32 = int32(s5i64)
s5f32 = math.Float32frombits(uint32(s5i32))
s4f32 = s4f32 + s5f32
s5i64 = 2147475456
s6i64 = 0
s7i64 = l14
s8i64 = 32767
s7i64 = s7i64 & s8i64
l7 = s7i64
s7i32 = int32(s7i64)
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i64 = s5i64
} else {
s5i64 = s6i64
}
s6i64 = l7
s7i64 = 13
s6i64 = s6i64 << (uint64(s7i64) & 63)
s7i64 = 939524096
s6i64 = s6i64 + s7i64
s5i64 = s5i64 & s6i64
s6i64 = l9
s7i64 = 2147483648
s6i64 = s6i64 & s7i64
s5i64 = s5i64 | s6i64
s5i32 = int32(s5i64)
s5f32 = math.Float32frombits(uint32(s5i32))
s4f32 = s4f32 + s5f32
s5i64 = 2147475456
s6i64 = 0
s7i64 = l15
s8i64 = 32767
s7i64 = s7i64 & s8i64
l7 = s7i64
s7i32 = int32(s7i64)
s8i32 = 1023
if uint32(s7i32) > uint32(s8i32) {
s7i32 = 1
} else {
s7i32 = 0
}
if s7i32 != 0 {
// s5i64 = s5i64
} else {
s5i64 = s6i64
}
s6i64 = l7
s7i64 = 13
s6i64 = s6i64 << (uint64(s7i64) & 63)
s7i64 = 939524096
s6i64 = s6i64 + s7i64
s5i64 = s5i64 & s6i64
s6i64 = l10
s7i64 = 2147483648
s6i64 = s6i64 & s7i64
s5i64 = s5i64 | s6i64
s5i32 = int32(s5i64)
s5f32 = math.Float32frombits(uint32(s5i32))
s4f32 = s4f32 + s5f32
s5f32 = 0.25
s4f32 = s4f32 * s5f32
s4i32 = int32(math.Float32bits(s4f32))
l4 = s4i32
s5i32 = 2147475456
s4i32 = s4i32 & s5i32
s5i32 = 947904511
if uint32(s4i32) > uint32(s5i32) {
s4i32 = 1
} else {
s4i32 = 0
}
if s4i32 != 0 {
// s2i32 = s2i32
} else {
s2i32 = s3i32
}
s3i32 = l4
s4i32 = -134217728
s3i32 = s3i32 - s4i32
s4i32 = 13
s3i32 = int32(uint32(s3i32) >> (uint32(s4i32) & 31))
s2i32 = s2i32 & s3i32
s3i32 = l4
s4i32 = 16
s3i32 = int32(uint32(s3i32) >> (uint32(s4i32) & 31))
s4i32 = 32768
s3i32 = s3i32 & s4i32
s2i32 = s2i32 | s3i32
s2i64 = int64(uint32(s2i32))
s3i64 = 32
s2i64 = s2i64 << (uint64(s3i64) & 63)
s1i64 = s1i64 | s2i64
*(*uint64)(unsafe.Pointer(&ctx.Mem[int(s0i32+0)])) = uint64(s1i64)
s0i32 = l2
s1i32 = 16
s0i32 = s0i32 + s1i32
l2 = s0i32
s0i32 = l1
s1i32 = 16
s0i32 = s0i32 + s1i32
l1 = s0i32
s0i32 = l6
s1i32 = 1
s0i32 = s0i32 + s1i32
l6 = s0i32
s1i32 = l3
if s0i32 != s1i32 {
s0i32 = 1
} else {
s0i32 = 0
}
if s0i32 != 0 {
goto lbl1
}
}
}
| f2029 |
logger_factory.py | #####################################################################################
# MIT License #
# #
# Copyright (C) 2018 Charly Lamothe #
# #
# This file is part of copyright-updater. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
import logging
from logging.handlers import RotatingFileHandler
import os
import errno
class LoggerFactory:
@staticmethod
def create(path, module_name):
# Create logger
| logger = logging.getLogger(module_name)
logger.setLevel(logging.DEBUG)
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# Create file handler
fh = RotatingFileHandler(path + os.sep + module_name + '.log', maxBytes=1000000, backupCount=5)
fh.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter('%(asctime)s - %(filename)s:%(lineno)s - %(name)s - %(levelname)s - %(message)s')
# Add formatter to handler
fh.setFormatter(formatter)
# Add fh to logger
logger.addHandler(fh)
return logger |
|
MnemonicSave.ts | import { Storage } from "@utils/StorageArray";
const CHECK_NEEDED_KEY = "mnemonic-check-needed";
| export class MnemonicSave {
public mnemonic: string;
constructor(private readonly storage: Storage) {
this.mnemonic = "";
}
async isChallengeNeeded() {
return await this.storage.hasItem(CHECK_NEEDED_KEY);
}
async setChallengeNeeded() {
await this.storage.setItem(CHECK_NEEDED_KEY, JSON.stringify(true));
}
async setChallengeNotNeeded() {
await this.storage.removeItem(CHECK_NEEDED_KEY);
}
} | |
lotto.js | /**
* @author Nuno Cunha
*/
'use strict';
const express = require('express')
const Lotto = require('../models/lotto.js');;
const router = express.Router();
router.get("/", (req, res) => {
let data = {};
data.draw = new Lotto().draw();
if (req.query.row != null) {
data.player = convertToArrayInt(req.query.row);
data.matches = compare(data.draw, data.player);
} else {
data.player = [];
data.matches = 0;
}
res.render("lotto", data);
});
function convertToArrayInt(queryString) {
let result = queryString.split(',');
for (let i in result) {
result[i] = parseInt(result[i], 10);
}
return result;
}
function | (drawnNumbers, playerNumbers) {
let totalMatches = 0;
if (playerNumbers.length === 7) {
for (let i = 0; i < playerNumbers.length; i++) {
for (let j = 0; j < drawnNumbers.length; j++) {
if (playerNumbers[i] === drawnNumbers[j]) {
totalMatches++;
}
}
}
}
return totalMatches;
}
module.exports = router;
| compare |
main.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Alacritty - The GPU Enhanced Terminal
#![cfg_attr(feature = "cargo-clippy", deny(clippy, if_not_else, enum_glob_use, wrong_pub_self_convention))]
#![cfg_attr(feature = "nightly", feature(core_intrinsics))]
#![cfg_attr(all(test, feature = "bench"), feature(test))]
#[macro_use]
extern crate alacritty;
#[macro_use]
extern crate log;
#[cfg(target_os = "macos")]
extern crate dirs;
use std::error::Error;
use std::sync::Arc;
#[cfg(target_os = "macos")]
use std::env;
use alacritty::cli;
use alacritty::config::{self, Config};
use alacritty::display::Display;
use alacritty::event;
use alacritty::event_loop::{self, EventLoop, Msg};
#[cfg(target_os = "macos")]
use alacritty::locale;
use alacritty::logging;
use alacritty::sync::FairMutex;
use alacritty::term::{Term}; | use alacritty::tty::{self, process_should_exit};
use alacritty::util::fmt::Red;
fn main() {
// Load command line options and config
let options = cli::Options::load();
let config = load_config(&options).update_dynamic_title(&options);
// Switch to home directory
#[cfg(target_os = "macos")]
env::set_current_dir(dirs::home_dir().unwrap()).unwrap();
// Set locale
#[cfg(target_os = "macos")]
locale::set_locale_environment();
// Run alacritty
if let Err(err) = run(config, &options) {
die!("Alacritty encountered an unrecoverable error:\n\n\t{}\n", Red(err));
}
info!("Goodbye.");
}
/// Load configuration
///
/// If a configuration file is given as a command line argument we don't
/// generate a default file. If an empty configuration file is given, i.e.
/// /dev/null, we load the compiled-in defaults.
fn load_config(options: &cli::Options) -> Config {
let config_path = options.config_path()
.or_else(Config::installed_config)
.unwrap_or_else(|| {
Config::write_defaults()
.unwrap_or_else(|err| die!("Write defaults config failure: {}", err))
});
Config::load_from(&*config_path).unwrap_or_else(|err| {
eprintln!("Error: {}; Loading default config", err);
Config::default()
})
}
/// Run Alacritty
///
/// Creates a window, the terminal state, pty, I/O event loop, input processor,
/// config change monitor, and runs the main display loop.
fn run(mut config: Config, options: &cli::Options) -> Result<(), Box<Error>> {
// Initialize the logger first as to capture output from other subsystems
logging::initialize(options)?;
info!("Welcome to Alacritty.");
if let Some(config_path) = config.path() {
info!("Configuration loaded from {}", config_path.display());
};
// Create a display.
//
// The display manages a window and can draw the terminal
let mut display = Display::new(&config, options)?;
info!(
"PTY Dimensions: {:?} x {:?}",
display.size().lines(),
display.size().cols()
);
// Create the terminal
//
// This object contains all of the state about what's being displayed. It's
// wrapped in a clonable mutex since both the I/O loop and display need to
// access it.
let terminal = Term::new(&config, display.size().to_owned());
let terminal = Arc::new(FairMutex::new(terminal));
// Find the window ID for setting $WINDOWID
let window_id = display.get_window_id();
// Create the pty
//
// The pty forks a process to run the shell on the slave side of the
// pseudoterminal. A file descriptor for the master side is retained for
// reading/writing to the shell.
let mut pty = tty::new(&config, options, &display.size(), window_id);
// Create the pseudoterminal I/O loop
//
// pty I/O is ran on another thread as to not occupy cycles used by the
// renderer and input processing. Note that access to the terminal state is
// synchronized since the I/O loop updates the state, and the display
// consumes it periodically.
let event_loop = EventLoop::new(
Arc::clone(&terminal),
display.notifier(),
pty.reader(),
options.ref_test,
);
// The event loop channel allows write requests from the event processor
// to be sent to the loop and ultimately written to the pty.
let loop_tx = event_loop.channel();
// Event processor
//
// Need the Rc<RefCell<_>> here since a ref is shared in the resize callback
let mut processor = event::Processor::new(
event_loop::Notifier(event_loop.channel()),
display.resize_channel(),
options,
&config,
options.ref_test,
display.size().to_owned(),
);
// Create a config monitor when config was loaded from path
//
// The monitor watches the config file for changes and reloads it. Pending
// config changes are processed in the main loop.
let config_monitor = match (options.live_config_reload, config.live_config_reload()) {
// Start monitor if CLI flag says yes
(Some(true), _) |
// Or if no CLI flag was passed and the config says yes
(None, true) => config.path()
.map(|path| config::Monitor::new(path, display.notifier())),
// Otherwise, don't start the monitor
_ => None,
};
// Kick off the I/O thread
let io_thread = event_loop.spawn(None);
// Main display loop
loop {
// Process input and window events
let mut terminal = processor.process_events(&terminal, display.window());
// Handle config reloads
if let Some(new_config) = config_monitor
.as_ref()
.and_then(|monitor| monitor.pending_config())
{
config = new_config.update_dynamic_title(&options);
display.update_config(&config);
processor.update_config(&config);
terminal.update_config(&config);
terminal.dirty = true;
}
// Maybe draw the terminal
if terminal.needs_draw() {
// Try to update the position of the input method editor
display.update_ime_position(&terminal);
// Handle pending resize events
//
// The second argument is a list of types that want to be notified
// of display size changes.
display.handle_resize(&mut terminal, &config, &mut [&mut pty, &mut processor]);
// Draw the current state of the terminal
display.draw(terminal, &config, processor.selection.as_ref());
}
// Begin shutdown if the flag was raised.
if process_should_exit() {
break;
}
}
loop_tx.send(Msg::Shutdown).expect("Error sending shutdown to event loop");
// FIXME patch notify library to have a shutdown method
// config_reloader.join().ok();
// Wait for the I/O thread thread to finish
let _ = io_thread.join();
Ok(())
} | |
functions.rs | #[cfg(windows)]
extern crate winapi;
extern crate libloading;
use libloading::{Symbol, Library};
const LIBPATH: &'static str = "target/libtest_helpers.module";
fn make_helpers() {
static ONCE: ::std::sync::Once = ::std::sync::Once::new();
ONCE.call_once(|| {
let rustc = std::env::var_os("RUSTC").unwrap_or_else(|| { "rustc".into() });
let mut cmd = ::std::process::Command::new(rustc);
cmd
.arg("src/test_helpers.rs")
.arg("-o")
.arg(LIBPATH);
if let Some(target) = std::env::var_os("TARGET") {
cmd.arg("--target").arg(target);
} else {
eprintln!("WARNING: $TARGET NOT SPECIFIED! BUILDING HELPER MODULE FOR NATIVE TARGET.");
}
assert!(cmd
.status()
.expect("could not compile the test helpers!")
.success()
);
});
}
#[test]
fn test_id_u32() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
let f: Symbol<unsafe extern fn(u32) -> u32> = lib.get(b"test_identity_u32\0").unwrap();
assert_eq!(42, f(42));
}
}
#[repr(C)]
#[derive(Clone,Copy,PartialEq,Debug)]
struct S {
a: u64,
b: u32,
c: u16,
d: u8
}
#[test]
fn test_id_struct() |
#[test]
fn test_0_no_0() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
let f: Symbol<unsafe extern fn(S) -> S> = lib.get(b"test_identity_struct\0").unwrap();
let f2: Symbol<unsafe extern fn(S) -> S> = lib.get(b"test_identity_struct").unwrap();
assert_eq!(*f, *f2);
}
}
#[test]
fn wrong_name_fails() {
unsafe {
Library::new("target/this_location_is_definitely_non existent:^~").err().unwrap();
}
}
#[test]
fn missing_symbol_fails() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
lib.get::<*mut ()>(b"test_does_not_exist").err().unwrap();
lib.get::<*mut ()>(b"test_does_not_exist\0").err().unwrap();
}
}
#[test]
fn interior_null_fails() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
lib.get::<*mut ()>(b"test_does\0_not_exist").err().unwrap();
lib.get::<*mut ()>(b"test\0_does_not_exist\0").err().unwrap();
}
}
#[test]
fn test_incompatible_type() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
assert!(match lib.get::<()>(b"test_identity_u32\0") {
Err(libloading::Error::IncompatibleSize) => true,
_ => false,
})
}
}
#[test]
fn test_incompatible_type_named_fn() {
make_helpers();
unsafe fn get<'a, T>(l: &'a Library, _: T) -> Result<Symbol<'a, T>, libloading::Error> {
l.get::<T>(b"test_identity_u32\0")
}
unsafe {
let lib = Library::new(LIBPATH).unwrap();
assert!(match get(&lib, test_incompatible_type_named_fn) {
Err(libloading::Error::IncompatibleSize) => true,
_ => false,
})
}
}
#[test]
fn test_static_u32() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
let var: Symbol<*mut u32> = lib.get(b"TEST_STATIC_U32\0").unwrap();
**var = 42;
let help: Symbol<unsafe extern fn() -> u32> = lib.get(b"test_get_static_u32\0").unwrap();
assert_eq!(42, help());
}
}
#[test]
fn test_static_ptr() {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
let var: Symbol<*mut *mut ()> = lib.get(b"TEST_STATIC_PTR\0").unwrap();
**var = *var as *mut _;
let works: Symbol<unsafe extern fn() -> bool> =
lib.get(b"test_check_static_ptr\0").unwrap();
assert!(works());
}
}
#[test]
// Something about i686-pc-windows-gnu, makes dll initialisation code call abort when it is loaded
// and unloaded many times. So far it seems like an issue with mingw, not libloading, so ignoring
// the target. Especially since it is very unlikely to be fixed given the state of support its
// support.
#[cfg(not(all(target_arch="x86", target_os="windows", target_env="gnu")))]
fn manual_close_many_times() {
make_helpers();
let join_handles: Vec<_> = (0..16).map(|_| {
std::thread::spawn(|| unsafe {
for _ in 0..10000 {
let lib = Library::new(LIBPATH).expect("open library");
let _: Symbol<unsafe extern fn(u32) -> u32> =
lib.get(b"test_identity_u32").expect("get fn");
lib.close().expect("close is successful");
}
})
}).collect();
for handle in join_handles {
handle.join().expect("thread should succeed");
}
}
#[cfg(unix)]
#[test]
fn library_this_get() {
use libloading::os::unix::Library;
make_helpers();
// SAFE: functions are never called
unsafe {
let _lib = Library::new(LIBPATH).unwrap();
let this = Library::this();
// Library we loaded in `_lib` (should be RTLD_LOCAL).
assert!(this.get::<unsafe extern "C" fn()>(b"test_identity_u32").is_err());
// Something obscure from libc...
assert!(this.get::<unsafe extern "C" fn()>(b"freopen").is_ok());
}
}
#[cfg(windows)]
#[test]
fn library_this() {
use libloading::os::windows::Library;
make_helpers();
unsafe {
// SAFE: well-known library without initialisers is loaded.
let _lib = Library::new(LIBPATH).unwrap();
let this = Library::this().expect("this library");
// SAFE: functions are never called.
// Library we loaded in `_lib`.
assert!(this.get::<unsafe extern "C" fn()>(b"test_identity_u32").is_err());
// Something "obscure" from kernel32...
assert!(this.get::<unsafe extern "C" fn()>(b"GetLastError").is_err());
}
}
#[cfg(windows)]
#[test]
fn works_getlasterror() {
use winapi::um::errhandlingapi;
use winapi::shared::minwindef::DWORD;
use libloading::os::windows::{Library, Symbol};
unsafe {
let lib = Library::new("kernel32.dll").unwrap();
let gle: Symbol<unsafe extern "system" fn() -> DWORD> = lib.get(b"GetLastError").unwrap();
errhandlingapi::SetLastError(42);
assert_eq!(errhandlingapi::GetLastError(), gle())
}
}
#[cfg(windows)]
#[test]
fn works_getlasterror0() {
use winapi::um::errhandlingapi;
use winapi::shared::minwindef::DWORD;
use libloading::os::windows::{Library, Symbol};
unsafe {
let lib = Library::new("kernel32.dll").unwrap();
let gle: Symbol<unsafe extern "system" fn() -> DWORD> = lib.get(b"GetLastError\0").unwrap();
errhandlingapi::SetLastError(42);
assert_eq!(errhandlingapi::GetLastError(), gle())
}
}
#[cfg(windows)]
#[test]
fn library_open_already_loaded() {
use libloading::os::windows::Library;
// Present on Windows systems and NOT used by any other tests to prevent races.
const LIBPATH: &str = "Msftedit.dll";
// Not loaded yet.
assert!(match Library::open_already_loaded(LIBPATH) {
Err(libloading::Error::GetModuleHandleExW { .. }) => true,
_ => false,
});
unsafe {
let _lib = Library::new(LIBPATH).unwrap();
// Loaded now.
assert!(Library::open_already_loaded(LIBPATH).is_ok());
}
}
| {
make_helpers();
unsafe {
let lib = Library::new(LIBPATH).unwrap();
let f: Symbol<unsafe extern fn(S) -> S> = lib.get(b"test_identity_struct\0").unwrap();
assert_eq!(S { a: 1, b: 2, c: 3, d: 4 }, f(S { a: 1, b: 2, c: 3, d: 4 }));
}
} |
named_test.go | package logger
import (
"bytes"
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) | tests := []struct {
In string
In2 string
ExpectOutput string
}{
{In: "foobar", ExpectOutput: "[test] foobar"},
{In: "foobar\nbaz", ExpectOutput: "[test] foobar\n[test] baz"},
{In: "foobar\nbaz\n", ExpectOutput: "[test] foobar\n[test] baz\n"},
{In: "foobar\n", ExpectOutput: "[test] foobar\n"},
{In: "foobar", In2: "baz", ExpectOutput: "[test] foobarbaz"},
{In: "foobar\n", In2: "baz", ExpectOutput: "[test] foobar\n[test] baz"},
{In: "foobar\n", In2: "baz\n", ExpectOutput: "[test] foobar\n[test] baz\n"},
}
for _, tt := range tests {
t.Run(tt.In, func(t *testing.T) {
buf := new(bytes.Buffer)
w := NewNamedWriter(buf, "test")
n, err := io.WriteString(w, tt.In)
require.NoError(t, err)
assert.Equal(t, len(tt.In), n)
if tt.In2 != "" {
n, err = io.WriteString(w, tt.In2)
require.NoError(t, err)
assert.Equal(t, len(tt.In2), n)
}
assert.Equal(t, tt.ExpectOutput, buf.String())
})
}
} |
func TestNamedWriter(t *testing.T) { |
main.rs | use std::env;
use std::fs;
use color_eyre::Result;
pub mod crd;
mod err;
mod util;
/// Setup different logging & debugging services
fn setup() -> Result<()> |
fn main() -> Result<()> {
setup()?;
// retrieve a filename path if given
// not using clap as we're only focusing on a single arg...
let path = env::args().nth(1).unwrap_or_else(|| ".".to_owned());
let full_path = format!("{path}/crd.yaml");
let spec = crd::generate_crd().expect("Expect to generate CRD");
fs::write(&full_path, spec)?;
println!("✅ CRD has been generated at the path {full_path}");
Ok(())
}
| {
if std::env::var("RUST_BACKTRACE").is_err() {
std::env::set_var("RUST_BACKTRACE", "full");
}
color_eyre::install()
} |
ui.rs | use trybuild::TestCases;
#[test]
fn ui() { | t.compile_fail("ui/single/*.rs");
t.compile_fail("ui/multi/*.rs");
} | let t = TestCases::new(); |
LoadConfigFileEvalStats_5G.py | # Description
# A sample Python REST API script to:
# - Load a saved configuration file
# - Run traffic
# - Get stats and evaluate for user defined expected stat values.
# - Get test result
#
# - For IxLoad Gateway running in Windows only. Linux Gateway is not supported at this time.
# - This script will set the license model to "Perpetual Mode". This is a 5G requirement.
#
# If the saved config file is located on a remote pc, this script could upload it to the gateway.
# Otherwise, the saved config file must be already in the IxLoad API gateway server.
#
# Requirements
# IxL_RestApi.py
# Air Mosaic installed in the Windows OS where IxLoad is installed
# The Air Mosaic version must be the same version as IxLoad for compatibility
# 5G cell
import os, sys, time, signal, traceback, platform
# Insert the Modules path to env in order to import IxL_RestApi.py
currentDir = os.path.abspath(os.path.dirname(__file__))
# Automatically create the os path to the IxL_RestApi.py module for this script to use
if platform.system() == 'Windows':
sys.path.insert(0, (currentDir.replace('SampleScripts\\LoadSavedConfigFile', 'Modules')))
else:
sys.path.insert(0, (currentDir.replace('SampleScripts/LoadSavedConfigFile', 'Modules')))
from IxL_RestApi import *
# Choices of IxLoad Gateway server OS: linux or windows
serverOs = 'windows'
# Which IxLoad version are you using for your test?
# To view all the installed versions, go on a web browser and enter:
# http://<server ip>:8080/api/v0/applicationTypes
ixLoadVersion = '9.10.115.43' ;# EA
# Do you want to delete the session at the end of the test or if the test failed?
deleteSession = True
forceTakePortOwnership = True
# API-Key: Use your user API-Key if you want added security
apiKey = None
# The saved config file to load
rxfFile = '5G-eLSU_HTTP_UXM_stats.rxf'
if serverOs == 'windows':
apiServerIp = '192.168.129.6'
# Where to store all of the csv result files in Windows
resultsDir = 'c:\\Results'
# Where to upload the config file or where to tell IxLoad the location if you're not uploading it.
rxfFileOnServer = 'C:\\Results\\{}'.format(rxfFile)
# Where to put the downloaded csv results
saveResultsInPath = currentDir
# Do you need to upload your saved config file to the server?
# If not, a saved config must be already in the IxLoad gateway server filesystem.
upLoadFile = True
# On the local host where you are running this script.
# The path to the saved config file. In this example, get it from the current folder.
if platform.system() == 'Linux':
localConfigFileToUpload = '{}/{}'.format(currentDir, rxfFile)
else:
localConfigFileToUpload = '{}\\{}'.format(currentDir, rxfFile)
# The path where you want to download the csv result files to. This is mostly used if using a Linux Gateway server.
# If you're using IxLoad in Windows, SSH must be installed. Otherwise, this variable will be ignored.
scpDestPath = currentDir
# For IxLoad versions prior to 8.50 that doesn't have the rest api to download results.
# Set to True if you want to save run time stat results to CSV files.
saveStatsToCsvFile = True
apiServerIpPort = 8443 ;# http=8080. https=8443 (https is supported starting 8.50)
licenseServerIp = '192.168.129.6'
# For 5G, the license model must be Perpetual Mode
licenseModel = 'Perpetual Mode'
# To assign ports for testing. Format = (cardId,portId)
# Traffic1@Network1 are activity names.
# To get the Activity names, got to: /ixload/test/activeTest/communityList
communityPortList1 = {
'chassisIp': '192.168.129.15',
'Traffic2@Network1': [(1,1)],
}
communityPortList2 = {
'chassisIp': '192.168.129.15',
'Traffic1@SGi': [(1,2)],
}
# Stat names to display at run time.
# To see how to get the stat names, go to the link below for step-by-step guidance:
# https://www.openixia.com/tutorials?subject=ixLoad/getStatName&page=fromApiBrowserForRestApi.html
#
# What this does:
# Get run time stats and evaluate the stats with an operator and the expected value.
# Due to stats going through ramp up and ramp down, stats will fluctuate.
# Once the stat hits and maintains the expected threshold value, the stat is marked as passed.
#
# If evaluating stats at run time is not what you need, use PollStats() instead shown
# in sample script LoadConfigFile.py
#
# operator options: None, >, <, <=, >=
statsDict = {
'HTTPClient': [{'caption': 'Rx Rate (Kbps)', 'operator': '>', 'expect': 5000000}]
}
#NRstatsDict = {
# 'UESIM-NRLayer1Statistics': [{'caption': "CRC OK TBs - PDSCH", 'operator': '>', 'expect': 1000)}]
#}
try:
restObj = Main(apiServerIp=apiServerIp,
apiServerIpPort=apiServerIpPort,
osPlatform=serverOs,
deleteSession=deleteSession,
pollStatusInterval=1,
apiKey=apiKey,
generateRestLogFile=True)
# sessionId is an opened existing session that you like to connect to instead of starting a new session.
restObj.connect(ixLoadVersion, sessionId=None, timeout=120)
restObj.configLicensePreferences(licenseServerIp=licenseServerIp, licenseModel=licenseModel)
restObj.setResultDir(resultsDir, createTimestampFolder=True)
if upLoadFile == True:
|
restObj.loadConfigFile(rxfFileOnServer)
restObj.assignChassisAndPorts([communityPortList1, communityPortList2])
if forceTakePortOwnership:
restObj.enableForceOwnership()
# Optional: Modify the sustain time
#restObj.configTimeline(name='Timeline1', sustainTime=12)
runTestOperationsId = restObj.runTraffic()
restObj.pollStatsAndCheckStatResults(statsDict,
csvFile=saveStatsToCsvFile,
csvFilePrependName=None,
pollStatInterval=2,
exitAfterPollingIteration=None)
testResult = restObj.getTestResults()
restObj.waitForActiveTestToUnconfigure()
restObj.downloadResults(targetPath=saveResultsInPath)
restObj.retrievePortCaptureFileForAssignedPorts(currentDir)
if deleteSession:
restObj.deleteSessionId()
except (IxLoadRestApiException, Exception) as errMsg:
print('\n%s' % traceback.format_exc())
if deleteSession:
restObj.abortActiveTest()
restObj.deleteSessionId()
sys.exit(errMsg)
except KeyboardInterrupt:
print('\nCTRL-C detected.')
if deleteSession:
restObj.abortActiveTest()
restObj.deleteSessionId()
| restObj.uploadFile(localConfigFileToUpload, rxfFileOnServer) |
prospectSet.js | const mongoose = require("mongoose");
| demographic: { type: String, required: true },
source: { type: String, required: true },
addedBy: { type: String, required: true },
dateAdded: {
type: Date,
default: Date.now(), required: true
},
setType: { type: String, required: true },
howMany: { type: Number, required: true },
details: { type: String, required: true },
});
const ProspectSet = mongoose.model("Prospectset", prospectSetSchema);
exports.ProspectSet = ProspectSet; | const prospectSetSchema = new mongoose.Schema({
prospectName: { type: String, required: true }, |
http_test.go | package transport_test
import (
"bytes"
"net/http"
"net/http/httptest"
"testing"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/api/password"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/api/password/transport"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/utl/mock"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/utl/mock/mockdb"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/utl/model"
"github.com/Pradnyana28/go-rest-api-boilerplate/pkg/utl/server"
"github.com/go-pg/pg/orm"
"github.com/labstack/echo"
"github.com/stretchr/testify/assert"
)
func | (t *testing.T) {
cases := []struct {
name string
req string
wantStatus int
id string
udb *mockdb.User
rbac *mock.RBAC
sec *mock.Secure
}{
{
name: "NaN",
wantStatus: http.StatusBadRequest,
id: "abc",
},
{
name: "Fail on Bind",
req: `{"new_password":"new","old_password":"my_old_password", "new_password_confirm":"new"}`,
wantStatus: http.StatusBadRequest,
id: "1",
},
{
name: "Different passwords",
req: `{"new_password":"new_password","old_password":"my_old_password", "new_password_confirm":"new_password_cf"}`,
wantStatus: http.StatusBadRequest,
id: "1",
},
{
name: "Fail on RBAC",
req: `{"new_password":"newpassw","old_password":"oldpassw", "new_password_confirm":"newpassw"}`,
rbac: &mock.RBAC{
EnforceUserFn: func(c echo.Context, id int) error {
return echo.ErrForbidden
},
},
id: "1",
wantStatus: http.StatusForbidden,
},
{
name: "Success",
req: `{"new_password":"newpassw","old_password":"oldpassw", "new_password_confirm":"newpassw"}`,
rbac: &mock.RBAC{
EnforceUserFn: func(c echo.Context, id int) error {
return nil
},
},
id: "1",
udb: &mockdb.User{
ViewFn: func(db orm.DB, id int) (*rest.User, error) {
return &rest.User{
Password: "oldPassword",
}, nil
},
UpdateFn: func(db orm.DB, usr *rest.User) error {
return nil
},
},
sec: &mock.Secure{
HashMatchesPasswordFn: func(string, string) bool {
return true
},
PasswordFn: func(string, ...string) bool {
return true
},
HashFn: func(string) string {
return "hashedPassword"
},
},
wantStatus: http.StatusOK,
},
}
client := &http.Client{}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
r := server.New()
rg := r.Group("")
transport.NewHTTP(password.New(nil, tt.udb, tt.rbac, tt.sec), rg)
ts := httptest.NewServer(r)
defer ts.Close()
path := ts.URL + "/password/" + tt.id
req, err := http.NewRequest("PATCH", path, bytes.NewBufferString(tt.req))
req.Header.Set("Content-Type", "application/json")
if err != nil {
t.Fatal(err)
}
res, err := client.Do(req)
if err != nil {
t.Fatal(err)
}
defer res.Body.Close()
assert.Equal(t, tt.wantStatus, res.StatusCode)
})
}
}
| TestChangePassword |
keys.rs | use ecmult::ECMULT_CONTEXT;
use ecmult::ECMULT_GEN_CONTEXT;
use secp256k1::scalar::Scalar;
use secp256k1::error::Error;
use std::ops::{Add, Neg, Sub, Mul};
use super::rand::Rng;
use secp256k1::group::{Jacobian, Affine};
use secp256k1::field::Field;
use hex;
use std::fmt::Display;
use std::fmt;
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
/// Public key on a secp256k1 curve.
pub struct PublicKey(pub(crate) Affine);
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
/// Secret key (256-bit) on a secp256k1 curve.
pub struct SecretKey(pub(crate) Scalar);
impl PublicKey {
/// Create a public key from a private key by performing P = k.G
pub fn | (seckey: &SecretKey) -> PublicKey {
let mut pj = Jacobian::default();
ECMULT_GEN_CONTEXT.ecmult_gen(&mut pj, &seckey.0);
let mut p = Affine::default();
p.set_gej(&pj);
PublicKey(p)
}
/// Generate a public key from hex
/// The expected format is a tag (0x04, 0x06 or 0x07) for uncompressed keys,
/// 0x02, 0x03 for compressed keys
/// followed by 32 or 64 bytes, depending on whether the key is compressed or not
pub fn from_hex(h: &str) -> Result<PublicKey, Error> {
let data = hex::decode(h).or(Err(Error::InvalidHex))?;
match data.len() {
33 => PublicKey::parse_compressed(array_ref!(data, 0, 33)),
65 => PublicKey::parse(array_ref!(data, 0, 65)),
_ => Err(Error::InvalidPublicKey),
}
}
/// Return the hexadecimal representation of the public key
pub fn to_hex(&self, compressed: bool) -> String {
if compressed {
return hex::encode(self.serialize_compressed().to_vec());
}
hex::encode(self.serialize().to_vec())
}
/// Create a public key from a compressed public key. Remember that Public keys are just points on the elliptic
/// curve, so you can derive the full point by supplying the x-coordinate and the parity. By convention, compressed
/// public keys hold the parity in the first byte and the x-coordinate in the next 32 bytes.
pub fn parse_compressed(p: &[u8; 33]) -> Result<PublicKey, Error> {
if !(p[0] == 0x02 || p[0] == 0x03) {
return Err(Error::InvalidPublicKey);
}
let mut x = Field::default();
if !x.set_b32(array_ref!(p, 1, 32)) {
return Err(Error::InvalidPublicKey);
}
let mut elem = Affine::default();
if !elem.set_xo_var(&x, p[0] == 0x03) {
return Err(Error::InvalidPublicKey);
}
if elem.is_infinity() {
return Err(Error::InvalidPublicKey);
}
if elem.is_valid_var() {
elem.x.normalize();
elem.y.normalize();
return Ok(PublicKey(elem));
} else {
return Err(Error::InvalidPublicKey);
}
}
/// Create a PublicKey from 65-byte binary representation of a public key. The first byte is a prefix (must be 4,6,
/// or 7). The next 32 bytes represent the x-coordinate; and the last 32 bytes represent thew y-coordinate.
pub fn parse(p: &[u8; 65]) -> Result<PublicKey, Error> {
use secp256k1::util::{TAG_PUBKEY_HYBRID_EVEN, TAG_PUBKEY_HYBRID_ODD};
if !(p[0] == 0x04 || p[0] == 0x06 || p[0] == 0x07) {
return Err(Error::InvalidPublicKey);
}
let mut x = Field::default();
let mut y = Field::default();
if !x.set_b32(array_ref!(p, 1, 32)) {
return Err(Error::InvalidPublicKey);
}
if !y.set_b32(array_ref!(p, 33, 32)) {
return Err(Error::InvalidPublicKey);
}
let mut elem = Affine::default();
elem.set_xy(&x, &y);
if (p[0] == TAG_PUBKEY_HYBRID_EVEN || p[0] == TAG_PUBKEY_HYBRID_ODD)
&& (y.is_odd() != (p[0] == TAG_PUBKEY_HYBRID_ODD))
{
return Err(Error::InvalidPublicKey);
}
if elem.is_infinity() {
return Err(Error::InvalidPublicKey);
}
if elem.is_valid_var() {
return Ok(PublicKey(elem));
} else {
return Err(Error::InvalidPublicKey);
}
}
/// Return the 65-bit serialization of the public key. The first byte is always 0x04 to represent an uncompressed
///public key.
pub fn serialize(&self) -> [u8; 65] {
use secp256k1::util::TAG_PUBKEY_UNCOMPRESSED;
debug_assert!(!self.0.is_infinity());
let mut ret = [0u8; 65];
let mut elem = self.0.clone();
elem.x.normalize_var();
elem.y.normalize_var();
elem.x.fill_b32(array_mut_ref!(ret, 1, 32));
elem.y.fill_b32(array_mut_ref!(ret, 33, 32));
ret[0] = TAG_PUBKEY_UNCOMPRESSED;
ret
}
/// Return the 33-bit serialization of the compressed public key.
pub fn serialize_compressed(&self) -> [u8; 33] {
use secp256k1::util::{TAG_PUBKEY_EVEN, TAG_PUBKEY_ODD};
debug_assert!(!self.0.is_infinity());
let mut ret = [0u8; 33];
let mut elem = self.0.clone();
elem.x.normalize_var();
elem.y.normalize_var();
elem.x.fill_b32(array_mut_ref!(ret, 1, 32));
ret[0] = if elem.y.is_odd() {
TAG_PUBKEY_ODD
} else {
TAG_PUBKEY_EVEN
};
ret
}
}
impl Display for PublicKey {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{}", self.to_hex(true));
Ok(())
}
}
impl Into<Affine> for PublicKey {
fn into(self) -> Affine {
self.0
}
}
impl Add for PublicKey {
type Output = PublicKey;
fn add(self, rhs: PublicKey) -> <Self as Add<PublicKey>>::Output {
let mut j1 = Jacobian::default();
j1.set_ge(&self.0);
let j2 = j1.add_ge(&rhs.0);
let mut ret = Affine::default();
ret.set_gej(&j2);
PublicKey(ret)
}
}
impl Sub for PublicKey {
type Output = PublicKey;
fn sub(self, rhs: PublicKey) -> PublicKey {
let ret = rhs.0.neg();
self + PublicKey(ret)
}
}
impl SecretKey {
/// Read a 32-byte array into a Secret key
pub fn parse(p: &[u8; 32]) -> Result<SecretKey, Error> {
let mut elem = Scalar::default();
if !elem.set_b32(p) && !elem.is_zero() {
Ok(SecretKey(elem))
} else {
Err(Error::InvalidSecretKey)
}
}
pub fn from_hex(h: &str) -> Result<SecretKey, Error> {
let data = hex::decode(h).or(Err(Error::InvalidHex))?;
match data.len() {
32 => SecretKey::parse(array_ref!(data, 0, 32)),
_ => Err(Error::InvalidSecretKey),
}
}
/// Return the hexadecimal representation of the secret key
pub fn to_hex(&self) -> String {
hex::encode(self.serialize().to_vec())
}
/// Create a new random secret key
/// # Examples
/// ```
/// extern crate rand;
/// extern crate libsecp256k1_rs as secp256k1;
/// use rand::thread_rng;
/// use secp256k1::SecretKey;
///
/// let k1 = SecretKey::random(&mut thread_rng());
/// ```
pub fn random<R: Rng>(rng: &mut R) -> SecretKey {
loop {
let mut ret = [0u8; 32];
rng.fill_bytes(&mut ret);
match Self::parse(&ret) {
Ok(key) => return key,
Err(_) => (),
}
}
}
/// Represent a SecretKey as a 32-byte array
pub fn serialize(&self) -> [u8; 32] {
self.0.b32()
}
pub fn inv(&self) -> SecretKey {
SecretKey(self.0.inv())
}
}
impl Display for SecretKey {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str(&self.to_hex())
}
}
impl Into<Scalar> for SecretKey {
fn into(self) -> Scalar {
self.0
}
}
impl Add for SecretKey {
type Output = SecretKey;
fn add(self, rhs: SecretKey) -> <Self as Add<SecretKey>>::Output {
SecretKey(self.0 + rhs.0)
}
}
impl Sub for SecretKey {
type Output = SecretKey;
fn sub(self, rhs: SecretKey) -> SecretKey {
SecretKey(self.0 + rhs.0.neg())
}
}
impl Mul<SecretKey> for SecretKey {
type Output = SecretKey;
fn mul(self, rhs: SecretKey) -> SecretKey {
SecretKey(self.0 * rhs.0)
}
}
impl Mul<PublicKey> for SecretKey {
type Output = PublicKey;
fn mul(self, rhs: PublicKey) -> PublicKey {
let mut pj = Jacobian::default();
ECMULT_CONTEXT.ecmult_const(&mut pj, &rhs.0, &self.0);
let mut p = Affine::default();
p.set_gej(&pj);
PublicKey(p)
}
}
impl Neg for SecretKey {
type Output = SecretKey;
fn neg(self) -> <Self as Neg>::Output {
SecretKey(-self.0)
}
}
#[cfg(test)]
mod tests {
use secp256k1::rand::thread_rng;
use {Error, PublicKey, SecretKey};
use secp256k1::Scalar;
#[test]
fn test_from_hex() {
let k = SecretKey::from_hex("ebb2c082fd7727890a28ac82f6bdf97bad8de9f5d7c9028692de1a255cad3e0f").unwrap();
assert_eq!(k.serialize(), [0xebu8, 0xb2u8, 0xc0u8, 0x82u8, 0xfdu8, 0x77u8, 0x27u8, 0x89u8,
0x0au8, 0x28u8, 0xacu8, 0x82u8, 0xf6u8, 0xbdu8, 0xf9u8, 0x7bu8,
0xadu8, 0x8du8, 0xe9u8, 0xf5u8, 0xd7u8, 0xc9u8, 0x02u8, 0x86u8,
0x92u8, 0xdeu8, 0x1au8, 0x25u8, 0x5cu8, 0xadu8, 0x3eu8, 0x0fu8] );
}
#[test]
fn secret_to_hex() {
let k = SecretKey::from_hex("ebb2c082fd7727890a28ac82f6bdf97bad8de9f5d7c9028692de1a255cad3e0f").unwrap();
assert_eq!(&k.to_hex(), "ebb2c082fd7727890a28ac82f6bdf97bad8de9f5d7c9028692de1a255cad3e0f");
}
#[test]
fn create_secret() {
let _ = SecretKey::random(&mut thread_rng());
}
#[test]
fn inverse_secret() {
let k = SecretKey::random(&mut thread_rng());
let one = small(1);
assert_eq!(k * k.inv(), one);
}
#[test]
fn negate_twice() {
let k = SecretKey::random(&mut thread_rng());
let k2 = -k;
assert_ne!(k, k2);
assert_eq!(k, -k2);
}
#[test]
fn add_scalar_is_associative() {
let k1 = SecretKey::random(&mut thread_rng());
let k2 = SecretKey::random(&mut thread_rng());
assert_eq!(k1 + k2, k2 + k1);
}
#[test]
fn add_scalar() {
let one = small(1);
let two = small(2);
let three = small(3);
assert_eq!(one + two, three);
assert_ne!(one, two);
}
#[test]
fn mul_scalar_is_associative() {
let k1 = SecretKey::random(&mut thread_rng());
let k2 = SecretKey::random(&mut thread_rng());
assert_eq!(k1 * k2, k2 * k1);
}
#[test]
fn mul_scalar() {
let one = small(1);
let two = small(2);
assert_eq!(one * two, two);
}
#[test]
fn scalar_subtraction() {
let k1 = SecretKey::random(&mut thread_rng());
let k2 = SecretKey::random(&mut thread_rng());
let z: Scalar = (k1 - k1).into();
assert!(z.is_zero());
assert_eq!(k1 + k2 - k2, k1);
}
fn small(val: u8) -> SecretKey {
SecretKey::parse(&[
0u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, val,
]).unwrap()
}
#[test]
fn formatting() {
let key = PublicKey::from_hex("0241cc121c419921942add6db6482fb36243faf83317c866d2a28d8c6d7089f7ba").unwrap();
assert_eq!(&key.to_hex(true), "0241cc121c419921942add6db6482fb36243faf83317c866d2a28d8c6d7089f7ba");
let key = PublicKey::from_hex("04fe53c78e36b86aae8082484a4007b706d5678cabb92d178fc95020d4d8dc41ef44cfbb8dfa7a593c7910a5b6f94d079061a7766cbeed73e24ee4f654f1e51904").unwrap();
assert_eq!(&key.to_hex(false), "04fe53c78e36b86aae8082484a4007b706d5678cabb92d178fc95020d4d8dc41ef44cfbb8dfa7a593c7910a5b6f94d079061a7766cbeed73e24ee4f654f1e51904");
}
#[test]
fn valid_keys() {
let key = PublicKey::from_hex("0241cc121c419921942add6db6482fb36243faf83317c866d2a28d8c6d7089f7ba");
assert!(key.is_ok());
let key = PublicKey::from_hex("02e6642fd69bd211f93f7f1f36ca51a26a5290eb2dd1b0d8279a87bb0d480c8443");
assert!(key.is_ok());
let key = PublicKey::from_hex("0384526253c27c7aef56c7b71a5cd25bebb66dddda437826defc5b2568bde81f07");
assert!(key.is_ok());
let key = PublicKey::from_hex("");
assert_eq!(key.err().unwrap(), Error::InvalidPublicKey);
let key = PublicKey::from_hex("0abcdefgh");
assert_eq!(key.err().unwrap(), Error::InvalidHex);
let key = PublicKey::from_hex("9384526253c27c7aef56c7b71a5cd25bebb66dddda437826defc5b2568bde81f07");
assert_eq!(key.err().unwrap(), Error::InvalidPublicKey);
let key = PublicKey::from_hex("04fe53c78e36b86aae8082484a4007b706d5678cabb92d178fc95020d4d8dc41ef44cfbb8dfa7a593c7910a5b6f94d079061a7766cbeed73e24ee4f654f1e51904");
assert!(key.is_ok());
}
#[test]
fn serlialize_deserialize() {
let key = PublicKey::from_hex("0384526253c27c7aef56c7b71a5cd25bebb66dddda437826defc5b2568bde81f07").unwrap();
let des = key.serialize();
let key2 = PublicKey::parse(&des).unwrap();
assert_eq!(key, key2);
let key = PublicKey::from_hex("0241cc121c419921942add6db6482fb36243faf83317c866d2a28d8c6d7089f7ba").unwrap();
let des = key.serialize_compressed();
let key2 = PublicKey::parse_compressed(&des).unwrap();
assert_eq!(key, key2);
let key = PublicKey::from_hex("04fe53c78e36b86aae8082484a4007b706d5678cabb92d178fc95020d4d8dc41ef44cfbb8dfa7a593c7910a5b6f94d079061a7766cbeed73e24ee4f654f1e51904").unwrap();
let des = key.serialize();
let key2 = PublicKey::parse(&des).unwrap();
assert_eq!(key, key2);
}
#[test]
fn add_public_keys() {
let p1 =
PublicKey::from_hex("0241cc121c419921942add6db6482fb36243faf83317c866d2a28d8c6d7089f7ba").unwrap();
let p2 =
PublicKey::from_hex("02e6642fd69bd211f93f7f1f36ca51a26a5290eb2dd1b0d8279a87bb0d480c8443").unwrap();
let exp_sum =
PublicKey::from_hex("0384526253c27c7aef56c7b71a5cd25bebb66dddda437826defc5b2568bde81f07").unwrap();
let sum = p1 + p2;
assert_eq!(p2 + p1, sum);
assert_eq!(sum, exp_sum);
}
#[test]
fn scalar_multiplication_is_addition() {
let p1 = PublicKey::from_hex("04fe53c78e36b86aae8082484a4007b706d5678cabb92d178fc95020d4d8dc41ef44cfbb8dfa7a593c7910a5b6f94d079061a7766cbeed73e24ee4f654f1e51904").unwrap();
let k = small(3);
assert_eq!(p1 + p1 + p1, k * p1);
}
}
| from_secret_key |
parsers.ts | import commander from 'commander'
import validator from 'validator' | throw new commander.InvalidOptionArgumentError('Not a url')
}
return value
} |
export const endpointParser = (value: string) => {
if (!validator.isURL(value)) { |
breadcrums.js | import Vue from 'vue'
const SET_BREADCRUMS = 'SET_BREADCRUMS'
const PUSH_BREADCRUMS = 'PUSH_BREADCRUMS'
const SET_CHIPS = 'SET_CHIPS'
/** breadcrums item struct
* {
* disabled: true/false
* text: ''
* path: ''
* }
* related chip struct
* {
* text: ''
* path: ''
* }
**/
// state
const state = {
breadcrums: [],
relatedChips: []
}
// getters
const getters = {
breadCrums: state => {
return state.breadcrums
},
relatedChips: state => {
return state.relatedChips
}
}
// mutations
const mutations = {
[SET_BREADCRUMS] (state, obj) {
state.breadcrums = obj
},
[PUSH_BREADCRUMS] (state, obj) {
let idx = state.breadcrums.findIndex(o => o.path === obj.path)
console.log(idx)
if (idx === -1) {
state.breadcrums.push(obj)
} else {
Vue.set(state.breadcrums, idx, obj)
}
},
[SET_CHIPS] (state, obj) {
state.relatedChips = obj
}
}
// actions
function | (type) {
return ({ commit }, ...args) => commit(type, ...args)
}
const actions = {
setBreadcrumbs: makeAction(SET_BREADCRUMS),
pushBreadcrumbs: makeAction(PUSH_BREADCRUMS),
setRelatedChips: makeAction(SET_CHIPS)
}
export default {
state,
getters,
mutations,
actions
}
| makeAction |
test_polydata_utils.py | # -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import pytest
import vtk
import numpy as np
import sksurgeryvtk.utils.polydata_utils as pdu
import sksurgeryvtk.models.vtk_surface_model as vbs
def test_overlapping_bounds():
radius_0=10.0
radius_1=7.0
centre_1=5.0
radius_2=4.0
centre_2=15.0
radius_3=4.0
centre_3=0.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(12)
sphere_0.SetThetaResolution(12)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(12)
sphere_1.SetThetaResolution(21)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
sphere_2 = vtk.vtkSphereSource()
sphere_2.SetRadius(radius_2)
sphere_2.SetPhiResolution(12)
sphere_2.SetThetaResolution(21)
sphere_2.SetCenter(centre_2, 0.0, 0.0)
sphere_2.Update()
vtk_model_2 = sphere_2.GetOutput()
sphere_3 = vtk.vtkSphereSource()
sphere_3.SetRadius(radius_3)
sphere_3.SetPhiResolution(12)
sphere_3.SetThetaResolution(21)
sphere_3.SetCenter(centre_3, 0.0, 0.0)
sphere_3.Update()
vtk_model_3 = sphere_3.GetOutput()
assert (pdu.check_overlapping_bounds( vtk_model_0, vtk_model_1))
assert (pdu.check_overlapping_bounds( vtk_model_1, vtk_model_0))
assert (not pdu.check_overlapping_bounds( vtk_model_0, vtk_model_2))
assert (not pdu.check_overlapping_bounds( vtk_model_2, vtk_model_0))
assert (pdu.check_overlapping_bounds( vtk_model_0, vtk_model_3))
assert (pdu.check_overlapping_bounds( vtk_model_3, vtk_model_0))
def test_dice_overlap():
radius_0=10.0
radius_1=7.0
centre_1=5.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(60)
sphere_0.SetThetaResolution(60)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(60)
sphere_1.SetThetaResolution(60)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
dice, volume_0, volume_1, volume_01 = pdu.two_polydata_dice(vtk_model_0, vtk_model_1)
np.testing.assert_approx_equal(volume_0, 4.0 * np.pi * radius_0**3.0 / 3.0, significant=2)
np.testing.assert_approx_equal(volume_1, 4.0 * np.pi * radius_1**3.0 / 3.0, significant=2)
#from http://mathworld.wolfram.com/Sphere-SphereIntersection.html
cap_height_0 = ( radius_1 - radius_0 + centre_1) * ( radius_1 + radius_0 - centre_1) / (2 * centre_1)
cap_height_1 = ( radius_0 - radius_1 + centre_1) * ( radius_0 + radius_1 - centre_1) / (2 * centre_1)
cap_vol_0 = np.pi * cap_height_0**2 * ( 3 * radius_0 - cap_height_0) / 3
cap_vol_1 = np.pi * cap_height_1**2 * ( 3 * radius_1 - cap_height_1) / 3
analytic = cap_vol_0 + cap_vol_1
np.testing.assert_approx_equal(volume_01, analytic, significant=2)
np.testing.assert_approx_equal(dice, 2*volume_01 / ( volume_0 + volume_1) , significant=10)
def | ():
radius_0=5.5
radius_1=4.3
centre_1=12.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(60)
sphere_0.SetThetaResolution(60)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(60)
sphere_1.SetThetaResolution(60)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
dice, volume_0, volume_1, volume_01 = pdu.two_polydata_dice(vtk_model_0, vtk_model_1)
np.testing.assert_approx_equal(volume_0, 4.0 * np.pi * radius_0**3.0 / 3.0, significant=2)
np.testing.assert_approx_equal(volume_1, 4.0 * np.pi * radius_1**3.0 / 3.0, significant=2)
analytic = 0.0
np.testing.assert_approx_equal(volume_01, analytic, significant=2)
np.testing.assert_approx_equal(dice, 2*volume_01 / ( volume_0 + volume_1) , significant=10)
| test_dice_no_overlap |
markdown.py | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
import html
import re
from typing import Optional
import pyrogram
from . import utils
from .html import HTML
BOLD_DELIM = "**"
ITALIC_DELIM = "__"
UNDERLINE_DELIM = "--"
STRIKE_DELIM = "~~"
CODE_DELIM = "`"
PRE_DELIM = "```"
MARKDOWN_RE = re.compile(r"({d})|\[(.+?)\]\((.+?)\)".format(
d="|".join(
["".join(i) for i in [
[rf"\{j}" for j in i]
for i in [
PRE_DELIM,
CODE_DELIM,
STRIKE_DELIM,
UNDERLINE_DELIM,
ITALIC_DELIM,
BOLD_DELIM
]
]] | OPENING_TAG = "<{}>"
CLOSING_TAG = "</{}>"
URL_MARKUP = '<a href="{}">{}</a>'
FIXED_WIDTH_DELIMS = [CODE_DELIM, PRE_DELIM]
class Markdown:
def __init__(self, client: Optional["pyrogram.Client"]):
self.html = HTML(client)
async def parse(self, text: str, strict: bool = False):
if strict:
text = html.escape(text)
delims = set()
is_fixed_width = False
for i, match in enumerate(re.finditer(MARKDOWN_RE, text)):
start, _ = match.span()
delim, text_url, url = match.groups()
full = match.group(0)
if delim in FIXED_WIDTH_DELIMS:
is_fixed_width = not is_fixed_width
if is_fixed_width and delim not in FIXED_WIDTH_DELIMS:
continue
if text_url:
text = utils.replace_once(text, full, URL_MARKUP.format(url, text_url), start)
continue
if delim == BOLD_DELIM:
tag = "b"
elif delim == ITALIC_DELIM:
tag = "i"
elif delim == UNDERLINE_DELIM:
tag = "u"
elif delim == STRIKE_DELIM:
tag = "s"
elif delim == CODE_DELIM:
tag = "code"
elif delim == PRE_DELIM:
tag = "pre"
else:
continue
if delim not in delims:
delims.add(delim)
tag = OPENING_TAG.format(tag)
else:
delims.remove(delim)
tag = CLOSING_TAG.format(tag)
text = utils.replace_once(text, delim, tag, start)
return await self.html.parse(text)
@staticmethod
def unparse(text: str, entities: list):
text = utils.add_surrogates(text)
entities_offsets = []
for entity in entities:
entity_type = entity.type
start = entity.offset
end = start + entity.length
if entity_type == "bold":
start_tag = end_tag = BOLD_DELIM
elif entity_type == "italic":
start_tag = end_tag = ITALIC_DELIM
elif entity_type == "underline":
start_tag = end_tag = UNDERLINE_DELIM
elif entity_type == "strikethrough":
start_tag = end_tag = STRIKE_DELIM
elif entity_type == "code":
start_tag = end_tag = CODE_DELIM
elif entity_type in ("pre", "blockquote"):
start_tag = end_tag = PRE_DELIM
elif entity_type == "text_link":
url = entity.url
start_tag = "["
end_tag = f"]({url})"
elif entity_type == "text_mention":
user = entity.user
start_tag = "["
end_tag = f"](tg://user?id={user.id})"
else:
continue
entities_offsets.append((start_tag, start,))
entities_offsets.append((end_tag, end,))
# sorting by offset (desc)
entities_offsets.sort(key=lambda x: -x[1])
for entity, offset in entities_offsets:
text = text[:offset] + entity + text[offset:]
return utils.remove_surrogates(text) | )))
|
untrusted_annotator_comm_factory.js | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import {PostMessageAPIClient} from 'chrome-untrusted://projector/js/post_message_api_client.m.js';
import {RequestHandler} from 'chrome-untrusted://projector/js/post_message_api_request_handler.m.js';
const TARGET_URL = 'chrome://projector/';
// A client that sends messages to the chrome://projector embedder.
export class TrustedAnnotatorClient extends PostMessageAPIClient {
/**
* @param {!Window} parentWindow The embedder window from which requests
* come.
*/
constructor(parentWindow) {
super(TARGET_URL, parentWindow);
// TODO(b/196245932) Register the onUndoRedoAvailabilityChanged as callback
// to the ink library wrapper.
}
/**
* Notifies the native ui that undo/redo has become available.
* @param {boolean} undoAvailable
* @param {boolean} redoAvailable
* @return {Promise}
*/
onUndoRedoAvailabilityChanged(undoAvailable, redoAvailable) {
return this.callApiFn(
'onUndoRedoAvailabilityChanged', [undoAvailable, redoAvailable]);
}
}
/**
* Class that implements the RequestHandler inside the Projector untrusted
* scheme for Annotator.
*/
export class UntrustedAnnotatorRequestHandler extends RequestHandler {
/**
* @param {!Window} parentWindow The embedder window from which requests
* come.
*/
constructor(parentWindow) {
super(null, TARGET_URL, TARGET_URL);
this.targetWindow_ = parentWindow;
this.registerMethod('setTool', (tool) => {
// TODO(b/196245932) Call into the Ink library to set tool.
return true;
});
this.registerMethod('undo', () => {
// TODO(b/196245932) Call into the Ink wrapper to undo.
return true;
});
this.registerMethod('redo', () => {
// TODO(b/196245932) Call into Ink wrapper to redo.
return true;
});
this.registerMethod('clear', () => {
// TODO(b/196245932) call into Ink wrapper to clear.
return true;
});
}
/** @override */
targetWindow() {
return this.targetWindow_;
}
}
/**
* This is a class that is used to setup the duplex communication channels
* between this origin, chrome-untrusted://projector/* and the embedder content.
*/
export class | {
/**
* Creates the instances of PostMessageAPIClient and Requesthandler.
*/
static maybeCreateInstances() {
if (AnnotatorUntrustedCommFactory.client_ ||
AnnotatorUntrustedCommFactory.requestHandler_) {
return;
}
AnnotatorUntrustedCommFactory.client_ =
new TrustedAnnotatorClient(window.parent);
AnnotatorUntrustedCommFactory.requestHandler_ =
new UntrustedAnnotatorRequestHandler(window.parent);
}
/**
* In order to use this class, please do the following (e.g. To notify when
* undo-redo becomes available):
* AnnotatorUntrustedCommFactory.
* getPostMessageAPIClient().
* onUndoRedoAvailabilityChanged(true, true);
* @return {!TrustedAnnotatorClient}
*/
static getPostMessageAPIClient() {
// AnnotatorUntrustedCommFactory.client_ should be available. However to be
// on the cautious side create an instance here if getPostMessageAPIClient
// is triggered before the page finishes loading.
AnnotatorUntrustedCommFactory.maybeCreateInstances();
return AnnotatorUntrustedCommFactory.client_;
}
}
document.addEventListener('DOMContentLoaded', () => {
// Create instances of the singletons(PostMessageAPIClient and
// RequestHandler) when the document has finished loading.
AnnotatorUntrustedCommFactory.maybeCreateInstances();
});
| AnnotatorUntrustedCommFactory |
upgrade.rs | use std::collections::BTreeMap;
use num_rational::Ratio;
use dimension_engine_test_support::{
ExecuteRequestBuilder, InMemoryWasmTestBuilder, UpgradeRequestBuilder, DEFAULT_ACCOUNT_ADDR,
DEFAULT_MAX_ASSOCIATED_KEYS, DEFAULT_RUN_GENESIS_REQUEST, DEFAULT_UNBONDING_DELAY,
DEFAULT_WASM_CONFIG,
};
use dimension_execution_engine::{
core::engine_state::{
EngineConfig, DEFAULT_MAX_QUERY_DEPTH, DEFAULT_MAX_RUNTIME_CALL_STACK_HEIGHT,
},
shared::{
host_function_costs::HostFunctionCosts,
opcode_costs::{
OpcodeCosts, DEFAULT_ADD_COST, DEFAULT_BIT_COST, DEFAULT_CONST_COST,
DEFAULT_CONTROL_FLOW_COST, DEFAULT_CONVERSION_COST, DEFAULT_CURRENT_MEMORY_COST,
DEFAULT_DIV_COST, DEFAULT_GLOBAL_COST, DEFAULT_GROW_MEMORY_COST,
DEFAULT_INTEGER_COMPARISON_COST, DEFAULT_LOAD_COST, DEFAULT_LOCAL_COST,
DEFAULT_MUL_COST, DEFAULT_NOP_COST, DEFAULT_REGULAR_COST, DEFAULT_STORE_COST,
DEFAULT_UNREACHABLE_COST,
},
storage_costs::StorageCosts,
system_config::{
auction_costs::AuctionCosts, handle_payment_costs::HandlePaymentCosts,
mint_costs::MintCosts, standard_payment_costs::StandardPaymentCosts, SystemConfig,
DEFAULT_WASMLESS_TRANSFER_COST,
},
wasm_config::{WasmConfig, DEFAULT_MAX_STACK_HEIGHT, DEFAULT_WASM_MAX_MEMORY},
},
};
use dimension_types::{
account::{AccountHash, ACCOUNT_HASH_LENGTH},
runtime_args,
system::{
auction::{
AUCTION_DELAY_KEY, LOCKED_FUNDS_PERIOD_KEY, UNBONDING_DELAY_KEY, VALIDATOR_SLOTS_KEY,
},
mint::ROUND_SEIGNIORAGE_RATE_KEY,
},
CLValue, EraId, ProtocolVersion, RuntimeArgs, StoredValue, U256, U512,
};
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::V1_0_0;
const DEFAULT_ACTIVATION_POINT: EraId = EraId::new(1);
const ARG_ACCOUNT: &str = "account";
fn get_upgraded_wasm_config() -> WasmConfig {
let opcode_cost = OpcodeCosts {
bit: DEFAULT_BIT_COST + 1,
add: DEFAULT_ADD_COST + 1,
mul: DEFAULT_MUL_COST + 1,
div: DEFAULT_DIV_COST + 1,
load: DEFAULT_LOAD_COST + 1,
store: DEFAULT_STORE_COST + 1,
op_const: DEFAULT_CONST_COST + 1,
local: DEFAULT_LOCAL_COST + 1,
global: DEFAULT_GLOBAL_COST + 1,
control_flow: DEFAULT_CONTROL_FLOW_COST + 1,
integer_comparison: DEFAULT_INTEGER_COMPARISON_COST + 1,
conversion: DEFAULT_CONVERSION_COST + 1,
unreachable: DEFAULT_UNREACHABLE_COST + 1,
nop: DEFAULT_NOP_COST + 1,
current_memory: DEFAULT_CURRENT_MEMORY_COST + 1,
grow_memory: DEFAULT_GROW_MEMORY_COST + 1,
regular: DEFAULT_REGULAR_COST + 1,
};
let storage_costs = StorageCosts::default();
let host_function_costs = HostFunctionCosts::default();
WasmConfig::new(
DEFAULT_WASM_MAX_MEMORY,
DEFAULT_MAX_STACK_HEIGHT * 2,
opcode_cost,
storage_costs,
host_function_costs,
)
}
#[ignore]
#[test]
fn should_upgrade_only_protocol_version() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value(); | let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let upgraded_engine_config = builder.get_engine_state().config();
assert_eq!(
*DEFAULT_WASM_CONFIG,
*upgraded_engine_config.wasm_config(),
"upgraded costs should equal original costs"
);
}
#[ignore]
#[test]
fn should_allow_only_wasm_costs_patch_version() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 2);
let new_wasm_config = get_upgraded_wasm_config();
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
let engine_config = EngineConfig::new(
DEFAULT_MAX_QUERY_DEPTH,
DEFAULT_MAX_ASSOCIATED_KEYS,
DEFAULT_MAX_RUNTIME_CALL_STACK_HEIGHT,
new_wasm_config,
SystemConfig::default(),
);
builder
.upgrade_with_upgrade_request(engine_config, &mut upgrade_request)
.expect_upgrade_success();
let upgraded_engine_config = builder.get_engine_state().config();
assert_eq!(
new_wasm_config,
*upgraded_engine_config.wasm_config(),
"upgraded costs should equal new costs"
);
}
#[ignore]
#[test]
fn should_allow_only_wasm_costs_minor_version() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor + 1, sem_ver.patch);
let new_wasm_config = get_upgraded_wasm_config();
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
let engine_config = EngineConfig::new(
DEFAULT_MAX_QUERY_DEPTH,
DEFAULT_MAX_ASSOCIATED_KEYS,
DEFAULT_MAX_RUNTIME_CALL_STACK_HEIGHT,
new_wasm_config,
SystemConfig::default(),
);
builder
.upgrade_with_upgrade_request(engine_config, &mut upgrade_request)
.expect_upgrade_success();
let upgraded_engine_config = builder.get_engine_state().config();
assert_eq!(
new_wasm_config,
*upgraded_engine_config.wasm_config(),
"upgraded costs should equal new costs"
);
}
#[ignore]
#[test]
fn should_not_downgrade() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let new_protocol_version = ProtocolVersion::from_parts(2, 0, 0);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let upgraded_engine_config = builder.get_engine_state().config();
assert_eq!(
*DEFAULT_WASM_CONFIG,
*upgraded_engine_config.wasm_config(),
"upgraded costs should equal original costs"
);
let mut downgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(new_protocol_version)
.with_new_protocol_version(PROTOCOL_VERSION)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut downgrade_request);
let maybe_upgrade_result = builder.get_upgrade_result(1).expect("should have response");
assert!(
maybe_upgrade_result.is_err(),
"expected failure got {:?}",
maybe_upgrade_result
);
}
#[ignore]
#[test]
fn should_not_skip_major_versions() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let invalid_version =
ProtocolVersion::from_parts(sem_ver.major + 2, sem_ver.minor, sem_ver.patch);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(invalid_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request);
let maybe_upgrade_result = builder.get_upgrade_result(0).expect("should have response");
assert!(maybe_upgrade_result.is_err(), "expected failure");
}
#[ignore]
#[test]
fn should_allow_skip_minor_versions() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
// can skip minor versions as long as they are higher than current version
let valid_new_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor + 2, sem_ver.patch);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(valid_new_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request);
let maybe_upgrade_result = builder.get_upgrade_result(0).expect("should have response");
assert!(!maybe_upgrade_result.is_err(), "expected success");
}
#[ignore]
#[test]
fn should_upgrade_only_validator_slots() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let validator_slot_key = builder
.get_contract(builder.get_auction_contract_hash())
.expect("auction should exist")
.named_keys()[VALIDATOR_SLOTS_KEY];
let before_validator_slots: u32 = builder
.query(None, validator_slot_key, &[])
.expect("should have validator slots")
.as_cl_value()
.expect("should be CLValue")
.clone()
.into_t()
.expect("should be u32");
let new_validator_slots = before_validator_slots + 1;
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_new_validator_slots(new_validator_slots)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_validator_slots: u32 = builder
.query(None, validator_slot_key, &[])
.expect("should have validator slots")
.as_cl_value()
.expect("should be CLValue")
.clone()
.into_t()
.expect("should be u32");
assert_eq!(
new_validator_slots, after_validator_slots,
"should have upgraded validator slots to expected value"
)
}
#[ignore]
#[test]
fn should_upgrade_only_auction_delay() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let auction_delay_key = builder
.get_contract(builder.get_auction_contract_hash())
.expect("auction should exist")
.named_keys()[AUCTION_DELAY_KEY];
let before_auction_delay: u64 = builder
.query(None, auction_delay_key, &[])
.expect("should have auction delay")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
let new_auction_delay = before_auction_delay + 1;
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_new_auction_delay(new_auction_delay)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_auction_delay: u64 = builder
.query(None, auction_delay_key, &[])
.expect("should have auction delay")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
assert_eq!(
new_auction_delay, after_auction_delay,
"should hae upgrade version auction delay"
)
}
#[ignore]
#[test]
fn should_upgrade_only_locked_funds_period() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let locked_funds_period_key = builder
.get_contract(builder.get_auction_contract_hash())
.expect("auction should exist")
.named_keys()[LOCKED_FUNDS_PERIOD_KEY];
let before_locked_funds_period_millis: u64 = builder
.query(None, locked_funds_period_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
let new_locked_funds_period_millis = before_locked_funds_period_millis + 1;
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_new_locked_funds_period_millis(new_locked_funds_period_millis)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_locked_funds_period_millis: u64 = builder
.query(None, locked_funds_period_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
assert_eq!(
new_locked_funds_period_millis, after_locked_funds_period_millis,
"Should have upgraded locked funds period"
)
}
#[ignore]
#[test]
fn should_upgrade_only_round_seigniorage_rate() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let round_seigniorage_rate_key = builder
.get_contract(builder.get_mint_contract_hash())
.expect("auction should exist")
.named_keys()[ROUND_SEIGNIORAGE_RATE_KEY];
let before_round_seigniorage_rate: Ratio<U512> = builder
.query(None, round_seigniorage_rate_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
let new_round_seigniorage_rate = Ratio::new(1, 1_000_000_000);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_new_round_seigniorage_rate(new_round_seigniorage_rate)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_round_seigniorage_rate: Ratio<U512> = builder
.query(None, round_seigniorage_rate_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
assert_ne!(before_round_seigniorage_rate, after_round_seigniorage_rate);
let expected_round_seigniorage_rate = Ratio::new(
U512::from(*new_round_seigniorage_rate.numer()),
U512::from(*new_round_seigniorage_rate.denom()),
);
assert_eq!(
expected_round_seigniorage_rate, after_round_seigniorage_rate,
"Should have upgraded locked funds period"
);
}
#[ignore]
#[test]
fn should_upgrade_only_unbonding_delay() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let unbonding_delay_key = builder
.get_contract(builder.get_auction_contract_hash())
.expect("auction should exist")
.named_keys()[UNBONDING_DELAY_KEY];
let before_unbonding_delay: u64 = builder
.query(None, unbonding_delay_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
let new_unbonding_delay = DEFAULT_UNBONDING_DELAY + 5;
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_new_unbonding_delay(new_unbonding_delay)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_unbonding_delay: u64 = builder
.query(None, unbonding_delay_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
assert_ne!(before_unbonding_delay, new_unbonding_delay);
assert_eq!(
new_unbonding_delay, after_unbonding_delay,
"Should have upgraded locked funds period"
);
}
#[ignore]
#[test]
fn should_apply_global_state_upgrade() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
// We'll try writing directly to this key.
let unbonding_delay_key = builder
.get_contract(builder.get_auction_contract_hash())
.expect("auction should exist")
.named_keys()[UNBONDING_DELAY_KEY];
let before_unbonding_delay: u64 = builder
.query(None, unbonding_delay_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
let new_unbonding_delay = DEFAULT_UNBONDING_DELAY + 5;
let mut update_map = BTreeMap::new();
update_map.insert(
unbonding_delay_key,
StoredValue::from(CLValue::from_t(new_unbonding_delay).expect("should create a CLValue")),
);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.with_global_state_update(update_map)
.build()
};
builder
.upgrade_with_upgrade_request(*builder.get_engine_state().config(), &mut upgrade_request)
.expect_upgrade_success();
let after_unbonding_delay: u64 = builder
.query(None, unbonding_delay_key, &[])
.expect("should have locked funds period")
.as_cl_value()
.expect("should be a CLValue")
.clone()
.into_t()
.expect("should be u64");
assert_ne!(before_unbonding_delay, new_unbonding_delay);
assert_eq!(
new_unbonding_delay, after_unbonding_delay,
"Should have modified locked funds period"
);
}
#[ignore]
#[test]
fn should_increase_max_associated_keys_after_upgrade() {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);
let sem_ver = PROTOCOL_VERSION.value();
let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
let new_system_config = SystemConfig::new(
DEFAULT_WASMLESS_TRANSFER_COST,
AuctionCosts::default(),
MintCosts::default(),
HandlePaymentCosts::default(),
StandardPaymentCosts::default(),
);
let new_engine_config = EngineConfig::new(
DEFAULT_MAX_QUERY_DEPTH,
DEFAULT_MAX_ASSOCIATED_KEYS + 1,
DEFAULT_MAX_RUNTIME_CALL_STACK_HEIGHT,
*DEFAULT_WASM_CONFIG,
new_system_config,
);
let mut upgrade_request = {
UpgradeRequestBuilder::new()
.with_current_protocol_version(PROTOCOL_VERSION)
.with_new_protocol_version(new_protocol_version)
.with_activation_point(DEFAULT_ACTIVATION_POINT)
.build()
};
builder
.upgrade_with_upgrade_request(new_engine_config, &mut upgrade_request)
.expect_upgrade_success();
for n in (0..DEFAULT_MAX_ASSOCIATED_KEYS).map(U256::from) {
let account_hash = {
let mut addr = [0; ACCOUNT_HASH_LENGTH];
n.to_big_endian(&mut addr);
AccountHash::new(addr)
};
let add_request = ExecuteRequestBuilder::standard(
*DEFAULT_ACCOUNT_ADDR,
"add_update_associated_key.wasm",
runtime_args! {
ARG_ACCOUNT => account_hash,
},
)
.with_protocol_version(new_protocol_version)
.build();
builder.exec(add_request).expect_success().commit();
}
let account = builder
.get_account(*DEFAULT_ACCOUNT_ADDR)
.expect("should get account");
assert!(account.associated_keys().len() > DEFAULT_MAX_ASSOCIATED_KEYS as usize);
assert_eq!(
account.associated_keys().len(),
new_engine_config.max_associated_keys() as usize
);
} | let new_protocol_version =
ProtocolVersion::from_parts(sem_ver.major, sem_ver.minor, sem_ver.patch + 1);
|
sorting.py | #!python
from binarytree import *
def is_sorted(items):
"""Return a boolean indicating whether given items are in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Check that all adjacent items are in order, return early if not
def bubble_sort(items):
"""Sort given items by swapping adjacent items that are out of order, and
repeating until all items are in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Repeat until all items are in sorted order
# TODO: Swap adjacent items that are out of order
def selection_sort(items):
"""Sort given items by finding minimum item, swapping it with first
unsorted item, and repeating until all items are in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Repeat until all items are in sorted order
# TODO: Find minimum item in unsorted items
# TODO: Swap it with first unsorted item
def insertion_sort(items):
"""Sort given items by taking first unsorted item, inserting it in sorted
order in front of items, and repeating until all items are in order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Repeat until all items are in sorted order
# TODO: Take first unsorted item
# TODO: Insert it in sorted order in front of items
def merge(items1, items2):
"""Merge given lists of items, each assumed to already be in sorted order,
and return a new list containing all items in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Repeat until one list is empty
# TODO: Find minimum item in both lists and append it to new list
# TODO: Append remaining items in non-empty list to new list
def split_sort_merge(items):
"""Sort given items by splitting list into two approximately equal halves,
sorting each with an iterative sorting algorithm, and merging results into
a list in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Split items list into approximately equal halves
# TODO: Sort each half using any other sorting algorithm
# TODO: Merge sorted halves into one list in sorted order
def merge_sort(items):
|
def partition(items, low, high):
"""Return index `p` after in-place partitioning given items in range
`[low...high]` by choosing a pivot (TODO: document your method here) from
that range, moving pivot into index `p`, items less than pivot into range
`[low...p-1]`, and items greater than pivot into range `[p+1...high]`.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Choose a pivot any way and document your method in docstring above
# TODO: Loop through all items in range [low...high]
# TODO: Move items less than pivot into front of range [low...p-1]
# TODO: Move items greater than pivot into back of range [p+1...high]
# TODO: Move pivot item into final position [p] and return index p
def quick_sort(items, low=None, high=None):
"""Sort given items in place by partitioning items in range `[low...high]`
around a pivot item and recursively sorting each remaining sublist range.
TODO: Best case running time: ??? Why and under what conditions?
TODO: Worst case running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Check if high and low range bounds have default values (not given)
# TODO: Check if list or range is so small it's already sorted (base case)
# TODO: Partition items in-place around a pivot and get index of pivot
# TODO: Sort each sublist range by recursively calling quick sort
def counting_sort(numbers):
"""Sort given numbers (integers) by counting occurrences of each number,
then looping over counts and copying that many numbers into output list.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Find range of given numbers (minimum and maximum integer values)
# TODO: Create list of counts with a slot for each number in input range
# TODO: Loop over given numbers and increment each number's count
# TODO: Loop over counts and append that many numbers into output list
# FIXME: Improve this to mutate input instead of creating new output list
def bucket_sort(numbers, num_buckets=10):
"""Sort given numbers by distributing into buckets representing subranges,
sorting each bucket, and combining contents of all buckets in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Find range of given numbers (minimum and maximum integer values)
# TODO: Create list of buckets to store numbers in subranges of input range
# TODO: Loop over given numbers and place each item in appropriate bucket
# TODO: Sort each bucket using any sorting algorithm (recursive or another)
# TODO: Loop over buckets and append each bucket's numbers into output list
# FIXME: Improve this to mutate input instead of creating new output list
def random_ints(count=20, min=1, max=50):
"""Return a list of `count` integers sampled uniformly at random from
given range [`min`...`max`] with replacement (duplicates are allowed)."""
import random
return [random.randint(min, max) for _ in range(count)]
def test_sorting(sort=bubble_sort, num_items=20, max_value=50):
"""Test sorting algorithms with a small list of random items."""
# Create a list of items randomly sampled from range [1...max_value]
items = random_ints(num_items, 1, max_value)
print('Initial items: {!r}'.format(items))
print('Sorted order? {!r}'.format(is_sorted(items)))
# Change this sort variable to the sorting algorithm you want to test
# sort = bubble_sort
print('Sorting items with {}(items)'.format(sort.__name__))
sort(items)
print('Sorted items: {!r}'.format(items))
print('Sorted order? {!r}'.format(is_sorted(items)))
def main():
"""Read command-line arguments and test sorting algorithms."""
import sys
args = sys.argv[1:] # Ignore script file name
if len(args) == 0:
script = sys.argv[0] # Get script file name
print('Usage: {} sort num max'.format(script))
print('Test sorting algorithm `sort` with a list of `num` integers')
print(' randomly sampled from the range [1...`max`] (inclusive)')
print('\nExample: {} bubble_sort 10 20'.format(script))
print('Initial items: [3, 15, 4, 7, 20, 6, 18, 11, 9, 7]')
print('Sorting items with bubble_sort(items)')
print('Sorted items: [3, 4, 6, 7, 7, 9, 11, 15, 18, 20]')
return
# Get sort function by name
if len(args) >= 1:
sort_name = args[0]
# Terrible hack abusing globals
if sort_name in globals():
sort_function = globals()[sort_name]
else:
# Don't explode, just warn user and show list of sorting functions
print('Sorting function {!r} does not exist'.format(sort_name))
print('Available sorting functions:')
for name in globals():
if name.find('sort') >= 0:
print(' {}'.format(name))
return
# Get num_items and max_value, but don't explode if input is not an integer
try:
num_items = int(args[1]) if len(args) >= 2 else 20
max_value = int(args[2]) if len(args) >= 3 else 50
# print('Num items: {}, max value: {}'.format(num_items, max_value))
except ValueError:
print('Integer required for `num` and `max` command-line arguments')
return
# Test sort function
test_sorting(sort_function, num_items, max_value)
if __name__ == '__main__':
main()
| """Sort given items by splitting list into two approximately equal halves,
sorting each recursively, and merging results into a list in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# TODO: Check if list is so small it's already sorted (base case)
# TODO: Split items list into approximately equal halves
# TODO: Sort each half by recursively calling merge sort
# TODO: Merge sorted halves into one list in sorted order |
zz_groupversion_info.go | /*
Copyright 2021 The Crossplane Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by terrajet. DO NOT EDIT.
// +kubebuilder:object:generate=true
// +groupName=thermal.intersight.jet.crossplane.io
// +versionName=v1alpha1
package v1alpha1
import (
"k8s.io/apimachinery/pkg/runtime/schema"
"sigs.k8s.io/controller-runtime/pkg/scheme"
)
// Package type metadata.
const (
CRDGroup = "thermal.intersight.jet.crossplane.io"
CRDVersion = "v1alpha1"
)
var (
// CRDGroupVersion is the API Group Version used to register the objects
CRDGroupVersion = schema.GroupVersion{Group: CRDGroup, Version: CRDVersion}
| // AddToScheme adds the types in this group-version to the given scheme.
AddToScheme = SchemeBuilder.AddToScheme
) | // SchemeBuilder is used to add go types to the GroupVersionKind scheme
SchemeBuilder = &scheme.Builder{GroupVersion: CRDGroupVersion}
|
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod apps {
use crate::models::*;
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<App, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.IoTCentral/iotApps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: App =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value) | let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
app: &App,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.IoTCentral/iotApps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(app).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: App = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: App = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(App),
Created201(App),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
app_patch: &AppPatch,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.IoTCentral/iotApps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(app_patch).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: App =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(App),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.IoTCentral/iotApps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<AppListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.IoTCentral/iotApps",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<AppListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.IoTCentral/iotApps",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
operation_inputs: &OperationInputs,
) -> std::result::Result<AppAvailabilityInfo, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.IoTCentral/checkNameAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(operation_inputs).map_err(check_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppAvailabilityInfo = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_name_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_name_availability {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_subdomain_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
operation_inputs: &OperationInputs,
) -> std::result::Result<AppAvailabilityInfo, check_subdomain_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.IoTCentral/checkSubdomainAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_subdomain_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_subdomain_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(operation_inputs).map_err(check_subdomain_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_subdomain_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_subdomain_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppAvailabilityInfo = serde_json::from_slice(rsp_body)
.map_err(|source| check_subdomain_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| check_subdomain_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_subdomain_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_subdomain_availability {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_templates(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<AppTemplatesResult, list_templates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.IoTCentral/appTemplates",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_templates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_templates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_templates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_templates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppTemplatesResult =
serde_json::from_slice(rsp_body).map_err(|source| list_templates::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list_templates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_templates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_templates {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.IoTCentral/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
} | }
status_code => {
let rsp_body = rsp.body(); |
index.js | import React from 'react';
import PropTypes from 'prop-types';
import { FormLayout, SelectMimicry } from '@vkontakte/vkui';
import get from 'lodash/get';
// Configs
import { COUNTRY_LIST } from '../../configs/country';
const SelectCountry = ({ setActiveView, countryViewName, selectedCountry }) => {
/**
* @returns {String} | const countryName = get(country, 'name', '');
return countryName;
}
const country = getCountryName();
return (
<FormLayout>
<SelectMimicry
top="Выберите страну"
placeholder="Не выбрана"
onClick={() => setActiveView(countryViewName)}
>
{country}
</SelectMimicry>
</FormLayout>
);
};
SelectCountry.propTypes = {
setActiveView: PropTypes.func.isRequired,
countryViewName: PropTypes.string.isRequired,
selectedCountry: PropTypes.string.isRequired,
};
export default SelectCountry; | */
const getCountryName = () => {
const country = COUNTRY_LIST.find(item => item.key === selectedCountry); |
query.py | import operator
import re
from typing import Any, Dict, List, Tuple, Union
import frappe
from frappe import _
from frappe.query_builder import Criterion, Field, Order, Table
def like(key: Field, value: str) -> frappe.qb:
"""Wrapper method for `LIKE`
Args:
key (str): field
value (str): criterion
Returns:
frappe.qb: `frappe.qb object with `LIKE`
"""
return key.like(value)
def func_in(key: Field, value: Union[List, Tuple]) -> frappe.qb:
"""Wrapper method for `IN`
Args:
key (str): field
value (Union[int, str]): criterion
Returns:
frappe.qb: `frappe.qb object with `IN`
"""
return key.isin(value)
def not_like(key: Field, value: str) -> frappe.qb:
"""Wrapper method for `NOT LIKE`
Args:
key (str): field
value (str): criterion
Returns:
frappe.qb: `frappe.qb object with `NOT LIKE`
"""
return key.not_like(value)
def func_not_in(key: Field, value: Union[List, Tuple]):
"""Wrapper method for `NOT IN`
Args:
key (str): field
value (Union[int, str]): criterion
Returns:
frappe.qb: `frappe.qb object with `NOT IN`
"""
return key.notin(value)
def func_regex(key: Field, value: str) -> frappe.qb:
"""Wrapper method for `REGEX`
Args:
key (str): field
value (str): criterion
Returns:
frappe.qb: `frappe.qb object with `REGEX`
"""
return key.regex(value)
def func_between(key: Field, value: Union[List, Tuple]) -> frappe.qb:
"""Wrapper method for `BETWEEN`
Args:
key (str): field
value (Union[int, str]): criterion
Returns:
frappe.qb: `frappe.qb object with `BETWEEN`
"""
return key[slice(*value)]
def make_function(key: Any, value: Union[int, str]):
"""returns fucntion query
Args:
key (Any): field
value (Union[int, str]): criterion
Returns:
frappe.qb: frappe.qb object
"""
return OPERATOR_MAP[value[0]](key, value[1])
def change_orderby(order: str):
"""Convert orderby to standart Order object
Args:
order (str): Field, order
Returns:
tuple: field, order
"""
order = order.split()
try:
if order[1].lower() == "asc":
return order[0], Order.asc
except IndexError:
pass
return order[0], Order.desc
OPERATOR_MAP = {
"+": operator.add,
"=": operator.eq,
"-": operator.sub,
"!=": operator.ne,
"<": operator.lt,
">": operator.gt,
"<=": operator.le,
"=<": operator.le,
">=": operator.ge,
"=>": operator.ge,
"in": func_in,
"not in": func_not_in,
"like": like,
"not like": not_like,
"regex": func_regex,
"between": func_between,
}
class Query:
tables: dict = {}
def get_condition(self, table: Union[str, Table], **kwargs) -> frappe.qb:
"""Get initial table object
Args:
table (str): DocType
Returns:
frappe.qb: DocType with initial condition
"""
table_object = self.get_table(table)
if kwargs.get("update"):
return frappe.qb.update(table_object)
if kwargs.get("into"):
return frappe.qb.into(table_object)
return frappe.qb.from_(table_object)
def get_table(self, table_name: Union[str, Table]) -> Table:
if isinstance(table_name, Table):
return table_name
table_name = table_name.strip('"').strip("'")
if table_name not in self.tables:
self.tables[table_name] = frappe.qb.DocType(table_name)
return self.tables[table_name]
def criterion_query(self, table: str, criterion: Criterion, **kwargs) -> frappe.qb:
"""Generate filters from Criterion objects
Args:
table (str): DocType
criterion (Criterion): Filters
Returns:
frappe.qb: condition object
"""
condition = self.add_conditions(self.get_condition(table, **kwargs), **kwargs)
return condition.where(criterion)
def add_conditions(self, conditions: frappe.qb, **kwargs):
"""Adding additional conditions
Args:
conditions (frappe.qb): built conditions
Returns:
conditions (frappe.qb): frappe.qb object
"""
if kwargs.get("orderby"):
orderby = kwargs.get("orderby")
if isinstance(orderby, str) and len(orderby.split()) > 1:
for ordby in orderby.split(","):
if ordby := ordby.strip():
orderby, order = change_orderby(ordby)
conditions = conditions.orderby(orderby, order=order)
else:
conditions = conditions.orderby(orderby, order=kwargs.get("order") or Order.desc)
if kwargs.get("limit"):
conditions = conditions.limit(kwargs.get("limit"))
if kwargs.get("distinct"):
conditions = conditions.distinct()
if kwargs.get("for_update"):
conditions = conditions.for_update()
return conditions
def misc_query(self, table: str, filters: Union[List, Tuple] = None, **kwargs):
|
def dict_query(
self, table: str, filters: Dict[str, Union[str, int]] = None, **kwargs
) -> frappe.qb:
"""Build conditions using the given dictionary filters
Args:
table (str): DocType
filters (Dict[str, Union[str, int]], optional): Filters. Defaults to None.
Returns:
frappe.qb: conditions object
"""
conditions = self.get_condition(table, **kwargs)
if not filters:
conditions = self.add_conditions(conditions, **kwargs)
return conditions
for key in filters:
value = filters.get(key)
_operator = OPERATOR_MAP["="]
if not isinstance(key, str):
conditions = conditions.where(make_function(key, value))
continue
if isinstance(value, (list, tuple)):
if isinstance(value[1], (list, tuple)) or value[0] in list(OPERATOR_MAP.keys())[-4:]:
_operator = OPERATOR_MAP[value[0]]
conditions = conditions.where(_operator(Field(key), value[1]))
else:
_operator = OPERATOR_MAP[value[0]]
conditions = conditions.where(_operator(Field(key), value[1]))
else:
if value is not None:
conditions = conditions.where(_operator(Field(key), value))
else:
_table = conditions._from[0]
field = getattr(_table, key)
conditions = conditions.where(field.isnull())
return self.add_conditions(conditions, **kwargs)
def build_conditions(
self, table: str, filters: Union[Dict[str, Union[str, int]], str, int] = None, **kwargs
) -> frappe.qb:
"""Build conditions for sql query
Args:
filters (Union[Dict[str, Union[str, int]], str, int]): conditions in Dict
table (str): DocType
Returns:
frappe.qb: frappe.qb conditions object
"""
if isinstance(filters, int) or isinstance(filters, str):
filters = {"name": str(filters)}
if isinstance(filters, Criterion):
criterion = self.criterion_query(table, filters, **kwargs)
elif isinstance(filters, (list, tuple)):
criterion = self.misc_query(table, filters, **kwargs)
else:
criterion = self.dict_query(filters=filters, table=table, **kwargs)
return criterion
def get_sql(
self,
table: str,
fields: Union[List, Tuple],
filters: Union[Dict[str, Union[str, int]], str, int, List[Union[List, str, int]]] = None,
**kwargs,
):
# Clean up state before each query
self.tables = {}
criterion = self.build_conditions(table, filters, **kwargs)
if len(self.tables) > 1:
primary_table = self.tables[table]
del self.tables[table]
for table_object in self.tables.values():
criterion = criterion.left_join(table_object).on(table_object.parent == primary_table.name)
if isinstance(fields, (list, tuple)):
query = criterion.select(*kwargs.get("field_objects", fields))
elif isinstance(fields, Criterion):
query = criterion.select(fields)
else:
query = criterion.select(fields)
return query
class Permission:
@classmethod
def check_permissions(cls, query, **kwargs):
if not isinstance(query, str):
query = query.get_sql()
doctype = cls.get_tables_from_query(query)
if isinstance(doctype, str):
doctype = [doctype]
for dt in doctype:
dt = re.sub("^tab", "", dt)
if not frappe.has_permission(
dt,
"select",
user=kwargs.get("user"),
parent_doctype=kwargs.get("parent_doctype"),
) and not frappe.has_permission(
dt,
"read",
user=kwargs.get("user"),
parent_doctype=kwargs.get("parent_doctype"),
):
frappe.throw(_("Insufficient Permission for {0}").format(frappe.bold(dt)))
@staticmethod
def get_tables_from_query(query: str):
return [table for table in re.findall(r"\w+", query) if table.startswith("tab")]
| """Build conditions using the given Lists or Tuple filters
Args:
table (str): DocType
filters (Union[List, Tuple], optional): Filters. Defaults to None.
"""
conditions = self.get_condition(table, **kwargs)
if not filters:
return conditions
if isinstance(filters, list):
for f in filters:
if not isinstance(f, (list, tuple)):
_operator = OPERATOR_MAP[filters[1]]
if not isinstance(filters[0], str):
conditions = make_function(filters[0], filters[2])
break
conditions = conditions.where(_operator(Field(filters[0]), filters[2]))
break
else:
_operator = OPERATOR_MAP[f[-2]]
if len(f) == 4:
table_object = self.get_table(f[0])
_field = table_object[f[1]]
else:
_field = Field(f[0])
conditions = conditions.where(_operator(_field, f[-1]))
return self.add_conditions(conditions, **kwargs) |
instrumenter.it.spec.ts | import { promises as fsPromises } from 'fs';
import { testInjector } from '@stryker-mutator/test-helpers';
import { File } from '@stryker-mutator/api/core';
import { expect } from 'chai';
import chaiJestSnapshot from 'chai-jest-snapshot';
import { Instrumenter } from '../../src';
import { createInstrumenterOptions } from '../helpers/factories';
import { resolveTestResource } from '../helpers/resolve-test-resource';
describe('instrumenter integration', () => {
let sut: Instrumenter;
beforeEach(() => {
sut = testInjector.injector.injectClass(Instrumenter);
});
it('should be able to instrument html', async () => {
await arrangeAndActAssert('html-sample.html');
});
it('should be able to instrument a simple js file', async () => {
await arrangeAndActAssert('js-sample.js');
});
it('should be able to instrument a simple ts file', async () => {
await arrangeAndActAssert('ts-sample.ts');
});
it('should be able to instrument an angular component', async () => {
await arrangeAndActAssert('app.component.ts');
});
it('should be able to instrument a lit-html file', async () => {
await arrangeAndActAssert('lit-html-sample.ts');
});
it('should be able to instrument a vue sample', async () => {
await arrangeAndActAssert('vue-sample.vue');
});
it('should be able to instrument super calls', async () => {
await arrangeAndActAssert('super-call.ts');
}); | await arrangeAndActAssert('ignore.js', createInstrumenterOptions({ excludedMutations: ['ArithmeticOperator'] }));
});
it('should be able to instrument switch case statements (using the switchCaseMutantPlacer)', async () => {
await arrangeAndActAssert('switch-case.js');
});
it('should be able to instrument string literals in different places', async () => {
await arrangeAndActAssert('string-mutations.ts');
});
describe('type declarations', () => {
it('should not produce mutants for TS type definitions', async () => {
await arrangeAndActAssert('type-definitions.ts');
});
it('should not produce mutants for flow-types', async () => {
await arrangeAndActAssert('flow-typed.js', createInstrumenterOptions({ plugins: ['flow'] }));
});
it('should not produce mutants for a TS declaration file', async () => {
await arrangeAndActAssert('ts-declarations.ts');
});
});
async function arrangeAndActAssert(fileName: string, options = createInstrumenterOptions()) {
const fullFileName = resolveTestResource('instrumenter', fileName);
const file = new File(fullFileName, await fsPromises.readFile(fullFileName));
const result = await sut.instrument([file], options);
expect(result.files).lengthOf(1);
chaiJestSnapshot.setFilename(resolveTestResource('instrumenter', `${fileName}.out.snap`));
expect(result.files[0].textContent).matchSnapshot();
}
}); | it('should be able to instrument js files with a shebang in them', async () => {
await arrangeAndActAssert('shebang.js');
});
it('should not place ignored mutants', async () => { |
bank_account_service_api.py | # coding: utf-8
from __future__ import absolute_import
import six
from postfinancecheckout.api_client import ApiClient
class BankAccountServiceApi:
def __init__(self, configuration):
self.api_client = ApiClient(configuration=configuration)
def count(self, space_id, **kwargs):
"""Count
Counts the number of items in the database as restricted by the given filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count(space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQueryFilter filter: The filter which restricts the entities which are used to calculate the count.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.count_with_http_info(space_id, **kwargs)
else:
(data) = self.count_with_http_info(space_id, **kwargs)
return data
def | (self, space_id, **kwargs):
"""Count
Counts the number of items in the database as restricted by the given filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count_with_http_info(space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQueryFilter filter: The filter which restricts the entities which are used to calculate the count.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'filter']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method count" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `count`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'filter' in params:
body_params = params['filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/count', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read(self, space_id, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read(space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int id: The ID of the bank account which should be returned. (required)
:return: BankAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_with_http_info(space_id, id, **kwargs)
else:
(data) = self.read_with_http_info(space_id, id, **kwargs)
return data
def read_with_http_info(self, space_id, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_with_http_info(space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int id: The ID of the bank account which should be returned. (required)
:return: BankAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `read`")
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `read`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
if 'id' in params:
query_params.append(('id', params['id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/read', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BankAccount',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search(self, space_id, query, **kwargs):
"""Search
Searches for the entities as specified by the given query.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search(space_id, query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQuery query: The query restricts the bank accounts which are returned by the search. (required)
:return: list[BankAccount]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_with_http_info(space_id, query, **kwargs)
else:
(data) = self.search_with_http_info(space_id, query, **kwargs)
return data
def search_with_http_info(self, space_id, query, **kwargs):
"""Search
Searches for the entities as specified by the given query.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_with_http_info(space_id, query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQuery query: The query restricts the bank accounts which are returned by the search. (required)
:return: list[BankAccount]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'query']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `search`")
# verify the required parameter 'query' is set
if ('query' not in params or
params['query'] is None):
raise ValueError("Missing the required parameter `query` when calling `search`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[BankAccount]',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| count_with_http_info |
hangul.min.js |
!function(){"use strict";var r,d,g,l,t,e,h=["ㄱ","ㄲ","ㄴ","ㄷ","ㄸ","ㄹ","ㅁ","ㅂ","ㅃ","ㅅ","ㅆ","ㅇ","ㅈ","ㅉ","ㅊ","ㅋ","ㅌ","ㅍ","ㅎ"],p=["ㅏ","ㅐ","ㅑ","ㅒ","ㅓ","ㅔ","ㅕ","ㅖ","ㅗ",["ㅗ","ㅏ"],["ㅗ","ㅐ"],["ㅗ","ㅣ"],"ㅛ","ㅜ",["ㅜ","ㅓ"],["ㅜ","ㅔ"],["ㅜ","ㅣ"],"ㅠ","ㅡ",["ㅡ","ㅣ"],"ㅣ"],C=["","ㄱ","ㄲ",["ㄱ","ㅅ"],"ㄴ",["ㄴ","ㅈ"],["ㄴ","ㅎ"],"ㄷ","ㄹ",["ㄹ","ㄱ"],["ㄹ","ㅁ"],["ㄹ","ㅂ"],["ㄹ","ㅅ"],["ㄹ","ㅌ"],["ㄹ","ㅍ"],["ㄹ","ㅎ"],"ㅁ","ㅂ",["ㅂ","ㅅ"],"ㅅ","ㅆ","ㅇ","ㅈ","ㅊ","ㅋ","ㅌ","ㅍ","ㅎ"],A=44032;function n(n){for(var r=n.length,t={0:0},e=0;e<r;e++)n[e]&&(t[n[e].charCodeAt(0)]=e);return t}function o(n){for(var r,t,e=n.length,o={},i=0;i<e;i++)r=n[i][0].charCodeAt(0),t=n[i][1].charCodeAt(0),void 0===o[r]&&(o[r]={}),o[r][t]=n[i][2].charCodeAt(0);return o}function v(n){return void 0!==r[n]}function y(n){return void 0!==d[n]}function m(n){return void 0!==g[n]}function u(n){return void 0!==l[n]}function j(n){return 44032<=n&&n<=55203}function b(n,r){return!(!e[n]||!e[n][r])&&e[n][r]}function S(n,r){return!(!t[n]||!t[n][r])&&t[n][r]}r=n(["ㄱ","ㄲ","ㄳ","ㄴ","ㄵ","ㄶ","ㄷ","ㄸ","ㄹ","ㄺ","ㄻ","ㄼ","ㄽ","ㄾ","ㄿ","ㅀ","ㅁ","ㅂ","ㅃ","ㅄ","ㅅ","ㅆ","ㅇ","ㅈ","ㅉ","ㅊ","ㅋ","ㅌ","ㅍ","ㅎ"]),d=n(["ㄱ","ㄲ","ㄴ","ㄷ","ㄸ","ㄹ","ㅁ","ㅂ","ㅃ","ㅅ","ㅆ","ㅇ","ㅈ","ㅉ","ㅊ","ㅋ","ㅌ","ㅍ","ㅎ"]),g=n(["ㅏ","ㅐ","ㅑ","ㅒ","ㅓ","ㅔ","ㅕ","ㅖ","ㅗ","ㅘ","ㅙ","ㅚ","ㅛ","ㅜ","ㅝ","ㅞ","ㅟ","ㅠ","ㅡ","ㅢ","ㅣ"]),l=n(["","ㄱ","ㄲ","ㄳ","ㄴ","ㄵ","ㄶ","ㄷ","ㄹ","ㄺ","ㄻ","ㄼ","ㄽ","ㄾ","ㄿ","ㅀ","ㅁ","ㅂ","ㅄ","ㅅ","ㅆ","ㅇ","ㅈ","ㅊ","ㅋ","ㅌ","ㅍ","ㅎ"]),t=o([["ㄱ","ㅅ","ㄳ"],["ㄴ","ㅈ","ㄵ"],["ㄴ","ㅎ","ㄶ"],["ㄹ","ㄱ","ㄺ"],["ㄹ","ㅁ","ㄻ"],["ㄹ","ㅂ","ㄼ"],["ㄹ","ㅅ","ㄽ"],["ㄹ","ㅌ","ㄾ"],["ㄹ","ㅍ","ㄿ"],["ㄹ","ㅎ","ㅀ"],["ㅂ","ㅅ","ㅄ"]]),e=o([["ㅗ","ㅏ","ㅘ"],["ㅗ","ㅐ","ㅙ"],["ㅗ","ㅣ","ㅚ"],["ㅜ","ㅓ","ㅝ"],["ㅜ","ㅔ","ㅞ"],["ㅜ","ㅣ","ㅟ"],["ㅡ","ㅣ","ㅢ"]]);function w(n,r){if(null===n)throw new Error("Arguments cannot be null");"object"==typeof n&&(n=n.join(""));for(var t,e,o,i,u,f=[],c=n.length,a=0;a<c;a++){var s=[];j(i=n.charCodeAt(a))?(e=((i-=A)-(o=i%28))/28%21,t=parseInt((i-o)/28/21),s.push(h[t]),"object"==typeof p[e]?s=s.concat(p[e]):s.push(p[e]),0<o&&("object"==typeof C[o]?s=s.concat(C[o]):s.push(C[o]))):v(i)?"string"==typeof(u=y(i)?h[d[i]]:C[l[i]])?s.push(u):s=s.concat(u):m(i)?"string"==typeof(u=p[g[i]])?s.push(u):s=s.concat(u):s.push(n.charAt(a)),r?f.push(s):f=f.concat(s)}return f}function i(n){return"string"!=typeof n?"":(n=w(n)).join("")}function f(c){"string"==typeof c&&(c=w(c));var n,r,a=[],t=c.length,e=0,s=-1,h=!1;function o(n){var r,t,e,o,i=0,u="";if(h=!1,!(n<s+1))for(var f=1;;f++){if(1===f){if(m(r=c[s+f].charCodeAt(0)))return s+f+1<=n&&m(t=c[s+f+1].charCodeAt(0))?a.push(String.fromCharCode(b(r,t))):a.push(c[s+f]),void(s=n);if(!y(r))return a.push(c[s+f]),void(s=n);u=c[s+f]}else if(2===f){if(y(t=c[s+f].charCodeAt(0)))return r=S(r,t),u=String.fromCharCode(r),a.push(u),void(s=n);u=String.fromCharCode(28*(21*d[r]+g[t])+A)}else 3===f?(b(t,e=c[s+f].charCodeAt(0))?t=b(t,e):i=e,u=String.fromCharCode(28*(21*d[r]+g[t])+l[i]+A)):4===f?(i=S(i,o=c[s+f].charCodeAt(0))?S(i,o):o,u=String.fromCharCode(28*(21*d[r]+g[t])+l[i]+A)):5===f&&(i=S(i,o=c[s+f].charCodeAt(0)),u=String.fromCharCode(28*(21*d[r]+g[t])+l[i]+A));if(n<=s+f)return a.push(u),void(s=n)}}for(var i=0;i<t;i++)y(n=c[i].charCodeAt(0))||m(n)||u(n)?(0===e?y(n)?e=1:m(n)&&(e=4):1==e?m(n)?e=2:S(r,n)?e=5:o(i-1):2==e?u(n)?e=3:m(n)?b(r,n)||(o(i-1),e=4):(o(i-1),e=1):3==e?u(n)?!h&&S(r,n)?h=!0:(o(i-1),e=1):y(n)?(o(i-1),e=1):m(n)&&(o(i-2),e=2):4==e?m(n)?b(r,n)?(o(i),e=0):o(i-1):(o(i-1),e=1):5==e&&(e=m(n)?(o(i-2),2):(o(i-1),1)),r=n):(o(i-1),o(i),e=0);return o(i-1),a.join("")}function c(n){this.string=n,this.disassembled=w(n).join("")}c.prototype.search=function(n){return w(n).join("").indexOf(this.disassembled)};var a={disassemble:w,d:w,disassembleToString:i,ds:i,assemble:f,a:f,search:function(n,r){var t=w(n).join(""),e=w(r).join("");return t.indexOf(e)},rangeSearch:function(n,r){var t,e=w(n).join(""),o=w(r).join(""),i=w(n,!0),u=new RegExp(o,"gi"),f=[];if(!r.length)return[];for(;t=u.exec(e);)f.push(t.index);return f.map(function(n){return[function(n){for(var r=0,t=0;r<i.length;++r)if(n<(t+=i[r].length))return r}(n),function(n){for(var r=0,t=0;r<i.length;++r)if(t+=i[r].length,n+o.length<=t)return r}(n)]})},Searcher:c,endsWithConsonant:function(n){"object"==typeof n&&(n=n.join(""));var r=n.charCodeAt(n.length-1);if(j(r)){if(0<(r-=A)%28)return!0}else if(v(r))return!0;return!1},endsWith:function(n,r){return w(n).pop()===r},isHangul:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),j(n)},isComplete:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),j(n)},isConsonant:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),v(n)},isVowel:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),m(n)},isCho:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),y(n)},isJong:function(n){return"string"==typeof n&&(n=n.charCodeAt(0)),u(n)},isHangulAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!j(n.charCodeAt(r)))return!1;return!0},isCompleteAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!j(n.charCodeAt(r)))return!1;return!0},isConsonantAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!v(n.charCodeAt(r)))return!1;return!0},isVowelAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!m(n.charCodeAt(r)))return!1;return!0},isChoAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!y(n.charCodeAt(r)))return!1;return!0},isJongAll:function(n){if("string"!=typeof n)return!1;for(var r=0;r<n.length;r++)if(!u(n.charCodeAt(r)))return!1;return!0}};"function"==typeof define&&define.amd?define(function(){return a}):"undefined"!=typeof module?module.exports=a:window.Hangul=a}(); | /*! hangul-js 2019-12-10 */ |
|
type.go | package paymentmethod
import "github.com/dpb587/go-schemaorg"
// // <p>A payment method is a standardized procedure for transferring the monetary
// amount for a purchase. Payment methods are characterized by the legal and
// technical structures used, and by the organization or group carrying out the
// transaction.</p>
//
// <p>Commonly used values:</p>
//
// <ul>
// <li>http://purl.org/goodrelations/v1#ByBankTransferInAdvance</li> | // <li>http://purl.org/goodrelations/v1#DirectDebit</li>
// <li>http://purl.org/goodrelations/v1#GoogleCheckout</li>
// <li>http://purl.org/goodrelations/v1#PayPal</li>
// <li>http://purl.org/goodrelations/v1#PaySwarm</li>
// </ul>
//
var Type = schemaorg.NewDataType("http://schema.org", "PaymentMethod")
func New() *schemaorg.Thing {
return schemaorg.NewThing(Type)
} | // <li>http://purl.org/goodrelations/v1#ByInvoice</li>
// <li>http://purl.org/goodrelations/v1#Cash</li>
// <li>http://purl.org/goodrelations/v1#CheckInAdvance</li>
// <li>http://purl.org/goodrelations/v1#COD</li> |
submit.spec.tsx | import * as React from 'react';
import { shallow } from 'enzyme'; | describe('Submit Page', () => {
it('is defined', () => {
const app = shallow(<Page serverState={{}} />);
expect(app).toBeDefined();
});
}); |
import Page from '../submit';
|
func.py | import cv2
import os
import numpy as np
import matplotlib.pyplot as plt
# %%
def _pick(L, ty, path):
L_ = [cv2.imread(os.path.join(path, i)) for i in L if i.split('_')[0]==ty]
# 輸入影像
return L_
def _gray(img):
return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
def _Pos(img, idx):
def on_press |
L.append(np.array([int(event.xdata), int(event.ydata)]))
# 紀錄點選的座標點
if len(L)>=2:
plt.close()
# 當點選次數大於等於2時,關閉視窗
np.save('./npy/loc_' + idx + '.npy', np.array(L))
# 儲存紀錄座標點
fig = plt.figure()
plt.imshow(img, animated= True)
L = []
fig.canvas.mpl_connect('button_press_event', on_press)
# 用動態圖的形式產生介面供使用者點選目標點
plt.show()
def _PlotPos(img, idx):
img_c = np.copy(img)
src = np.load('./npy/loc_' + idx + '.npy')
# 輸入儲存的選取座標
print('Choose point 1: ({}, {})'.format(src[0, 0], src[0, 1]))
print('Choose point 2: ({}, {})'.format(src[1, 0], src[1, 1]))
cv2.circle(img_c, (src[0, 0], src[0, 1]), 3, (0, 38, 255), -1)
cv2.circle(img_c, (src[1, 0], src[1, 1]), 3, (0, 38, 255), -1)
# 畫上座標點
return img_c
# def _flow(pre_img, nxt_img, pt_x, pt_y, param, init_flow=None):
# XL, YL = [0], [0]
# PX, PY = [pt_x], [pt_y]
# flow = init_flow
# ep = 1000
# i=0
# while ep>1e-2:
# if i==0:
# fg = 0
# else:
# fg = cv2.OPTFLOW_USE_INITIAL_FLOW
# flow = cv2.calcOpticalFlowFarneback(pre_img, nxt_img, flow=flow, flags=fg, **param)
# XL.append(flow[pt_y, pt_x, 0])
# YL.append(flow[pt_y, pt_x, 1])
# PX.append(int(pt_x + flow[pt_y, pt_x, 0]))
# PY.append(int(pt_y + flow[pt_y, pt_x, 1]))
# print('iter:{}, ep:{}\nu = {:.4f}, v = {:.4f}'.format(i, ep, XL[i], YL[i]))
# print('x = {:.4f}, y = {:.4f}'.format(PX[i], PY[i]))
# print('======================')
# i+=1
# if i>0:
# ep = np.sum(np.abs(XL[i-1] - XL[i])) + np.sum(np.abs(YL[i-1] - YL[i]))
# return PX, PY
def _LKflow(pre_img, nxt_img, pt_x, pt_y, lk_params):
p0 = np.array([[pt_x, pt_y]]).astype(np.float32)
i = 0
PX, PY = [pt_x], [pt_y]
XL, YL = [], []
ep = 1e3
# 初始化各參數
while ep>1e-2:
if i==0:
p1, _, _ = cv2.calcOpticalFlowPyrLK(pre_img, nxt_img, p0, None, **lk_params)
else:
p1, _, _ = cv2.calcOpticalFlowPyrLK(pre_img, nxt_img, p0, p1, flags=cv2.OPTFLOW_USE_INITIAL_FLOW, **lk_params)
# 用迴圈計算每個iteration的輸出座標
PX.append(p1[0][0])
PY.append(p1[0][1])
XL.append(PX[i] - PX[i+1])
YL.append(PY[i] - PY[i+1])
# 紀錄輸出座標與位移向量
if i>0:
ep = np.sum(np.abs(XL[i-1] - XL[i])) + np.sum(np.abs(YL[i-1] - YL[i]))
# 與前一個iteration位移向量之差值,
# 當差值<0.01時則停止迴圈
print('iter:{}, ep:{}\nu = {:.4f}, v = {:.4f}'.format(i, ep, XL[i], YL[i]))
print('x = {:.4f}, y = {:.4f}'.format(PX[i+1], PY[i+1]))
print('======================')
i+=1
return PX, PY
def _plot(img, PX, PY):
PX = np.array(PX).astype(np.int)
PY = np.array(PY).astype(np.int)
for j in range(len(PX)):
if j!=0:
cv2.line(img, (PX[j-1], PY[j-1]), (PX[j], PY[j]), (250, 5, 216), 2)
for k in range(len(PX)):
if k==0:
c = (0, 38, 255)
elif k==len(PX)-1:
c = (182, 255, 0)
else:
c = (255, 0, 0)
cv2.circle(img, (PX[k], PY[k]), 3, c, -1)
# 依每個iteration輸出的座標畫上標點
return img
# param = dict(pyr_scale=0.8,
# levels=25,
# iterations=1,
# winsize=5,
# poly_n=5,
# poly_sigma=1.1)
lk_params = dict(winSize = (15, 15),
maxLevel = 3,
criteria = (cv2.TERM_CRITERIA_COUNT, 1, 0.03)) | (event): |
index.js | import {routerReducer as routing} from 'react-router-redux';
import {combineReducers} from 'redux';
import lessons from './lessons'; | export default combineReducers({routing, lessons}); | |
hive_ds.py | import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from pyhive import hive
enabled = True
except ImportError, e:
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'DATE': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Hive(BaseSQLQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"database": {
"type": "string"
},
"username": {
"type": "string"
}
},
"required": ["host"]
}
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "hive"
def __init__(self, configuration):
super(Hive, self).__init__(configuration)
def _get_tables(self, schema):
try:
schemas_query = "show schemas"
tables_query = "show tables in %s"
columns_query = "show columns in %s"
for schema_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['database_name']), self._run_query_internal(schemas_query))):
for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['tab_name']), self._run_query_internal(tables_query % schema_name))):
columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['field']), self._run_query_internal(columns_query % table_name)))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
|
register(Hive)
| connection = None
try:
connection = hive.connect(**self.configuration.to_dict())
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
if connection:
connection.close()
return json_data, error |
setup.py | import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name='twarc-hashtags',
version='0.0.5',
url='https://github.com/docnow/twarc-hashtags',
author='Ed Summers',
author_email='[email protected]',
py_modules=['twarc_hashtags'],
description='A twarc plugin to extract hashtags from Twitter data',
long_description=long_description,
long_description_content_type="text/markdown",
python_requires='>=3.3',
install_requires=['twarc>=2.1.1'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
entry_points='''
[twarc.plugins] | hashtags=twarc_hashtags:hashtags
'''
) |
|
Question.style.js | import styled from 'styled-components'; | const QuestionCounter = styled.div`
font-size: 0.9rem;
margin-bottom: 0.5rem;
`;
const QuestionLabel = styled.div`
font-weight: 900;
font-size: 1.5rem;
`;
export { QuestionStyled, QuestionCounter, QuestionLabel }; |
const QuestionStyled = styled.div`
padding: 2rem;
`; |
check_repo.py | # coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import inspect
import os
import re
import warnings
from pathlib import Path
from transformers import is_flax_available, is_tf_available, is_torch_available
from transformers.file_utils import ENV_VARS_TRUE_VALUES
from transformers.models.auto import get_values
# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
PATH_TO_DOC = "docs/source"
# Update this list with models that are supposed to be private.
PRIVATE_MODELS = [
"DPRSpanPredictor",
"T5Stack",
"TFDPRSpanPredictor",
]
# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
# models to ignore for not tested
"BigBirdPegasusEncoder", # Building part of bigger (tested) model.
"BigBirdPegasusDecoder", # Building part of bigger (tested) model.
"BigBirdPegasusDecoderWrapper", # Building part of bigger (tested) model.
"DetrEncoder", # Building part of bigger (tested) model.
"DetrDecoder", # Building part of bigger (tested) model.
"DetrDecoderWrapper", # Building part of bigger (tested) model.
"M2M100Encoder", # Building part of bigger (tested) model.
"M2M100Decoder", # Building part of bigger (tested) model.
"Speech2TextEncoder", # Building part of bigger (tested) model.
"Speech2TextDecoder", # Building part of bigger (tested) model.
"LEDEncoder", # Building part of bigger (tested) model.
"LEDDecoder", # Building part of bigger (tested) model.
"BartDecoderWrapper", # Building part of bigger (tested) model.
"BartEncoder", # Building part of bigger (tested) model.
"BertLMHeadModel", # Needs to be setup as decoder.
"BlenderbotSmallEncoder", # Building part of bigger (tested) model.
"BlenderbotSmallDecoderWrapper", # Building part of bigger (tested) model.
"BlenderbotEncoder", # Building part of bigger (tested) model.
"BlenderbotDecoderWrapper", # Building part of bigger (tested) model.
"MBartEncoder", # Building part of bigger (tested) model.
"MBartDecoderWrapper", # Building part of bigger (tested) model.
"MegatronBertLMHeadModel", # Building part of bigger (tested) model.
"MegatronBertEncoder", # Building part of bigger (tested) model.
"MegatronBertDecoder", # Building part of bigger (tested) model.
"MegatronBertDecoderWrapper", # Building part of bigger (tested) model.
"PegasusEncoder", # Building part of bigger (tested) model.
"PegasusDecoderWrapper", # Building part of bigger (tested) model.
"DPREncoder", # Building part of bigger (tested) model.
"ProphetNetDecoderWrapper", # Building part of bigger (tested) model.
"ReformerForMaskedLM", # Needs to be setup as decoder.
"TFDPREncoder", # Building part of bigger (tested) model.
"TFElectraMainLayer", # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
"TFRobertaForMultipleChoice", # TODO: fix
"SeparableConv1D", # Building part of bigger (tested) model.
]
# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
"test_modeling_camembert.py",
"test_modeling_flax_mt5.py",
"test_modeling_mbart.py",
"test_modeling_mt5.py",
"test_modeling_pegasus.py",
"test_modeling_tf_camembert.py",
"test_modeling_tf_mt5.py",
"test_modeling_tf_xlm_roberta.py",
"test_modeling_xlm_prophetnet.py",
"test_modeling_xlm_roberta.py",
]
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
IGNORE_NON_AUTO_CONFIGURED = PRIVATE_MODELS.copy() + [
# models to ignore for model xxx mapping
"BeitForMaskedImageModeling",
"CLIPTextModel",
"CLIPVisionModel",
"FlaxCLIPTextModel",
"FlaxCLIPVisionModel",
"FlaxWav2Vec2ForCTC",
"DetrForSegmentation",
"DPRReader",
"FlaubertForQuestionAnswering",
"GPT2DoubleHeadsModel",
"LukeForEntityClassification",
"LukeForEntityPairClassification",
"LukeForEntitySpanClassification",
"OpenAIGPTDoubleHeadsModel",
"RagModel",
"RagSequenceForGeneration",
"RagTokenForGeneration",
"TFDPRReader",
"TFGPT2DoubleHeadsModel",
"TFOpenAIGPTDoubleHeadsModel",
"TFRagModel",
"TFRagSequenceForGeneration",
"TFRagTokenForGeneration",
"Wav2Vec2ForCTC",
"HubertForCTC",
"XLMForQuestionAnswering",
"XLNetForQuestionAnswering",
"SeparableConv1D",
"VisualBertForRegionToPhraseAlignment",
"VisualBertForVisualReasoning",
"VisualBertForQuestionAnswering",
"VisualBertForMultipleChoice",
"TFWav2Vec2ForCTC",
"TFHubertForCTC",
]
# This is to make sure the transformers module imported is the one in the repo.
spec = importlib.util.spec_from_file_location(
"transformers",
os.path.join(PATH_TO_TRANSFORMERS, "__init__.py"),
submodule_search_locations=[PATH_TO_TRANSFORMERS],
)
transformers = spec.loader.load_module()
# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
"""Get the model modules inside the transformers library."""
_ignore_modules = [
"modeling_auto",
"modeling_encoder_decoder",
"modeling_marian",
"modeling_mmbt",
"modeling_outputs",
"modeling_retribert",
"modeling_utils",
"modeling_flax_auto",
"modeling_flax_utils",
"modeling_transfo_xl_utilities",
"modeling_tf_auto",
"modeling_tf_outputs",
"modeling_tf_pytorch_utils",
"modeling_tf_utils",
"modeling_tf_transfo_xl_utilities",
]
modules = []
for model in dir(transformers.models):
# There are some magic dunder attributes in the dir, we ignore them
if not model.startswith("__"):
model_module = getattr(transformers.models, model)
for submodule in dir(model_module):
if submodule.startswith("modeling") and submodule not in _ignore_modules:
modeling_module = getattr(model_module, submodule)
if inspect.ismodule(modeling_module):
modules.append(modeling_module)
return modules
def get_models(module, include_pretrained=False):
"""Get the objects in module that are models."""
models = []
model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
for attr_name in dir(module):
if not include_pretrained and ("Pretrained" in attr_name or "PreTrained" in attr_name):
continue
attr = getattr(module, attr_name)
if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
models.append((attr_name, attr))
return models
def is_a_private_model(model):
"""Returns True if the model should not be in the main init."""
if model in PRIVATE_MODELS:
return True
# Wrapper, Encoder and Decoder are all privates
if model.endswith("Wrapper"):
return True
if model.endswith("Encoder"):
return True
if model.endswith("Decoder"):
return True
return False
def check_models_are_in_init():
"""Checks all models defined in the library are in the main init."""
models_not_in_init = []
dir_transformers = dir(transformers)
for module in get_model_modules():
models_not_in_init += [
model[0] for model in get_models(module, include_pretrained=True) if model[0] not in dir_transformers
]
# Remove private models
models_not_in_init = [model for model in models_not_in_init if not is_a_private_model(model)]
if len(models_not_in_init) > 0:
raise Exception(f"The following models should be in the main init: {','.join(models_not_in_init)}.")
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
"""Get the model test files."""
_ignore_files = [
"test_modeling_common",
"test_modeling_encoder_decoder",
"test_modeling_marian",
"test_modeling_tf_common",
]
test_files = []
for filename in os.listdir(PATH_TO_TESTS):
if (
os.path.isfile(f"{PATH_TO_TESTS}/{filename}")
and filename.startswith("test_modeling")
and not os.path.splitext(filename)[0] in _ignore_files
):
test_files.append(filename)
return test_files
# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
|
def check_models_are_tested(module, test_file):
"""Check models defined in module are tested in test_file."""
# XxxPreTrainedModel are not tested
defined_models = get_models(module)
tested_models = find_tested_models(test_file)
if tested_models is None:
if test_file in TEST_FILES_WITH_NO_COMMON_TESTS:
return
return [
f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
+ "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
+ "`utils/check_repo.py`."
]
failures = []
for model_name, _ in defined_models:
if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
failures.append(
f"{model_name} is defined in {module.__name__} but is not tested in "
+ f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
+ "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
+ "in the file `utils/check_repo.py`."
)
return failures
def check_all_models_are_tested():
"""Check all models are properly tested."""
modules = get_model_modules()
test_files = get_model_test_files()
failures = []
for module in modules:
test_file = f"test_{module.__name__.split('.')[-1]}.py"
if test_file not in test_files:
failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
new_failures = check_models_are_tested(module, test_file)
if new_failures is not None:
failures += new_failures
if len(failures) > 0:
raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))
def get_all_auto_configured_models():
"""Return the list of all models in at least one auto class."""
result = set() # To avoid duplicates we concatenate all model classes in a set.
if is_torch_available():
for attr_name in dir(transformers.models.auto.modeling_auto):
if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
if is_tf_available():
for attr_name in dir(transformers.models.auto.modeling_tf_auto):
if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
if is_flax_available():
for attr_name in dir(transformers.models.auto.modeling_flax_auto):
if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
return [cls for cls in result]
def ignore_unautoclassed(model_name):
"""Rules to determine if `name` should be in an auto class."""
# Special white list
if model_name in IGNORE_NON_AUTO_CONFIGURED:
return True
# Encoder and Decoder should be ignored
if "Encoder" in model_name or "Decoder" in model_name:
return True
return False
def check_models_are_auto_configured(module, all_auto_models):
"""Check models defined in module are each in an auto class."""
defined_models = get_models(module)
failures = []
for model_name, _ in defined_models:
if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
failures.append(
f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
"If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
"`utils/check_repo.py`."
)
return failures
def check_all_models_are_auto_configured():
"""Check all models are each in an auto class."""
missing_backends = []
if not is_torch_available():
missing_backends.append("PyTorch")
if not is_tf_available():
missing_backends.append("TensorFlow")
if not is_flax_available():
missing_backends.append("Flax")
if len(missing_backends) > 0:
missing = ", ".join(missing_backends)
if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
raise Exception(
"Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}."
)
else:
warnings.warn(
"Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
"didn't make any change in one of those backends modeling files, you should probably execute the "
"command above to be on the safe side."
)
modules = get_model_modules()
all_auto_models = get_all_auto_configured_models()
failures = []
for module in modules:
new_failures = check_models_are_auto_configured(module, all_auto_models)
if new_failures is not None:
failures += new_failures
if len(failures) > 0:
raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")
def check_decorator_order(filename):
"""Check that in the test file `filename` the slow decorator is always last."""
with open(filename, "r", encoding="utf-8", newline="\n") as f:
lines = f.readlines()
decorator_before = None
errors = []
for i, line in enumerate(lines):
search = _re_decorator.search(line)
if search is not None:
decorator_name = search.groups()[0]
if decorator_before is not None and decorator_name.startswith("parameterized"):
errors.append(i)
decorator_before = decorator_name
elif decorator_before is not None:
decorator_before = None
return errors
def check_all_decorator_order():
"""Check that in all test files, the slow decorator is always last."""
errors = []
for fname in os.listdir(PATH_TO_TESTS):
if fname.endswith(".py"):
filename = os.path.join(PATH_TO_TESTS, fname)
new_errors = check_decorator_order(filename)
errors += [f"- {filename}, line {i}" for i in new_errors]
if len(errors) > 0:
msg = "\n".join(errors)
raise ValueError(
f"The parameterized decorator (and its variants) should always be first, but this is not the case in the following files:\n{msg}"
)
def find_all_documented_objects():
"""Parse the content of all doc files to detect which classes and functions it documents"""
documented_obj = []
for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
content = f.read()
raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
return documented_obj
# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
"AutoModelWithLMHead",
"BartPretrainedModel",
"DataCollator",
"DataCollatorForSOP",
"GlueDataset",
"GlueDataTrainingArguments",
"LineByLineTextDataset",
"LineByLineWithRefDataset",
"LineByLineWithSOPTextDataset",
"PretrainedBartModel",
"PretrainedFSMTModel",
"SingleSentenceClassificationProcessor",
"SquadDataTrainingArguments",
"SquadDataset",
"SquadExample",
"SquadFeatures",
"SquadV1Processor",
"SquadV2Processor",
"TFAutoModelWithLMHead",
"TFBartPretrainedModel",
"TextDataset",
"TextDatasetForNextSentencePrediction",
"Wav2Vec2ForMaskedLM",
"Wav2Vec2Tokenizer",
"glue_compute_metrics",
"glue_convert_examples_to_features",
"glue_output_modes",
"glue_processors",
"glue_tasks_num_labels",
"squad_convert_examples_to_features",
"xnli_compute_metrics",
"xnli_output_modes",
"xnli_processors",
"xnli_tasks_num_labels",
]
# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
"AddedToken", # This is a tokenizers class.
"BasicTokenizer", # Internal, should never have been in the main init.
"CharacterTokenizer", # Internal, should never have been in the main init.
"DPRPretrainedReader", # Like an Encoder.
"MecabTokenizer", # Internal, should never have been in the main init.
"ModelCard", # Internal type.
"SqueezeBertModule", # Internal building block (should have been called SqueezeBertLayer)
"TFDPRPretrainedReader", # Like an Encoder.
"TransfoXLCorpus", # Internal type.
"WordpieceTokenizer", # Internal, should never have been in the main init.
"absl", # External module
"add_end_docstrings", # Internal, should never have been in the main init.
"add_start_docstrings", # Internal, should never have been in the main init.
"cached_path", # Internal used for downloading models.
"convert_tf_weight_name_to_pt_weight_name", # Internal used to convert model weights
"logger", # Internal logger
"logging", # External module
"requires_backends", # Internal function
]
# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
# Benchmarks
"PyTorchBenchmark",
"PyTorchBenchmarkArguments",
"TensorFlowBenchmark",
"TensorFlowBenchmarkArguments",
]
def ignore_undocumented(name):
"""Rules to determine if `name` should be undocumented."""
# NOT DOCUMENTED ON PURPOSE.
# Constants uppercase are not documented.
if name.isupper():
return True
# PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
if (
name.endswith("PreTrainedModel")
or name.endswith("Decoder")
or name.endswith("Encoder")
or name.endswith("Layer")
or name.endswith("Embeddings")
or name.endswith("Attention")
):
return True
# Submodules are not documented.
if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
):
return True
# All load functions are not documented.
if name.startswith("load_tf") or name.startswith("load_pytorch"):
return True
# is_xxx_available functions are not documented.
if name.startswith("is_") and name.endswith("_available"):
return True
# Deprecated objects are not documented.
if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
return True
# MMBT model does not really work.
if name.startswith("MMBT"):
return True
if name in SHOULD_HAVE_THEIR_OWN_PAGE:
return True
return False
def check_all_objects_are_documented():
"""Check all models are properly documented."""
documented_objs = find_all_documented_objects()
modules = transformers._modules
objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
if len(undocumented_objs) > 0:
raise Exception(
"The following objects are in the public init so should be documented:\n - "
+ "\n - ".join(undocumented_objs)
)
def check_repo_quality():
"""Check all models are properly tested and documented."""
print("Checking all models are public.")
check_models_are_in_init()
print("Checking all models are properly tested.")
check_all_decorator_order()
check_all_models_are_tested()
print("Checking all objects are properly documented.")
check_all_objects_are_documented()
print("Checking all models are in at least one auto class.")
check_all_models_are_auto_configured()
if __name__ == "__main__":
check_repo_quality()
| """Parse the content of test_file to detect what's in all_model_classes"""
# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
content = f.read()
all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
# Check with one less parenthesis as well
all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
if len(all_models) > 0:
model_tested = []
for entry in all_models:
for line in entry.split(","):
name = line.strip()
if len(name) > 0:
model_tested.append(name)
return model_tested |
created_node.rs | use crate::events::MountEvent;
use crate::Listener;
use crate::{
dom::Dispatch,
html,
html::attributes::{AttributeValue, SegregatedAttributes, Special},
Attribute, Event,
};
use std::cell::Cell;
use std::collections::HashMap;
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
use web_sys::{
self, Element, EventTarget, HtmlButtonElement, HtmlDetailsElement,
HtmlElement, HtmlFieldSetElement, HtmlInputElement, HtmlLinkElement,
HtmlMenuItemElement, HtmlOptGroupElement, HtmlOptionElement,
HtmlSelectElement, HtmlStyleElement, HtmlTextAreaElement, Node, Text,
};
thread_local!(static NODE_ID_COUNTER: Cell<usize> = Cell::new(1));
/// This is the value of the data-sauron-vdom-id.
/// Used to uniquely identify elements that contain closures so that the DomUpdater can
/// look them up by their unique id.
/// When the DomUpdater sees that the element no longer exists it will drop all of it's
/// Rc'd Closures for those events.
fn create_unique_identifier() -> usize {
let id = NODE_ID_COUNTER.with(|x| {
let tmp = x.get();
x.set(tmp + 1);
tmp
});
id
}
pub(crate) const DATA_VDOM_ID: &str = "data-vdom-id";
/// Closures that we are holding on to to make sure that they don't get invalidated after a
/// VirtualNode is dropped.
///
/// The u32 is a unique identifier that is associated with the DOM element that this closure is
/// attached to.
///
pub type ActiveClosure =
HashMap<usize, Vec<(&'static str, Closure<dyn FnMut(web_sys::Event)>)>>;
/// A node along with all of the closures that were created for that
/// node's events and all of it's child node's events.
#[derive(Debug)]
pub struct CreatedNode {
/// A `Node` or `Element` that was created from a `Node`
pub node: Node,
pub(crate) closures: ActiveClosure,
}
impl CreatedNode {
/// create a simple node with no closure attache
pub fn without_closures(node: Node) -> Self {
CreatedNode {
node,
closures: HashMap::with_capacity(0),
}
}
/// create a text node
pub fn create_text_node(txt: &str) -> Text {
crate::document().create_text_node(txt)
}
/// Create and return a `CreatedNode` instance (containing a DOM `Node`
/// together with potentially related closures) for this virtual node.
pub fn create_dom_node<DSP, MSG>(
program: &DSP,
vnode: &crate::Node<MSG>,
focused_node: &mut Option<Node>,
) -> CreatedNode
where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
match vnode {
crate::Node::Text(txt) => {
let text_node = Self::create_text_node(&txt.text);
CreatedNode::without_closures(text_node.unchecked_into())
}
crate::Node::Comment(comment) => {
let comment_node = crate::document().create_comment(comment);
CreatedNode::without_closures(comment_node.unchecked_into())
}
crate::Node::Element(element_node) => {
Self::create_element_node(program, element_node, focused_node)
}
}
}
/// dispatch the mount event,
/// call the listener since browser don't allow asynchronous execution of
/// dispatching custom events (non-native browser events)
fn dispatch_mount_event<DSP, MSG>(
program: &DSP,
velem: &crate::Element<MSG>,
element: &Element,
) where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
for att in velem.attrs.iter() {
if *att.name() == "mount" {
for val in att.value().iter() {
if let AttributeValue::EventListener(cb) = val {
let msg = cb.emit(Event::from(MountEvent {
target_node: element.clone().unchecked_into(),
}));
program.dispatch(msg);
}
}
}
}
}
/// Build a DOM element by recursively creating DOM nodes for this element and it's
/// children, it's children's children, etc.
fn create_element_node<DSP, MSG>(
program: &DSP,
velem: &crate::Element<MSG>,
focused_node: &mut Option<Node>,
) -> CreatedNode
where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
let document = crate::document();
let element = if let Some(namespace) = velem.namespace() {
document
.create_element_ns(Some(namespace), velem.tag())
.expect("Unable to create element")
} else {
document
.create_element(velem.tag())
.expect("Unable to create element")
};
Self::dispatch_mount_event(program, velem, &element);
if velem.is_focused() {
*focused_node = Some(element.clone().unchecked_into());
log::trace!("element is focused..{:?}", focused_node);
Self::set_element_focus(&element);
}
let mut closures = ActiveClosure::new();
Self::set_element_attributes(
program,
&mut closures,
&element,
&velem.get_attributes().iter().collect::<Vec<_>>(),
);
for child in velem.get_children().iter() {
if child.is_safe_html() {
let child_text = child.unwrap_text();
// https://developer.mozilla.org/en-US/docs/Web/API/Element/insertAdjacentHTML
element
.insert_adjacent_html("beforeend", &child_text.text)
.expect("must not error");
} else {
let created_child =
Self::create_dom_node(program, child, focused_node);
closures.extend(created_child.closures);
element
.append_child(&created_child.node)
.expect("Unable to append element node");
}
}
let node: Node = element.unchecked_into();
CreatedNode { node, closures }
}
/// set the element attribute
pub fn set_element_attributes<DSP, MSG>(
program: &DSP,
closures: &mut ActiveClosure,
element: &Element,
attrs: &[&Attribute<MSG>],
) where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
let attrs = mt_dom::merge_attributes_of_same_name(attrs);
for att in attrs {
Self::set_element_attribute(program, closures, element, &att); | }
/// set the element attribute
///
/// Note: this is called in a loop, so setting the attributes, and style will not be on
/// the same call, but on a subsequent call to each other. Using the if-else-if here for
/// attributes, style, function_call.
#[track_caller]
pub fn set_element_attribute<DSP, MSG>(
program: &DSP,
closures: &mut ActiveClosure,
element: &Element,
attr: &Attribute<MSG>,
) where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
let SegregatedAttributes {
listeners,
plain_values,
styles,
function_calls,
} =
html::attributes::partition_callbacks_from_plain_styles_and_func_calls(
attr,
);
// set simple values
if let Some(merged_plain_values) =
html::attributes::merge_plain_attributes_values(&plain_values)
{
if let Some(namespace) = attr.namespace() {
// Warning NOTE: set_attribute_ns should only be called
// when you meant to use a namespace
// using this with None will error in the browser with:
// NamespaceError: An attempt was made to create or change an object in a way which is incorrect with regard to namespaces
element
.set_attribute_ns(
Some(namespace),
attr.name(),
&merged_plain_values,
)
.unwrap_or_else(|_| {
panic!(
"Error setting an attribute_ns for {:?}",
element
)
});
} else {
match *attr.name() {
"value" => {
Self::set_value(element, &merged_plain_values);
}
"open" => {
let is_open: bool = plain_values
.first()
.map(|v| {
v.get_simple().map(|v| v.as_bool()).flatten()
})
.flatten()
.unwrap_or(false);
Self::set_open(element, is_open);
}
"checked" => {
let is_checked: bool = plain_values
.first()
.map(|av| {
av.get_simple().map(|v| v.as_bool()).flatten()
})
.flatten()
.unwrap_or(false);
Self::set_checked(element, is_checked)
}
"disabled" => {
let is_disabled: bool = plain_values
.first()
.map(|av| {
av.get_simple().map(|v| v.as_bool()).flatten()
})
.flatten()
.unwrap_or(false);
Self::set_disabled(element, is_disabled);
}
_ => {
element
.set_attribute(attr.name(), &merged_plain_values)
.unwrap_or_else(|_| {
panic!(
"Error setting an attribute for {:?}",
element
)
});
}
}
}
} else if let Some(merged_styles) =
html::attributes::merge_styles_attributes_values(&styles)
{
// set the styles
element
.set_attribute(attr.name(), &merged_styles)
.unwrap_or_else(|_| {
panic!("Error setting an attribute_ns for {:?}", element)
});
} else {
//if the merged attribute is blank of empty when string is trimmed
//remove the attribute
element
.remove_attribute(attr.name())
.expect("must remove attribute");
}
// do function calls such as set_inner_html
if let Some(merged_func_values) =
html::attributes::merge_plain_attributes_values(&function_calls)
{
if *attr.name() == "inner_html" {
element.set_inner_html(&merged_func_values);
}
}
// add listeners using add_event_listener
for listener in listeners {
let unique_id = create_unique_identifier();
// set the data-sauron_vdom-id this will be read later on
// when it's time to remove this element and its closures and event listeners
element
.set_attribute(DATA_VDOM_ID, &unique_id.to_string())
.expect("Could not set attribute on element");
closures.insert(unique_id, vec![]);
let event_str = attr.name();
let current_elm: &EventTarget =
element.dyn_ref().expect("unable to cast to event targe");
// a custom enter event which triggers the listener
// when the enter key is pressed
if *event_str == "enter" {
let program_clone = program.clone();
let listener_clone = listener.clone();
let key_press_func: Closure<dyn FnMut(web_sys::Event)> =
Closure::wrap(Box::new(move |event: web_sys::Event| {
let ke: &web_sys::KeyboardEvent = event
.dyn_ref()
.expect("should be a keyboard event");
if ke.key() == "Enter" {
let msg = listener_clone.emit(Event::from(event));
program_clone.dispatch(msg);
}
}));
current_elm
.add_event_listener_with_callback(
"keypress",
key_press_func.as_ref().unchecked_ref(),
)
.expect("unable to attach enter event listener");
key_press_func.forget();
} else {
// This is where all of the UI events is wired in this part of the code.
// All event listener is added to this element.
// The callback to this listener emits an Msg which is then \
// dispatched to the `program` which then triggers update view cycle.
let callback_wrapped: Closure<dyn FnMut(web_sys::Event)> =
create_closure_wrap(program, listener);
current_elm
.add_event_listener_with_callback(
event_str,
callback_wrapped.as_ref().unchecked_ref(),
)
.expect("Unable to attached event listener");
closures
.get_mut(&unique_id)
.expect("Unable to get closure")
.push((event_str, callback_wrapped));
}
}
}
/// set focus to this element
pub(crate) fn set_element_focus(element: &Element) {
let html_element: &HtmlElement = element.unchecked_ref();
html_element.focus().expect("must focus")
}
/// explicitly call `set_checked` function on the html element
/// since setting the attribute to false will not unchecked it.
///
/// There are only 2 elements where set_checked is applicable:
/// - input
/// - menuitem
fn set_checked(element: &Element, is_checked: bool) {
if let Some(input) = element.dyn_ref::<HtmlInputElement>() {
input.set_checked(is_checked);
} else if let Some(menu_item) = element.dyn_ref::<HtmlMenuItemElement>()
{
menu_item.set_checked(is_checked);
}
}
/// explicitly call set_open for details
/// since setting the attribute `open` to false will not close it.
///
/// TODO: HtmlDialogElement ( but it is not supported on firefox and in safarit, only works on chrome)
///
/// Applies to:
/// - dialog
/// - details
fn set_open(element: &Element, is_open: bool) {
if let Some(details) = element.dyn_ref::<HtmlDetailsElement>() {
details.set_open(is_open);
}
}
/// explicitly call on `set_disabled`
/// since setting the attribute `disabled` false will not enable it.
///
/// These are 10 elements that we can call `set_disabled` function to.
/// - input
/// - button
/// - textarea
/// - style
/// - link
/// - select
/// - option
/// - optgroup
/// - fieldset
/// - menuitem
fn set_disabled(element: &Element, is_disabled: bool) {
if let Some(input) = element.dyn_ref::<HtmlInputElement>() {
input.set_disabled(is_disabled);
} else if let Some(btn) = element.dyn_ref::<HtmlButtonElement>() {
btn.set_disabled(is_disabled);
} else if let Some(text_area) = element.dyn_ref::<HtmlTextAreaElement>()
{
text_area.set_disabled(is_disabled);
} else if let Some(style_elem) = element.dyn_ref::<HtmlStyleElement>() {
style_elem.set_disabled(is_disabled);
} else if let Some(link_elem) = element.dyn_ref::<HtmlLinkElement>() {
link_elem.set_disabled(is_disabled);
} else if let Some(select) = element.dyn_ref::<HtmlSelectElement>() {
select.set_disabled(is_disabled);
} else if let Some(option) = element.dyn_ref::<HtmlOptionElement>() {
option.set_disabled(is_disabled);
} else if let Some(opt_group) = element.dyn_ref::<HtmlOptGroupElement>()
{
opt_group.set_disabled(is_disabled);
} else if let Some(field_set) = element.dyn_ref::<HtmlFieldSetElement>()
{
field_set.set_disabled(is_disabled);
} else if let Some(menu_item) = element.dyn_ref::<HtmlMenuItemElement>()
{
menu_item.set_disabled(is_disabled);
}
}
/// we explicitly call the `set_value` function in the html element
///
/// Note: for most of other elements, setting the attribute value will do just fine.
//
// TODO:
// web_sys::Attr::set_value
// web_sys::AudioParam::set_value
// web_sys::DomTokenList::set_value
// web_sys::HtmlButtonElement::set_value
// web_sys::HtmlDataElement::set_value
// web_sys::HtmlLiElement::set_value
// web_sys::HtmlMeterElement::set_value
// web_sys::HtmlOutputElement::set_value
// web_sys::HtmlParamElement::set_value
// web_sys::HtmlProgressElement::set_value
// web_sys::RadioNodeList::set_value
// web_sys::SvgAngle::set_value
// web_sys::SvgLength::set_value
// web_sys::SvgNumber::set_value
fn set_value(element: &Element, value: &str) {
if let Some(input) = element.dyn_ref::<HtmlInputElement>() {
input.set_value(value);
} else if let Some(textarea) = element.dyn_ref::<HtmlTextAreaElement>()
{
textarea.set_value(value);
} else if let Some(select) = element.dyn_ref::<HtmlSelectElement>()
{
select.set_value(value);
} else if let Some(option) = element.dyn_ref::<HtmlOptionElement>()
{
option.set_value(value);
}
}
/// remove element attribute,
/// takes care of special case such as checked
pub fn remove_element_attribute<MSG>(
element: &Element,
attr: &Attribute<MSG>,
) -> Result<(), JsValue> {
log::trace!("removing attribute: {}", attr.name());
element.remove_attribute(attr.name())?;
match *attr.name() {
"value" => {
Self::set_value(element, "");
}
"open" => {
Self::set_open(element, false);
}
"checked" => {
Self::set_checked(element, false);
}
"disabled" => {
Self::set_disabled(element, false);
}
_ => (),
}
Ok(())
}
}
/// This wrap into a closure the function that is dispatched when the event is triggered.
pub(crate) fn create_closure_wrap<DSP, MSG>(
program: &DSP,
listener: &Listener<MSG>,
) -> Closure<dyn FnMut(web_sys::Event)>
where
MSG: 'static,
DSP: Clone + Dispatch<MSG> + 'static,
{
let listener_clone = listener.clone();
let program_clone = program.clone();
Closure::wrap(Box::new(move |event: web_sys::Event| {
let msg = listener_clone.emit(Event::from(event));
program_clone.dispatch(msg);
}))
} | } |
mkds_scenes.ts | // Mario Kart DS
import * as Viewer from '../viewer';
import * as CX from '../compression/CX';
import * as NARC from './narc';
import * as NSBMD from './nsbmd';
import * as NSBTA from './nsbta';
import * as NSBTP from './nsbtp';
import * as NSBTX from './nsbtx';
import { fetchData } from '../fetch';
import Progressable from '../Progressable';
import ArrayBufferSlice from '../ArrayBufferSlice';
import { GfxDevice, GfxHostAccessPass, GfxRenderPass } from '../gfx/platform/GfxPlatform';
import { MDL0Renderer, G3DPass } from './render';
import { assert } from '../util';
import { GfxRenderInstViewRenderer } from '../gfx/render/GfxRenderer';
import { BasicRenderTarget, standardFullClearRenderPassDescriptor, depthClearRenderPassDescriptor } from '../gfx/helpers/RenderTargetHelpers';
import { FakeTextureHolder } from '../TextureHolder';
export class CourseRenderer implements Viewer.SceneGfx {
public viewRenderer = new GfxRenderInstViewRenderer();
public renderTarget = new BasicRenderTarget();
public textureHolder: FakeTextureHolder;
constructor(device: GfxDevice, public courseRenderer: MDL0Renderer, public skyboxRenderer: MDL0Renderer | null) {
this.textureHolder = new FakeTextureHolder(this.courseRenderer.viewerTextures);
this.courseRenderer.addToViewRenderer(device, this.viewRenderer);
if (this.skyboxRenderer !== null)
this.skyboxRenderer.addToViewRenderer(device, this.viewRenderer);
}
public prepareToRender(hostAccessPass: GfxHostAccessPass, viewerInput: Viewer.ViewerRenderInput): void {
this.courseRenderer.prepareToRender(hostAccessPass, viewerInput);
if (this.skyboxRenderer !== null)
this.skyboxRenderer.prepareToRender(hostAccessPass, viewerInput);
}
public render(device: GfxDevice, viewerInput: Viewer.ViewerRenderInput): GfxRenderPass {
const hostAccessPass = device.createHostAccessPass();
this.prepareToRender(hostAccessPass, viewerInput);
device.submitPass(hostAccessPass);
this.viewRenderer.prepareToRender(device);
this.renderTarget.setParameters(device, viewerInput.viewportWidth, viewerInput.viewportHeight);
this.viewRenderer.setViewport(viewerInput.viewportWidth, viewerInput.viewportHeight);
// First, render the skybox.
const skyboxPassRenderer = this.renderTarget.createRenderPass(device, standardFullClearRenderPassDescriptor);
this.viewRenderer.executeOnPass(device, skyboxPassRenderer, G3DPass.SKYBOX);
skyboxPassRenderer.endPass(null);
device.submitPass(skyboxPassRenderer);
// Now do main pass.
const mainPassRenderer = this.renderTarget.createRenderPass(device, depthClearRenderPassDescriptor);
this.viewRenderer.executeOnPass(device, mainPassRenderer, G3DPass.MAIN);
return mainPassRenderer;
}
public destroy(device: GfxDevice): void {
this.viewRenderer.destroy(device);
this.renderTarget.destroy(device);
this.courseRenderer.destroy(device);
if (this.skyboxRenderer !== null)
this.skyboxRenderer.destroy(device);
}
}
class MarioKartDSSceneDesc implements Viewer.SceneDesc {
constructor(public id: string, public name: string) {}
private fetchCARC(path: string, abortSignal: AbortSignal): Progressable<NARC.NitroFS> {
return fetchData(path, abortSignal).then((buffer: ArrayBufferSlice) => {
return NARC.parse(CX.decompress(buffer));
});
}
public createScene(device: GfxDevice, abortSignal: AbortSignal): Progressable<Viewer.SceneGfx> {
return Progressable.all([
this.fetchCARC(`mkds/Course/${this.id}.carc`, abortSignal),
this.fetchCARC(`mkds/Course/${this.id}Tex.carc`, abortSignal),
]).then(([courseNARC, textureNARC]) => {
const courseBmdFile = courseNARC.files.find((file) => file.path === '/course_model.nsbmd');
const courseBmd = NSBMD.parse(courseBmdFile.buffer);
const courseBtxFile = textureNARC.files.find((file) => file.path === '/course_model.nsbtx');
const courseBtx = courseBtxFile !== undefined ? NSBTX.parse(courseBtxFile.buffer) : null;
assert(courseBmd.models.length === 1);
const courseRenderer = new MDL0Renderer(device, courseBmd.models[0], courseBtx !== null ? courseBtx.tex0 : courseBmd.tex0);
let skyboxRenderer: MDL0Renderer | null = null;
const skyboxBmdFile = courseNARC.files.find((file) => file.path === '/course_model_V.nsbmd');
if (skyboxBmdFile !== undefined) {
const skyboxBmd = skyboxBmdFile !== undefined ? NSBMD.parse(skyboxBmdFile.buffer) : null;
const skyboxBtxFile = textureNARC.files.find((file) => file.path === '/course_model_V.nsbtx');
const skyboxBtx = skyboxBtxFile !== undefined ? NSBTX.parse(skyboxBtxFile.buffer) : null;
assert(skyboxBmd.models.length === 1);
skyboxRenderer = new MDL0Renderer(device, skyboxBmd.models[0], skyboxBtx !== null ? skyboxBtx.tex0 : skyboxBmd.tex0);
skyboxRenderer.modelMatrix[13] -= 1500;
skyboxRenderer.isSkybox = true;
}
const c = new CourseRenderer(device, courseRenderer, skyboxRenderer);
const courseBtaFile = courseNARC.files.find((file) => file.path === '/course_model.nsbta');
const courseBta = courseBtaFile !== undefined ? NSBTA.parse(courseBtaFile.buffer) : null;
if (courseBta !== null)
courseRenderer.bindSRT0(courseBta.srt0); | const courseBtpFile = courseNARC.files.find((file) => file.path === '/course_model.nsbtp');
const courseBtp = courseBtpFile !== undefined ? NSBTP.parse(courseBtpFile.buffer) : null;
if (courseBtp !== null) {
assert(courseBtp.pat0.length === 1);
courseRenderer.bindPAT0(device, courseBtp.pat0[0]);
}
if (skyboxRenderer !== null) {
const skyboxBtaFile = courseNARC.files.find((file) => file.path === '/course_model_V.nsbta');
const skyboxBta = skyboxBtaFile !== undefined ? NSBTA.parse(skyboxBtaFile.buffer) : null;
if (skyboxBta !== null)
skyboxRenderer.bindSRT0(skyboxBta.srt0);
}
return c;
});
}
}
const id = 'mkds';
const name = 'Mario Kart DS';
const sceneDescs = [
"Mushroom Cup",
new MarioKartDSSceneDesc("cross_course", "Figure-8 Circuit"),
new MarioKartDSSceneDesc("bank_course", "Yoshi Falls"),
new MarioKartDSSceneDesc("beach_course", "Cheep Cheep Beach"),
new MarioKartDSSceneDesc("mansion_course", "Luigi's Mansion"),
"Flower Cup",
new MarioKartDSSceneDesc("desert_course", "Desert Hills"),
new MarioKartDSSceneDesc("town_course", "Delfino Square"),
new MarioKartDSSceneDesc("pinball_course", "Waluigi Pinball"),
new MarioKartDSSceneDesc("ridge_course", "Shroom Ridge"),
"Star Cup",
new MarioKartDSSceneDesc("snow_course", "DK Pass"),
new MarioKartDSSceneDesc("clock_course", "Tick Tock Clock"),
new MarioKartDSSceneDesc("mario_course", "Mario Circuit"),
new MarioKartDSSceneDesc("airship_course", "Airship Fortress"),
"Special Cup",
new MarioKartDSSceneDesc("stadium_course", "Wario's Stadium"),
new MarioKartDSSceneDesc("garden_course", "Peach Gardens"),
new MarioKartDSSceneDesc("koopa_course", "Bowser's Castle"),
new MarioKartDSSceneDesc("rainbow_course", "Rainbow Road"),
"Shell Cup",
new MarioKartDSSceneDesc("old_mario_sfc", "SNES Mario Circuit 1"),
new MarioKartDSSceneDesc("old_momo_64", "N64 Moo Moo Farm"),
new MarioKartDSSceneDesc("old_peach_agb", "GBA Peach Cup"),
new MarioKartDSSceneDesc("old_luigi_gc", "GCN Luigi Circuit"),
"Banana Cup",
new MarioKartDSSceneDesc("old_donut_sfc", "SNES Donut Plains 1"),
new MarioKartDSSceneDesc("old_frappe_64", "N64 Frappe Snowland"),
new MarioKartDSSceneDesc("old_koopa_agb", "GBA Bowser Castle 2"),
new MarioKartDSSceneDesc("old_baby_gc", "GCN Baby Park"),
"Leaf Cup",
new MarioKartDSSceneDesc("old_noko_sfc", "SNES Koopa Beach 2"),
new MarioKartDSSceneDesc("old_choco_64", "N64 Choco Mountain"),
new MarioKartDSSceneDesc("old_luigi_agb", "GBA Luigi Circuit"),
new MarioKartDSSceneDesc("old_mario_gc", "GCN Mushroom Bridge"),
"Lightning Cup",
new MarioKartDSSceneDesc("old_choco_sfc", "SNES Choco Island 2"),
new MarioKartDSSceneDesc("old_hyudoro_64", "N64 Banshee Boardwalk"),
new MarioKartDSSceneDesc("old_sky_agb", "GBA Sky Garden"),
new MarioKartDSSceneDesc("old_yoshi_gc", "GCN Yoshi Circuit"),
"Mission Stages",
new MarioKartDSSceneDesc("mini_stage1", "mini_stage1"),
new MarioKartDSSceneDesc("mini_stage2", "mini_stage2"),
new MarioKartDSSceneDesc("mini_stage3", "mini_stage3"),
new MarioKartDSSceneDesc("mini_stage4", "mini_stage4"),
new MarioKartDSSceneDesc("MR_stage1", "MR_stage1"),
new MarioKartDSSceneDesc("MR_stage2", "MR_stage2"),
new MarioKartDSSceneDesc("MR_stage3", "MR_stage3"),
new MarioKartDSSceneDesc("MR_stage4", "MR_stage4"),
"Unused Test Courses",
new MarioKartDSSceneDesc("dokan_course", "dokan_course"),
new MarioKartDSSceneDesc("wario_course", "wario_course"),
new MarioKartDSSceneDesc("donkey_course", "donkey_course"),
new MarioKartDSSceneDesc("luigi_course", "luigi_course"),
new MarioKartDSSceneDesc("test1_course", "test1_course"),
new MarioKartDSSceneDesc("test_circle", "test_circle"),
new MarioKartDSSceneDesc("mini_block_course", "mini_block_course"),
new MarioKartDSSceneDesc("mini_block_64", "mini_block_64"),
new MarioKartDSSceneDesc("mini_dokan_gc", "mini_dokan_gc"),
new MarioKartDSSceneDesc("nokonoko_course", "nokonoko_course"),
];
export const sceneGroup: Viewer.SceneGroup = { id, name, sceneDescs }; | |
unified_quantization_handler.py | # Copyright (C) 2021, 2020 GreenWaves Technologies, SAS
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import logging
from copy import deepcopy
from utils.subclasses import get_all_subclasses
from quantization.qtype_constraint import ConstraintBase, MatchAll
LOG = logging.getLogger('nntool.' + __name__)
def min_or_max_none(v):
return v['min'] is None or v['max'] is None
def dimstr(dims):
return ", ".join("".join(dimname for dimname in dim) for dim in dims)
class QuantizionHandler(ConstraintBase):
""" This class is base quantization handler class.
All quantization handlers MUST put decorator @params_type to register corresponding params handled.
"""
SCHEME = None
PARAMS_TYPE = None
DOCUMENTATION = ''
OPTIONS = {}
CAN_DEQUANTIZE = False
NEEDS_STATS = True
FUSION_HANDLER = False # Indicates that the subgraph inside a fusion should be traversed
PRIORITY = 1
@classmethod
def description(cls):
return f'{cls.__name__}({cls.SCHEME}, {cls.constraints_repr()})'
@classmethod
def check_cls(cls):
if not cls.PARAMS_TYPE:
LOG.warning(
"%s doesn't have PARAMS_TYPE. "
"Please use Handler.params_type decorator to register PARAMS_TYPE.",
cls.__name__)
if not cls.SCHEME:
LOG.warning(
"%s doesn't have SCHEME. "
"Please use Handler.params_type decorator to register SCHEME.",
cls.__name__)
@classmethod
def check_valid_ranges(cls, params, stats, idx=None, dirs=None):
if stats is None:
raise ValueError(
f'no valid range information for node {params.name}. Graph cannot be quantized.')
if dirs is None:
dirs = ('in', 'out')
elif isinstance(dirs, str):
dirs = (dirs,)
for direction in dirs:
if not f'range_{direction}' in stats:
raise ValueError(
f'no valid range information for node {params.name} range_{direction}. Graph cannot be quantized.')
range_info = stats[f'range_{direction}']
if idx is not None:
if len(range_info) <= idx or range_info[idx] is None or min_or_max_none(range_info[idx]):
raise ValueError(
f'no valid range information for node {params.name} range_{direction}[{idx}]. Graph cannot be quantized.')
else:
for i, item in enumerate(range_info):
if item is None or min_or_max_none(item):
raise ValueError(
f'no valid range information for node {params.name} range_{direction}[{i}]. Graph cannot be quantized.')
@classmethod
def check_order(cls, params, in_order, out_order):
if params.ker_in_order and params.ker_in_order != in_order:
LOG.warning('indicating change of %s input from %s to %s order - rerun adjust command',
params.name,
dimstr(params.ker_in_order),
dimstr(in_order))
params.ker_in_order = in_order
if params.ker_out_order and params.ker_out_order != out_order:
LOG.warning('indicating change of %s output from %s to %s order - rerun adjust command',
params.name,
dimstr(params.ker_out_order),
dimstr(out_order))
params.ker_out_order = out_order
@classmethod
def get_in_qs_from_stats(cls, params, stats, in_qs, **kwargs):
return cls._get_in_qs_from_stats(params, stats, in_qs, **kwargs)
@classmethod
def _get_in_qs_from_stats(cls, params, stats, in_qs, **kwargs):
raise NotImplementedError("no _get_in_qs_from_stats method implemented")
@classmethod
def _quantize(cls, params, in_qs, stats, **kwargs):
raise NotImplementedError("no _quantize method implemented")
@classmethod
def get_options(cls, params, opts):
popts = cls.get_default_options()
popts.update(opts.get(params, {}))
return popts
@classmethod
def quantize(cls, params, in_qs, stats, **kwargs):
return cls._quantize(params, in_qs, stats, **kwargs)
@classmethod
def _dequantize(cls, params, qrec):
raise ValueError("this handler cannot dequantize")
@classmethod
def dequantize(cls, params, qrec):
cls._dequantize(params, qrec)
@classmethod
def get_default_options(cls):
return {name: opt['default'] for name, opt in cls.OPTIONS.items()}
@staticmethod
def needs_stats(yesno):
return QuantizionHandler.property_register("NEEDS_STATS", yesno)
@staticmethod
def params_type(*args):
|
@staticmethod
def documentation(doc):
return QuantizionHandler.property_register("DOCUMENTATION", doc)
@staticmethod
def priority(num):
return QuantizionHandler.property_register("PRIORITY", num)
@staticmethod
def scheme(doc):
return QuantizionHandler.property_register("SCHEME", doc)
@staticmethod
def fusion_handler(deco_cls):
setattr(deco_cls, "FUSION_HANDLER", True)
return deco_cls
@staticmethod
def options(*args):
def deco(cls):
# copy the closest base class options so that we create
# a new class variable on this class
setattr(cls, "OPTIONS", deepcopy(getattr(cls, "OPTIONS")))
# Now add / overide options
cls_opts = getattr(cls, "OPTIONS")
cls_opts.update({opt['name']: opt for opt in args})
cls_opts_hash = object.__hash__(cls_opts)
# since object classes can be intialized in an arbitrary order
# copy to all subclasses that have already set options
for subcls in get_all_subclasses(cls):
sub_cls_opts = getattr(subcls, "OPTIONS")
if object.__hash__(sub_cls_opts) != cls_opts_hash:
sub_cls_opts.update({opt['name']: opt for opt in args})
return cls
return deco
@staticmethod
def can_dequantize(yes_no):
return QuantizionHandler.property_register("CAN_DEQUANTIZE", yes_no)
@staticmethod
def property_register(name, value):
def deco(cls):
setattr(cls, name, value)
return cls
return deco
@classmethod
def get_prefered_input_dtypes(cls, params, **kwargs):
input_constraints = cls.INPUT_CONSTRAINTS
current_constraint = None
if input_constraints:
constraints = []
input_constraint = None
for idx, _ in enumerate(params.in_dims):
if current_constraint is None:
if len(input_constraints) > idx:
input_constraint = input_constraints[idx]
if isinstance(input_constraint, MatchAll):
current_constraint = input_constraint = input_constraint.constraint
else:
input_constraint = None
else:
input_constraint = current_constraint
if input_constraint and 'dtype' in input_constraint:
dtype_constraint = input_constraint['dtype']
if isinstance(dtype_constraint, set):
constraints.append(next(dtype_constraint.__iter__()))
else:
constraints.append(dtype_constraint)
else:
constraints.append(None)
else:
constraints = [None] * len(params.in_dims)
return constraints
#pylint: disable=invalid-name
params_type = QuantizionHandler.params_type
documentation = QuantizionHandler.documentation
options = QuantizionHandler.options
option_constraint = QuantizionHandler.option_constraint
can_dequantize = QuantizionHandler.can_dequantize
scheme = QuantizionHandler.scheme
in_qs_constraint = QuantizionHandler.in_qs_constraint
out_qs_constraint = QuantizionHandler.out_qs_constraint
fusion_handler = QuantizionHandler.fusion_handler
priority = QuantizionHandler.priority
needs_stats = QuantizionHandler.needs_stats
| return QuantizionHandler.property_register("PARAMS_TYPE", args) |
status.rs | use crate::filesystem::{self, File, get_file_path_without_migration_path};
use crate::Configuration;
use crate::EngineName;
use crate::engines::{get_sql_engine, EngineError};
use crate::commands::interactive::{merge_migrations_and_files, InteractiveMigration, InteractionType};
use crate::helpers::{limit_number, limit_per_date};
use console::Style;
use std::error::Error;
/// Show the status.
///
/// # Arguments
///
/// * `root` - The root folder where all migrations are.
/// * `migrations` - The files & migrations.
fn show_status(root: &str, migrations: &mut Vec<InteractiveMigration>) {
let installed = Style::new().green();
let notinstalled = Style::new().red();
let installed_with_warning = Style::new().yellow();
let inactive = Style::new().dim();
let yellow = Style::new().yellow();
println!("");
println!("Installed | migration number | name");
println!("----------+------------------+----------------------------");
for index in 0..migrations.len() {
if let Some(migration) = migrations.get(index) {
let mut content = String::new();
if migration.current_type == InteractionType::UP {
let m_hash = migration.migration_hash.as_ref();
let f_hash = migration.file_up_hash.as_ref();
if m_hash.is_some() && f_hash.is_some() && Some(m_hash) == Some(f_hash) {
content.push_str(&format!(" {} ", installed.apply_to("yes")));
} else {
content.push_str(&format!(" {} ", installed_with_warning.apply_to("changed")));
}
} else {
content.push_str(&format!(" {} ", notinstalled.apply_to("no")));
}
| if migration.file_up.is_some() {
let f = migration.file_up.as_ref().unwrap();
let file_name = get_file_path_without_migration_path(root, &f.origin.display().to_string());
content.push_str(&format!("{} {}{}{}", f.name, inactive.apply_to("("), inactive.apply_to(file_name), inactive.apply_to(")")));
} else if migration.migration_origin.is_some() {
content.push_str(&format!("{} {}was: {}{}", yellow.apply_to("missing file"),
inactive.apply_to("("), inactive.apply_to(migration.migration_origin.as_ref().unwrap()),
inactive.apply_to(")")
));
}
println!("{}", &content.replace("\"", ""));
}
}
println!("");
}
/// Do the status mode.
///
/// # Arguments
///
/// * `configuration` - The system configuration.
/// * `files` - The files.
fn process_status_sql(configuration: &Configuration, files: &mut Vec<File>) -> Result<(), Box<dyn Error>> {
match get_sql_engine(&configuration.engine, configuration) {
Ok(mut db) => {
match db.create_migration_table() {
Ok(_) => {
match db.get_migrations_with_hashes(&configuration.migration_type) {
Ok(mut existing) => {
if configuration.interactive_days > 0 {
existing.retain(|(migration, _, _)| limit_per_date(migration, configuration.interactive_days));
files.retain(|file| limit_per_date(&file.number.to_string(), configuration.interactive_days));
}
let mut to_show = merge_migrations_and_files(&existing, files);
show_status(&configuration.path, &mut to_show);
Ok(())
},
Err(e) => {
crit!("Error getting migrations: {:?}", e);
Err(Box::new(EngineError {}))
}
}
},
Err(e) => {
crit!("Error creating migration table: {:?}", e);
Err(Box::new(EngineError {}))
}
}
},
Err(e) => {
crit!("Error getting engine: {:?}", e);
Err(Box::new(EngineError {}))
}
}
}
/// Dump the status of the database.
///
/// # Arguments
///
/// * `configuration` - The configuration to use
pub fn process(configuration: &Configuration) -> bool {
let mut files = filesystem::migrations(&configuration.path, None);
files.sort_by(|f1, f2| f1.partial_cmp(f2).unwrap());
match configuration.engine {
EngineName::POSTGRESQL | EngineName::SQLITE | EngineName::MYSQL => {
match process_status_sql(configuration, &mut files) {
Err(_e) => false,
_ => true
}
}
}
} | content.push_str("| ");
content.push_str(&limit_number(&migration.number));
content.push_str(" | ");
|
database_config.js | import { config } from 'dotenv';
config(); | password: process.env.DB_PASSWORD || '',
database: process.env.DB_DATABASE,
host: process.env.DB_HOST,
dialect: 'postgres'
};
export const test = {
username: process.env.TEST_DB_USERNAME,
password: process.env.TEST_DB_PASSWORD || '',
database: process.env.TEST_DB_DATABASE,
host: process.env.TEST_DB_HOST,
dialect: 'postgres'
};
export const production = {
use_env_variable: 'DATABASE_URL',
dialect: 'postgres',
dialectOptions: {
ssl: true
},
logging: false
}; |
export const development = {
username: process.env.DB_USERNAME, |
wg.service.ts | import Cognito from './cognito.service';
import { Amplify, API } from 'aws-amplify';
export default class | {
public cognitoSvc: Cognito;
constructor() {
this.cognitoSvc = new Cognito();
Amplify.configure({
API: {
endpoints: [
{
name: 'WGLambda',
endpoint: 'https://lambda.us-west-2.amazonaws.com/2015-03-31/functions/prod-getDashboardView/invocations',
service: 'lambda',
region: 'us-west-2',
},
],
},
});
}
public async signInUser(username: string, password: string): Promise<any> {
return this.cognitoSvc.signInUser(username, password);
}
public async getDashboardInfo(): Promise<any> {
const apiName = 'WGLambda';
const path = '/';
const myInit = {
body: {
'userId':this.cognitoSvc.cognitoUser && this.cognitoSvc.cognitoUser.getUsername(),
},
};
return await API.post(apiName, path, myInit);
}
} | WaterguruService |
context.py | import copy
import tabulator
import requests
from .config import Config
from ..config.log import logger
from ..config.consts import CONFIG_SKIP_ROWS, CONFIG_TAXONOMY_ID, CONFIG_FORMAT, CONFIG_ALLOW_INSECURE_TLS
from ..taxonomies import TaxonomyRegistry, Taxonomy
_workbook_cache = {}
def trimmer(extended_rows):
for row_number, headers, row in extended_rows:
if headers:
row = row[:len(headers)]
if len(row) < len(headers):
continue
yield (row_number, headers, row)
class Context():
| def __init__(self, config: Config, taxonomies: TaxonomyRegistry):
self.config = config
self.taxonomies: TaxonomyRegistry = taxonomies
self._stream = None
self.enricher_dir = None
def _structure_params(self):
skip_rows = self.config.get(CONFIG_SKIP_ROWS) if CONFIG_SKIP_ROWS in self.config else None
fmt = self.config.get(CONFIG_FORMAT)
return dict(
headers=skip_rows + 1 if skip_rows is not None else None,
ignore_blank_headers=fmt in ('csv', 'xlsx', 'xls'),
post_parse=[trimmer]
)
def reset_stream(self):
self._stream = None
def http_session(self):
http_session = requests.Session()
http_session.headers.update(tabulator.config.HTTP_HEADERS)
if self.config.get(CONFIG_ALLOW_INSECURE_TLS):
http_session.verify = False
return http_session
@property
def stream(self):
if self._stream is None:
source = copy.deepcopy(self.config._unflatten().get('source', {}))
structure = self._structure_params()
try:
path = source.pop('path')
if not path:
return None
logger.info('Opening stream %s', path)
if 'workbook_cache' in source:
source['workbook_cache'] = _workbook_cache
self._stream = tabulator.Stream(path, **source, **structure, http_session=self.http_session()).open()
for k in source.keys():
self.config.get('source.' + k)
for k in structure.keys():
self.config.get('structure.' + k)
except Exception:
logger.exception('Failed to open URL, source=%r, structure=%r', source, structure)
raise
return self._stream
@property
def taxonomy(self) -> Taxonomy:
if CONFIG_TAXONOMY_ID in self.config:
return self.taxonomies.get(self.config[CONFIG_TAXONOMY_ID]) |
|
importer.ts | import * as http from 'http';
import * as https from 'https';
import * as url from 'url';
import * as util from 'util';
import * as smogon from 'smogon';
import {Streams} from '../../lib';
import {Dex, toID} from '../../sim/dex';
import {TeamValidator} from '../../sim/team-validator';
Dex.includeModData();
type DeepPartial<T> = {
[P in keyof T]?: T[P] extends (infer I)[]
? (DeepPartial<I>)[]
: DeepPartial<T[P]>;
};
interface PokemonSets {
[speciesid: string]: {
[name: string]: DeepPartial<PokemonSet>,
};
}
interface IncomingMessage extends NodeJS.ReadableStream {
statusCode: number;
headers: {location?: string};
}
// eg. 'gen1.json'
interface GenerationData {
[formatid: string]: FormatData;
}
// eg. 'gen7balancedhackmons.json'
interface FormatData {
[source: string]: PokemonSets;
}
type GenerationNum = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8;
// The tiers we support, ie. ones that we have data sources for.
export const TIERS = new Set([
'ubers', 'oubl', 'ou', 'uu', 'ru', 'nu', 'pu', 'zu', 'lc', 'cap', 'nationaldex',
'doublesou', 'battlespotsingles', 'battlespotdoubles', 'battlestadiumsingles',
'vgc2016', 'vgc2017', 'vgc2018', 'vgc2019ultraseries', 'vgc2020', '1v1',
'anythinggoes', 'nationaldexag', 'balancedhackmons', 'letsgoou', 'monotype',
]);
const FORMATS = new Map<ID, {gen: GenerationNum, format: Format}>();
const VALIDATORS = new Map<ID, TeamValidator>();
for (let gen = 1; gen <= 8; gen++) {
for (const tier of TIERS) {
const format = Dex.getFormat(`gen${gen}${tier}`);
if (format.exists) {
FORMATS.set(format.id, {gen: gen as GenerationNum, format});
VALIDATORS.set(format.id, new TeamValidator(format));
}
}
}
export async function importAll() {
const index = await request(smogon.Statistics.URL);
const imports = [];
for (let gen = 1; gen <= 8; gen++) {
imports.push(importGen(gen as GenerationNum, index));
}
return Promise.all(imports);
}
async function importGen(gen: GenerationNum, index: string) {
const data: GenerationData = {};
const smogonSetsByFormat: {[formatid: string]: PokemonSets} = {};
const thirdPartySetsByFormat: {[source: string]: {[formatid: string]: PokemonSets}} = {};
const numByFormat: {[formatid: string]: number} = {};
const imports = [];
const dex = Dex.forFormat(`gen${gen}ou`);
for (const id in dex.data.Pokedex) {
if (!eligible(dex, id as ID)) continue;
const species = dex.getSpecies(id);
if (species.battleOnly) continue;// Smogon collapses these into their out of battle species
imports.push(importSmogonSets(dex.getSpecies(id).name, gen, smogonSetsByFormat, numByFormat));
}
await Promise.all(imports);
for (const {format, gen: g} of FORMATS.values()) {
if (g !== gen) continue;
if (smogonSetsByFormat[format.id] && Object.keys(smogonSetsByFormat[format.id]).length) {
data[format.id] = {};
data[format.id]['dex'] = smogonSetsByFormat[format.id];
report(format, numByFormat[format.id], 'dex');
}
for (const source in thirdPartySetsByFormat) {
if (thirdPartySetsByFormat[source][format.id] && Object.keys(thirdPartySetsByFormat[source][format.id]).length) {
data[format.id] = data[format.id] || {};
data[format.id][source] = thirdPartySetsByFormat[source][format.id];
}
}
const stats = await getStatisticsURL(index, format);
if (!stats) continue;
try {
const statistics = smogon.Statistics.process(await request(stats.url));
const sets = importUsageBasedSets(gen, format, statistics, stats.count);
if (Object.keys(sets).length) {
data[format.id] = data[format.id] || {};
data[format.id]['stats'] = sets;
}
data[format.id] = data[format.id] || {};
} catch (err) {
error(`${stats.url} = ${err}`);
}
}
return data;
}
function eligible(dex: ModdedDex, id: ID) {
const gen = toGen(dex, id);
if (!gen || gen > dex.gen) return false;
const species = dex.getSpecies(id);
if (['Mega', 'Primal', 'Ultra'].some(f => species.forme.startsWith(f))) return true;
// Species with formes distinct enough to merit inclusion
const unique = ['darmanitan', 'meloetta', 'greninja', 'zygarde'];
// Too similar to their base forme/species to matter
const similar = ['pichu', 'pikachu', 'genesect', 'basculin', 'magearna', 'keldeo', 'vivillon'];
if (species.battleOnly && !unique.some(f => id.startsWith(f))) return false;
// Most of these don't have analyses
const capNFE = species.isNonstandard === 'CAP' && species.nfe;
return !id.endsWith('totem') && !capNFE && !similar.some(f => id.startsWith(f) && id !== f);
}
// TODO: Fix dex data such that CAP mons have a correct gen set
function toGen(dex: ModdedDex, name: string): GenerationNum | undefined {
const pokemon = dex.getSpecies(name);
if (pokemon.isNonstandard === 'LGPE') return 7;
if (!pokemon.exists || (pokemon.isNonstandard && pokemon.isNonstandard !== 'CAP')) return undefined;
const n = pokemon.num;
if (n > 810) return 8;
if (n > 721 || (n <= -23 && n >= -28) || (n <= -120 && n >= -126)) return 7;
if (n > 649 || (n <= -8 && n >= -22) || (n <= -106 && n >= -110)) return 6;
if (n > 493 || (n <= -12 && n >= -17) || (n <= -111 && n >= -115)) return 5;
if (n > 386 || (n <= -1 && n >= -11) || (n <= -101 && n >= -104) || (n <= -116 && n >= -119)) return 4;
if (n > 251) return 3;
if (n > 151) return 2;
if (n > 0) return 1;
}
async function importSmogonSets(
pokemon: string,
gen: GenerationNum,
setsByFormat: {[format: string]: PokemonSets},
numByFormat: {[format: string]: number}
) {
const analysesByFormat = await getAnalysesByFormat(pokemon, gen);
if (!analysesByFormat) return;
for (const [format, analyses] of analysesByFormat.entries()) {
const dex = Dex.forFormat(format);
let setsForPokemon = setsByFormat[format.id];
if (!setsForPokemon) {
setsForPokemon = {};
setsByFormat[format.id] = setsForPokemon;
}
let baseSpecies = dex.getSpecies(pokemon);
if (baseSpecies.baseSpecies !== baseSpecies.name) baseSpecies = dex.getSpecies(baseSpecies.baseSpecies);
const battleOnlyFormes: Species[] = [];
if (baseSpecies.otherFormes) {
for (const forme of baseSpecies.otherFormes) {
const formeSpecies = dex.getSpecies(forme);
if (formeSpecies.battleOnly && eligible(dex, toID(formeSpecies))) {
battleOnlyFormes.push(formeSpecies);
}
}
}
for (const analysis of analyses) {
for (const moveset of analysis.movesets) {
const set = movesetToPokemonSet(dex, format, pokemon, moveset);
const name = cleanName(moveset.name);
addSmogonSet(dex, format, pokemon, name, set, setsForPokemon, numByFormat);
for (const battleOnlyForme of battleOnlyFormes) {
// Note: this is just a shallow copy which is fine because we're just modifying the ability
const s = {...set};
if (!format.id.includes('balancedhackmons')) s.ability = battleOnlyForme.abilities[0];
if (typeof battleOnlyForme.battleOnly !== 'string') {
if (!battleOnlyForme.battleOnly!.includes(pokemon)) continue;
const species = dex.getSpecies(pokemon);
const disambiguated = `${name} - ${species.baseForme || species.forme}`;
addSmogonSet(dex, format, battleOnlyForme.name, disambiguated, s, setsForPokemon, numByFormat, pokemon);
} else if (battleOnlyForme.battleOnly === pokemon) {
addSmogonSet(dex, format, battleOnlyForme.name, name, s, setsForPokemon, numByFormat);
}
}
}
}
}
}
function addSmogonSet(
dex: ModdedDex,
format: Format,
pokemon: string,
name: string,
set: DeepPartial<PokemonSet>,
setsForPokemon: PokemonSets,
numByFormat: {[format: string]: number},
outOfBattleSpeciesName?: string
) {
if (validSet('dex', dex, format, pokemon, name, set, outOfBattleSpeciesName)) {
setsForPokemon[pokemon] = setsForPokemon[pokemon] || {};
setsForPokemon[pokemon][name] = set;
numByFormat[format.id] = (numByFormat[format.id] || 0) + 1;
}
}
function cleanName(name: string) {
return name.replace(/"/g, `'`);
}
function movesetToPokemonSet(dex: ModdedDex, format: Format, pokemon: string, set: smogon.Moveset) {
const level = getLevel(format, set.level);
return {
level: level === 100 ? undefined : level,
moves: set.moveslots.map(ms => ms[0]).map(s => s.type ? `${s.move} ${s.type}` : s.move),
ability: fixedAbility(dex, pokemon, set.abilities[0]),
item: set.items[0] === 'No Item' ? undefined : set.items[0],
nature: set.natures[0],
ivs: toStatsTable(set.ivconfigs[0], 31),
evs: toStatsTable(set.evconfigs[0]),
};
}
function toStatsTable(stats?: StatsTable, elide = 0) {
if (!stats) return undefined;
const s: Partial<StatsTable> = {};
let stat: keyof StatsTable;
for (stat in stats) {
const val = stats[stat];
if (val !== elide) s[stat] = val;
}
return s;
}
function fixedAbility(dex: ModdedDex, pokemon: string, ability?: string) {
if (dex.gen <= 2) return undefined;
const species = dex.getSpecies(pokemon);
if (ability && !['Mega', 'Primal', 'Ultra'].some(f => species.forme.startsWith(f))) return ability;
return species.abilities[0];
}
function validSet(
source: string,
dex: ModdedDex,
format: Format,
pokemon: string,
name: string,
set: DeepPartial<PokemonSet>,
outOfBattleSpeciesName?: string
) {
if (skip(dex, format, pokemon, set)) return false;
const pset = toPokemonSet(dex, format, pokemon, set, outOfBattleSpeciesName);
let invalid = VALIDATORS.get(format.id)!.validateSet(pset, {});
if (!invalid) return true;
// Correct invalidations where set is required to be shiny due to an event
if (invalid.length === 1 && invalid[0].includes('must be shiny')) {
set.shiny = true;
pset.shiny = true;
invalid = VALIDATORS.get(format.id)!.validateSet(pset, {});
if (!invalid) return true;
}
// Allow Gen 4 Arceus sets because they're occasionally useful for tournaments
if (format.id === 'gen4ubers' && invalid.includes(`${pokemon} is banned.`)) return true;
const title = `${format.name}: ${pokemon} (${name})'`;
const details = `${JSON.stringify(set)} = ${invalid.join(', ')}`;
// console.error(`${color(source, 94)} Invalid set ${color(title, 91)}: ${color(details, 90)}`);
console.error(color(`${source} Invalid set ${title}: ${details}`, 90));
return false;
}
function skip(dex: ModdedDex, format: Format, pokemon: string, set: DeepPartial<PokemonSet>) {
const {gen} = FORMATS.get(format.id)!;
const hasMove = (m: string) => set.moves && set.moves.includes(m);
const bh = format.id.includes('balancedhackmons');
if (pokemon === 'Groudon-Primal' && set.item !== 'Red Orb') return true;
if (pokemon === 'Kyogre-Primal' && set.item !== 'Blue Orb' && !(bh && gen === 7)) return true;
if (bh) return false; // Everying else is legal or will get stripped by the team validator anyway
if (dex.getSpecies(pokemon).forme.startsWith('Mega')) {
if (pokemon === 'Rayquaza-Mega') {
return format.id.includes('ubers') || !hasMove('Dragon Ascent');
} else {
return dex.getItem(set.item).megaStone !== pokemon;
}
}
if (pokemon === 'Necrozma-Ultra' && set.item !== 'Ultranecrozium Z') return true;
if (pokemon === 'Greninja-Ash' && set.ability !== 'Battle Bond') return true;
if (pokemon === 'Zygarde-Complete' && set.ability !== 'Power Construct') return true;
if (pokemon === 'Darmanitan-Zen' && set.ability !== 'Zen Mode') return true;
if (pokemon === 'Meloetta-Pirouette' && !hasMove('Relic Song')) return true;
return false;
}
function toPokemonSet(
dex: ModdedDex,
format: Format,
pokemon: string,
set: DeepPartial<PokemonSet>,
outOfBattleSpeciesName?: string
): PokemonSet {
// To simplify things, during validation we mutate the input set to correct for HP mismatches
const hp = set.moves && set.moves.find(m => m.startsWith('Hidden Power'));
let fill = dex.gen === 2 ? 30 : 31;
if (hp) {
const type = hp.slice(13);
if (type && dex.getHiddenPower(fillStats(set.ivs, fill)).type !== type) {
if (!set.ivs || (dex.gen >= 7 && (!set.level || set.level === 100))) {
set.hpType = type;
fill = 31;
} else if (dex.gen === 2) {
const dvs = {...dex.getType(type).HPdvs};
let stat: StatName;
for (stat in dvs) {
dvs[stat]! *= 2;
}
set.ivs = {...dvs, ...set.ivs};
set.ivs.hp = expectedHP(set.ivs);
} else {
set.ivs = {...dex.getType(type).HPivs, ...set.ivs};
}
}
}
const copy = {species: pokemon, ...set} as PokemonSet;
copy.ivs = fillStats(set.ivs, fill);
// The validator expects us to have at least 1 EV set to prove it is intentional
if (!set.evs && dex.gen >= 3 && format.id !== 'gen7letsgoou') set.evs = {spe: 1};
copy.evs = fillStats(set.evs, dex.gen <= 2 ? 252 : 0);
// The validator wants an ability even when Gen < 3
copy.ability = copy.ability || 'None';
const species = dex.getSpecies(pokemon);
if (species.battleOnly && !format.id.includes('balancedhackmons')) {
if (outOfBattleSpeciesName) {
copy.species = outOfBattleSpeciesName;
} else if (typeof species.battleOnly === 'string') {
copy.species = species.battleOnly;
} else {
throw new Error(`Unable to disambiguate out of battle species for ${species.name} in ${format.id}`);
}
copy.ability = dex.getSpecies(copy.species).abilities[0];
}
return copy;
}
function expectedHP(ivs: Partial<StatsTable>) {
ivs = fillStats(ivs, 31);
const atkDV = Math.floor(ivs.atk! / 2);
const defDV = Math.floor(ivs.def! / 2);
const speDV = Math.floor(ivs.spe! / 2);
const spcDV = Math.floor(ivs.spa! / 2);
return 2 * ((atkDV % 2) * 8 + (defDV % 2) * 4 + (speDV % 2) * 2 + (spcDV % 2));
}
function fillStats(stats?: Partial<StatsTable>, fill = 0) {
return TeamValidator.fillStats(stats || null, fill);
}
const SMOGON = {
uber: 'ubers',
doubles: 'doublesou',
lgpeou: 'letsgoou',
ag: 'anythinggoes',
bh: 'balancedhackmons',
vgc16: 'vgc2016',
vgc17: 'vgc2017',
vgc18: 'vgc2018',
vgc19: 'vgc2019ultraseries',
} as unknown as {[id: string]: ID};
const getAnalysis = retrying(async (u: string) => {
try {
return smogon.Analyses.process(await request(u));
} catch (err) {
// Don't try HTTP errors that we've already retried
if (err.message.startsWith('HTTP')) {
return Promise.reject(err);
} else {
return Promise.reject(new RetryableError(err.message));
}
}
}, 3, 50);
async function getAnalysesByFormat(pokemon: string, gen: GenerationNum) {
const u = smogon.Analyses.url(pokemon === 'Meowstic' ? 'Meowstic-M' : pokemon, gen);
try {
const analysesByTier = await getAnalysis(u);
if (!analysesByTier) {
error(`Unable to process analysis for ${pokemon} in generation ${gen}`);
return undefined;
}
const analysesByFormat = new Map<Format, smogon.Analysis[]>();
for (const [tier, analyses] of analysesByTier.entries()) {
const t = toID(tier);
const f = FORMATS.get(`gen${gen}${SMOGON[t] || t}` as ID);
if (f) analysesByFormat.set(f.format, analyses);
}
return analysesByFormat;
} catch (err) {
error(`Unable to process analysis for ${pokemon} in generation ${gen}`);
return undefined;
}
}
function getLevel(format: Format, level = 0) {
if (format.forcedLevel) return format.forcedLevel;
const maxLevel = format.maxLevel || 100;
const maxForcedLevel = format.maxForcedLevel || maxLevel;
if (!level) level = format.defaultLevel || maxLevel;
return level > maxForcedLevel ? maxForcedLevel : level;
}
export async function getStatisticsURL(
index: string,
format: Format
): Promise<{url: string, count: number} | undefined> {
const current = index.includes(format.id);
const latest = await smogon.Statistics.latestDate(format.id, !current);
if (!latest) return undefined;
return {url: smogon.Statistics.url(latest.date, format.id, current || 1500), count: latest.count};
}
// TODO: Use bigram matrix, bucketed spreads and generative validation logic for more realistic sets
function importUsageBasedSets(gen: GenerationNum, format: Format, statistics: smogon.UsageStatistics, count: number) {
const sets: PokemonSets = {};
const dex = Dex.forFormat(format);
const threshold = getUsageThreshold(format, count);
let num = 0;
for (const pokemon in statistics.data) {
const stats = statistics.data[pokemon];
if (eligible(dex, toID(pokemon)) && stats.usage >= threshold) {
const set: DeepPartial<PokemonSet> = {
level: getLevel(format),
moves: (top(stats.Moves, 4) as string[]).map(m => dex.getMove(m).name).filter(m => m),
};
if (gen >= 2 && format.id !== 'gen7letsgoou') {
const id = top(stats.Items) as string;
set.item = dex.getItem(id).name;
if (set.item === 'nothing') set.item = undefined;
}
if (gen >= 3) {
const id = top(stats.Abilities) as string;
set.ability = fixedAbility(dex, pokemon, dex.getAbility(id).name);
const {nature, evs} = fromSpread(top(stats.Spreads) as string);
set.nature = nature;
if (format.id !== 'gen7letsgoou') {
if (!evs || !Object.keys(evs).length) continue;
set.evs = evs;
}
}
const name = 'Showdown Usage';
if (validSet('stats', dex, format, pokemon, name, set)) {
sets[pokemon] = {};
sets[pokemon][name] = set;
num++;
}
}
}
report(format, num, 'stats');
return sets;
}
function getUsageThreshold(format: Format, count: number) {
// For old metagames with extremely low total battle counts we adjust the thresholds
if (count < 100) return Infinity;
if (count < 400) return 0.05;
// These formats are deemed to have playerbases of lower quality than normal
return /uber|anythinggoes|doublesou/.test(format.id) ? 0.03 : 0.01;
}
const STATS: StatName[] = ['hp', 'atk', 'def', 'spa', 'spd', 'spe'];
function fromSpread(spread: string) {
const [nature, revs] = spread.split(':');
const evs: Partial<StatsTable> = {};
for (const [i, rev] of revs.split('/').entries()) {
const ev = Number(rev);
if (ev) evs[STATS[i]] = ev;
}
return {nature, evs};
}
function top(weighted: {[key: string]: number}, n = 1): string | string[] | undefined {
if (n === 0) return undefined;
// Optimize the more common case with an linear algorithm instead of log-linear
if (n === 1) {
let max;
for (const key in weighted) {
if (!max || weighted[max] < weighted[key]) max = key;
}
return max;
}
return Object.entries(weighted)
.sort((a, b) => b[1] - a[1])
.slice(0, n)
.map(x => x[0]);
}
class RetryableError extends Error {
constructor(message?: string) {
super(message);
// restore prototype chain
Object.setPrototypeOf(this, new.target.prototype);
}
}
// We throttle to 20 QPS by only issuing one request every 50ms at most. This
// is importantly different than using the more obvious 20 and 1000ms here,
// as it results in more spaced out requests which won't cause as many gettaddrinfo
// ENOTFOUND (nodejs/node-v0.x-archive#5488). Similarly, the evenly spaced
// requests makes us signficantly less likely to encounter ECONNRESET errors
// on macOS (though these are still pretty frequent, Linux is recommended for running
// this tool). Retry up to 5 times with a 20ms backoff increment.
const request = retrying(throttling(fetch, 1, 50), 5, 20);
export function fetch(u: string) {
const client = u.startsWith('http:') ? http : https;
return new Promise<string>((resolve, reject) => {
// @ts-ignore Typescript bug - thinks the second argument should be RequestOptions, not a callback
const req = client.get(u, (res: IncomingMessage) => {
if (res.statusCode !== 200) {
if (res.statusCode >= 500 && res.statusCode < 600) {
return reject(new RetryableError(`HTTP ${res.statusCode}`));
} else if (res.statusCode >= 300 && res.statusCode <= 400 && res.headers.location) {
resolve(fetch(url.resolve(u, res.headers.location)));
} else {
return reject(new Error(`HTTP ${res.statusCode}`));
}
}
Streams.readAll(res).then(resolve, reject);
});
req.on('error', reject);
req.end();
});
}
function retrying<I, O>(fn: (args: I) => Promise<O>, retries: number, wait: number): (args: I) => Promise<O> {
const retry = async (args: I, attempt = 0): Promise<O> => {
try {
return await fn(args);
} catch (err) {
if (err instanceof RetryableError) {
attempt++;
if (attempt > retries) return Promise.reject(err);
const timeout = Math.round(attempt * wait * (1 + Math.random() / 2));
warn(`Retrying ${args} in ${timeout}ms (${attempt}):`, err);
return new Promise(resolve => {
setTimeout(() => {
resolve(retry(args, attempt++));
}, timeout);
});
} else {
return Promise.reject(err);
}
}
};
return retry;
}
function throttling<I, O>(fn: (args: I) => Promise<O>, limit: number, interval: number): (args: I) => Promise<O> {
const queue = new Map();
let currentTick = 0;
let activeCount = 0;
const throttled = (args: I) => {
let timeout: NodeJS.Timeout;
return new Promise<O>((resolve, reject) => {
const execute = () => {
resolve(fn(args));
queue.delete(timeout);
};
const now = Date.now();
if (now - currentTick > interval) {
activeCount = 1;
currentTick = now;
} else if (activeCount < limit) {
activeCount++;
} else {
currentTick += interval;
activeCount = 1;
}
timeout = setTimeout(execute, currentTick - now);
queue.set(timeout, reject);
});
};
return throttled;
}
function color(s: any, code: number) {
return util.format(`\x1b[${code}m%s\x1b[0m`, s);
}
function report(format: Format, num: number, source: string) {
console.info(`${format.name}: ${color(num, 33)} ${color(`(${source})`, 90)}`);
}
function warn(s: string, err: Error) {
console.warn(`${color(s, 33)} ${color(err.message, 90)}`);
}
function | (s: string) {
console.error(color(s, 91));
}
| error |
mod.rs | use bytes::{Bytes, BytesMut};
use bytesstr::BytesStr;
use std::fmt;
pub mod candidate;
pub mod direction;
pub mod fmtp;
pub mod ice;
pub mod rtcp;
pub mod rtpmap;
/// `name:[value]` pair which contains an unparsed/unknown attribute
#[derive(Debug, Clone)]
pub struct UnknownAttribute {
/// Attribute name, the part before the optional `:`
pub name: BytesStr,
/// if the optional `:` is present the part parsed after is stored inside `value`
pub value: Option<BytesStr>,
}
impl UnknownAttribute {
pub fn parse(src: &Bytes, line: &str) -> Self {
match line.split_once(':') {
None => Self {
name: BytesStr::from_parse(src, line),
value: None,
},
Some((name, value)) => Self {
name: BytesStr::from_parse(src, name),
value: Some(BytesStr::from_parse(src, value)),
},
}
}
pub fn | (&self, buf: &mut BytesMut) {
buf.extend_from_slice(self.name.as_ref());
if let Some(value) = &self.value {
buf.extend_from_slice(b":");
buf.extend_from_slice(value.as_ref());
}
}
}
impl fmt::Display for UnknownAttribute {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.name.as_str())?;
if let Some(value) = &self.value {
write!(f, ":{}", value)?;
}
Ok(())
}
}
| print |
generator.ts | import * as path from 'path';
import * as fse from 'fs-extra';
import { formatPath } from '@builder/app-helpers';
import { getPageDir, getPageStorePath } from './utils/getPath';
import checkPageIndexFileExists from './utils/checkPageIndexFileExists';
export interface IRenderPageParams {
pageName: string;
pageNameDir: string;
pageStoreFile: string;
}
export default class | {
private srcPath: string
private tempPath: string
private applyMethod: Function
private disableResetPageState: boolean
constructor({
srcPath,
tempPath,
applyMethod,
disableResetPageState,
}: {
srcPath: string;
tempPath: string;
applyMethod: Function;
disableResetPageState: boolean;
}) {
this.srcPath = srcPath;
this.tempPath = tempPath;
this.applyMethod = applyMethod;
this.disableResetPageState = disableResetPageState;
}
public render(rerender = false) {
if (!rerender) {
// avoid rerendering files
// generate .ice/store/index.ts
this.renderAppStore();
// generate .ice/store/types.ts
this.renderAppStoreTypes();
}
const pagesName: string[] = this.applyMethod('getPages', this.srcPath);
pagesName.forEach((pageName: string) => {
const pageNameDir = getPageDir(this.srcPath, pageName);
const pageStoreFile = formatPath(getPageStorePath(this.srcPath, pageName));
const params = { pageName, pageNameDir, pageStoreFile };
// generate .ice/pages/${pageName}/index.tsx
this.renderPageComponent(params);
// generate .ice/pages/${pageName}/Layout.tsx
this.renderPageLayout(params);
});
}
private renderAppStore() {
const sourceFilename = 'plugins/store/index';
const appStoreTemplatePath = path.join(__dirname, './template/appStore.ts.ejs');
const targetPath = path.join(this.tempPath, `${sourceFilename}.ts`);
this.applyMethod('addRenderFile', appStoreTemplatePath, targetPath);
}
private renderAppStoreTypes() {
const sourceFilename = 'plugins/store/types';
const typesTemplatePath = path.join(__dirname, './template/types.ts.ejs');
const targetPath = path.join(this.tempPath, `${sourceFilename}.ts`);
this.applyMethod('addRenderFile', typesTemplatePath, targetPath);
this.applyMethod('addTypesExport', { source: '../plugins/store/types' });
this.applyMethod('addImportDeclaration', {
importSource: '$$ice/plugins/store/types',
exportMembers: ['IRootDispatch', 'IRootState', 'IStore', 'IStoreModels', 'IStoreDispatch', 'IStoreRootState'],
});
}
private renderPageComponent({ pageName, pageNameDir, pageStoreFile }: IRenderPageParams) {
const pageComponentTemplatePath = path.join(__dirname, './template/pageComponent.tsx.ejs');
const pageComponentTargetPath = path.join(this.tempPath, 'pages', pageName, 'index.tsx');
const pageComponentSourcePath = formatPath(path.join(pageNameDir, 'index'));
const pageComponentName = 'PageComponent';
const pageStoreExtname = path.extname(pageStoreFile);
const pageComponentRenderData = {
pageComponentImport: `import ${pageComponentName} from '${pageComponentSourcePath}'`,
pageComponentExport: pageComponentName,
storeFileExists: !!pageStoreFile,
pageStoreImport: pageStoreFile ? `import store from '${pageStoreFile.replace(pageStoreExtname, '')}'` : '',
disableResetPageState: this.disableResetPageState,
};
checkPageIndexFileExists(pageNameDir);
this.applyMethod('addRenderFile', pageComponentTemplatePath, pageComponentTargetPath, pageComponentRenderData);
}
private renderPageLayout({ pageName, pageNameDir, pageStoreFile }: IRenderPageParams) {
const pageComponentTemplatePath = path.join(__dirname, './template/pageComponent.tsx.ejs');
const pageComponentTargetPath = path.join(this.tempPath, 'pages', pageName, 'Layout.tsx');
const pageComponentSourcePath = formatPath(`${pageNameDir}/Layout`);
if (!fse.pathExistsSync(pageComponentSourcePath)) {
return;
}
const pageLayoutName = `${pageName}Layout`;
const pageStoreExtname = path.extname(pageStoreFile);
const pageLayoutRenderData = {
pageComponentImport: `import ${pageLayoutName} from '${pageComponentSourcePath}'`,
pageComponentExport: pageLayoutName,
storeFileExists: !!pageStoreFile,
pageStoreImport: pageStoreFile ? `import store from '${pageStoreFile.replace(pageStoreExtname, '')}'` : '',
disableResetPageState: this.disableResetPageState,
};
checkPageIndexFileExists(pageNameDir);
this.applyMethod('addRenderFile', pageComponentTemplatePath, pageComponentTargetPath, pageLayoutRenderData);
}
}
| Generator |
querier.go | package faucet
import (
"os"
"github.com/deep2chain/sscq/types"
sdk "github.com/deep2chain/sscq/types"
"github.com/ethereum/go-ethereum/common"
log "github.com/sirupsen/logrus"
)
func init() {
// junying-todo,2020-01-17
lvl, ok := os.LookupEnv("LOG_LEVEL")
// LOG_LEVEL not set, let's default to debug
if !ok {
lvl = "info" //trace/debug/info/warn/error/parse/fatal/panic | ll, err := log.ParseLevel(lvl)
if err != nil {
ll = log.FatalLevel //TraceLevel/DebugLevel/InfoLevel/WarnLevel/ErrorLevel/ParseLevel/FatalLevel/PanicLevel
}
// set global log level
log.SetLevel(ll)
log.SetFormatter(&log.TextFormatter{}) //&log.JSONFormatter{})
}
// Query endpoints supported by the core querier
const (
QueryContract = "contract"
//
ZeroAddress = "0000000000000000000000000000000000000000"
//
TxGasLimit = 100000
)
//
type MsgTest struct {
From sdk.AccAddress
}
func NewMsgTest(addr sdk.AccAddress) MsgTest {
return MsgTest{
From: addr,
}
}
func (msg MsgTest) FromAddress() common.Address {
return types.ToEthAddress(msg.From)
}
func isZeroByte(data []byte) bool {
for index := 0; index < len(data); index++ {
if data[index] != 0 {
return false
}
}
return true
} | }
// parse string, this is built-in feature of logrus |
sfclusterreplicator_controller.go | /*
Copyright 2019 The Service Fabrik Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sfclusterreplicator
import (
"context"
resourcev1alpha1 "github.com/cloudfoundry-incubator/service-fabrik-broker/interoperator/api/resource/v1alpha1"
"github.com/cloudfoundry-incubator/service-fabrik-broker/interoperator/controllers/multiclusterdeploy/watchmanager"
"github.com/cloudfoundry-incubator/service-fabrik-broker/interoperator/pkg/cluster/registry"
"github.com/cloudfoundry-incubator/service-fabrik-broker/interoperator/pkg/constants"
"github.com/cloudfoundry-incubator/service-fabrik-broker/interoperator/pkg/watches"
"github.com/go-logr/logr"
"github.com/prometheus/client_golang/prometheus"
apiErrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/util/retry"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/handler"
"sigs.k8s.io/controller-runtime/pkg/metrics"
"sigs.k8s.io/controller-runtime/pkg/source"
)
var (
allocatableMetric = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "allocatable",
Namespace: "interoperator",
Subsystem: "cluster",
Help: "Allocatable resources partitioned by cluster and resource type",
},
[]string{
// Which cluster?
"cluster",
// Type of the resource
"type",
},
)
instancesMetric = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "service_instances",
Namespace: "interoperator",
Subsystem: "cluster",
Help: "Number of service instances partitioned by cluster",
},
[]string{
// Which cluster?
"cluster",
},
)
)
// To the function mock
var getWatchChannel = watchmanager.GetWatchChannel
// SFClusterReplicator replicates SFCluster
type SFClusterReplicator struct {
client.Client
Log logr.Logger
Scheme *runtime.Scheme
clusterRegistry registry.ClusterRegistry
}
// Reconcile reads that state of the cluster for a SFCluster object on master and sister clusters
// and replicates it.
func (r *SFClusterReplicator) Reconcile(req ctrl.Request) (ctrl.Result, error) {
ctx := context.Background()
log := r.Log.WithValues("sfcluster", req.NamespacedName)
cluster := &resourcev1alpha1.SFCluster{}
replica := &resourcev1alpha1.SFCluster{}
err := r.Get(ctx, req.NamespacedName, cluster)
if err != nil {
if apiErrors.IsNotFound(err) {
// Object not found, return.
return ctrl.Result{}, nil
}
// Error reading the object - requeue the request.
return ctrl.Result{}, err
}
clusterID := cluster.GetName()
allocatable := cluster.Status.TotalCapacity.DeepCopy()
if allocatable == nil || len(allocatable) == 0 {
allocatable = cluster.Status.CurrentCapacity.DeepCopy()
}
resourcev1alpha1.ResourceListSub(allocatable, cluster.Status.Requests)
for key, quantity := range allocatable {
allocatableMetric.WithLabelValues(clusterID, key.String()).Set(float64(quantity.Value()))
}
instancesMetric.WithLabelValues(clusterID).Set(float64(cluster.Status.ServiceInstanceCount))
if clusterID == constants.OwnClusterID {
// Target cluster is mastercluster itself
// Replication not needed
return ctrl.Result{}, nil
}
targetClient, err := r.clusterRegistry.GetClient(clusterID)
if err != nil {
return ctrl.Result{}, err
}
err = targetClient.Get(ctx, req.NamespacedName, replica)
if err != nil {
if apiErrors.IsNotFound(err) {
// Object not found, return.
log.Info("sfcluster not found in sister. ignoring")
return ctrl.Result{}, nil
}
// Error reading the object - requeue the request.
return ctrl.Result{}, err
}
err = r.reconcileSpec(targetClient, cluster, replica)
if err != nil {
return ctrl.Result{}, err
}
err = r.reconcileStatus(targetClient, cluster, replica)
if err != nil {
return ctrl.Result{}, err
}
return ctrl.Result{}, nil
}
func (r *SFClusterReplicator) reconcileSpec(targetClient client.Client, cluster, replica *resourcev1alpha1.SFCluster) error {
ctx := context.Background()
log := r.Log.WithValues("clusterID", cluster.GetName())
err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
updateRequired := false | updateRequired = true
replica.Spec.SchedulingLimitPercentage = cluster.Spec.SchedulingLimitPercentage
}
if !resourcev1alpha1.ResourceListEqual(cluster.Spec.TotalCapacity, replica.Spec.TotalCapacity) {
updateRequired = true
replica.Spec.TotalCapacity = cluster.Spec.TotalCapacity.DeepCopy()
}
if cluster.Spec.SecretRef != replica.Spec.SecretRef {
updateRequired = true
replica.Spec.SecretRef = cluster.Spec.SecretRef
}
if updateRequired {
err := targetClient.Update(ctx, replica)
if err != nil {
if apiErrors.IsConflict(err) {
namespacedName := types.NamespacedName{
Name: cluster.GetName(),
Namespace: cluster.GetNamespace(),
}
_ = targetClient.Get(ctx, namespacedName, replica)
}
return err
}
log.Info("updated cluster spec on sister cluster")
}
return nil
})
if err != nil {
log.Error(err, "failed to update cluster spec on sister cluster")
return err
}
return nil
}
func (r *SFClusterReplicator) reconcileStatus(targetClient client.Client, cluster, replica *resourcev1alpha1.SFCluster) error {
ctx := context.Background()
log := r.Log.WithValues("clusterID", cluster.GetName())
err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
updateRequired := false
if !resourcev1alpha1.ResourceListEqual(cluster.Status.CurrentCapacity, replica.Status.CurrentCapacity) {
updateRequired = true
cluster.Status.CurrentCapacity = replica.Status.CurrentCapacity.DeepCopy()
}
if !resourcev1alpha1.ResourceListEqual(cluster.Status.TotalCapacity, replica.Status.TotalCapacity) {
updateRequired = true
cluster.Status.TotalCapacity = replica.Status.TotalCapacity.DeepCopy()
}
if !resourcev1alpha1.ResourceListEqual(cluster.Status.Requests, replica.Status.Requests) {
updateRequired = true
cluster.Status.Requests = replica.Status.Requests.DeepCopy()
}
if updateRequired {
err := r.Status().Update(ctx, cluster)
if err != nil {
if apiErrors.IsConflict(err) {
namespacedName := types.NamespacedName{
Name: cluster.GetName(),
Namespace: cluster.GetNamespace(),
}
_ = r.Get(ctx, namespacedName, cluster)
}
return err
}
log.Info("updated cluster status from sister cluster")
}
return nil
})
if err != nil {
log.Error(err, "failed to update cluster status from sister cluster")
return err
}
return nil
}
// SetupWithManager registers the MCD SFCluster replicator with manager
// and setups the watches.
func (r *SFClusterReplicator) SetupWithManager(mgr ctrl.Manager) error {
if r.Log == nil {
r.Log = ctrl.Log.WithName("mcd").WithName("replicator").WithName("cluster")
}
if r.clusterRegistry == nil {
clusterRegistry, err := registry.New(mgr.GetConfig(), mgr.GetScheme(), mgr.GetRESTMapper())
if err != nil {
return err
}
r.clusterRegistry = clusterRegistry
}
// Watch for changes to SFCluster in sister clusters
watchEvents, err := getWatchChannel("sfclusters")
if err != nil {
return err
}
metrics.Registry.MustRegister(allocatableMetric, instancesMetric)
builder := ctrl.NewControllerManagedBy(mgr).
Named("mcd_replicator_cluster").
For(&resourcev1alpha1.SFCluster{}).
Watches(&source.Channel{Source: watchEvents}, &handler.EnqueueRequestForObject{}).
WithEventFilter(watches.NamespaceFilter())
return builder.Complete(r)
} | if cluster.Spec.SchedulingLimitPercentage != replica.Spec.SchedulingLimitPercentage { |
day20_2021.py | # Day20 - 2021 Advent of code
# source: https://adventofcode.com/2021/day/20
import os
import numpy as np
def | ():
os.system('clear')
print('< .... AoC 2021 Day 20, part 1 .... >')
print()
return
# convert from pixels to binary "...#...#." -> "000100010"
def pix_2_binary(the_9_pixels):
i = 0
binaryX = ""
pix = the_9_pixels
#check all fields in list. "." -> 0 and "#" -> 1
while i < 9:
if pix[i] == ".": binaryX += "0"
else: binaryX += "1"
i += 1
return binaryX
# find all adjacent cells and add content of all cells to a string
def find_adjacent_pixels(grid, element):
#the 9 pixels - around and including the pos - visualised here with a zero:
# # . . # .
# #[. . .].
# #[# 0 .]#
# .[. # .].
# . . # # #
the_9_pixels = ""
maxRow, maxCol = grid.shape
maxRow -= 1 #starting from zero
maxCol -= 1 #starting from zero
elementProcessed = False
# check all 4 corners - use dots for all positions "outside the grid"
if element == [0,0]:
the_9_pixels = the_9_pixels + "...." + grid[0,0] + grid[0, 1] + "." + grid[1, 0] + grid[1, 1]
elementProcessed = True
if element == [0,maxCol]:
the_9_pixels = the_9_pixels + "..." + grid[0, maxCol-1] + grid[0,maxCol] + "." + grid[1, maxCol-1] + grid[1, maxCol] + "."
elementProcessed = True
if element == [maxRow,maxCol]:
the_9_pixels = the_9_pixels + grid[maxRow-1, maxCol-1] + grid[maxRow-1, maxCol] + "." + grid[maxRow, maxCol-1] + grid[maxRow,maxCol] + "...."
elementProcessed = True
if element == [maxRow,0]:
the_9_pixels = the_9_pixels + "." + grid[maxRow-1, 0] + grid[maxRow-1, 1] + "." + grid[maxRow, 0] + grid[maxRow, 1] + "..."
elementProcessed = True
#check if first column or last column
if elementProcessed != True:
if element[1] == 0: #first column
row = element[0]
the_9_pixels = the_9_pixels + "."+grid[row-1, 0]+grid[row-1, 1] + "."+grid[row, 0]+grid[row, 1] + "."+grid[row+1, 0]+grid[row+1, 1]
elementProcessed = True
elif element[1] == maxCol: #last column
row = element[0]
the_9_pixels = the_9_pixels + grid[row-1, maxCol-1]+grid[row-1, maxCol]+"."+ grid[row, maxCol-1]+grid[row, maxCol]+"." + grid[row+1, maxCol-1]+grid[row+1, maxCol]+"."
elementProcessed = True
#check if first row or last row
if elementProcessed != True:
if element[0] == 0: #first row
col = element[1]
the_9_pixels = the_9_pixels + "..."+grid[0, col-1]+grid[0, col]+grid[0, col+1]+grid[1, col-1]+grid[1, col]+grid[1, col+1]
elementProcessed = True
elif element[0] == maxRow: #last row
col = element[1]
the_9_pixels = the_9_pixels + grid[maxRow-1, col-1]+grid[maxRow-1, col]+grid[maxRow-1, col+1]+grid[maxRow, col-1]+grid[maxRow, col]+grid[maxRow, col+1] + "..."
elementProcessed = True
#the position is in the middle if still false
if elementProcessed != True:
row = element[0]
col = element[1]
the_9_pixels = the_9_pixels + grid[row-1, col-1]+grid[row-1, col]+grid[row-1, col+1]+grid[row, col-1]+grid[row, col]+grid[row, col+1] + grid[row+1, col-1]+grid[row+1, col]+grid[row+1, col+1]
return the_9_pixels
# do a new step
def step_away(input_image, image_algo, valueX):
maxRow, maxCol = input_image.shape
#start with an empty output image
data_list = ["."] * (maxRow * maxCol)
output_image = np.array(data_list, dtype="str").reshape(maxRow, maxCol)
row = 0
while row < maxRow:
#REPEAT for all pixels i input picture
# for each dot in input image - find the 9 pixels around a dot -> e.g. [...#...#.]
col = 0
while col < maxCol:
cell = [row, col]
the_9_pixels = find_adjacent_pixels(input_image, cell)
# transform from pixel to binary -> e.g. [000100010]
binaryX = pix_2_binary(the_9_pixels)
# convert from binary to decimal -> e.g. 34
numX = int(binaryX, 2)
# find the symbol in that specific pos in image algo -> image_algo[34] -> "#"
outSymbol = image_algo[numX]
# add that symbol to the OUTPUT PICTURE in same position as in input image
output_image[row, col] = outSymbol
# if pixel is lit in output picture (i.e. "#") -> , valueX += 1
if outSymbol == "#": valueX += 1
#next column
col += 1
#next row
row += 1
return valueX, output_image
# process the data with all indicated steps
def process_the_data(input_image, image_algo):
#number of steps
noSteps = 2
i = 0
#list_of_10 = []
while i < noSteps:
#valueX holds the number of lit pixels
valueX = 0
#do a step and see what changes are done to the input image
#the resulting image from the setp is the new input image for next cycle
valueX, input_image = step_away(input_image, image_algo, valueX)
i += 1
#print(input_image, "\n")
return valueX
#add some empty columns
def add_cols(data_list, cols):
n = 0
while n < cols:
data_list.append(".")
n += 1
return data_list
#add some empty rows
def add_rows(data_list, rows, cols):
n = 0
while n < rows*cols:
data_list.append(".")
n += 1
return data_list
def get_the_data():
#read the test puzzle input
theData = open('day20_test_puzzle_input.txt', 'r')
#read the puzzle input
#theData = open('day20_puzzle_input.txt', 'r')
#move data into a list - read a line and remove lineshift
data_list = []
rows = 0 # in image
firstRow = True
emptyRows = 5
#process each row in the data
for element in theData:
elementTrimmed = element.strip()
if firstRow:
image_algo = elementTrimmed
firstRow = False
elif elementTrimmed != "":
#add X empty rows and X empty cols before or after data; to simulate the infinity outwards of the grid
if rows == 0:
data_list = add_rows(data_list, emptyRows, len(elementTrimmed)+emptyRows*2) # *2 -> i begynnelsen og slutten av linjen
rows += 1 #one more row in input image
#add each single char to a list - reformat to numpy array later
i = 0
while i < len(elementTrimmed):
#add 5 empty cols before the data; to simulate the infinity outwards of the grid
if i == 0:
data_list = add_cols(data_list, emptyRows)
data_list.append(elementTrimmed[i])
i += 1
#add 5 empty cols after the data; to simulate the infinity outwards of the grid
if i == len(elementTrimmed):
data_list = add_cols(data_list, emptyRows)
#add 5 empty rows and 5 empty cols before or after data; to simulate the infinity outwards of the grid
data_list = add_rows(data_list, emptyRows, len(elementTrimmed)+emptyRows*2)
maxCols = len(elementTrimmed) + emptyRows*2
maxRows = rows + emptyRows*2
#create a numpy array
input_image = np.array(data_list, dtype="str").reshape(maxRows, maxCols)
return input_image, image_algo
def start_the_engine():
#get the data and read them into a list
input_image, image_algo = get_the_data()
#process the data and return the answer
valueX = process_the_data(input_image, image_algo)
# Next, you need to
print('\nHow many pixels are lit in the resulting image ->', valueX, '\n')
return
#let's start
if __name__ == '__main__':
clear_console()
start_the_engine() | clear_console |
general.actions.js | import { SAVE_USER_DATA_GENERAL } from 'containers/SignIn/signin.constants';
| export const saveUserData = payload => ({ type: SAVE_USER_DATA_GENERAL, payload });
export const CLEARSTORE = () => ({ type: 'CLEARSTORE' }); |
|
tc_br_models_runner.py | import pandas as pd
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from modules.utils import plots
from modules.utils import firefox_dataset_p2 as fd
from modules.utils import tokenizers as tok
from modules.utils import aux_functions
from modules.models.lda import LDA
from modules.models.lsi import LSI
from modules.models.bm25 import BM_25
from modules.models.wordvec import WordVec_BasedModel
from modules.models.zeror import ZeroR_Model
from modules.models.vsm import VSM
import modules.models.model_hyperps as mh
class TC_BR_Models_Hyperp:
@staticmethod
def get_lsi_model_hyperp():
return {
mh.LSI_Model_Hyperp.SVD_MODEL_N_COMPONENTS.value: 20,
mh.LSI_Model_Hyperp.VECTORIZER_NGRAM_RANGE.value: (1,1),
mh.LSI_Model_Hyperp.VECTORIZER_MAX_FEATURES.value: 400,
mh.LSI_Model_Hyperp.VECTORIZER.value : TfidfVectorizer(stop_words='english', use_idf=True, smooth_idf=True),
mh.LSI_Model_Hyperp.VECTORIZER_TOKENIZER.value : tok.WordNetBased_LemmaTokenizer()
}
@staticmethod
def get_lda_model_hyperp():
return {
mh.LDA_Model_Hyperp.LDA_MODEL_N_COMPONENTS.value: 20,
mh.LDA_Model_Hyperp.LDA_MODEL_RANDOM_STATE.value : 2,
mh.LDA_Model_Hyperp.VECTORIZER_NGRAM_RANGE.value: (1,1),
mh.LDA_Model_Hyperp.VECTORIZER_MAX_FEATURES.value: 200,
mh.LDA_Model_Hyperp.VECTORIZER.value : TfidfVectorizer(stop_words='english', use_idf=True, smooth_idf=True),
mh.LDA_Model_Hyperp.VECTORIZER_TOKENIZER.value : tok.PorterStemmerBased_Tokenizer()
}
@staticmethod
def get_bm25_model_hyperp():
return {
mh.BM25_Model_Hyperp.TOKENIZER.value : tok.PorterStemmerBased_Tokenizer()
}
@staticmethod
def get_w2v_model_hyperp():
return {
mh.WordVec_Model_Hyperp.TOKENIZER.value : tok.PorterStemmerBased_Tokenizer(),
mh.WordVec_Model_Hyperp.WORD_EMBEDDING.value : 'CC_BASED',
mh.WordVec_Model_Hyperp.GEN_NAME.value : 'wordvector'
}
@staticmethod
def get_cust_w2v_model_hyperp():
return {
mh.WordVec_Model_Hyperp.TOKENIZER.value : tok.PorterStemmerBased_Tokenizer(),
mh.WordVec_Model_Hyperp.WORD_EMBEDDING.value : 'CUSTOMIZED',
mh.WordVec_Model_Hyperp.GEN_NAME.value : 'cust_wordvector'
}
@staticmethod
def get_vsm_model_hyperp():
return {
mh.VSM_Model_Hyperp.VECTORIZER_NGRAM_RANGE.value: (1,1),
mh.VSM_Model_Hyperp.VECTORIZER_MAX_FEATURES.value: 400,
mh.VSM_Model_Hyperp.VECTORIZER.value : TfidfVectorizer(stop_words='english', use_idf=True, smooth_idf=True),
mh.VSM_Model_Hyperp.VECTORIZER_TOKENIZER.value : tok.WordNetBased_LemmaTokenizer()
}
class TC_BR_Runner:
def __init__(self, testcases=pd.DataFrame(), bugreports=pd.DataFrame()):
self.test_cases_df = None
self.bug_reports_df = None
self.corpus = None
self.query = None
self.test_cases_names = None
self.bug_reports_names = None
self.set_basic_params(testcases, bugreports)
def set_basic_params(self, testcases, bugreports):
if testcases.empty:
self.test_cases_df = fd.Datasets.read_testcases_df()
else:
self.test_cases_df = testcases
if bugreports.empty:
self.bug_reports_df = fd.Datasets.read_selected_bugreports_df()
else:
self.bug_reports_df = bugreports
self.corpus = self.test_cases_df.tc_desc
self.query = self.bug_reports_df.br_desc
self.test_cases_names = self.test_cases_df.TC_Number
self.bug_reports_names = self.bug_reports_df.Bug_Number
def run_lsi_model(self, lsi_hyperp=None):
print("Running LSI Model ------")
if lsi_hyperp == None:
lsi_hyperp = TC_BR_Models_Hyperp.get_lsi_model_hyperp()
lsi_model = LSI(**lsi_hyperp)
lsi_model.set_name('LSI_Model_TC_BR')
lsi_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return lsi_model
def run_lda_model(self, lda_hyperp=None):
|
def run_bm25_model(self, bm25_hyperp=None):
print("Running BM25 Model -----")
if bm25_hyperp == None:
bm25_hyperp = TC_BR_Models_Hyperp.get_bm25_model_hyperp()
bm25_model = BM_25(**bm25_hyperp)
bm25_model.set_name('BM25_Model_TC_BR')
bm25_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return bm25_model
def run_word2vec_model(self, wv_hyperp=None):
print("Running W2V Model ------")
if wv_hyperp == None:
wv_hyperp = TC_BR_Models_Hyperp.get_w2v_model_hyperp()
wv_model = WordVec_BasedModel(**wv_hyperp)
wv_model.set_name('WordVec_Model_TC_BR')
wv_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return wv_model
def run_cust_word2vec_model(self, wv_hyperp=None):
print("Running Customized W2V model -----")
if wv_hyperp == None:
wv_hyperp = TC_BR_Models_Hyperp.get_cust_w2v_model_hyperp()
wv_model = WordVec_BasedModel(**wv_hyperp)
wv_model.set_name('Customized_WordVec_Model_TC_BR')
wv_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return wv_model
def run_zeror_model(self, zeror_hyperp=None):
print("Running ZeroR model -----")
oracle = fd.Tc_BR_Oracles.read_oracle_expert_volunteers_intersec_df()
zeror_model = ZeroR_Model(oracle)
zeror_model.set_name('ZeroR_Model_TC_BR')
zeror_model.recover_links()
return zeror_model
def run_vsm_model(self, vsm_hyperp=None):
print('Running VSM model -----')
if vsm_hyperp == None:
vsm_hyperp = TC_BR_Models_Hyperp.get_vsm_model_hyperp()
vsm_model = VSM(**vsm_hyperp)
vsm_model.set_name('VSM_Model_TC_BR')
vsm_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return vsm_model | print("Running LDA Model -----")
if lda_hyperp == None:
lda_hyperp = TC_BR_Models_Hyperp.get_lda_model_hyperp()
lda_model = LDA(**lda_hyperp)
lda_model.set_name('LDA_Model_TC_BR')
lda_model.recover_links(self.corpus, self.query, self.test_cases_names, self.bug_reports_names)
return lda_model |
store_redis.go | /*
Copyright 2015 All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package store
import (
"net/url"
"time"
"github.com/XiaYinchang/nginx-ingress-keycloak-auth/pkg/common"
redis "gopkg.in/redis.v4"
)
type redisStore struct {
client *redis.Client
}
// newRedisStore creates a new redis store
func | (location *url.URL) (common.Storage, error) {
// step: get any password
password := ""
if location.User != nil {
password, _ = location.User.Password()
}
// step: parse the url notation
client := redis.NewClient(&redis.Options{
Addr: location.Host,
DB: 0,
Password: password,
})
return redisStore{
client: client,
}, nil
}
// Set adds a token to the store
func (r redisStore) Set(key, value string) error {
if err := r.client.Set(key, value, time.Duration(0)); err.Err() != nil {
return err.Err()
}
return nil
}
// Get retrieves a token from the store
func (r redisStore) Get(key string) (string, error) {
result := r.client.Get(key)
if result.Err() != nil {
return "", result.Err()
}
return result.String(), nil
}
// Delete remove the key
func (r redisStore) Delete(key string) error {
return r.client.Del(key).Err()
}
// Close closes of any open resources
func (r redisStore) Close() error {
if r.client != nil {
return r.client.Close()
}
return nil
}
| newRedisStore |
Player.js | class Player {
constructor(x, y, radius, color) {
this.x = x;
this.y = y;
this.radius = radius; | export default Player; | this.color = color;
}
};
|
logger.py | import logging
from mmcv.utils import get_logger
def | (log_file=None, log_level=logging.INFO):
"""Get root logger.
Args:
log_file (str, optional): File path of log. Defaults to None.
log_level (int, optional): The level of logger.
Defaults to logging.INFO.
Returns:
:obj:`logging.Logger`: The obtained logger
"""
logger = get_logger(name='mmcap', log_file=log_file, log_level=log_level)
return logger
| get_root_logger |
addresses.js | 'use strict';
var bitcore = require('bitcore-lib-crown');
var async = require('async');
var TxController = require('./transactions');
var Common = require('./common');
function AddressController(node) {
this.node = node;
this.txController = new TxController(node);
this.common = new Common({log: this.node.log});
}
AddressController.prototype.show = function(req, res) {
var self = this;
var options = {
noTxList: parseInt(req.query.noTxList)
};
if (req.query.from && req.query.to) {
options.from = parseInt(req.query.from);
options.to = parseInt(req.query.to);
}
this.getAddressSummary(req.addr, options, function(err, data) {
if(err) {
return self.common.handleErrors(err, res);
}
res.jsonp(data);
});
};
AddressController.prototype.balance = function(req, res) {
this.addressSummarySubQuery(req, res, 'balanceSat');
};
AddressController.prototype.totalReceived = function(req, res) {
this.addressSummarySubQuery(req, res, 'totalReceivedSat');
};
AddressController.prototype.totalSent = function(req, res) {
this.addressSummarySubQuery(req, res, 'totalSentSat');
};
AddressController.prototype.unconfirmedBalance = function(req, res) {
this.addressSummarySubQuery(req, res, 'unconfirmedBalanceSat');
};
AddressController.prototype.addressSummarySubQuery = function(req, res, param) {
var self = this;
this.getAddressSummary(req.addr, {}, function(err, data) {
if(err) {
return self.common.handleErrors(err, res);
}
res.jsonp(data[param]);
});
};
AddressController.prototype.getAddressSummary = function(address, options, callback) {
this.node.getAddressSummary(address, options, function(err, summary) {
if(err) {
return callback(err);
}
var transformed = {
addrStr: address,
balance: summary.balance / 1e8,
balanceSat: summary.balance,
totalReceived: summary.totalReceived / 1e8,
totalReceivedSat: summary.totalReceived,
totalSent: summary.totalSpent / 1e8,
totalSentSat: summary.totalSpent,
unconfirmedBalance: summary.unconfirmedBalance / 1e8,
unconfirmedBalanceSat: summary.unconfirmedBalance,
unconfirmedTxApperances: summary.unconfirmedAppearances, // misspelling - ew
txApperances: summary.appearances, // yuck
transactions: summary.txids
};
callback(null, transformed);
});
};
AddressController.prototype.checkAddr = function(req, res, next) {
req.addr = req.params.addr;
this.check(req, res, next, [req.addr]);
};
AddressController.prototype.checkAddrs = function(req, res, next) {
if(req.body.addrs) {
req.addrs = req.body.addrs.split(',');
} else {
req.addrs = req.params.addrs.split(',');
}
this.check(req, res, next, req.addrs);
};
AddressController.prototype.check = function(req, res, next, addresses) {
var self = this;
if(!addresses.length || !addresses[0]) {
return self.common.handleErrors({
message: 'Must include address',
code: 1
}, res);
}
for(var i = 0; i < addresses.length; i++) {
try {
var a = new bitcore.Address(addresses[i]);
} catch(e) {
return self.common.handleErrors({
message: 'Invalid address: ' + e.message,
code: 1
}, res);
}
}
next();
};
AddressController.prototype.utxo = function(req, res) {
var self = this;
this.node.getAddressUnspentOutputs(req.addr, {}, function(err, utxos) {
if(err) {
return self.common.handleErrors(err, res);
} else if (!utxos.length) {
return res.jsonp([]); | }
res.jsonp(utxos.map(self.transformUtxo.bind(self)));
});
};
AddressController.prototype.multiutxo = function(req, res) {
var self = this;
this.node.getAddressUnspentOutputs(req.addrs, true, function(err, utxos) {
if(err && err.code === -5) {
return res.jsonp([]);
} else if(err) {
return self.common.handleErrors(err, res);
}
res.jsonp(utxos.map(self.transformUtxo.bind(self)));
});
};
AddressController.prototype.transformUtxo = function(utxoArg) {
var utxo = {
address: utxoArg.address,
txid: utxoArg.txid,
vout: utxoArg.outputIndex,
scriptPubKey: utxoArg.script,
amount: utxoArg.satoshis / 1e8,
satoshis: utxoArg.satoshis
};
if (utxoArg.height && utxoArg.height > 0) {
utxo.height = utxoArg.height;
utxo.confirmations = this.node.services.bitcoind.height - utxoArg.height + 1;
} else {
utxo.confirmations = 0;
}
if (utxoArg.timestamp) {
utxo.ts = utxoArg.timestamp;
}
return utxo;
};
AddressController.prototype._getTransformOptions = function(req) {
return {
noAsm: parseInt(req.query.noAsm) ? true : false,
noScriptSig: parseInt(req.query.noScriptSig) ? true : false,
noSpent: parseInt(req.query.noSpent) ? true : false
};
};
AddressController.prototype.multitxs = function(req, res, next) {
var self = this;
var options = {
from: parseInt(req.query.from) || parseInt(req.body.from) || 0
};
options.to = parseInt(req.query.to) || parseInt(req.body.to) || parseInt(options.from) + 10;
self.node.getAddressHistory(req.addrs, options, function(err, result) {
if(err) {
return self.common.handleErrors(err, res);
}
var transformOptions = self._getTransformOptions(req);
self.transformAddressHistoryForMultiTxs(result.items, transformOptions, function(err, items) {
if (err) {
return self.common.handleErrors(err, res);
}
res.jsonp({
totalItems: result.totalCount,
from: options.from,
to: Math.min(options.to, result.totalCount),
items: items
});
});
});
};
AddressController.prototype.transformAddressHistoryForMultiTxs = function(txinfos, options, callback) {
var self = this;
var items = txinfos.map(function(txinfo) {
return txinfo.tx;
}).filter(function(value, index, self) {
return self.indexOf(value) === index;
});
async.map(
items,
function(item, next) {
self.txController.transformTransaction(item, options, next);
},
callback
);
};
module.exports = AddressController; | |
solve.py | # Angr script written by other people
import angr
import claripy
FLAG_LEN = 29
STDIN_FD = 0
# base_addr = 0x100000 # To match addresses to Ghidra
base_addr = 0
proj = angr.Project("./attachments/hotel_key_puzzle", main_opts={'base_addr': base_addr})
flag_chars = [claripy.BVS('sun{%d}' % i, 8) for i in range(FLAG_LEN)]
flag = claripy.Concat( *flag_chars + [claripy.BVV(b'\n')]) # Add \n for scanf() to accept the input
state = proj.factory.full_init_state(
args=['./attachments/hotel_key_puzzle'],
add_options=angr.options.unicorn,
stdin=flag,
)
# Add constraints that all characters are printable
for k in flag_chars:
state.solver.add(k >= ord('!'))
state.solver.add(k <= ord('~'))
simgr = proj.factory.simulation_manager(state) | simgr.explore(find=find_addr, avoid=avoid_addr)
if (len(simgr.found) > 0):
for found in simgr.found:
print(found.posix.dumps(STDIN_FD).decode('utf-8').strip()) | find_addr = 0x22ba # SUCCESS
avoid_addr = 0x22c8 # FAILURE |
release.js | /**
发布 release 版本
1. TODO: 检测分支是否干净
2. TODO: 询问分支并切换分支,拉取最新代码
3. 询问版本并确认
4. 升级版本并添加 tag
5. 发布到远程
*/
const execa = require('execa')
const semver = require('semver')
const inquirer = require('inquirer')
const minimist = require('minimist')
const cliOptions = minimist(process.argv)
const release = async () => {
// 检测分支是否干净
// 询问分支并切换分支,拉取最新代码
// 询问版本并确认 `${process.cwd()}/package.json`
const curVersion = require('../package.json').version;
console.log(`Current version: ${curVersion}`)
const bumps = ['patch', 'minor', 'major', 'prerelease']
const versions = {}
bumps.forEach(b => { versions[b] = semver.inc(curVersion, b) })
const bumpChoices = bumps.map(b => ({ name: `${b} (${versions[b]})`, value: b }))
const { bump, customVersion } = await inquirer.prompt([
{
name: 'bump',
message: 'Select release type:',
type: 'list',
choices: [
...bumpChoices,
{ name: 'custom', value: 'custom' }
]
},
{
name: 'customVersion',
message: 'Input version:',
type: 'input',
when: answers => answers.bump === 'custom'
}
])
const version = customVersion || versions[bump]
const { yes } = await inquirer.prompt([{
name: 'yes',
message: `Confirm releasing ${version}?`,
type: 'confirm'
}])
if (yes) {
try {
// update tag
await execa('npm', ['version', version], { stdio: 'inherit' })
// push remote
await execa('git', ['push'], { stdio: 'inherit' })
await execa('git', ['push', 'origin', `refs/tags/v${version}`], { stdio: 'inherit' })
} catch (e) {
}
}
// const releaseType = semver.diff(curVersion, version)
}
release().catch(err => {
console.error(err) | process.exit(1)
}) |
|
exceptions.py | """
exceptions
Created by: Martin Sicho
On: 7/23/20, 10:08 AM
"""
import json
import traceback
class GenUIException(Exception):
| def __init__(self, original, *args, **kwargs):
super().__init__(*args)
self.original = original
def getData(self):
return ''
def __repr__(self):
return self.asJSON()
def asJSON(self):
return json.dumps({
"original" : str(type(self.original)) if self.original else '',
"current" : str(type(self)),
"reprOrig" : repr(self.original) if self.original else '',
"tracebacks" : {
"original" : traceback.extract_tb(self.original.__traceback__).format() if self.original else '',
"current" : traceback.extract_tb(self.__traceback__).format()
},
"messages" : {
"original" : [x for x in self.original.args] if self.original else [],
"current" : [x for x in self.args]
},
"data" : self.getData()
}) |
|
CustomButton.js | import React from "react";
import { Button } from "antd";
const buttonStyles = {
red: {
backgroundColor: "#ffa39e"
},
green: { | };
export default ({ theme, icon, label, action }) => {
const butTheme = buttonStyles[theme] ? buttonStyles[theme] : null;
const butIcon = icon ? icon : null;
return (
<Button style={butTheme} icon={butIcon} onClick={action}>
{label}
</Button>
);
}; | backgroundColor: "#eaff8f"
} |
settings.py | TELEGRAM_TOKEN = os.environ['TELEGRAM_TOKEN']
DATABASE = {
'HOST': os.getenv('DB_PORT_3306_TCP_ADDR', 'localhost'),
'USER': os.getenv('DB_MYSQL_USER', 'root'),
'PASSWORD': os.getenv('DB_MYSQL_PASSWORD', ''),
'NAME': 'aurora',
} | import os
|
|
PlayVideo.go | package main
import (
"time"
castv2 "github.com/AndreasAbdi/go-castv2"
)
// A simple example on how to use the devices and
func | () {
devices := make(chan *castv2.Device, 100)
castv2.FindDevices(time.Second*5, devices)
for device := range devices {
device.PlayMedia("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4", "video/mp4")
device.QuitApplication(time.Second * 5)
}
}
| main |
sentry_client_impl.rs | use super::{message_decoder, messages::*, sentry_address::SentryAddress, sentry_client::*};
use async_trait::async_trait;
use ethereum_interfaces::{sentry as grpc_sentry, types as grpc_types};
use futures_core::Stream;
use std::pin::Pin;
use tokio_stream::StreamExt;
use tracing::*;
#[derive(Debug)]
pub struct SentryClientImpl {
client: grpc_sentry::sentry_client::SentryClient<tonic::transport::channel::Channel>,
}
impl SentryClientImpl {
pub async fn new(addr: SentryAddress) -> anyhow::Result<Self> {
info!("SentryClient connecting to {}...", addr.addr);
let client = grpc_sentry::sentry_client::SentryClient::connect(addr.addr).await?;
Ok(SentryClientImpl { client })
}
}
#[async_trait]
impl SentryClient for SentryClientImpl {
async fn set_status(&mut self, status: Status) -> anyhow::Result<()> {
let forks_block_numbers = status
.chain_fork_config
.fork_block_numbers()
.iter()
.map(|num| num.0)
.collect();
let fork_data = grpc_sentry::Forks {
genesis: Some(status.chain_fork_config.genesis_block_hash().into()),
forks: forks_block_numbers,
};
let status_data = grpc_sentry::StatusData {
network_id: status.chain_fork_config.network_id().0,
total_difficulty: Some(grpc_types::H256::from(status.total_difficulty)),
best_hash: Some(grpc_types::H256::from(status.best_hash)),
fork_data: Some(fork_data),
max_block: status.max_block.0,
};
let request = tonic::Request::new(status_data);
let response = self.client.set_status(request).await?;
let reply: grpc_sentry::SetStatusReply = response.into_inner();
debug!("SentryClient set_status replied with: {:?}", reply);
return Ok(());
}
async fn penalize_peer(&mut self, peer_id: PeerId) -> anyhow::Result<()> {
let penalize_peer_request = grpc_sentry::PenalizePeerRequest {
peer_id: Some(grpc_types::H512::from(peer_id)),
penalty: grpc_sentry::PenaltyKind::Kick as i32,
};
let request = tonic::Request::new(penalize_peer_request);
self.client.penalize_peer(request).await?;
Ok(())
}
async fn send_message(
&mut self,
message: Message,
peer_filter: PeerFilter,
) -> anyhow::Result<u32> {
let message_id = message.eth_id();
let message_data = grpc_sentry::OutboundMessageData {
id: grpc_sentry::MessageId::from(message_id) as i32,
data: rlp::encode(&message).into(),
};
let response = match peer_filter {
PeerFilter::MinBlock(min_block) => {
let request = grpc_sentry::SendMessageByMinBlockRequest {
data: Some(message_data),
min_block,
};
self.client
.send_message_by_min_block(tonic::Request::new(request))
.await?
}
PeerFilter::PeerId(peer_id) => {
let request = grpc_sentry::SendMessageByIdRequest {
data: Some(message_data),
peer_id: Some(grpc_types::H512::from(peer_id)),
};
self.client
.send_message_by_id(tonic::Request::new(request))
.await?
}
PeerFilter::Random(max_peers) => {
let request = grpc_sentry::SendMessageToRandomPeersRequest {
data: Some(message_data),
max_peers,
};
self.client
.send_message_to_random_peers(tonic::Request::new(request))
.await?
}
PeerFilter::All => {
let request = message_data;
self.client
.send_message_to_all(tonic::Request::new(request))
.await?
}
};
let sent_peers: grpc_sentry::SentPeers = response.into_inner();
debug!(
"SentryClient send_message sent {:?} to: {:?}",
message.eth_id(),
sent_peers
);
let sent_peers_count = sent_peers.peers.len() as u32;
return Ok(sent_peers_count);
}
async fn receive_messages(
&mut self,
filter_ids: &[EthMessageId],
) -> anyhow::Result<MessageFromPeerStream> {
let grpc_ids = filter_ids
.iter()
.map(|id| grpc_sentry::MessageId::from(*id) as i32)
.collect::<Vec<_>>();
let ids_request = grpc_sentry::MessagesRequest { ids: grpc_ids };
let request = tonic::Request::new(ids_request);
let response = self.client.messages(request).await?;
let tonic_stream: tonic::codec::Streaming<grpc_sentry::InboundMessage> =
response.into_inner();
let tonic_stream = tonic_stream_fuse_on_error(tonic_stream);
debug!("SentryClient receive_messages subscribed to incoming messages");
let stream = tonic_stream.map(|result: Result<grpc_sentry::InboundMessage, tonic::Status>| -> anyhow::Result<MessageFromPeer> {
match result {
Ok(inbound_message) => {
let grpc_message_id = grpc_sentry::MessageId::from_i32(inbound_message.id)
.ok_or_else(|| anyhow::format_err!("SentryClient receive_messages stream got an invalid MessageId {}", inbound_message.id))?;
let message_id = EthMessageId::try_from(grpc_message_id)?;
let grpc_peer_id: Option<grpc_types::H512> = inbound_message.peer_id;
let peer_id: Option<PeerId> = grpc_peer_id.map(ethereum_types::H512::from);
let message_bytes: bytes::Bytes = inbound_message.data;
let message = message_decoder::decode_rlp_message(message_id, message_bytes.as_ref())?;
let message_from_peer = MessageFromPeer {
message,
from_peer_id: peer_id,
};
debug!("SentryClient receive_messages received a message {:?} from {:?}",
message_from_peer.message.eth_id(),
message_from_peer.from_peer_id);
Ok(message_from_peer)
},
Err(status) => {
if status.message().ends_with("broken pipe") {
Err(anyhow::Error::new(std::io::Error::new(std::io::ErrorKind::BrokenPipe, status)))
} else {
Err(anyhow::Error::new(status))
}
}
}
});
Ok(Box::pin(stream))
}
}
fn tonic_stream_fuse_on_error<T: 'static + Send>(
mut tonic_stream: tonic::codec::Streaming<T>,
) -> Pin<Box<dyn Stream<Item = Result<T, tonic::Status>> + Send>> {
let stream = async_stream::stream! {
while let Some(result) = tonic_stream.next().await {
match result {
Ok(item) => {
yield Ok(item);
},
Err(status) => {
yield Err(status);
break;
},
}
}
};
Box::pin(stream)
}
impl From<EthMessageId> for grpc_sentry::MessageId {
fn | (id: EthMessageId) -> Self {
match id {
EthMessageId::Status => grpc_sentry::MessageId::Status66,
EthMessageId::NewBlockHashes => grpc_sentry::MessageId::NewBlockHashes66,
EthMessageId::Transactions => grpc_sentry::MessageId::Transactions66,
EthMessageId::GetBlockHeaders => grpc_sentry::MessageId::GetBlockHeaders66,
EthMessageId::BlockHeaders => grpc_sentry::MessageId::BlockHeaders66,
EthMessageId::GetBlockBodies => grpc_sentry::MessageId::GetBlockBodies66,
EthMessageId::BlockBodies => grpc_sentry::MessageId::BlockBodies66,
EthMessageId::NewBlock => grpc_sentry::MessageId::NewBlock66,
EthMessageId::NewPooledTransactionHashes => {
grpc_sentry::MessageId::NewPooledTransactionHashes66
}
EthMessageId::GetPooledTransactions => grpc_sentry::MessageId::GetPooledTransactions66,
EthMessageId::PooledTransactions => grpc_sentry::MessageId::PooledTransactions66,
EthMessageId::GetNodeData => grpc_sentry::MessageId::GetNodeData66,
EthMessageId::NodeData => grpc_sentry::MessageId::NodeData66,
EthMessageId::GetReceipts => grpc_sentry::MessageId::GetReceipts66,
EthMessageId::Receipts => grpc_sentry::MessageId::Receipts66,
}
}
}
impl TryFrom<grpc_sentry::MessageId> for EthMessageId {
type Error = anyhow::Error;
fn try_from(id: grpc_sentry::MessageId) -> anyhow::Result<Self> {
match id {
grpc_sentry::MessageId::Status66 => Ok(EthMessageId::Status),
grpc_sentry::MessageId::NewBlockHashes66 => Ok(EthMessageId::NewBlockHashes),
grpc_sentry::MessageId::Transactions66 => Ok(EthMessageId::Transactions),
grpc_sentry::MessageId::GetBlockHeaders66 => Ok(EthMessageId::GetBlockHeaders),
grpc_sentry::MessageId::BlockHeaders66 => Ok(EthMessageId::BlockHeaders),
grpc_sentry::MessageId::GetBlockBodies66 => Ok(EthMessageId::GetBlockBodies),
grpc_sentry::MessageId::BlockBodies66 => Ok(EthMessageId::BlockBodies),
grpc_sentry::MessageId::NewBlock66 => Ok(EthMessageId::NewBlock),
grpc_sentry::MessageId::NewPooledTransactionHashes66 => {
Ok(EthMessageId::NewPooledTransactionHashes)
}
grpc_sentry::MessageId::GetPooledTransactions66 => {
Ok(EthMessageId::GetPooledTransactions)
}
grpc_sentry::MessageId::PooledTransactions66 => Ok(EthMessageId::PooledTransactions),
grpc_sentry::MessageId::GetNodeData66 => Ok(EthMessageId::GetNodeData),
grpc_sentry::MessageId::NodeData66 => Ok(EthMessageId::NodeData),
grpc_sentry::MessageId::GetReceipts66 => Ok(EthMessageId::GetReceipts),
grpc_sentry::MessageId::Receipts66 => Ok(EthMessageId::Receipts),
_ => Err(anyhow::format_err!("unsupported MessageId '{:?}'", id)),
}
}
}
| from |
configs.go | package model
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"time"
"github.com/futurehomeno/edge-mill-adapter/utils"
"github.com/futurehomeno/fimpgo"
log "github.com/sirupsen/logrus"
)
const ServiceName = "mill"
type Configs struct {
path string
InstanceAddress string `json:"instance_address"`
MqttServerURI string `json:"mqtt_server_uri"`
MqttUsername string `json:"mqtt_server_username"`
MqttPassword string `json:"mqtt_server_password"`
MqttClientIdPrefix string `json:"mqtt_client_id_prefix"`
LogFile string `json:"log_file"`
LogLevel string `json:"log_level"`
LogFormat string `json:"log_format"`
WorkDir string `json:"-"`
ConfiguredAt string `json:"configured_at"`
ConfiguredBy string `json:"configured_by"`
Param1 bool `json:"param_1"`
Param2 string `json:"param_2"`
PollTimeMin string `json:"poll_time_min"`
Username string `json:"username"` // this should be moved
Password string `json:"password"` // this should be moved
Auth struct {
AuthorizationCode string `json:"authorization_code"` // this should be moved
AccessToken string `json:"access_token"` // this should be moved
RefreshToken string `json:"refresh_token"` // this should be moved
ExpireTime int64 `json:"expireTime"` // this should be moved
RefreshExpireTime int64 `json:"refresh_expireTime"` // this should be moved
}
ConnectionState string `json:"connection_state"`
Errors string `json:"errors"`
HubToken string `json:"token"`
UID string `json:"uid"`
}
func | (workDir string) *Configs {
conf := &Configs{WorkDir: workDir}
conf.path = filepath.Join(workDir, "data", "config.json")
if !utils.FileExists(conf.path) {
log.Info("Config file doesn't exist.Loading default config")
defaultConfigFile := filepath.Join(workDir, "defaults", "config.json")
err := utils.CopyFile(defaultConfigFile, conf.path)
if err != nil {
fmt.Print(err)
panic("Can't copy config file.")
}
}
return conf
}
func (cf *Configs) LoadFromFile() error {
configFileBody, err := ioutil.ReadFile(cf.path)
if err != nil {
return err
}
err = json.Unmarshal(configFileBody, cf)
if err != nil {
return err
}
return nil
}
func (cf *Configs) SaveToFile() error {
cf.ConfiguredBy = "auto"
cf.ConfiguredAt = time.Now().Format(time.RFC3339)
bpayload, err := json.Marshal(cf)
err = ioutil.WriteFile(cf.path, bpayload, 0664)
if err != nil {
return err
}
return err
}
func (cf *Configs) GetDataDir() string {
return filepath.Join(cf.WorkDir, "data")
}
func (cf *Configs) GetDefaultDir() string {
return filepath.Join(cf.WorkDir, "defaults")
}
func (cf *Configs) LoadDefaults() error {
configFile := filepath.Join(cf.WorkDir, "data", "config.json")
os.Remove(configFile)
log.Info("Config file doesn't exist.Loading default config")
defaultConfigFile := filepath.Join(cf.WorkDir, "defaults", "config.json")
return utils.CopyFile(defaultConfigFile, configFile)
}
func (cf *Configs) IsConfigured() bool {
if cf.Auth.AccessToken != "" {
return true
} else {
return false
}
}
func (cf *Configs) IsAuthenticated() bool {
if cf.Auth.AuthorizationCode != "" {
return true
} else {
return false
}
}
type ConfigReport struct {
OpStatus string `json:"op_status"`
AppState AppStates `json:"app_state"`
}
func (cf *Configs) GetHubToken(oldMsg *fimpgo.Message) (*fimpgo.Address, *fimpgo.FimpMessage, error) {
// mqt := fimpgo.MqttTransport{}
err := oldMsg.Payload.GetObjectValue(&cf)
if err != nil {
log.Error("Could not get object value")
return nil, nil, err
}
if cf.Username != "" && cf.Password != "" {
// Get hub token
val := map[string]interface{}{
"site_id": "",
"hub_id": "",
"auth_system": "heimdall",
}
msg := fimpgo.NewMessage("cmd.hub_auth.get_jwt", "auth-api", fimpgo.VTypeStrMap, val, nil, nil, nil)
msg.Source = "clbridge"
newadr, err := fimpgo.NewAddressFromString("pt:j1/mt:cmd/rt:cloud/rn:auth-api/ad:1")
if err != nil {
log.Debug("Could not send hub token request")
return nil, nil, err
}
return newadr, msg, nil
}
return nil, nil, err
}
| NewConfigs |
mv.rs | use castle::Castle;
use piece::*;
use square;
use square::Square;
use std::fmt;
// super compress move representation
// 0-5: from
// 6-11: to
// 12-15: flags
/*
FLAGS:
0001 1: double pawn push
0101 4: capture
0101 5: ep capture
1XXX : promotion
ALT LAYOUT
lower, upper
00 01 -> double pawn push
01 00 -> capture
01 01 -> ep capture
10 XX -> promotion (XX is piece to promote to)
11 XX -> promo-capture (XX is piece to promote to)
00 1X -> castle (X is castle type)
*/
const CASTLE_FLAG: u8 = 128;
const CAPTURE_FLAG: u8 = 64;
const PROMOTION_FLAG: u8 = 128;
const EP_CAPTURE_FLAG: u8 = 64;
pub const NULL_MOVE: Move = Move { upper: 0, lower: 0 };
/// Represents a move on the chess position. Uses a compact 16 bit representation
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Move {
lower: u8, // holds from square and castle and pawn flags
upper: u8, // holds to square and promotion and capture flags
}
impl Move {
#[inline]
pub fn from(&self) -> Square {
Square::new((self.lower & 63) as square::Internal)
}
#[inline]
pub fn to(&self) -> Square {
Square::new((self.upper & 63) as square::Internal)
}
#[inline]
pub fn promote_to(&self) -> Kind {
debug_assert!(!self.is_castle());
debug_assert!(self.is_promotion());
Kind(((self.upper as usize) & (!63)) >> 6)
}
/// Returns the absolute distance moved. Eg for a push from square 8 to square 24: |24 - 8| = 16
#[inline]
pub fn distance(&self) -> i32 {
debug_assert!(!self.is_castle());
(self.from().to_i32() - self.to().to_i32()).abs()
}
#[inline]
pub fn is_castle(&self) -> bool {
((self.upper & CASTLE_FLAG) != 0) && ((self.lower & (!63)) == 0)
}
#[inline]
pub fn is_capture(&self) -> bool {
(self.lower & CAPTURE_FLAG) != 0
}
#[inline]
pub fn is_ep_capture(&self) -> bool {
((self.lower & (!63)) == CAPTURE_FLAG) && ((self.upper & (!63)) == EP_CAPTURE_FLAG)
}
#[inline]
pub fn is_promotion(&self) -> bool {
(self.lower & PROMOTION_FLAG) != 0
}
#[inline]
pub fn castle(&self) -> Castle {
debug_assert!(self.is_castle());
Castle::new(((self.upper & 64) >> 6) as usize)
}
pub fn to_string(&self) -> String {
if self.is_castle() {
return self.castle().pgn_string().to_string();
}
let mut s = String::new();
s += self.from().to_str();
if self.is_capture() {
s.push('x');
}
s += self.to().to_str();
if self.is_promotion() {
s.push('=');
s.push(self.promote_to().to_char());
}
if self.is_ep_capture() {
s += "e.p."
}
s
}
pub fn new_move(from: Square, to: Square, is_capture: bool) -> Move {
Move {
lower: from.to_u8() | if is_capture { CAPTURE_FLAG } else { 0 },
upper: to.to_u8(),
}
}
#[inline]
pub const fn new_push(from: Square, to: Square) -> Move {
Move {
lower: from.to_u8(),
upper: to.to_u8(),
}
}
#[inline]
pub const fn new_capture(from: Square, to: Square) -> Move {
Move {
lower: from.to_u8() | CAPTURE_FLAG,
upper: to.to_u8(),
}
}
#[inline]
pub const fn new_castle(castle: Castle) -> Move {
Move {
lower: 0,
upper: CASTLE_FLAG | (castle.to_u8() << 6),
}
}
#[inline]
pub const fn new_promotion(from: Square, to: Square, promote_to: Kind) -> Move {
Move {
lower: from.to_u8() | PROMOTION_FLAG,
upper: to.to_u8() | (promote_to.to_u8() << 6),
}
}
#[inline]
pub const fn new_capture_promotion(from: Square, to: Square, promote_to: Kind) -> Move {
Move {
lower: from.to_u8() | PROMOTION_FLAG | CAPTURE_FLAG,
upper: to.to_u8() | (promote_to.to_u8() << 6),
}
}
#[inline]
pub const fn new_ep_capture(from: Square, to: Square) -> Move {
Move {
lower: from.to_u8() | CAPTURE_FLAG,
upper: to.to_u8() | EP_CAPTURE_FLAG,
}
}
}
impl fmt::Display for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_string())
}
}
impl fmt::Debug for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_string())
}
}
#[cfg(test)]
mod test {
use super::*;
use castle::*;
use square::*;
use std::mem;
#[test]
fn test_packed() {
assert_eq!(2, mem::size_of::<Move>());
}
#[test]
fn push() {
let mv = Move::new_push(B2, B3);
assert_eq!(mv.from(), B2);
assert_eq!(mv.to(), B3);
assert_eq!(mv.is_capture(), false);
assert_eq!(mv.is_castle(), false);
assert_eq!(mv.is_promotion(), false);
assert_eq!(mv.is_ep_capture(), false);
}
#[test]
fn capture() {
let mv = Move::new_capture(B2, B5);
assert_eq!(mv.from(), B2);
assert_eq!(mv.to(), B5);
assert_eq!(mv.is_capture(), true);
assert_eq!(mv.is_castle(), false);
assert_eq!(mv.is_promotion(), false);
assert_eq!(mv.is_ep_capture(), false);
}
#[test]
fn promotion() {
let mv = Move::new_promotion(B7, B8, KNIGHT);
assert_eq!(mv.from(), B7);
assert_eq!(mv.to(), B8);
assert_eq!(mv.promote_to(), KNIGHT);
assert_eq!(mv.is_capture(), false);
assert_eq!(mv.is_castle(), false);
assert_eq!(mv.is_promotion(), true);
assert_eq!(mv.is_ep_capture(), false);
}
#[test]
fn capture_promotion() |
#[test]
fn castle_queen_side() {
let mv = Move::new_castle(QUEEN_SIDE);
assert_eq!(mv.is_castle(), true);
assert_eq!(mv.castle(), QUEEN_SIDE);
assert_eq!(mv.is_capture(), false);
assert_eq!(mv.is_promotion(), false);
assert_eq!(mv.is_ep_capture(), false);
}
#[test]
fn castle_king_side() {
let mv = Move::new_castle(KING_SIDE);
assert_eq!(mv.is_castle(), true);
assert_eq!(mv.castle(), KING_SIDE);
assert_eq!(mv.is_capture(), false);
assert_eq!(mv.is_promotion(), false);
assert_eq!(mv.is_ep_capture(), false);
}
#[test]
fn new_ep_capture() {
let mv = Move::new_ep_capture(D4, C3);
assert_eq!(mv.from(), D4);
assert_eq!(mv.to(), C3);
assert_eq!(mv.is_capture(), true);
assert_eq!(mv.is_castle(), false);
assert_eq!(mv.is_promotion(), false);
assert_eq!(mv.is_ep_capture(), true);
}
#[test]
fn to_string() {
assert_eq!(Move::new_castle(KING_SIDE).to_string(), "O-O");
assert_eq!(Move::new_castle(QUEEN_SIDE).to_string(), "O-O-O");
assert_eq!(Move::new_push(B2, B3).to_string(), "b2b3");
assert_eq!(Move::new_push(B2, D5).to_string(), "b2d5");
assert_eq!(Move::new_capture(B2, B5).to_string(), "b2xb5");
assert_eq!(Move::new_capture(B2, B3).to_string(), "b2xb3");
assert_eq!(Move::new_promotion(B7, B8, QUEEN).to_string(), "b7b8=Q");
assert_eq!(
Move::new_capture_promotion(C7, B8, QUEEN).to_string(),
"c7xb8=Q"
);
assert_eq!(Move::new_ep_capture(D4, C3).to_string(), "d4xc3e.p.");
}
}
| {
let mv = Move::new_capture_promotion(B7, B8, QUEEN);
assert_eq!(mv.from(), B7);
assert_eq!(mv.to(), B8);
assert_eq!(mv.promote_to(), QUEEN);
assert_eq!(mv.is_capture(), true);
assert_eq!(mv.is_castle(), false);
assert_eq!(mv.is_promotion(), true);
assert_eq!(mv.is_ep_capture(), false);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.