hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
0865844cc4f735adc442ac53f14240e31a10d357 | 6,617 | use algebra::curves::{
bls12_377::Bls12_377, edwards_bls12::EdwardsProjective as EdwardsBls,
edwards_sw6::EdwardsProjective as EdwardsSW, sw6::SW6,
};
use algebra::fields::bls12_377::{fr::Fr as Bls12_377Fr, fq::Fq as Bls12_377Fq};
use crypto_primitives::{
commitment::{blake2s::Blake2sCommitment, injective_map::PedersenCommCompressor},
crh::{
injective_map::{PedersenCRHCompressor, TECompressor},
pedersen::PedersenWindow,
},
nizk::Gm17,
merkle_tree::MerkleTreeConfig,
prf::blake2s::Blake2s,
};
use crypto_primitives::{
commitment::{
blake2s::constraints::Blake2sCommitmentGadget,
injective_map::constraints::PedersenCommitmentCompressorGadget,
},
crh::injective_map::constraints::{PedersenCRHCompressorGadget, TECompressorGadget},
prf::blake2s::constraints::Blake2sGadget,
nizk::gm17::constraints::Gm17VerifierGadget,
};
use r1cs_std::{
groups::curves::twisted_edwards::{
edwards_bls12::EdwardsBlsGadget, edwards_sw6::EdwardsSWGadget,
},
pairing::bls12_377::PairingGadget,
};
use crate::dpc::plain_dpc::{
core_checks_circuit::*, predicate::DPCPredicate, predicate_circuit::*, proof_check_circuit::*,
transaction::DPCTransaction, LocalData as DPCLocalData, PlainDPCComponents, DPC,
};
use crate::ledger::*;
pub const NUM_INPUT_RECORDS: usize = 2;
pub const NUM_OUTPUT_RECORDS: usize = 2;
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SnNonceWindow;
// `WINDOW_SIZE * NUM_WINDOWS` = 2 * 256 + 8 + 256 bits
const SN_NONCE_SIZE_BITS: usize = NUM_INPUT_RECORDS * 2 * 256 + 8 + 256;
impl PedersenWindow for SnNonceWindow {
const WINDOW_SIZE: usize = SN_NONCE_SIZE_BITS / 8;
const NUM_WINDOWS: usize = 8;
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct PredVkHashWindow;
impl PedersenWindow for PredVkHashWindow {
const WINDOW_SIZE: usize = 248;
const NUM_WINDOWS: usize = 38;
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct LocalDataWindow;
impl PedersenWindow for LocalDataWindow {
const WINDOW_SIZE: usize = 248;
const NUM_WINDOWS: usize = 30;
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct TwoToOneWindow;
// `WINDOW_SIZE * NUM_WINDOWS` = 2 * 256 bits
impl PedersenWindow for TwoToOneWindow {
const WINDOW_SIZE: usize = 128;
const NUM_WINDOWS: usize = 4;
}
pub struct CommitmentMerkleTreeConfig;
impl MerkleTreeConfig for CommitmentMerkleTreeConfig {
const HEIGHT: usize = 32;
type H = MerkleTreeCRH;
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct RecordWindow;
impl PedersenWindow for RecordWindow {
const WINDOW_SIZE: usize = 225;
const NUM_WINDOWS: usize = 8;
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct AddressWindow;
impl PedersenWindow for AddressWindow {
const WINDOW_SIZE: usize = 128;
const NUM_WINDOWS: usize = 4;
}
pub struct Components;
impl PlainDPCComponents for Components {
const NUM_INPUT_RECORDS: usize = NUM_INPUT_RECORDS;
const NUM_OUTPUT_RECORDS: usize = NUM_OUTPUT_RECORDS;
type CoreCheckF = CoreCheckF;
type ProofCheckF = ProofCheckF;
type MerkleTreeConfig = CommitmentMerkleTreeConfig;
type MerkleTree_HGadget = MerkleTreeCRHGadget;
type AddrC = AddressComm;
type RecC = RecordComm;
type AddrCGadget = AddressCommGadget;
type RecCGadget = RecordCommGadget;
type SnNonceH = SnNonceCRH;
type SnNonceHGadget = SnNonceCRHGadget;
type MainNIZK = CoreCheckNIZK;
type ProofCheckNIZK = ProofCheckNIZK;
type P = PRF;
type PGadget = PRFGadget;
type PredicateNIZK = PredicateNIZK<Self>;
type PredicateNIZKGadget = PredicateNIZKGadget;
type PredVkH = PredVkCRH;
type PredVkHGadget = PredVkCRHGadget;
type PredVkComm = PredicateComm;
type PredVkCommGadget = PredicateCommGadget;
type LocalDataComm = LocalDataComm;
type LocalDataCommGadget = LocalDataCommGadget;
}
// Native primitives
pub type EdwardsCompressor = TECompressor;
pub type CoreCheckPairing = Bls12_377;
pub type ProofCheckPairing = SW6;
pub type CoreCheckF = Bls12_377Fr;
pub type ProofCheckF = Bls12_377Fq;
pub type AddressComm = PedersenCommCompressor<EdwardsBls, EdwardsCompressor, AddressWindow>;
pub type RecordComm = PedersenCommCompressor<EdwardsBls, EdwardsCompressor, RecordWindow>;
pub type PredicateComm = Blake2sCommitment;
pub type LocalDataComm = PedersenCommCompressor<EdwardsBls, EdwardsCompressor, LocalDataWindow>;
pub type MerkleTreeCRH = PedersenCRHCompressor<EdwardsBls, EdwardsCompressor, TwoToOneWindow>;
pub type SnNonceCRH = PedersenCRHCompressor<EdwardsBls, EdwardsCompressor, SnNonceWindow>;
pub type PredVkCRH = PedersenCRHCompressor<EdwardsSW, EdwardsCompressor, PredVkHashWindow>;
pub type Predicate = DPCPredicate<Components>;
pub type CoreCheckNIZK =
Gm17<CoreCheckPairing, CoreChecksCircuit<Components>, CoreChecksVerifierInput<Components>>;
pub type ProofCheckNIZK =
Gm17<ProofCheckPairing, ProofCheckCircuit<Components>, ProofCheckVerifierInput<Components>>;
pub type PredicateNIZK<C> = Gm17<CoreCheckPairing, EmptyPredicateCircuit<C>, PredicateLocalData<C>>;
pub type PRF = Blake2s;
// Gadgets
pub type EdwardsCompressorGadget = TECompressorGadget;
pub type RecordCommGadget = PedersenCommitmentCompressorGadget<
EdwardsBls,
EdwardsCompressor,
CoreCheckF,
EdwardsBlsGadget,
EdwardsCompressorGadget,
>;
pub type AddressCommGadget = PedersenCommitmentCompressorGadget<
EdwardsBls,
EdwardsCompressor,
CoreCheckF,
EdwardsBlsGadget,
EdwardsCompressorGadget,
>;
pub type PredicateCommGadget = Blake2sCommitmentGadget;
pub type LocalDataCommGadget = PedersenCommitmentCompressorGadget<
EdwardsBls,
EdwardsCompressor,
CoreCheckF,
EdwardsBlsGadget,
EdwardsCompressorGadget,
>;
pub type SnNonceCRHGadget = PedersenCRHCompressorGadget<
EdwardsBls,
EdwardsCompressor,
CoreCheckF,
EdwardsBlsGadget,
EdwardsCompressorGadget,
>;
pub type MerkleTreeCRHGadget = PedersenCRHCompressorGadget<
EdwardsBls,
EdwardsCompressor,
CoreCheckF,
EdwardsBlsGadget,
EdwardsCompressorGadget,
>;
pub type PredVkCRHGadget = PedersenCRHCompressorGadget<
EdwardsSW,
EdwardsCompressor,
ProofCheckF,
EdwardsSWGadget,
EdwardsCompressorGadget,
>;
pub type PRFGadget = Blake2sGadget;
pub type PredicateNIZKGadget = Gm17VerifierGadget<CoreCheckPairing, ProofCheckF, PairingGadget>;
//
pub type MerkleTreeIdealLedger = IdealLedger<Tx, CommitmentMerkleTreeConfig>;
pub type Tx = DPCTransaction<Components>;
pub type InstantiatedDPC = DPC<Components>;
pub type LocalData = DPCLocalData<Components>;
| 30.634259 | 100 | 0.762581 |
875394fac01d80ba0f29c34d8c954194f88b0184 | 705 | // Copyright 2018 Catallaxy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate cryptoaudit_core as core;
pub mod config;
pub mod types;
pub use types::{ConfigMembers, GlobalConfig};
| 33.571429 | 75 | 0.751773 |
0eecd3884308e7efb7fd571decf34cf146e53463 | 4,878 | use schema::{Snowflake, SnowflakeExt};
use sdk::models::*;
use crate::{
ctrl::{auth::Authorization, perm::get_cached_room_permissions, Error, SearchMode},
web::gateway::Event,
ServerState,
};
use sdk::models::gateway::message::ServerMsg;
pub async fn trigger_typing(
state: ServerState,
auth: Authorization,
room_id: Snowflake,
) -> Result<(), Error> {
let permissions = get_cached_room_permissions(&state, auth.user_id, room_id).await?;
if !permissions.contains(RoomPermissions::SEND_MESSAGES) {
return Err(Error::NotFound);
}
let db = state.db.read.get().await?;
let row = db
.query_opt_cached_typed(
|| {
use schema::*;
use thorn::*;
tables! {
struct AggRoom {
PartyId: Rooms::PartyId,
}
struct AggRoles {
RoleIds: SNOWFLAKE_ARRAY,
}
}
let user_id_var = Var::at(Users::Id, 1);
let room_id_var = Var::at(Rooms::Id, 2);
Query::with()
.with(
AggRoom::as_query(
Query::select()
.expr(Rooms::PartyId.alias_to(AggRoom::PartyId))
.from_table::<Rooms>()
.and_where(Rooms::Id.equals(room_id_var)),
)
.exclude(),
)
.select()
.col(AggRoom::PartyId)
.col(PartyMember::Nickname)
.cols(&[Users::Username, Users::Discriminator, Users::Flags])
.col(AggRoles::RoleIds)
.from(
Users::left_join(
PartyMember::inner_join_table::<AggRoom>()
.on(PartyMember::PartyId.equals(AggRoom::PartyId)),
)
.on(PartyMember::UserId.equals(Users::Id))
.left_join(Lateral(AggRoles::as_query(
Query::select()
.expr(Builtin::array_agg(RoleMembers::RoleId).alias_to(AggRoles::RoleIds))
.from(
RoleMembers::inner_join_table::<Roles>().on(Roles::Id
.equals(RoleMembers::RoleId)
.and(Roles::PartyId.equals(AggRoom::PartyId))),
)
.and_where(RoleMembers::UserId.equals(Users::Id)),
)))
.on(true.lit()),
)
.and_where(Users::Id.equals(user_id_var))
},
&[&auth.user_id, &room_id],
)
.await?;
let row = match row {
None => return Ok(()),
Some(row) => row,
};
let party_id: Option<Snowflake> = row.try_get(0)?;
let user = User {
id: auth.user_id,
username: row.try_get(2)?,
discriminator: row.try_get(3)?,
flags: UserFlags::from_bits_truncate(row.try_get(4)?).publicize(),
email: None,
preferences: None,
status: None,
bio: None,
avatar: None,
};
match party_id {
Some(party_id) => {
let member = PartyMember {
nick: row.try_get(1)?,
user: Some(user),
roles: row.try_get(5)?,
presence: None,
flags: None,
};
let event = ServerMsg::new_typing_start(Box::new(events::TypingStart {
room: room_id,
user: auth.user_id,
party: Some(party_id),
member: Some(member),
}));
state
.gateway
.broadcast_event(Event::new(event, Some(room_id))?, party_id)
.await;
}
None => todo!("Typing in non-party rooms"),
}
Ok(())
}
/*
use thorn::*;
fn query() -> impl AnyQuery {
use schema::*;
let user_id_var = Var::at(Users::Id, 1);
let room_id_var = Var::at(Rooms::Id, 2);
Query::insert()
.into::<EventLog>()
.cols(&[EventLog::Code, EventLog::Id, EventLog::PartyId, EventLog::RoomId])
.query(
Query::select()
.from(Rooms::left_join_table::<Party>().on(Party::Id.equals(Rooms::PartyId)))
.expr(EventCode::TypingStarted)
.expr(user_id_var)
.expr(Party::Id)
.expr(Rooms::Id)
.and_where(Rooms::Id.equals(room_id_var))
.as_value(),
)
}
*/
| 31.882353 | 106 | 0.447109 |
bfa628752f2fff449e734f372c67b2f1104669f6 | 2,697 | pub mod anoncreds;
pub mod blob_storage;
pub mod cache;
pub mod crypto;
pub mod did;
pub mod ledger;
pub mod logger;
pub mod metrics;
pub mod non_secrets;
pub mod payments;
pub mod payments_v2;
pub mod pairwise;
pub mod pool;
pub mod vdr;
pub mod wallet;
#[cfg(feature = "cheqd")]
pub mod cheqd_ledger;
#[cfg(feature = "cheqd")]
pub mod cheqd_keys;
#[cfg(feature = "cheqd")]
pub mod cheqd_pool;
use indy_api_types::{errors::prelude::*, validation::Validatable, ErrorCode};
use indy_utils::ctypes;
use libc::c_char;
use crate::domain::IndyConfig;
/// Set libindy runtime configuration. Can be optionally called to change current params.
///
/// #Params
/// config: {
/// "crypto_thread_pool_size": Optional<int> - size of thread pool for the most expensive crypto operations. (4 by default)
/// "collect_backtrace": Optional<bool> - whether errors backtrace should be collected.
/// Capturing of backtrace can affect library performance.
/// NOTE: must be set before invocation of any other API functions.
/// }
///
/// #Errors
/// Common*
#[no_mangle]
pub extern "C" fn indy_set_runtime_config(config: *const c_char) -> ErrorCode {
debug!("indy_set_runtime_config > config {:?}", config);
check_useful_validatable_json!(config, ErrorCode::CommonInvalidParam1, IndyConfig);
crate::Locator::instance()
.config_controller
.set_runtime_config(config);
let res = ErrorCode::Success;
debug!("indy_set_runtime_config < {:?}", res);
res
}
/// Get details for last occurred error.
///
/// This function should be called in two places to handle both cases of error occurrence:
/// 1) synchronous - in the same application thread
/// 2) asynchronous - inside of function callback
///
/// NOTE: Error is stored until the next one occurs in the same execution thread or until asynchronous callback finished.
/// Returning pointer has the same lifetime.
///
/// #Params
/// * `error_json_p` - Reference that will contain error details (if any error has occurred before)
/// in the format:
/// {
/// "backtrace": Optional<str> - error backtrace.
/// Collecting of backtrace can be enabled by:
/// 1) setting environment variable `RUST_BACKTRACE=1`
/// 2) calling `indy_set_runtime_config` API function with `collect_backtrace: true`
/// "message": str - human-readable error description
/// }
///
#[no_mangle]
pub extern "C" fn indy_get_current_error(error_json_p: *mut *const c_char) {
debug!("indy_get_current_error > error_json_p {:?}", error_json_p);
let error = get_current_error_c_json();
unsafe { *error_json_p = error };
debug!("indy_get_current_error <");
}
| 31.729412 | 127 | 0.697442 |
64fe48b69c40605ec0adc58538ed7f6d776d56e9 | 2,025 |
//! Autogenerated weights for `indices`
//!
//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev
//! DATE: 2022-01-18, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]`
//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("composable-dev"), DB CACHE: 128
// Executed Command:
// ./target/release/composable
// benchmark
// --chain=composable-dev
// --execution=wasm
// --wasm-execution=compiled
// --pallet=indices
// --extrinsic=*
// --steps=50
// --repeat=20
// --raw
// --output=runtime/composable/src/weights
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(unused_parens)]
#![allow(unused_imports)]
use frame_support::{traits::Get, weights::Weight};
use sp_std::marker::PhantomData;
/// Weight functions for `indices`.
pub struct WeightInfo<T>(PhantomData<T>);
impl<T: frame_system::Config> indices::WeightInfo for WeightInfo<T> {
// Storage: Indices Accounts (r:1 w:1)
fn claim() -> Weight {
(37_831_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
// Storage: Indices Accounts (r:1 w:1)
// Storage: System Account (r:1 w:1)
fn transfer() -> Weight {
(45_343_000 as Weight)
.saturating_add(T::DbWeight::get().reads(2 as Weight))
.saturating_add(T::DbWeight::get().writes(2 as Weight))
}
// Storage: Indices Accounts (r:1 w:1)
fn free() -> Weight {
(38_017_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
// Storage: Indices Accounts (r:1 w:1)
// Storage: System Account (r:1 w:1)
fn force_transfer() -> Weight {
(39_059_000 as Weight)
.saturating_add(T::DbWeight::get().reads(2 as Weight))
.saturating_add(T::DbWeight::get().writes(2 as Weight))
}
// Storage: Indices Accounts (r:1 w:1)
fn freeze() -> Weight {
(43_452_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
}
| 31.640625 | 97 | 0.677037 |
fed37b7231eb405c3491a7df2b3cae1d76370474 | 1,725 | pub fn compute() {
let mut stretch = stretch::Stretch::new();
let node0 = stretch
.new_node(
stretch::style::Style {
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(10f32),
..Default::default()
},
position: stretch::geometry::Rect {
top: stretch::style::Dimension::Points(15f32),
..Default::default()
},
..Default::default()
},
&[],
)
.unwrap();
let node1 = stretch
.new_node(
stretch::style::Style {
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(10f32),
..Default::default()
},
position: stretch::geometry::Rect {
top: stretch::style::Dimension::Points(15f32),
..Default::default()
},
..Default::default()
},
&[],
)
.unwrap();
let node = stretch
.new_node(
stretch::style::Style {
flex_direction: stretch::style::FlexDirection::Column,
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(100f32),
height: stretch::style::Dimension::Points(100f32),
..Default::default()
},
..Default::default()
},
&[node0, node1],
)
.unwrap();
stretch.compute_layout(node, stretch::geometry::Size::undefined()).unwrap();
}
| 33.823529 | 80 | 0.435362 |
e25c4bbdfabd217285ec53d8b66fce0969fd33d5 | 12,957 | use std::collections::HashMap;
use std::mem;
use codemap::{Span, Spanned};
use peekmore::PeekMoreIterator;
use crate::common::Identifier;
use crate::error::SassResult;
use crate::scope::Scope;
use crate::selector::Selector;
use crate::utils::{
devour_whitespace, devour_whitespace_or_comment, eat_ident, eat_ident_no_interpolation,
read_until_closing_paren, read_until_closing_quote, read_until_closing_square_brace,
};
use crate::value::Value;
use crate::Token;
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) struct FuncArgs(pub Vec<FuncArg>);
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) struct FuncArg {
pub name: Identifier,
pub default: Option<Vec<Token>>,
pub is_variadic: bool,
}
impl FuncArgs {
pub const fn new() -> Self {
FuncArgs(Vec::new())
}
}
#[derive(Debug, Clone)]
pub(crate) struct CallArgs(HashMap<CallArg, Vec<Token>>, Span);
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
enum CallArg {
Named(Identifier),
Positional(usize),
}
impl CallArg {
pub fn position(&self) -> Result<usize, String> {
match self {
Self::Named(ref name) => Err(name.to_string()),
Self::Positional(p) => Ok(*p),
}
}
pub fn decrement(self) -> CallArg {
match self {
Self::Named(..) => self,
Self::Positional(p) => Self::Positional(p - 1),
}
}
}
impl CallArgs {
pub fn new(span: Span) -> Self {
CallArgs(HashMap::new(), span)
}
pub fn to_css_string(
self,
scope: &Scope,
super_selector: &Selector,
) -> SassResult<Spanned<String>> {
let mut string = String::with_capacity(2 + self.len() * 10);
string.push('(');
let mut span = self.1;
if self.is_empty() {
return Ok(Spanned {
node: "()".to_string(),
span,
});
}
let args = match self.get_variadic(scope, super_selector) {
Ok(v) => v,
Err(..) => {
return Err(("Plain CSS functions don't support keyword arguments.", span).into())
}
};
string.push_str(
&args
.iter()
.map(|a| {
span = span.merge(a.span);
Ok(a.node.to_css_string(a.span)?.into())
})
.collect::<SassResult<Vec<String>>>()?
.join(", "),
);
string.push(')');
Ok(Spanned { node: string, span })
}
/// Get argument by name
///
/// Removes the argument
pub fn get_named<T: Into<Identifier>>(
&mut self,
val: T,
scope: &Scope,
super_selector: &Selector,
) -> Option<SassResult<Spanned<Value>>> {
match self.0.remove(&CallArg::Named(val.into())) {
Some(v) => {
let span_before = v[0].pos;
Some(Value::from_vec(v, scope, super_selector, span_before))
}
None => None,
}
}
/// Get a positional argument by 0-indexed position
///
/// Removes the argument
pub fn get_positional(
&mut self,
val: usize,
scope: &Scope,
super_selector: &Selector,
) -> Option<SassResult<Spanned<Value>>> {
match self.0.remove(&CallArg::Positional(val)) {
Some(v) => {
let span_before = v[0].pos;
Some(Value::from_vec(v, scope, super_selector, span_before))
}
None => None,
}
}
pub fn get<T: Into<Identifier>>(
&mut self,
position: usize,
name: T,
scope: &Scope,
super_selector: &Selector,
) -> Option<SassResult<Spanned<Value>>> {
match self.get_named(name, scope, super_selector) {
Some(v) => Some(v),
None => self.get_positional(position, scope, super_selector),
}
}
pub fn get_variadic(
self,
scope: &Scope,
super_selector: &Selector,
) -> SassResult<Vec<Spanned<Value>>> {
let mut vals = Vec::new();
let mut args = match self
.0
.into_iter()
.map(|(a, v)| Ok((a.position()?, v)))
.collect::<Result<Vec<(usize, Vec<Token>)>, String>>()
{
Ok(v) => v,
Err(e) => return Err((format!("No argument named ${}.", e), self.1).into()),
};
args.sort_by(|(a1, _), (a2, _)| a1.cmp(a2));
for arg in args {
let span_before = arg.1[0].pos;
vals.push(Value::from_vec(arg.1, scope, super_selector, span_before)?);
}
Ok(vals)
}
/// Decrement all positional arguments by 1
///
/// This is used by builtin function `call` to pass
/// positional arguments to the other function
pub fn decrement(self) -> Self {
CallArgs(
self.0
.into_iter()
.map(|(k, v)| (k.decrement(), v))
.collect(),
self.1,
)
}
pub const fn span(&self) -> Span {
self.1
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn max_args(&self, max: usize) -> SassResult<()> {
let len = self.len();
if len > max {
let mut err = String::with_capacity(50);
err.push_str(&format!("Only {} argument", max));
if max != 1 {
err.push('s');
}
err.push_str(" allowed, but ");
err.push_str(&len.to_string());
err.push(' ');
if len == 1 {
err.push_str("was passed.")
} else {
err.push_str("were passed.")
}
return Err((err, self.span()).into());
}
Ok(())
}
}
pub(crate) fn eat_func_args<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
scope: &Scope,
super_selector: &Selector,
) -> SassResult<FuncArgs> {
let mut args: Vec<FuncArg> = Vec::new();
let mut close_paren_span: Span = toks.peek().unwrap().pos();
devour_whitespace(toks);
while let Some(Token { kind, pos }) = toks.next() {
let name = match kind {
'$' => eat_ident(toks, scope, super_selector, pos)?,
')' => {
close_paren_span = pos;
break;
}
_ => return Err(("expected \")\".", pos).into()),
};
let mut default: Vec<Token> = Vec::new();
let mut is_variadic = false;
devour_whitespace(toks);
let (kind, span) = match toks.next() {
Some(Token { kind, pos }) => (kind, pos),
_ => todo!("unexpected eof"),
};
match kind {
':' => {
devour_whitespace(toks);
while let Some(tok) = toks.peek() {
match &tok.kind {
',' => {
toks.next();
args.push(FuncArg {
name: name.node.into(),
default: Some(default),
is_variadic,
});
break;
}
')' => {
args.push(FuncArg {
name: name.node.into(),
default: Some(default),
is_variadic,
});
close_paren_span = tok.pos();
break;
}
'(' => {
default.push(toks.next().unwrap());
default.extend(read_until_closing_paren(toks)?);
}
_ => default.push(toks.next().unwrap()),
}
}
}
'.' => {
let next = toks.next().ok_or(("expected \".\".", span))?;
if next.kind != '.' {
return Err(("expected \".\".", next.pos()).into());
}
let next = toks.next().ok_or(("expected \".\".", next.pos()))?;
if next.kind != '.' {
return Err(("expected \".\".", next.pos()).into());
}
devour_whitespace(toks);
let next = toks.next().ok_or(("expected \")\".", next.pos()))?;
if next.kind != ')' {
return Err(("expected \")\".", next.pos()).into());
}
is_variadic = true;
args.push(FuncArg {
name: name.node.into(),
default: Some(default),
is_variadic,
});
break;
}
')' => {
close_paren_span = span;
args.push(FuncArg {
name: name.node.into(),
default: if default.is_empty() {
None
} else {
Some(default)
},
is_variadic,
});
break;
}
',' => args.push(FuncArg {
name: name.node.into(),
default: None,
is_variadic,
}),
_ => {}
}
devour_whitespace(toks);
}
devour_whitespace(toks);
match toks.next() {
Some(v) if v.kind == '{' => {}
Some(..) | None => return Err(("expected \"{\".", close_paren_span).into()),
};
Ok(FuncArgs(args))
}
pub(crate) fn eat_call_args<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
span_before: Span,
) -> SassResult<CallArgs> {
let mut args: HashMap<CallArg, Vec<Token>> = HashMap::new();
devour_whitespace_or_comment(toks)?;
let mut name = String::new();
let mut val: Vec<Token> = Vec::new();
let mut span = toks.peek().ok_or(("expected \")\".", span_before))?.pos();
loop {
match toks.peek() {
Some(Token { kind: '$', .. }) => {
let Token { pos, .. } = toks.next().unwrap();
let v = eat_ident_no_interpolation(toks, false, pos)?;
let whitespace = devour_whitespace_or_comment(toks)?;
if let Some(Token { kind: ':', .. }) = toks.peek() {
toks.next();
name = v.node;
} else {
val.push(Token::new(pos, '$'));
let mut current_pos = 0;
val.extend(v.chars().map(|x| {
let len = x.len_utf8() as u64;
let tok = Token::new(v.span.subspan(current_pos, current_pos + len), x);
current_pos += len;
tok
}));
if whitespace {
val.push(Token::new(pos, ' '));
}
name.clear();
}
}
Some(Token { kind: ')', .. }) => {
toks.next();
return Ok(CallArgs(args, span));
}
Some(..) | None => name.clear(),
}
devour_whitespace_or_comment(toks)?;
while let Some(tok) = toks.next() {
match tok.kind {
')' => {
args.insert(
if name.is_empty() {
CallArg::Positional(args.len())
} else {
CallArg::Named(name.into())
},
val,
);
span = span.merge(tok.pos());
return Ok(CallArgs(args, span));
}
',' => break,
'[' => {
val.push(tok);
val.extend(read_until_closing_square_brace(toks)?);
}
'(' => {
val.push(tok);
val.extend(read_until_closing_paren(toks)?);
}
'"' | '\'' => {
val.push(tok);
val.extend(read_until_closing_quote(toks, tok.kind)?);
}
_ => val.push(tok),
}
}
args.insert(
if name.is_empty() {
CallArg::Positional(args.len())
} else {
CallArg::Named(name.as_str().into())
},
mem::take(&mut val),
);
devour_whitespace(toks);
if toks.peek().is_none() {
return Ok(CallArgs(args, span));
}
}
}
| 30.923628 | 97 | 0.43135 |
d662619ae118888456dfd29d691fba54b4fc425d | 5,828 | /// Represents an unsigned 12-bit integer.
#[derive(Default, Clone, Copy)]
#[allow(non_camel_case_types)]
pub struct u12(u16);
impl u12 {
/// The largest value of the u12 integer.
pub const MAX: Self = Self(0xFFF);
/// The smallest value of the u12 integer.
pub const MIN: Self = Self(0x000);
/// The number of bits in the u12 integer.
pub const BITS: u32 = 12;
}
impl u12 {
/// Takes a 3 value array representing the nibblees in a big-endian format
/// and attempts to covert the nibbles to a corresponding u12. All nibbles
/// are masked and will be truncated to a maximum value of 0x0f.
pub fn from_be_nibbles(src: [u8; 3]) -> u12 {
let masked_src = [src[0] & 0x0f, src[1] & 0x0f, src[2] & 0x0f];
u12::from(masked_src)
}
}
impl u12 {
/// Instantiates a new u12.
pub fn new(value: u16) -> Self {
if value > Self::MAX.0 {
panic!("this value will overflow")
} else {
Self(value)
}
}
pub fn wrapping_add(self, rhs: Self) -> Self {
let (lhs, rhs) = (self.0, rhs.0);
u12::new(lhs.wrapping_add(rhs).mask())
}
pub fn wrapping_sub(self, rhs: Self) -> Self {
let (lhs, rhs) = (self.0, rhs.0);
u12::new(lhs.wrapping_sub(rhs).mask())
}
}
impl core::fmt::Debug for u12 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> core::fmt::Result {
<u16 as core::fmt::Debug>::fmt(&self.0, f)
}
}
impl core::fmt::Display for u12 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> core::fmt::Result {
<u16 as core::fmt::Display>::fmt(&self.0, f)
}
}
impl core::fmt::UpperHex for u12 {
fn fmt(&self, f: &mut std::fmt::Formatter) -> core::fmt::Result {
<u16 as core::fmt::UpperHex>::fmt(&self.0, f)
}
}
impl core::fmt::LowerHex for u12 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
<u16 as core::fmt::LowerHex>::fmt(&self.0, f)
}
}
impl core::fmt::Octal for u12 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
<u16 as core::fmt::Octal>::fmt(&self.0, f)
}
}
impl core::fmt::Binary for u12 {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
<u16 as core::fmt::Binary>::fmt(&self.0, f)
}
}
impl core::cmp::PartialEq for u12 {
fn eq(&self, other: &Self) -> bool {
self.0.mask() == other.0.mask()
}
}
impl Eq for u12 {}
impl core::cmp::PartialOrd for u12 {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
self.0.mask().partial_cmp(&other.0.mask())
}
}
impl core::cmp::Ord for u12 {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.0.mask().cmp(&other.0.mask())
}
}
impl core::ops::Add for u12 {
type Output = Self;
fn add(self, other: Self) -> Self::Output {
let (lhs, rhs) = (self.0, other.0);
let sum = lhs + rhs;
if sum > Self::MAX.0 {
panic!("this arithmetic operation will overflow")
} else {
Self::new(sum)
}
}
}
impl core::ops::Sub for u12 {
type Output = Self;
fn sub(self, other: Self) -> Self::Output {
let (lhs, rhs) = (self.0, other.0);
Self(lhs - rhs).mask()
}
}
impl core::ops::BitAnd for u12 {
type Output = Self;
fn bitand(self, other: Self) -> Self::Output {
let (lhs, rhs) = (self.0, other.0);
Self(lhs & rhs).mask()
}
}
impl core::ops::BitOr for u12 {
type Output = Self;
fn bitor(self, other: Self) -> Self::Output {
let (lhs, rhs) = (self.0, other.0);
Self(lhs | rhs).mask()
}
}
impl core::ops::Shl for u12 {
type Output = Self;
fn shl(self, other: Self) -> Self {
let (lhs, rhs) = (self.0, other.0);
Self(lhs << rhs).mask()
}
}
impl core::ops::Shr for u12 {
type Output = Self;
fn shr(self, other: Self) -> Self {
let (lhs, rhs) = (self.0, other.0);
Self(lhs >> rhs).mask()
}
}
macro_rules! impl_from_u12_for_uX {
($($t:ty,)*) => {
$(
impl From<u12> for $t {
fn from(src: u12) -> Self {
src.0 as Self
}
}
)*
};
}
impl_from_u12_for_uX!(u16, u32, u64, u128,);
impl From<[u8; 3]> for u12 {
fn from(src: [u8; 3]) -> Self {
let msb = src[0] & 0x0f;
let lsb = ((src[1] & 0x0f) << 4) | (src[2] & 0x0f);
let val = u16::from_be_bytes([msb, lsb]);
u12::new(val)
}
}
trait MaskU12 {
fn mask(self) -> Self;
}
impl MaskU12 for u12 {
fn mask(self) -> u12 {
u12::new(self.0.mask())
}
}
impl MaskU12 for u16 {
fn mask(self) -> u16 {
self & u12::MAX.0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_add_as_expected() {
assert_eq!(u12::new(12), u12::new(5) + u12::new(7))
}
#[test]
#[should_panic]
#[allow(unused_must_use)]
#[allow(clippy::unnecessary_operation)]
fn should_panic_on_overflowing_add() {
u12::new(0xFFFF) + u12::new(1);
}
#[test]
fn should_wrap_correctly_with_overflowing_wrapping_add() {
// non-overflowing add.
assert_eq!(u12::new(12), (u12::new(5).wrapping_add(u12::new(7))));
// overflowing add.
assert_eq!(u12::new(1), (u12::new(0xFFF).wrapping_add(u12::new(2))))
}
#[test]
fn should_wrap_correctly_with_overflowing_wrapping_sub() {
// non-overflowing add.
assert_eq!(u12::new(2), (u12::new(7).wrapping_sub(u12::new(5))));
// overflowing add.
assert_eq!(u12::new(0xFFF), (u12::new(0).wrapping_sub(u12::new(1))))
}
#[test]
fn should_subtract_as_expected() {
assert_eq!(u12::new(2), u12::new(7) - u12::new(5))
}
}
| 23.885246 | 78 | 0.542896 |
11b146a28080fadeed37663fbac17cebd63435a2 | 2,482 | /*
* This file is part of CycloneDX Rust Cargo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
* Copyright (c) OWASP Foundation. All Rights Reserved.
*/
use std::{convert::TryFrom, io};
use cargo::core::Package;
use serde::Serialize;
use thiserror::Error;
use xml_writer::XmlWriter;
use crate::traits::ToXml;
#[derive(Serialize)]
#[serde(rename_all = "lowercase")]
pub struct License {
pub expression: String,
}
impl<'a> TryFrom<&'a Package> for License {
type Error = LicenseError;
fn try_from(pkg: &'a Package) -> Result<Self, Self::Error> {
let expression = pkg
.manifest()
.metadata()
.license
.as_ref()
.ok_or(LicenseError::NoLicenseProvidedError)?
.to_string();
Ok(Self { expression })
}
}
#[derive(Debug, Error)]
pub enum LicenseError {
#[error("No license was found in the package manifest")]
NoLicenseProvidedError,
}
impl<'a> TryFrom<&'a cargo_metadata::Package> for License {
type Error = LicenseError;
fn try_from(pkg: &'a cargo_metadata::Package) -> Result<Self, Self::Error> {
let expression = pkg
.license
.as_ref()
.ok_or(LicenseError::NoLicenseProvidedError)?
.to_string();
Ok(Self { expression })
}
}
impl ToXml for License {
fn to_xml<W: io::Write>(&self, xml: &mut XmlWriter<W>) -> io::Result<()> {
xml.begin_elem("license")?;
xml.begin_elem("expression")?;
xml.text(self.expression.trim())?;
xml.end_elem()?;
xml.end_elem()
}
}
impl ToXml for Vec<License> {
fn to_xml<W: io::Write>(&self, xml: &mut XmlWriter<W>) -> io::Result<()> {
if !self.is_empty() {
xml.begin_elem("licenses")?;
for license in self {
license.to_xml(xml)?;
}
xml.end_elem()?;
}
Ok(())
}
}
| 26.688172 | 80 | 0.612812 |
e29224264eeb82f108940e09a5057669c8129407 | 3,260 | #![deny(rust_2018_idioms)]
#![allow(
clippy::too_many_arguments,
clippy::type_complexity,
clippy::upper_case_acronyms, // see https://github.com/rust-lang/rust-clippy/issues/6974
clippy::vec_init_then_push, // uses two different styles of initialization
)]
#![recursion_limit = "1024"]
pub use crate::config::*;
use crate::currentprocess::*;
pub use crate::errors::*;
pub use crate::notifications::*;
use crate::toolchain::*;
pub(crate) use crate::utils::toml_utils;
use anyhow::{anyhow, Result};
#[macro_use]
extern crate rs_tracing;
// A list of all binaries which Rustup will proxy.
pub static TOOLS: &[&str] = &[
"rustc",
"rustdoc",
"cargo",
"rust-lldb",
"rust-gdb",
"rust-gdbgui",
"rls",
"cargo-clippy",
"clippy-driver",
"cargo-miri",
];
// Tools which are commonly installed by Cargo as well as rustup. We take a bit
// more care with these to ensure we don't overwrite the user's previous
// installation.
pub static DUP_TOOLS: &[&str] = &["rustfmt", "cargo-fmt"];
// If the given name is one of the tools we proxy.
pub fn is_proxyable_tools(tool: &str) -> Result<()> {
if TOOLS
.iter()
.chain(DUP_TOOLS.iter())
.any(|&name| name == tool)
{
Ok(())
} else {
Err(anyhow!(format!(
"unknown proxy name: '{}'; valid proxy names are {}",
tool,
TOOLS
.iter()
.chain(DUP_TOOLS.iter())
.map(|s| format!("'{}'", s))
.collect::<Vec<_>>()
.join(", ")
)))
}
}
fn component_for_bin(binary: &str) -> Option<&'static str> {
use std::env::consts::EXE_SUFFIX;
let binary_prefix = match binary.find(EXE_SUFFIX) {
_ if EXE_SUFFIX.is_empty() => binary,
Some(i) => &binary[..i],
None => binary,
};
match binary_prefix {
"rustc" | "rustdoc" => Some("rustc"),
"cargo" => Some("cargo"),
"rust-lldb" | "rust-gdb" | "rust-gdbgui" => Some("rustc"), // These are not always available
"rls" => Some("rls"),
"cargo-clippy" => Some("clippy"),
"clippy-driver" => Some("clippy"),
"cargo-miri" => Some("miri"),
"rustfmt" | "cargo-fmt" => Some("rustfmt"),
_ => None,
}
}
#[macro_use]
pub mod cli;
mod command;
mod config;
pub mod currentprocess;
mod diskio;
pub mod dist;
pub mod env_var;
pub mod errors;
mod fallback_settings;
mod install;
pub mod notifications;
mod settings;
pub mod test;
mod toolchain;
pub mod utils;
#[cfg(test)]
mod tests {
use crate::{is_proxyable_tools, DUP_TOOLS, TOOLS};
#[test]
fn test_is_proxyable_tools() {
for tool in TOOLS {
assert!(is_proxyable_tools(tool).is_ok());
}
for tool in DUP_TOOLS {
assert!(is_proxyable_tools(tool).is_ok());
}
let message = &"unknown proxy name: 'unknown-tool'; valid proxy names are 'rustc', \
'rustdoc', 'cargo', 'rust-lldb', 'rust-gdb', 'rust-gdbgui', 'rls', 'cargo-clippy', \
'clippy-driver', 'cargo-miri', 'rustfmt', 'cargo-fmt'";
assert!(is_proxyable_tools("unknown-tool")
.unwrap_err()
.to_string()
.eq(message));
}
}
| 26.721311 | 100 | 0.577607 |
1d3afe392d62f3123878e6ad9394708fb9636663 | 12,134 | // This file is part of Substrate.
// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! BABE consensus data provider
use super::ConsensusDataProvider;
use crate::Error;
use codec::Encode;
use sc_client_api::{AuxStore, UsageProvider};
use sc_consensus_babe::{
authorship, find_pre_digest, BabeIntermediate, CompatibleDigestItem, Config, Epoch,
INTERMEDIATE_KEY,
};
use sc_consensus_epochs::{
descendent_query, EpochHeader, SharedEpochChanges, ViableEpochDescriptor,
};
use sp_keystore::SyncCryptoStorePtr;
use std::{
borrow::Cow,
sync::{atomic, Arc},
time::SystemTime,
};
use sc_consensus::{BlockImportParams, ForkChoiceStrategy, Verifier};
use sp_api::{ProvideRuntimeApi, TransactionFor};
use sp_blockchain::{HeaderBackend, HeaderMetadata};
use sp_consensus::CacheKeyId;
use sp_consensus_babe::{
digests::{NextEpochDescriptor, PreDigest, SecondaryPlainPreDigest},
inherents::BabeInherentData,
AuthorityId, BabeApi, BabeAuthorityWeight, ConsensusLog, BABE_ENGINE_ID,
};
use sp_consensus_slots::Slot;
use sp_inherents::{InherentData, InherentDataProvider, InherentIdentifier};
use sp_runtime::{
generic::{BlockId, Digest},
traits::{Block as BlockT, DigestFor, DigestItemFor, Header, Zero},
};
use sp_timestamp::{InherentType, TimestampInherentData, INHERENT_IDENTIFIER};
/// Provides BABE-compatible predigests and BlockImportParams.
/// Intended for use with BABE runtimes.
pub struct BabeConsensusDataProvider<B: BlockT, C> {
/// shared reference to keystore
keystore: SyncCryptoStorePtr,
/// Shared reference to the client.
client: Arc<C>,
/// Shared epoch changes
epoch_changes: SharedEpochChanges<B, Epoch>,
/// BABE config, gotten from the runtime.
config: Config,
/// Authorities to be used for this babe chain.
authorities: Vec<(AuthorityId, BabeAuthorityWeight)>,
}
/// Verifier to be used for babe chains
pub struct BabeVerifier<B: BlockT, C> {
/// Shared epoch changes
epoch_changes: SharedEpochChanges<B, Epoch>,
/// Shared reference to the client.
client: Arc<C>,
}
impl<B: BlockT, C> BabeVerifier<B, C> {
/// create a nrew verifier
pub fn new(epoch_changes: SharedEpochChanges<B, Epoch>, client: Arc<C>) -> BabeVerifier<B, C> {
BabeVerifier { epoch_changes, client }
}
}
/// The verifier for the manual seal engine; instantly finalizes.
#[async_trait::async_trait]
impl<B, C> Verifier<B> for BabeVerifier<B, C>
where
B: BlockT,
C: HeaderBackend<B> + HeaderMetadata<B, Error = sp_blockchain::Error>,
{
async fn verify(
&mut self,
mut import_params: BlockImportParams<B, ()>,
) -> Result<(BlockImportParams<B, ()>, Option<Vec<(CacheKeyId, Vec<u8>)>>), String> {
import_params.finalized = false;
import_params.fork_choice = Some(ForkChoiceStrategy::LongestChain);
let pre_digest = find_pre_digest::<B>(&import_params.header)?;
let parent_hash = import_params.header.parent_hash();
let parent = self
.client
.header(BlockId::Hash(*parent_hash))
.ok()
.flatten()
.ok_or_else(|| format!("header for block {} not found", parent_hash))?;
let epoch_changes = self.epoch_changes.shared_data();
let epoch_descriptor = epoch_changes
.epoch_descriptor_for_child_of(
descendent_query(&*self.client),
&parent.hash(),
parent.number().clone(),
pre_digest.slot(),
)
.map_err(|e| format!("failed to fetch epoch_descriptor: {}", e))?
.ok_or_else(|| format!("{:?}", sp_consensus::Error::InvalidAuthoritiesSet))?;
// drop the lock
drop(epoch_changes);
import_params.intermediates.insert(
Cow::from(INTERMEDIATE_KEY),
Box::new(BabeIntermediate::<B> { epoch_descriptor }) as Box<_>,
);
Ok((import_params, None))
}
}
impl<B, C> BabeConsensusDataProvider<B, C>
where
B: BlockT,
C: AuxStore
+ HeaderBackend<B>
+ ProvideRuntimeApi<B>
+ HeaderMetadata<B, Error = sp_blockchain::Error>
+ UsageProvider<B>,
C::Api: BabeApi<B>,
{
pub fn new(
client: Arc<C>,
keystore: SyncCryptoStorePtr,
epoch_changes: SharedEpochChanges<B, Epoch>,
authorities: Vec<(AuthorityId, BabeAuthorityWeight)>,
) -> Result<Self, Error> {
if authorities.is_empty() {
return Err(Error::StringError("Cannot supply empty authority set!".into()))
}
let config = Config::get_or_compute(&*client)?;
Ok(Self { config, client, keystore, epoch_changes, authorities })
}
fn epoch(&self, parent: &B::Header, slot: Slot) -> Result<Epoch, Error> {
let epoch_changes = self.epoch_changes.shared_data();
let epoch_descriptor = epoch_changes
.epoch_descriptor_for_child_of(
descendent_query(&*self.client),
&parent.hash(),
parent.number().clone(),
slot,
)
.map_err(|e| Error::StringError(format!("failed to fetch epoch_descriptor: {}", e)))?
.ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?;
let epoch = epoch_changes
.viable_epoch(&epoch_descriptor, |slot| Epoch::genesis(&self.config, slot))
.ok_or_else(|| {
log::info!(target: "babe", "create_digest: no viable_epoch :(");
sp_consensus::Error::InvalidAuthoritiesSet
})?;
Ok(epoch.as_ref().clone())
}
}
impl<B, C> ConsensusDataProvider<B> for BabeConsensusDataProvider<B, C>
where
B: BlockT,
C: AuxStore
+ HeaderBackend<B>
+ HeaderMetadata<B, Error = sp_blockchain::Error>
+ UsageProvider<B>
+ ProvideRuntimeApi<B>,
C::Api: BabeApi<B>,
{
type Transaction = TransactionFor<C, B>;
fn create_digest(
&self,
parent: &B::Header,
inherents: &InherentData,
) -> Result<DigestFor<B>, Error> {
let slot = inherents
.babe_inherent_data()?
.ok_or_else(|| Error::StringError("No babe inherent data".into()))?;
let epoch = self.epoch(parent, slot)?;
// this is a dev node environment, we should always be able to claim a slot.
let logs = if let Some((predigest, _)) =
authorship::claim_slot(slot, &epoch, &self.keystore)
{
vec![<DigestItemFor<B> as CompatibleDigestItem>::babe_pre_digest(predigest)]
} else {
// well we couldn't claim a slot because this is an existing chain and we're not in the
// authorities. we need to tell BabeBlockImport that the epoch has changed, and we put
// ourselves in the authorities.
let predigest =
PreDigest::SecondaryPlain(SecondaryPlainPreDigest { slot, authority_index: 0_u32 });
let mut epoch_changes = self.epoch_changes.shared_data();
let epoch_descriptor = epoch_changes
.epoch_descriptor_for_child_of(
descendent_query(&*self.client),
&parent.hash(),
parent.number().clone(),
slot,
)
.map_err(|e| {
Error::StringError(format!("failed to fetch epoch_descriptor: {}", e))
})?
.ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?;
match epoch_descriptor {
ViableEpochDescriptor::Signaled(identifier, _epoch_header) => {
let epoch_mut = epoch_changes
.epoch_mut(&identifier)
.ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?;
// mutate the current epoch
epoch_mut.authorities = self.authorities.clone();
let next_epoch = ConsensusLog::NextEpochData(NextEpochDescriptor {
authorities: self.authorities.clone(),
// copy the old randomness
randomness: epoch_mut.randomness.clone(),
});
vec![
DigestItemFor::<B>::PreRuntime(BABE_ENGINE_ID, predigest.encode()),
DigestItemFor::<B>::Consensus(BABE_ENGINE_ID, next_epoch.encode()),
]
},
ViableEpochDescriptor::UnimportedGenesis(_) => {
// since this is the genesis, secondary predigest works for now.
vec![DigestItemFor::<B>::PreRuntime(BABE_ENGINE_ID, predigest.encode())]
},
}
};
Ok(Digest { logs })
}
fn append_block_import(
&self,
parent: &B::Header,
params: &mut BlockImportParams<B, Self::Transaction>,
inherents: &InherentData,
) -> Result<(), Error> {
let slot = inherents
.babe_inherent_data()?
.ok_or_else(|| Error::StringError("No babe inherent data".into()))?;
let epoch_changes = self.epoch_changes.shared_data();
let mut epoch_descriptor = epoch_changes
.epoch_descriptor_for_child_of(
descendent_query(&*self.client),
&parent.hash(),
parent.number().clone(),
slot,
)
.map_err(|e| Error::StringError(format!("failed to fetch epoch_descriptor: {}", e)))?
.ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?;
// drop the lock
drop(epoch_changes);
// a quick check to see if we're in the authorities
let epoch = self.epoch(parent, slot)?;
let (authority, _) = self.authorities.first().expect("authorities is non-emptyp; qed");
let has_authority = epoch.authorities.iter().find(|(id, _)| *id == *authority).is_some();
if !has_authority {
log::info!(target: "manual-seal", "authority not found");
let timestamp = inherents
.timestamp_inherent_data()?
.ok_or_else(|| Error::StringError("No timestamp inherent data".into()))?;
let slot = *timestamp / self.config.slot_duration;
// manually hard code epoch descriptor
epoch_descriptor = match epoch_descriptor {
ViableEpochDescriptor::Signaled(identifier, _header) =>
ViableEpochDescriptor::Signaled(
identifier,
EpochHeader {
start_slot: slot.into(),
end_slot: (slot * self.config.epoch_length).into(),
},
),
_ => unreachable!(
"we're not in the authorities, so this isn't the genesis epoch; qed"
),
};
}
params.intermediates.insert(
Cow::from(INTERMEDIATE_KEY),
Box::new(BabeIntermediate::<B> { epoch_descriptor }) as Box<_>,
);
Ok(())
}
}
/// Provide duration since unix epoch in millisecond for timestamp inherent.
/// Mocks the timestamp inherent to always produce the timestamp for the next babe slot.
pub struct SlotTimestampProvider {
time: atomic::AtomicU64,
slot_duration: u64,
}
impl SlotTimestampProvider {
/// Create a new mocked time stamp provider.
pub fn new<B, C>(client: Arc<C>) -> Result<Self, Error>
where
B: BlockT,
C: AuxStore + HeaderBackend<B> + ProvideRuntimeApi<B> + UsageProvider<B>,
C::Api: BabeApi<B>,
{
let slot_duration = Config::get_or_compute(&*client)?.slot_duration;
let info = client.info();
// looks like this isn't the first block, rehydrate the fake time.
// otherwise we'd be producing blocks for older slots.
let time = if info.best_number != Zero::zero() {
let header = client.header(BlockId::Hash(info.best_hash))?.unwrap();
let slot = find_pre_digest::<B>(&header).unwrap().slot();
// add the slot duration so there's no collision of slots
(*slot * slot_duration) + slot_duration
} else {
// this is the first block, use the correct time.
let now = SystemTime::now();
now.duration_since(SystemTime::UNIX_EPOCH)
.map_err(|err| Error::StringError(format!("{}", err)))?
.as_millis() as u64
};
Ok(Self { time: atomic::AtomicU64::new(time), slot_duration })
}
/// Get the current slot number
pub fn slot(&self) -> u64 {
self.time.load(atomic::Ordering::SeqCst) / self.slot_duration
}
}
#[async_trait::async_trait]
impl InherentDataProvider for SlotTimestampProvider {
fn provide_inherent_data(
&self,
inherent_data: &mut InherentData,
) -> Result<(), sp_inherents::Error> {
// we update the time here.
let duration: InherentType =
self.time.fetch_add(self.slot_duration, atomic::Ordering::SeqCst).into();
inherent_data.put_data(INHERENT_IDENTIFIER, &duration)?;
Ok(())
}
async fn try_handle_error(
&self,
_: &InherentIdentifier,
_: &[u8],
) -> Option<Result<(), sp_inherents::Error>> {
None
}
}
| 31.847769 | 96 | 0.705373 |
21dd528b767ff5db36fbb8b4e87e2e27259920ba | 751 | use std::{cell::RefCell, rc::Rc, thread};
#[derive(Debug, Default, Clone)]
struct Evil {
data: Rc<RefCell<usize>>,
}
// 为 Evil 强行实现 Send,这会让 Rc 整个紊乱
unsafe impl Send for Evil {}
fn main() {
let v = Evil::default();
let v1 = v.clone();
let v2 = v.clone();
let t1 = thread::spawn(move || {
let v3 = v;
let mut data = v3.data.borrow_mut();
*data += 1;
println!("v3: {:?}", data);
});
let t2 = thread::spawn(move || {
let v4 = v1;
let mut data = v4.data.borrow_mut();
*data += 1;
println!("v4: {:?}", data);
});
t2.join().unwrap();
t1.join().unwrap();
let mut data = v2.data.borrow_mut();
*data += 1;
println!("v2: {:?}", data);
}
| 19.763158 | 44 | 0.496671 |
2f7dd7f7469c2454385c165690a55047875ae96a | 1,063 | use crate::{
data_io::ChainTracker,
party::{AuthoritySubtaskCommon, Task},
};
use kumandra_bft::SpawnHandle;
use futures::channel::oneshot;
use log::debug;
use sc_client_api::HeaderBackend;
use sp_consensus::SelectChain;
use sp_runtime::traits::Block;
/// Runs the latest block refresher within a single session.
pub fn task<B, SC, C>(
subtask_common: AuthoritySubtaskCommon,
chain_tracker: ChainTracker<B, SC, C>,
) -> Task
where
B: Block,
C: HeaderBackend<B> + 'static,
SC: SelectChain<B> + 'static,
{
let AuthoritySubtaskCommon {
spawn_handle,
session_id,
} = subtask_common;
let (stop, exit) = oneshot::channel();
let task = async move {
debug!(target: "kumandra-party", "Running the chain refresh task for {:?}", session_id);
chain_tracker.run(exit).await;
debug!(target: "kumandra-party", "Chain refresh task stopped for {:?}", session_id);
};
let handle = spawn_handle.spawn_essential("kumandra/consensus_session_refresher", task);
Task::new(handle, stop)
}
| 29.527778 | 96 | 0.678269 |
260c51966b17d5985a48fa436ad832c023608125 | 27,958 | use crate::aliases::TMat4;
use na::RealField;
//pub fn frustum<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//pub fn frustum_lh<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_lr_no<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_lh_zo<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_no<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_rh<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_rh_no<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_rh_zo<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn frustum_zo<N: RealField>(left: N, right: N, bottom: N, top: N, near: N, far: N) -> TMat4<N> {
// unimplemented!()
//}
//pub fn infinite_perspective<N: RealField>(fovy: N, aspect: N, near: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn infinite_perspective_lh<N: RealField>(fovy: N, aspect: N, near: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn infinite_ortho<N: RealField>(left: N, right: N, bottom: N, top: N) -> TMat4<N> {
// unimplemented!()
//}
/// Creates a matrix for a right hand orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho<N: RealField>(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> TMat4<N> {
ortho_rh_no(left, right, bottom, top, znear, zfar)
}
/// Creates a left hand matrix for a orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_lh<N: RealField>(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> TMat4<N> {
ortho_lh_no(left, right, bottom, top, znear, zfar)
}
/// Creates a left hand matrix for a orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_lh_no<N: RealField>(
left: N,
right: N,
bottom: N,
top: N,
znear: N,
zfar: N,
) -> TMat4<N> {
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::<N>::identity();
mat[(0, 0)] = two / (right - left);
mat[(0, 3)] = -(right + left) / (right - left);
mat[(1, 1)] = two / (top - bottom);
mat[(1, 3)] = -(top + bottom) / (top - bottom);
mat[(2, 2)] = two / (zfar - znear);
mat[(2, 3)] = -(zfar + znear) / (zfar - znear);
mat
}
/// Creates a matrix for a left hand orthographic-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_lh_zo<N: RealField>(
left: N,
right: N,
bottom: N,
top: N,
znear: N,
zfar: N,
) -> TMat4<N> {
let one: N = N::one();
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::<N>::identity();
mat[(0, 0)] = two / (right - left);
mat[(0, 3)] = -(right + left) / (right - left);
mat[(1, 1)] = two / (top - bottom);
mat[(1, 3)] = -(top + bottom) / (top - bottom);
mat[(2, 2)] = one / (zfar - znear);
mat[(2, 3)] = -znear / (zfar - znear);
mat
}
/// Creates a matrix for a right hand orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_no<N: RealField>(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> TMat4<N> {
ortho_rh_no(left, right, bottom, top, znear, zfar)
}
/// Creates a matrix for a right hand orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_rh<N: RealField>(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> TMat4<N> {
ortho_rh_no(left, right, bottom, top, znear, zfar)
}
/// Creates a matrix for a right hand orthographic-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_rh_no<N: RealField>(
left: N,
right: N,
bottom: N,
top: N,
znear: N,
zfar: N,
) -> TMat4<N> {
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::<N>::identity();
mat[(0, 0)] = two / (right - left);
mat[(0, 3)] = -(right + left) / (right - left);
mat[(1, 1)] = two / (top - bottom);
mat[(1, 3)] = -(top + bottom) / (top - bottom);
mat[(2, 2)] = -two / (zfar - znear);
mat[(2, 3)] = -(zfar + znear) / (zfar - znear);
mat
}
/// Creates a right hand matrix for a orthographic-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_rh_zo<N: RealField>(
left: N,
right: N,
bottom: N,
top: N,
znear: N,
zfar: N,
) -> TMat4<N> {
let one: N = N::one();
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::<N>::identity();
mat[(0, 0)] = two / (right - left);
mat[(0, 3)] = -(right + left) / (right - left);
mat[(1, 1)] = two / (top - bottom);
mat[(1, 3)] = -(top + bottom) / (top - bottom);
mat[(2, 2)] = -one / (zfar - znear);
mat[(2, 3)] = -znear / (zfar - znear);
mat
}
/// Creates a right hand matrix for a orthographic-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `left` - Coordinate for left bound of matrix
/// * `right` - Coordinate for right bound of matrix
/// * `bottom` - Coordinate for bottom bound of matrix
/// * `top` - Coordinate for top bound of matrix
/// * `znear` - Distance from the viewer to the near clipping plane
/// * `zfar` - Distance from the viewer to the far clipping plane
///
pub fn ortho_zo<N: RealField>(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> TMat4<N> {
ortho_rh_zo(left, right, bottom, top, znear, zfar)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov<N: RealField>(fov: N, width: N, height: N, near: N, far: N) -> TMat4<N> {
perspective_fov_rh_no(fov, width, height, near, far)
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_lh<N: RealField>(fov: N, width: N, height: N, near: N, far: N) -> TMat4<N> {
perspective_fov_lh_no(fov, width, height, near, far)
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_lh_no<N: RealField>(
fov: N,
width: N,
height: N,
near: N,
far: N,
) -> TMat4<N> {
assert!(width > N::zero(), "The width must be greater than zero");
assert!(height > N::zero(), "The height must be greater than zero.");
assert!(fov > N::zero(), "The fov must be greater than zero");
let mut mat = TMat4::zeros();
let rad = fov;
let h = (rad * crate::convert(0.5)).cos() / (rad * crate::convert(0.5)).sin();
let w = h * height / width;
mat[(0, 0)] = w;
mat[(1, 1)] = h;
mat[(2, 2)] = (far + near) / (far - near);
mat[(2, 3)] = -(far * near * crate::convert(2.0)) / (far - near);
mat[(3, 2)] = N::one();
mat
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_lh_zo<N: RealField>(
fov: N,
width: N,
height: N,
near: N,
far: N,
) -> TMat4<N> {
assert!(width > N::zero(), "The width must be greater than zero");
assert!(height > N::zero(), "The height must be greater than zero.");
assert!(fov > N::zero(), "The fov must be greater than zero");
let mut mat = TMat4::zeros();
let rad = fov;
let h = (rad * crate::convert(0.5)).cos() / (rad * crate::convert(0.5)).sin();
let w = h * height / width;
mat[(0, 0)] = w;
mat[(1, 1)] = h;
mat[(2, 2)] = far / (far - near);
mat[(2, 3)] = -(far * near) / (far - near);
mat[(3, 2)] = N::one();
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_no<N: RealField>(fov: N, width: N, height: N, near: N, far: N) -> TMat4<N> {
perspective_fov_rh_no(fov, width, height, near, far)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_rh<N: RealField>(fov: N, width: N, height: N, near: N, far: N) -> TMat4<N> {
perspective_fov_rh_no(fov, width, height, near, far)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_rh_no<N: RealField>(
fov: N,
width: N,
height: N,
near: N,
far: N,
) -> TMat4<N> {
assert!(width > N::zero(), "The width must be greater than zero");
assert!(height > N::zero(), "The height must be greater than zero.");
assert!(fov > N::zero(), "The fov must be greater than zero");
let mut mat = TMat4::zeros();
let rad = fov;
let h = (rad * crate::convert(0.5)).cos() / (rad * crate::convert(0.5)).sin();
let w = h * height / width;
mat[(0, 0)] = w;
mat[(1, 1)] = h;
mat[(2, 2)] = -(far + near) / (far - near);
mat[(2, 3)] = -(far * near * crate::convert(2.0)) / (far - near);
mat[(3, 2)] = -N::one();
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_rh_zo<N: RealField>(
fov: N,
width: N,
height: N,
near: N,
far: N,
) -> TMat4<N> {
assert!(width > N::zero(), "The width must be greater than zero");
assert!(height > N::zero(), "The height must be greater than zero.");
assert!(fov > N::zero(), "The fov must be greater than zero");
let mut mat = TMat4::zeros();
let rad = fov;
let h = (rad * crate::convert(0.5)).cos() / (rad * crate::convert(0.5)).sin();
let w = h * height / width;
mat[(0, 0)] = w;
mat[(1, 1)] = h;
mat[(2, 2)] = far / (near - far);
mat[(2, 3)] = -(far * near) / (far - near);
mat[(3, 2)] = -N::one();
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `fov` - Field of view, in radians
/// * `width` - Width of the viewport
/// * `height` - Height of the viewport
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
pub fn perspective_fov_zo<N: RealField>(fov: N, width: N, height: N, near: N, far: N) -> TMat4<N> {
perspective_fov_rh_zo(fov, width, height, near, far)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
// TODO: Breaking change - revert back to proper glm conventions?
//
// Prior to changes to support configuring the behaviour of this function it was simply
// a wrapper around Perspective3::new(). The argument order for that function is different
// than the glm convention, but reordering the arguments would've caused pointlessly
// un-optimal code to be generated so they were rearranged so the function would just call
// straight through.
//
// Now this call to Perspective3::new() is no longer made so the functions can have their
// arguments reordered to the glm convention. Unfortunately this is a breaking change so
// can't be cleanly integrated into the existing library version without breaking other
// people's code. Reordering to glm isn't a huge deal but if it is done it will have to be
// in a major API breaking update.
//
perspective_rh_no(aspect, fovy, near, far)
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_lh<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
perspective_lh_no(aspect, fovy, near, far)
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_lh_no<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
assert!(
!relative_eq!(far - near, N::zero()),
"The near-plane and far-plane must not be superimposed."
);
assert!(
!relative_eq!(aspect, N::zero()),
"The apsect ratio must not be zero."
);
let one = N::one();
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::zeros();
let tan_half_fovy = (fovy / two).tan();
mat[(0, 0)] = one / (aspect * tan_half_fovy);
mat[(1, 1)] = one / tan_half_fovy;
mat[(2, 2)] = (far + near) / (far - near);
mat[(2, 3)] = -(two * far * near) / (far - near);
mat[(3, 2)] = one;
mat
}
/// Creates a matrix for a left hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_lh_zo<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
assert!(
!relative_eq!(far - near, N::zero()),
"The near-plane and far-plane must not be superimposed."
);
assert!(
!relative_eq!(aspect, N::zero()),
"The apsect ratio must not be zero."
);
let one = N::one();
let two: N = crate::convert(2.0);
let mut mat: TMat4<N> = TMat4::zeros();
let tan_half_fovy = (fovy / two).tan();
mat[(0, 0)] = one / (aspect * tan_half_fovy);
mat[(1, 1)] = one / tan_half_fovy;
mat[(2, 2)] = far / (far - near);
mat[(2, 3)] = -(far * near) / (far - near);
mat[(3, 2)] = one;
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_no<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
perspective_rh_no(aspect, fovy, near, far)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_rh<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
perspective_rh_no(aspect, fovy, near, far)
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of -1 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_rh_no<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
assert!(
!relative_eq!(far - near, N::zero()),
"The near-plane and far-plane must not be superimposed."
);
assert!(
!relative_eq!(aspect, N::zero()),
"The apsect ratio must not be zero."
);
let negone = -N::one();
let one = N::one();
let two: N = crate::convert(2.0);
let mut mat = TMat4::zeros();
let tan_half_fovy = (fovy / two).tan();
mat[(0, 0)] = one / (aspect * tan_half_fovy);
mat[(1, 1)] = one / tan_half_fovy;
mat[(2, 2)] = -(far + near) / (far - near);
mat[(2, 3)] = -(two * far * near) / (far - near);
mat[(3, 2)] = negone;
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_rh_zo<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
assert!(
!relative_eq!(far - near, N::zero()),
"The near-plane and far-plane must not be superimposed."
);
assert!(
!relative_eq!(aspect, N::zero()),
"The apsect ratio must not be zero."
);
let negone = -N::one();
let one = N::one();
let two = crate::convert(2.0);
let mut mat = TMat4::zeros();
let tan_half_fovy = (fovy / two).tan();
mat[(0, 0)] = one / (aspect * tan_half_fovy);
mat[(1, 1)] = one / tan_half_fovy;
mat[(2, 2)] = far / (near - far);
mat[(2, 3)] = -(far * near) / (far - near);
mat[(3, 2)] = negone;
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a depth range of 0 to 1
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn perspective_zo<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
perspective_rh_zo(aspect, fovy, near, far)
}
/// Build infinite right-handed perspective projection matrix with [-1,1] depth range.
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane.
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
pub fn infinite_perspective_rh_no<N: RealField>(aspect: N, fovy: N, near: N) -> TMat4<N> {
let f = N::one() / (fovy * na::convert(0.5)).tan();
let mut mat = TMat4::zeros();
mat[(0, 0)] = f / aspect;
mat[(1, 1)] = f;
mat[(2, 2)] = -N::one();
mat[(2, 3)] = -near * na::convert(2.0);
mat[(3, 2)] = -N::one();
mat
}
/// Build infinite right-handed perspective projection matrix with [0,1] depth range.
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane.
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
///
// https://discourse.nphysics.org/t/reversed-z-and-infinite-zfar-in-projections/341/2
pub fn infinite_perspective_rh_zo<N: RealField>(aspect: N, fovy: N, near: N) -> TMat4<N> {
let f = N::one() / (fovy * na::convert(0.5)).tan();
let mut mat = TMat4::zeros();
mat[(0, 0)] = f / aspect;
mat[(1, 1)] = f;
mat[(2, 2)] = -N::one();
mat[(2, 3)] = -near;
mat[(3, 2)] = -N::one();
mat
}
/// Creates a matrix for a right hand perspective-view frustum with a reversed depth range of 0 to 1.
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane
/// * `far` - Distance from the viewer to the far clipping plane
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
// NOTE: The variants `_no` of reversed perspective are not useful.
pub fn reversed_perspective_rh_zo<N: RealField>(aspect: N, fovy: N, near: N, far: N) -> TMat4<N> {
let one = N::one();
let two = crate::convert(2.0);
let mut mat = TMat4::zeros();
let tan_half_fovy = (fovy / two).tan();
mat[(0, 0)] = one / (aspect * tan_half_fovy);
mat[(1, 1)] = one / tan_half_fovy;
mat[(2, 2)] = -far / (near - far) - one;
mat[(2, 3)] = (far * near) / (far - near);
mat[(3, 2)] = -one;
mat
}
/// Build an infinite perspective projection matrix with a reversed [0, 1] depth range.
///
/// # Parameters
///
/// * `aspect` - Ratio of viewport width to height (width/height)
/// * `fovy` - Field of view, in radians
/// * `near` - Distance from the viewer to the near clipping plane.
///
/// # Important note
/// The `aspect` and `fovy` argument are interchanged compared to the original GLM API.
// Credit: https://discourse.nphysics.org/t/reversed-z-and-infinite-zfar-in-projections/341/2
// NOTE: The variants `_no` of reversed perspective are not useful.
pub fn reversed_infinite_perspective_rh_zo<N: RealField>(aspect: N, fovy: N, near: N) -> TMat4<N> {
let f = N::one() / (fovy * na::convert(0.5)).tan();
let mut mat = TMat4::zeros();
mat[(0, 0)] = f / aspect;
mat[(1, 1)] = f;
mat[(2, 3)] = near;
mat[(3, 2)] = -N::one();
mat
}
//pub fn tweaked_infinite_perspective<N: RealField>(fovy: N, aspect: N, near: N) -> TMat4<N> {
// unimplemented!()
//}
//
//pub fn tweaked_infinite_perspective_ep<N: RealField>(fovy: N, aspect: N, near: N, ep: N) -> TMat4<N> {
// unimplemented!()
//}
| 34.388684 | 105 | 0.611453 |
895707d1ea3ee23406c02c618cce94ebfbaeed06 | 5,424 | use std::{future::Future, pin::Pin};
use sqlx::{
mysql::{MySqlArguments, MySqlConnectOptions, MySqlQueryResult, MySqlRow},
MySql, MySqlPool,
};
sea_query::sea_query_driver_mysql!();
use sea_query_driver_mysql::bind_query;
use crate::{
debug_print, error::*, executor::*, ConnectOptions, DatabaseConnection, DatabaseTransaction,
QueryStream, Statement, TransactionError,
};
use super::sqlx_common::*;
#[derive(Debug)]
pub struct SqlxMySqlConnector;
#[derive(Debug, Clone)]
pub struct SqlxMySqlPoolConnection {
pool: MySqlPool,
}
impl SqlxMySqlConnector {
pub fn accepts(string: &str) -> bool {
string.starts_with("mysql://") && string.parse::<MySqlConnectOptions>().is_ok()
}
pub async fn connect(options: ConnectOptions) -> Result<DatabaseConnection, DbErr> {
let mut opt = options
.url
.parse::<MySqlConnectOptions>()
.map_err(|e| DbErr::Conn(e.to_string()))?;
if !options.sqlx_logging {
use sqlx::ConnectOptions;
opt.disable_statement_logging();
}
if let Ok(pool) = options.pool_options().connect_with(opt).await {
Ok(DatabaseConnection::SqlxMySqlPoolConnection(
SqlxMySqlPoolConnection { pool },
))
} else {
Err(DbErr::Conn("Failed to connect.".to_owned()))
}
}
}
impl SqlxMySqlConnector {
pub fn from_sqlx_mysql_pool(pool: MySqlPool) -> DatabaseConnection {
DatabaseConnection::SqlxMySqlPoolConnection(SqlxMySqlPoolConnection { pool })
}
}
impl SqlxMySqlPoolConnection {
pub async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
} else {
Err(DbErr::Exec(
"Failed to acquire connection from pool.".to_owned(),
))
}
}
pub async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
))
}
}
pub async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
))
}
}
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt);
if let Ok(conn) = self.pool.acquire().await {
Ok(QueryStream::from((conn, stmt)))
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
))
}
}
pub async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
if let Ok(conn) = self.pool.acquire().await {
DatabaseTransaction::new_mysql(conn).await
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
))
}
}
pub async fn transaction<F, T, E>(&self, callback: F) -> Result<T, TransactionError<E>>
where
F: for<'b> FnOnce(
&'b DatabaseTransaction,
) -> Pin<Box<dyn Future<Output = Result<T, E>> + Send + 'b>>
+ Send,
T: Send,
E: std::error::Error + Send,
{
if let Ok(conn) = self.pool.acquire().await {
let transaction = DatabaseTransaction::new_mysql(conn)
.await
.map_err(|e| TransactionError::Connection(e))?;
transaction.run(callback).await
} else {
Err(TransactionError::Connection(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
)))
}
}
}
impl From<MySqlRow> for QueryResult {
fn from(row: MySqlRow) -> QueryResult {
QueryResult {
row: QueryResultRow::SqlxMySql(row),
}
}
}
impl From<MySqlQueryResult> for ExecResult {
fn from(result: MySqlQueryResult) -> ExecResult {
ExecResult {
result: ExecResultHolder::SqlxMySql(result),
}
}
}
pub(crate) fn sqlx_query(stmt: &Statement) -> sqlx::query::Query<'_, MySql, MySqlArguments> {
let mut query = sqlx::query(&stmt.sql);
if let Some(values) = &stmt.values {
query = bind_query(query, values);
}
query
}
| 30.994286 | 96 | 0.556785 |
0ebfae69cc80f5a3a099c505fc9c183464071f8f | 17,563 | #[doc = "Register `ADC12MCTL2` reader"]
pub struct R(crate::R<ADC12MCTL2_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<ADC12MCTL2_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<ADC12MCTL2_SPEC>> for R {
fn from(reader: crate::R<ADC12MCTL2_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `ADC12MCTL2` writer"]
pub struct W(crate::W<ADC12MCTL2_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<ADC12MCTL2_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<ADC12MCTL2_SPEC>> for W {
fn from(writer: crate::W<ADC12MCTL2_SPEC>) -> Self {
W(writer)
}
}
#[doc = "ADC12 Input Channel Select Bit 0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum ADC12INCH_A {
#[doc = "0: ADC12 Input Channel 0"]
ADC12INCH_0 = 0,
#[doc = "1: ADC12 Input Channel 1"]
ADC12INCH_1 = 1,
#[doc = "2: ADC12 Input Channel 2"]
ADC12INCH_2 = 2,
#[doc = "3: ADC12 Input Channel 3"]
ADC12INCH_3 = 3,
#[doc = "4: ADC12 Input Channel 4"]
ADC12INCH_4 = 4,
#[doc = "5: ADC12 Input Channel 5"]
ADC12INCH_5 = 5,
#[doc = "6: ADC12 Input Channel 6"]
ADC12INCH_6 = 6,
#[doc = "7: ADC12 Input Channel 7"]
ADC12INCH_7 = 7,
#[doc = "8: ADC12 Input Channel 8"]
ADC12INCH_8 = 8,
#[doc = "9: ADC12 Input Channel 9"]
ADC12INCH_9 = 9,
#[doc = "10: ADC12 Input Channel 10"]
ADC12INCH_10 = 10,
#[doc = "11: ADC12 Input Channel 11"]
ADC12INCH_11 = 11,
#[doc = "12: ADC12 Input Channel 12"]
ADC12INCH_12 = 12,
#[doc = "13: ADC12 Input Channel 13"]
ADC12INCH_13 = 13,
#[doc = "14: ADC12 Input Channel 14"]
ADC12INCH_14 = 14,
#[doc = "15: ADC12 Input Channel 15"]
ADC12INCH_15 = 15,
}
impl From<ADC12INCH_A> for u8 {
#[inline(always)]
fn from(variant: ADC12INCH_A) -> Self {
variant as _
}
}
#[doc = "Field `ADC12INCH` reader - ADC12 Input Channel Select Bit 0"]
pub struct ADC12INCH_R(crate::FieldReader<u8, ADC12INCH_A>);
impl ADC12INCH_R {
pub(crate) fn new(bits: u8) -> Self {
ADC12INCH_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADC12INCH_A {
match self.bits {
0 => ADC12INCH_A::ADC12INCH_0,
1 => ADC12INCH_A::ADC12INCH_1,
2 => ADC12INCH_A::ADC12INCH_2,
3 => ADC12INCH_A::ADC12INCH_3,
4 => ADC12INCH_A::ADC12INCH_4,
5 => ADC12INCH_A::ADC12INCH_5,
6 => ADC12INCH_A::ADC12INCH_6,
7 => ADC12INCH_A::ADC12INCH_7,
8 => ADC12INCH_A::ADC12INCH_8,
9 => ADC12INCH_A::ADC12INCH_9,
10 => ADC12INCH_A::ADC12INCH_10,
11 => ADC12INCH_A::ADC12INCH_11,
12 => ADC12INCH_A::ADC12INCH_12,
13 => ADC12INCH_A::ADC12INCH_13,
14 => ADC12INCH_A::ADC12INCH_14,
15 => ADC12INCH_A::ADC12INCH_15,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ADC12INCH_0`"]
#[inline(always)]
pub fn is_adc12inch_0(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_0
}
#[doc = "Checks if the value of the field is `ADC12INCH_1`"]
#[inline(always)]
pub fn is_adc12inch_1(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_1
}
#[doc = "Checks if the value of the field is `ADC12INCH_2`"]
#[inline(always)]
pub fn is_adc12inch_2(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_2
}
#[doc = "Checks if the value of the field is `ADC12INCH_3`"]
#[inline(always)]
pub fn is_adc12inch_3(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_3
}
#[doc = "Checks if the value of the field is `ADC12INCH_4`"]
#[inline(always)]
pub fn is_adc12inch_4(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_4
}
#[doc = "Checks if the value of the field is `ADC12INCH_5`"]
#[inline(always)]
pub fn is_adc12inch_5(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_5
}
#[doc = "Checks if the value of the field is `ADC12INCH_6`"]
#[inline(always)]
pub fn is_adc12inch_6(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_6
}
#[doc = "Checks if the value of the field is `ADC12INCH_7`"]
#[inline(always)]
pub fn is_adc12inch_7(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_7
}
#[doc = "Checks if the value of the field is `ADC12INCH_8`"]
#[inline(always)]
pub fn is_adc12inch_8(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_8
}
#[doc = "Checks if the value of the field is `ADC12INCH_9`"]
#[inline(always)]
pub fn is_adc12inch_9(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_9
}
#[doc = "Checks if the value of the field is `ADC12INCH_10`"]
#[inline(always)]
pub fn is_adc12inch_10(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_10
}
#[doc = "Checks if the value of the field is `ADC12INCH_11`"]
#[inline(always)]
pub fn is_adc12inch_11(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_11
}
#[doc = "Checks if the value of the field is `ADC12INCH_12`"]
#[inline(always)]
pub fn is_adc12inch_12(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_12
}
#[doc = "Checks if the value of the field is `ADC12INCH_13`"]
#[inline(always)]
pub fn is_adc12inch_13(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_13
}
#[doc = "Checks if the value of the field is `ADC12INCH_14`"]
#[inline(always)]
pub fn is_adc12inch_14(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_14
}
#[doc = "Checks if the value of the field is `ADC12INCH_15`"]
#[inline(always)]
pub fn is_adc12inch_15(&self) -> bool {
**self == ADC12INCH_A::ADC12INCH_15
}
}
impl core::ops::Deref for ADC12INCH_R {
type Target = crate::FieldReader<u8, ADC12INCH_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADC12INCH` writer - ADC12 Input Channel Select Bit 0"]
pub struct ADC12INCH_W<'a> {
w: &'a mut W,
}
impl<'a> ADC12INCH_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ADC12INCH_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "ADC12 Input Channel 0"]
#[inline(always)]
pub fn adc12inch_0(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_0)
}
#[doc = "ADC12 Input Channel 1"]
#[inline(always)]
pub fn adc12inch_1(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_1)
}
#[doc = "ADC12 Input Channel 2"]
#[inline(always)]
pub fn adc12inch_2(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_2)
}
#[doc = "ADC12 Input Channel 3"]
#[inline(always)]
pub fn adc12inch_3(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_3)
}
#[doc = "ADC12 Input Channel 4"]
#[inline(always)]
pub fn adc12inch_4(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_4)
}
#[doc = "ADC12 Input Channel 5"]
#[inline(always)]
pub fn adc12inch_5(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_5)
}
#[doc = "ADC12 Input Channel 6"]
#[inline(always)]
pub fn adc12inch_6(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_6)
}
#[doc = "ADC12 Input Channel 7"]
#[inline(always)]
pub fn adc12inch_7(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_7)
}
#[doc = "ADC12 Input Channel 8"]
#[inline(always)]
pub fn adc12inch_8(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_8)
}
#[doc = "ADC12 Input Channel 9"]
#[inline(always)]
pub fn adc12inch_9(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_9)
}
#[doc = "ADC12 Input Channel 10"]
#[inline(always)]
pub fn adc12inch_10(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_10)
}
#[doc = "ADC12 Input Channel 11"]
#[inline(always)]
pub fn adc12inch_11(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_11)
}
#[doc = "ADC12 Input Channel 12"]
#[inline(always)]
pub fn adc12inch_12(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_12)
}
#[doc = "ADC12 Input Channel 13"]
#[inline(always)]
pub fn adc12inch_13(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_13)
}
#[doc = "ADC12 Input Channel 14"]
#[inline(always)]
pub fn adc12inch_14(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_14)
}
#[doc = "ADC12 Input Channel 15"]
#[inline(always)]
pub fn adc12inch_15(self) -> &'a mut W {
self.variant(ADC12INCH_A::ADC12INCH_15)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u8 & 0x0f);
self.w
}
}
#[doc = "ADC12 Select Reference Bit 0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum ADC12SREF_A {
#[doc = "0: ADC12 Select Reference 0"]
ADC12SREF_0 = 0,
#[doc = "1: ADC12 Select Reference 1"]
ADC12SREF_1 = 1,
#[doc = "2: ADC12 Select Reference 2"]
ADC12SREF_2 = 2,
#[doc = "3: ADC12 Select Reference 3"]
ADC12SREF_3 = 3,
#[doc = "4: ADC12 Select Reference 4"]
ADC12SREF_4 = 4,
#[doc = "5: ADC12 Select Reference 5"]
ADC12SREF_5 = 5,
#[doc = "6: ADC12 Select Reference 6"]
ADC12SREF_6 = 6,
#[doc = "7: ADC12 Select Reference 7"]
ADC12SREF_7 = 7,
}
impl From<ADC12SREF_A> for u8 {
#[inline(always)]
fn from(variant: ADC12SREF_A) -> Self {
variant as _
}
}
#[doc = "Field `ADC12SREF` reader - ADC12 Select Reference Bit 0"]
pub struct ADC12SREF_R(crate::FieldReader<u8, ADC12SREF_A>);
impl ADC12SREF_R {
pub(crate) fn new(bits: u8) -> Self {
ADC12SREF_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADC12SREF_A {
match self.bits {
0 => ADC12SREF_A::ADC12SREF_0,
1 => ADC12SREF_A::ADC12SREF_1,
2 => ADC12SREF_A::ADC12SREF_2,
3 => ADC12SREF_A::ADC12SREF_3,
4 => ADC12SREF_A::ADC12SREF_4,
5 => ADC12SREF_A::ADC12SREF_5,
6 => ADC12SREF_A::ADC12SREF_6,
7 => ADC12SREF_A::ADC12SREF_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ADC12SREF_0`"]
#[inline(always)]
pub fn is_adc12sref_0(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_0
}
#[doc = "Checks if the value of the field is `ADC12SREF_1`"]
#[inline(always)]
pub fn is_adc12sref_1(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_1
}
#[doc = "Checks if the value of the field is `ADC12SREF_2`"]
#[inline(always)]
pub fn is_adc12sref_2(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_2
}
#[doc = "Checks if the value of the field is `ADC12SREF_3`"]
#[inline(always)]
pub fn is_adc12sref_3(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_3
}
#[doc = "Checks if the value of the field is `ADC12SREF_4`"]
#[inline(always)]
pub fn is_adc12sref_4(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_4
}
#[doc = "Checks if the value of the field is `ADC12SREF_5`"]
#[inline(always)]
pub fn is_adc12sref_5(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_5
}
#[doc = "Checks if the value of the field is `ADC12SREF_6`"]
#[inline(always)]
pub fn is_adc12sref_6(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_6
}
#[doc = "Checks if the value of the field is `ADC12SREF_7`"]
#[inline(always)]
pub fn is_adc12sref_7(&self) -> bool {
**self == ADC12SREF_A::ADC12SREF_7
}
}
impl core::ops::Deref for ADC12SREF_R {
type Target = crate::FieldReader<u8, ADC12SREF_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADC12SREF` writer - ADC12 Select Reference Bit 0"]
pub struct ADC12SREF_W<'a> {
w: &'a mut W,
}
impl<'a> ADC12SREF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ADC12SREF_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "ADC12 Select Reference 0"]
#[inline(always)]
pub fn adc12sref_0(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_0)
}
#[doc = "ADC12 Select Reference 1"]
#[inline(always)]
pub fn adc12sref_1(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_1)
}
#[doc = "ADC12 Select Reference 2"]
#[inline(always)]
pub fn adc12sref_2(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_2)
}
#[doc = "ADC12 Select Reference 3"]
#[inline(always)]
pub fn adc12sref_3(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_3)
}
#[doc = "ADC12 Select Reference 4"]
#[inline(always)]
pub fn adc12sref_4(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_4)
}
#[doc = "ADC12 Select Reference 5"]
#[inline(always)]
pub fn adc12sref_5(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_5)
}
#[doc = "ADC12 Select Reference 6"]
#[inline(always)]
pub fn adc12sref_6(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_6)
}
#[doc = "ADC12 Select Reference 7"]
#[inline(always)]
pub fn adc12sref_7(self) -> &'a mut W {
self.variant(ADC12SREF_A::ADC12SREF_7)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 4)) | ((value as u8 & 0x07) << 4);
self.w
}
}
#[doc = "Field `ADC12EOS` reader - ADC12 End of Sequence"]
pub struct ADC12EOS_R(crate::FieldReader<bool, bool>);
impl ADC12EOS_R {
pub(crate) fn new(bits: bool) -> Self {
ADC12EOS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ADC12EOS_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADC12EOS` writer - ADC12 End of Sequence"]
pub struct ADC12EOS_W<'a> {
w: &'a mut W,
}
impl<'a> ADC12EOS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u8 & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - ADC12 Input Channel Select Bit 0"]
#[inline(always)]
pub fn adc12inch(&self) -> ADC12INCH_R {
ADC12INCH_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:6 - ADC12 Select Reference Bit 0"]
#[inline(always)]
pub fn adc12sref(&self) -> ADC12SREF_R {
ADC12SREF_R::new(((self.bits >> 4) & 0x07) as u8)
}
#[doc = "Bit 7 - ADC12 End of Sequence"]
#[inline(always)]
pub fn adc12eos(&self) -> ADC12EOS_R {
ADC12EOS_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - ADC12 Input Channel Select Bit 0"]
#[inline(always)]
pub fn adc12inch(&mut self) -> ADC12INCH_W {
ADC12INCH_W { w: self }
}
#[doc = "Bits 4:6 - ADC12 Select Reference Bit 0"]
#[inline(always)]
pub fn adc12sref(&mut self) -> ADC12SREF_W {
ADC12SREF_W { w: self }
}
#[doc = "Bit 7 - ADC12 End of Sequence"]
#[inline(always)]
pub fn adc12eos(&mut self) -> ADC12EOS_W {
ADC12EOS_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "ADC12 Memory Control 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [adc12mctl2](index.html) module"]
pub struct ADC12MCTL2_SPEC;
impl crate::RegisterSpec for ADC12MCTL2_SPEC {
type Ux = u8;
}
#[doc = "`read()` method returns [adc12mctl2::R](R) reader structure"]
impl crate::Readable for ADC12MCTL2_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [adc12mctl2::W](W) writer structure"]
impl crate::Writable for ADC12MCTL2_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets ADC12MCTL2 to value 0"]
impl crate::Resettable for ADC12MCTL2_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 32.524074 | 413 | 0.589421 |
7919ed78e9c784ce85f5a86b0b981d2832431e41 | 1,782 | use aries_model::bounds::Lit;
use aries_model::Model;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use rand::prelude::SliceRandom;
use rand::rngs::StdRng;
use rand::SeedableRng;
#[inline]
fn entailment(xs: &[Lit], ys: &[Lit]) -> u64 {
let mut count = 0;
for &x in xs {
for &y in ys {
if x.entails(y) {
count += 1
}
}
}
count
}
pub fn criterion_benchmark(c: &mut Criterion) {
let mut rng = StdRng::seed_from_u64(2398248538438434234);
let mut model = Model::new();
let mut bounds = Vec::new();
for _ in 0..50 {
let var = model.new_ivar(0, 100, "");
for v in -20..20 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-many-vars", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
let mut bounds = Vec::new();
for _ in 0..5 {
let var = model.new_ivar(0, 100, "");
for v in -20..20 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-few-vars", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
let mut bounds = Vec::new();
let var = model.new_ivar(0, 100, "");
for v in -40..40 {
bounds.push(Lit::leq(var, v));
bounds.push(Lit::geq(var, v));
}
bounds.shuffle(&mut rng);
c.bench_function("bounds-entail-one-var", |b| {
b.iter(|| entailment(black_box(&bounds), black_box(&bounds)))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| 24.410959 | 71 | 0.5578 |
d9b389461979777fa7fb66e8202f2c82f90bc4be | 2,733 | use hardware::i2c::connect;
use hardware::i2c::mpu6050::raw_data::{AccelFullScale, GyroFullScale};
use hardware::i2c::mpu6050::{ADDRESS_LOW, MPU6050};
use hardware::i2c::register_io::I2cWithAddr;
use linux_embedded_hal::Delay;
use num_traits::FromPrimitive;
use std::io::{stdout, Result as IOResult};
use std::result::Result;
use crossterm::*;
use ctrlc;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Args {
#[structopt(long, default_value = "0", parse(try_from_str = parse_accel))]
accel: AccelFullScale,
#[structopt(long, default_value = "0", parse(try_from_str = parse_gyro))]
gyro: GyroFullScale,
}
fn parse_accel(src: &str) -> Result<AccelFullScale, String> {
from_u8(src, |v| {
AccelFullScale::from_u8(v)
.ok_or_else(|| format!("Accel Full Scale Selector is out of range [0-3]: {}", v))
})
}
fn parse_gyro(src: &str) -> Result<GyroFullScale, String> {
from_u8(src, |v| {
GyroFullScale::from_u8(v)
.ok_or_else(|| format!("Gyro Full Scale Selector is out of range [0-3]: {}", v))
})
}
fn main() {
let args = Args::from_args();
run_loop(args.accel, args.gyro).unwrap();
}
fn run_loop(accel_fs: AccelFullScale, gyro_fs: GyroFullScale) -> IOResult<()> {
let dev = connect(1)?;
let mut mpu = MPU6050::new(I2cWithAddr::new(dev, ADDRESS_LOW)).unwrap();
mpu.normal_setup(&mut Delay).unwrap();
mpu.set_gyro_full_scale(gyro_fs).unwrap();
mpu.set_accel_full_scale(accel_fs).unwrap();
ctrlc::set_handler(|| {
execute!(
stdout(),
terminal::Clear(terminal::ClearType::All),
cursor::Show,
cursor::MoveTo(0, 0),
)
.unwrap();
std::process::exit(0);
})
.unwrap();
execute!(
stdout(),
cursor::Hide,
terminal::Clear(terminal::ClearType::All),
)?;
loop {
let info = mpu.get_infos().unwrap();
let accel_info = info.accel.scale::<f64>(accel_fs);
let gyro_info = info.gyro.scale::<f64>(gyro_fs);
execute!(
stdout(),
style::Print(format!("{:?}\n", info.accel)),
style::Print(format!("{:?}\n", info.gyro)),
cursor::MoveDown(1),
style::Print(format!("{:?}\n", accel_info)),
style::Print(format!("{:?}\n", gyro_info)),
cursor::MoveUp(5),
)?;
std::thread::sleep(std::time::Duration::from_millis(500));
}
}
// ----------------------------------------------------------------
fn from_u8<T, F>(src: &str, f: F) -> Result<T, String>
where
F: Fn(u8) -> Result<T, String>,
{
u8::from_str_radix(src, 10)
.map_err(|e| e.to_string())
.and_then(f)
}
| 28.46875 | 93 | 0.575558 |
082e8321e18a6e879abd50d7e87ffe8437a8cfb4 | 759 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_variables)]
use foo::{x, y as fooy};
use Maybe::{Yes as MaybeYes};
pub enum Maybe { Yes, No }
mod foo {
use super::Maybe::{self as MaybeFoo};
pub fn x(a: MaybeFoo) {}
pub fn y(a: i32) { println!("{}", a); }
}
pub fn main() { x(MaybeYes); fooy(10); }
| 31.625 | 68 | 0.682477 |
0ae53377f7048bb560c10b05063afdebb20524f6 | 4,918 | use super::session::{Error as SessionError, Session};
use crate::app::App;
use crate::item::{Action, Field, Item};
use crate::provider::Provider;
use crate::rofi::RofiWindow;
use anyhow::Result;
use keyring::Keyring;
use serde::Deserialize;
use std::collections::HashMap;
#[serde(rename_all = "camelCase")]
#[derive(Deserialize, Debug)]
pub struct Config {
cache: bool,
}
pub struct Bitwarden {
id: String,
session: Option<Session>,
config: Config,
}
impl Bitwarden {
pub fn new(app: &App, id: &str, config: serde_json::Value) -> Box<dyn Provider> {
let config: Config = serde_json::from_value(config).unwrap();
Box::new(Self {
config,
id: id.to_owned(),
session: None,
})
}
fn get_session(&mut self) -> Result<&Session> {
if self.session.is_some() {
Ok(self.session.as_ref().unwrap())
} else {
self.open_session()
}
}
fn open_session(&mut self) -> Result<&Session> {
let keyring = Keyring::new("bitwarden_rofi", "BW_SESSION");
let session = match keyring.get_password() {
Ok(key) => {
let session = Session::open(&key);
match session.is_unlocked() {
Ok(true) => Some(session),
Ok(false) => {
eprintln!("bitwarden: Session key is not valid");
None
}
Err(err) => match err.downcast_ref::<SessionError>() {
Some(SessionError::FailedToDecrypt) => {
eprintln!("bitwarden: Failed to decrypt");
None
}
_ => return Err(err),
},
}
}
Err(_) => None,
};
session
.map(Ok)
.unwrap_or_else(|| {
let password = RofiWindow::new("Enter master password")
.add_args(vec!["-dmenu"])
.password(true)
.lines(0)
.show(vec![])?
.entry()?;
let session = Session::unlock(&password)?;
keyring.set_password(&session.key).unwrap_or_else(|err| {
eprintln!("bitwarden: Failed to put session key in keyring: {}", err);
});
Ok(session)
})
.map(move |session| {
self.session = Some(session);
self.session.as_ref().unwrap()
})
}
fn lock(&mut self) -> Result<()> {
// TODO: this can open a session if it wasn't...
self.get_session()?.lock()?;
let keyring = Keyring::new("bitwarden_rofi", "BW_SESSION");
keyring.delete_password().unwrap_or_else(|err| {
eprintln!("Deleting entry from keyring failed: {}", err);
});
Ok(())
}
fn sync(&mut self) -> Result<()> {
self.get_session()?.sync()
}
}
impl Provider for Bitwarden {
fn list_items(&mut self) -> Result<Vec<Item>> {
let mut folders = HashMap::new();
let session = self.get_session()?;
for f in session.list_folders()?.into_iter() {
folders.insert(f.id.clone(), f);
}
let mut items: Vec<Item> = vec![];
for i in session.list_items()?.into_iter() {
let mut path = match folders.get(&i.folder_id) {
None => vec![],
_ if i.folder_id.is_none() => vec![],
Some(folder) => folder.name.split("/").map(|s| s.to_string()).collect(),
};
path.push(i.name);
let title = path.join("/");
let mut fields = vec![];
if let Some(login) = i.login {
if login.username.is_some() {
fields.push(Field::Username);
}
if login.password.is_some() {
fields.push(Field::Password);
}
if login.totp.is_some() {
fields.push(Field::Totp);
}
}
let item = Item {
id: i.id,
title,
fields,
};
items.push(item);
}
Ok(items)
}
fn read_field(&mut self, item: &Item, field: &Field) -> Result<String> {
let field_name = match field {
Field::Username => "username",
Field::Password => "password",
Field::Totp => "totp",
Field::Other(name) => &name,
};
let session = self.get_session()?;
session.read_field(&item.id, field_name)
}
fn list_actions(&mut self) -> Result<Vec<Action>> {
Ok(vec![])
}
fn do_action(&mut self, action: &Action) {}
}
| 29.27381 | 90 | 0.471126 |
62e90d7a6e22b0f105b51812c5a42e59aad350b4 | 1,250 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(associated_consts)]
struct Foo;
enum Bar {
Var1,
Var2,
}
// Use inherent and trait impls to test UFCS syntax.
impl Foo {
const MYBAR: Bar = Bar::Var2;
}
trait HasBar {
const THEBAR: Bar;
}
impl HasBar for Foo {
const THEBAR: Bar = Bar::Var1;
}
fn main() {
// Inherent impl
assert!(match Bar::Var2 {
Foo::MYBAR => true,
_ => false,
});
assert!(match Bar::Var2 {
<Foo>::MYBAR => true,
_ => false,
});
// Trait impl
assert!(match Bar::Var1 {
Foo::THEBAR => true,
_ => false,
});
assert!(match Bar::Var1 {
<Foo>::THEBAR => true,
_ => false,
});
assert!(match Bar::Var1 {
<Foo as HasBar>::THEBAR => true,
_ => false,
});
}
| 21.929825 | 68 | 0.5944 |
14b420421e7eff8930091331ede893c0b46948fd | 2,451 | use syntax::{
ast::{self, AttrsOwner},
AstNode, AstToken,
};
use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists};
// Assist: toggle_ignore
//
// Adds `#[ignore]` attribute to the test.
//
// ```
// <|>#[test]
// fn arithmetics {
// assert_eq!(2 + 2, 5);
// }
// ```
// ->
// ```
// #[test]
// #[ignore]
// fn arithmetics {
// assert_eq!(2 + 2, 5);
// }
// ```
pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let attr: ast::Attr = ctx.find_node_at_offset()?;
let func = attr.syntax().parent().and_then(ast::Fn::cast)?;
let attr = test_related_attribute(&func)?;
match has_ignore_attribute(&func) {
None => acc.add(
AssistId("toggle_ignore", AssistKind::None),
"Ignore this test",
attr.syntax().text_range(),
|builder| builder.insert(attr.syntax().text_range().end(), &format!("\n#[ignore]")),
),
Some(ignore_attr) => acc.add(
AssistId("toggle_ignore", AssistKind::None),
"Re-enable this test",
ignore_attr.syntax().text_range(),
|builder| {
builder.delete(ignore_attr.syntax().text_range());
let whitespace = ignore_attr
.syntax()
.next_sibling_or_token()
.and_then(|x| x.into_token())
.and_then(ast::Whitespace::cast);
if let Some(whitespace) = whitespace {
builder.delete(whitespace.syntax().text_range());
}
},
),
}
}
fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
fn_def.attrs().find(|attr| attr.path().map(|it| it.syntax().text() == "ignore") == Some(true))
}
#[cfg(test)]
mod tests {
use crate::tests::check_assist;
use super::*;
#[test]
fn test_base_case() {
check_assist(
toggle_ignore,
r#"
#[test<|>]
fn test() {}
"#,
r#"
#[test]
#[ignore]
fn test() {}
"#,
)
}
#[test]
fn test_unignore() {
check_assist(
toggle_ignore,
r#"
#[test<|>]
#[ignore]
fn test() {}
"#,
r#"
#[test]
fn test() {}
"#,
)
}
}
| 24.757576 | 98 | 0.4745 |
e4fa112ad717ec0c961da1bc4299661acc56d3f4 | 1,245 | #[doc = "Reader of register IOPAD_SR_0"]
pub type R = crate::R<u32, super::IOPAD_SR_0>;
#[doc = "Writer for register IOPAD_SR_0"]
pub type W = crate::W<u32, super::IOPAD_SR_0>;
#[doc = "Register IOPAD_SR_0 `reset()`'s with value 0xffff_ffff"]
impl crate::ResetValue for super::IOPAD_SR_0 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0xffff_ffff
}
}
#[doc = "Reader of field `slew_rate`"]
pub type SLEW_RATE_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `slew_rate`"]
pub struct SLEW_RATE_W<'a> {
w: &'a mut W,
}
impl<'a> SLEW_RATE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - Selects the slew rate of test chip I/O PA31-PA0"]
#[inline(always)]
pub fn slew_rate(&self) -> SLEW_RATE_R {
SLEW_RATE_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31 - Selects the slew rate of test chip I/O PA31-PA0"]
#[inline(always)]
pub fn slew_rate(&mut self) -> SLEW_RATE_W {
SLEW_RATE_W { w: self }
}
}
| 30.365854 | 84 | 0.610442 |
dbc8e59a408c9f46e814e250cc8cc63bcd980ede | 8,904 | // This file is generated by rust-protobuf 2.2.0. Do not edit
// @generated
// https://github.com/Manishearth/rust-clippy/issues/702
use protobuf::Message as Message_imported_for_functions;
use protobuf::ProtobufEnum as ProtobufEnum_imported_for_functions;
#[derive(PartialEq,Clone,Default)]
pub struct VersionDef {
// message fields
pub producer: i32,
pub min_consumer: i32,
pub bad_consumers: ::std::vec::Vec<i32>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl VersionDef {
pub fn new() -> VersionDef {
::std::default::Default::default()
}
// int32 producer = 1;
pub fn clear_producer(&mut self) {
self.producer = 0;
}
// Param is passed by value, moved
pub fn set_producer(&mut self, v: i32) {
self.producer = v;
}
pub fn get_producer(&self) -> i32 {
self.producer
}
// int32 min_consumer = 2;
pub fn clear_min_consumer(&mut self) {
self.min_consumer = 0;
}
// Param is passed by value, moved
pub fn set_min_consumer(&mut self, v: i32) {
self.min_consumer = v;
}
pub fn get_min_consumer(&self) -> i32 {
self.min_consumer
}
// repeated int32 bad_consumers = 3;
pub fn clear_bad_consumers(&mut self) {
self.bad_consumers.clear();
}
// Param is passed by value, moved
pub fn set_bad_consumers(&mut self, v: ::std::vec::Vec<i32>) {
self.bad_consumers = v;
}
// Mutable pointer to the field.
pub fn mut_bad_consumers(&mut self) -> &mut ::std::vec::Vec<i32> {
&mut self.bad_consumers
}
// Take field
pub fn take_bad_consumers(&mut self) -> ::std::vec::Vec<i32> {
::std::mem::replace(&mut self.bad_consumers, ::std::vec::Vec::new())
}
pub fn get_bad_consumers(&self) -> &[i32] {
&self.bad_consumers
}
}
impl ::protobuf::Message for VersionDef {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.producer = tmp;
},
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.min_consumer = tmp;
},
3 => {
::protobuf::rt::read_repeated_int32_into(wire_type, is, &mut self.bad_consumers)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.producer != 0 {
my_size += ::protobuf::rt::value_size(1, self.producer, ::protobuf::wire_format::WireTypeVarint);
}
if self.min_consumer != 0 {
my_size += ::protobuf::rt::value_size(2, self.min_consumer, ::protobuf::wire_format::WireTypeVarint);
}
for value in &self.bad_consumers {
my_size += ::protobuf::rt::value_size(3, *value, ::protobuf::wire_format::WireTypeVarint);
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {
if self.producer != 0 {
os.write_int32(1, self.producer)?;
}
if self.min_consumer != 0 {
os.write_int32(2, self.min_consumer)?;
}
for v in &self.bad_consumers {
os.write_int32(3, *v)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> VersionDef {
VersionDef::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"producer",
|m: &VersionDef| { &m.producer },
|m: &mut VersionDef| { &mut m.producer },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"min_consumer",
|m: &VersionDef| { &m.min_consumer },
|m: &mut VersionDef| { &mut m.min_consumer },
));
fields.push(::protobuf::reflect::accessor::make_vec_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"bad_consumers",
|m: &VersionDef| { &m.bad_consumers },
|m: &mut VersionDef| { &mut m.bad_consumers },
));
::protobuf::reflect::MessageDescriptor::new::<VersionDef>(
"VersionDef",
fields,
file_descriptor_proto()
)
})
}
}
fn default_instance() -> &'static VersionDef {
static mut instance: ::protobuf::lazy::Lazy<VersionDef> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const VersionDef,
};
unsafe {
instance.get(VersionDef::new)
}
}
}
impl ::protobuf::Clear for VersionDef {
fn clear(&mut self) {
self.clear_producer();
self.clear_min_consumer();
self.clear_bad_consumers();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for VersionDef {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for VersionDef {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n(tensorflow/core/framework/versions.proto\x12\ntensorflow\"p\n\nVersio\
nDef\x12\x1a\n\x08producer\x18\x01\x20\x01(\x05R\x08producer\x12!\n\x0cm\
in_consumer\x18\x02\x20\x01(\x05R\x0bminConsumer\x12#\n\rbad_consumers\
\x18\x03\x20\x03(\x05R\x0cbadConsumersBn\n\x18org.tensorflow.frameworkB\
\x0eVersionsProtosP\x01Z=github.com/tensorflow/tensorflow/tensorflow/go/\
core/framework\xf8\x01\x01b\x06proto3\
";
static mut file_descriptor_proto_lazy: ::protobuf::lazy::Lazy<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::descriptor::FileDescriptorProto,
};
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
unsafe {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
}
| 33.223881 | 133 | 0.572327 |
18b79a113540cc6245a81bee2ae9db491de77dd2 | 4,679 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use fidl_fuchsia_lowpan_test::{
MacAddressFilterItem, MacAddressFilterMode, MacAddressFilterSettings,
};
use serde::{Deserialize, Serialize};
/// Supported Wpan commands.
pub enum WpanMethod {
GetIsCommissioned,
GetMacAddressFilterSettings,
GetNcpChannel,
GetNcpMacAddress,
GetNcpRssi,
GetNcpState,
GetNetworkName,
GetPanId,
GetPartitionId,
GetThreadRloc16,
GetThreadRouterId,
GetWeaveNodeId,
InitializeProxies,
ReplaceMacAddressFilterSettings,
}
impl std::str::FromStr for WpanMethod {
type Err = anyhow::Error;
fn from_str(method: &str) -> Result<Self, Self::Err> {
match method {
"GetIsCommissioned" => Ok(WpanMethod::GetIsCommissioned),
"GetMacAddressFilterSettings" => Ok(WpanMethod::GetMacAddressFilterSettings),
"GetNcpChannel" => Ok(WpanMethod::GetNcpChannel),
"GetNcpMacAddress" => Ok(WpanMethod::GetNcpMacAddress),
"GetNcpRssi" => Ok(WpanMethod::GetNcpRssi),
"GetNcpState" => Ok(WpanMethod::GetNcpState),
"GetNetworkName" => Ok(WpanMethod::GetNetworkName),
"GetPanId" => Ok(WpanMethod::GetPanId),
"GetPartitionId" => Ok(WpanMethod::GetPartitionId),
"GetThreadRloc16" => Ok(WpanMethod::GetThreadRloc16),
"GetThreadRouterId" => Ok(WpanMethod::GetThreadRouterId),
"GetWeaveNodeId" => Ok(WpanMethod::GetWeaveNodeId),
"InitializeProxies" => Ok(WpanMethod::InitializeProxies),
"ReplaceMacAddressFilterSettings" => Ok(WpanMethod::ReplaceMacAddressFilterSettings),
_ => return Err(format_err!("invalid Wpan FIDL method: {}", method)),
}
}
}
#[derive(Serialize)]
pub enum ConnectivityState {
Inactive,
Ready,
Offline,
Attaching,
Attached,
Isolated,
Commissioning,
}
#[derive(Serialize, Deserialize)]
pub struct MacAddressFilterItemDto {
pub mac_address: Option<Vec<u8>>,
pub rssi: Option<i8>,
}
#[derive(Serialize, Deserialize)]
pub struct MacAddressFilterSettingsDto {
pub items: Option<Vec<MacAddressFilterItemDto>>,
pub mode: Option<MacAddressFilterModeDto>,
}
#[derive(Serialize, Deserialize)]
pub enum MacAddressFilterModeDto {
Disabled = 0,
Allow = 1,
Deny = 2,
}
impl Into<MacAddressFilterItemDto> for MacAddressFilterItem {
fn into(self) -> MacAddressFilterItemDto {
MacAddressFilterItemDto { mac_address: self.mac_address, rssi: self.rssi }
}
}
impl Into<MacAddressFilterItem> for MacAddressFilterItemDto {
fn into(self) -> MacAddressFilterItem {
MacAddressFilterItem {
mac_address: self.mac_address,
rssi: self.rssi,
..MacAddressFilterItem::EMPTY
}
}
}
impl Into<MacAddressFilterSettings> for MacAddressFilterSettingsDto {
fn into(self) -> MacAddressFilterSettings {
MacAddressFilterSettings {
mode: match self.mode {
Some(mode) => Some(mode.into()),
None => None,
},
items: match self.items {
Some(items) => Some(items.into_iter().map(|x| x.into()).collect()),
None => None,
},
..MacAddressFilterSettings::EMPTY
}
}
}
impl Into<MacAddressFilterSettingsDto> for MacAddressFilterSettings {
fn into(self) -> MacAddressFilterSettingsDto {
MacAddressFilterSettingsDto {
mode: match self.mode {
Some(mode) => Some(mode.into()),
None => None,
},
items: match self.items {
Some(items) => Some(items.into_iter().map(|x| x.into()).collect()),
None => None,
},
}
}
}
impl Into<MacAddressFilterModeDto> for MacAddressFilterMode {
fn into(self) -> MacAddressFilterModeDto {
match self {
MacAddressFilterMode::Disabled => MacAddressFilterModeDto::Disabled,
MacAddressFilterMode::Allow => MacAddressFilterModeDto::Allow,
MacAddressFilterMode::Deny => MacAddressFilterModeDto::Deny,
}
}
}
impl Into<MacAddressFilterMode> for MacAddressFilterModeDto {
fn into(self) -> MacAddressFilterMode {
match self {
MacAddressFilterModeDto::Disabled => MacAddressFilterMode::Disabled,
MacAddressFilterModeDto::Allow => MacAddressFilterMode::Allow,
MacAddressFilterModeDto::Deny => MacAddressFilterMode::Deny,
}
}
}
| 32.047945 | 97 | 0.642659 |
ed7681741df40356c9cfa33e00941e8461aa80ac | 95,717 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use cairo;
use gdk;
use gio;
use glib;
use glib::object::Cast;
use glib::object::IsA;
use glib::object::ObjectExt;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::GString;
use glib::StaticType;
use glib::Value;
use glib_sys;
use gobject_sys;
use graphene;
use gsk;
use gtk_sys;
use libc;
use pango;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem;
use std::mem::transmute;
use Accessible;
use Align;
use Allocation;
use Buildable;
use ConstraintTarget;
use DirectionType;
use EventController;
use LayoutManager;
use Native;
use Orientation;
use Overflow;
use PickFlags;
use Requisition;
use Root;
use Settings;
use SizeRequestMode;
use Snapshot;
use StateFlags;
use StyleContext;
use TextDirection;
use Tooltip;
glib_wrapper! {
pub struct Widget(Object<gtk_sys::GtkWidget, gtk_sys::GtkWidgetClass>) @implements Accessible, Buildable, ConstraintTarget;
match fn {
get_type => || gtk_sys::gtk_widget_get_type(),
}
}
impl Widget {
pub fn get_default_direction() -> TextDirection {
assert_initialized_main_thread!();
unsafe { from_glib(gtk_sys::gtk_widget_get_default_direction()) }
}
pub fn set_default_direction(dir: TextDirection) {
assert_initialized_main_thread!();
unsafe {
gtk_sys::gtk_widget_set_default_direction(dir.to_glib());
}
}
}
pub const NONE_WIDGET: Option<&Widget> = None;
pub trait WidgetExt: 'static {
fn action_set_enabled(&self, action_name: &str, enabled: bool);
fn activate(&self) -> bool;
//fn activate_action(&self, name: &str, format_string: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> bool;
fn activate_action_variant(&self, name: &str, args: Option<&glib::Variant>) -> bool;
fn activate_default(&self);
fn add_controller<P: IsA<EventController>>(&self, controller: &P);
fn add_css_class(&self, css_class: &str);
fn add_mnemonic_label<P: IsA<Widget>>(&self, label: &P);
fn allocate(&self, width: i32, height: i32, baseline: i32, transform: Option<&gsk::Transform>);
fn child_focus(&self, direction: DirectionType) -> bool;
fn compute_bounds<P: IsA<Widget>>(&self, target: &P) -> Option<graphene::Rect>;
fn compute_expand(&self, orientation: Orientation) -> bool;
fn compute_point<P: IsA<Widget>>(
&self,
target: &P,
point: &graphene::Point,
) -> Option<graphene::Point>;
fn compute_transform<P: IsA<Widget>>(&self, target: &P) -> Option<graphene::Matrix>;
fn contains(&self, x: f64, y: f64) -> bool;
fn create_pango_context(&self) -> Option<pango::Context>;
fn create_pango_layout(&self, text: Option<&str>) -> Option<pango::Layout>;
fn drag_check_threshold(
&self,
start_x: i32,
start_y: i32,
current_x: i32,
current_y: i32,
) -> bool;
fn error_bell(&self);
fn get_allocated_baseline(&self) -> i32;
fn get_allocated_height(&self) -> i32;
fn get_allocated_width(&self) -> i32;
fn get_allocation(&self) -> Allocation;
fn get_ancestor(&self, widget_type: glib::types::Type) -> Option<Widget>;
fn get_can_focus(&self) -> bool;
fn get_can_target(&self) -> bool;
fn get_child_visible(&self) -> bool;
fn get_clipboard(&self) -> gdk::Clipboard;
fn get_css_classes(&self) -> Vec<GString>;
fn get_css_name(&self) -> Option<GString>;
fn get_cursor(&self) -> Option<gdk::Cursor>;
fn get_direction(&self) -> TextDirection;
fn get_display(&self) -> Option<gdk::Display>;
fn get_first_child(&self) -> Option<Widget>;
fn get_focus_child(&self) -> Option<Widget>;
fn get_focus_on_click(&self) -> bool;
fn get_focusable(&self) -> bool;
fn get_font_map(&self) -> Option<pango::FontMap>;
fn get_font_options(&self) -> Option<cairo::FontOptions>;
fn get_frame_clock(&self) -> Option<gdk::FrameClock>;
fn get_halign(&self) -> Align;
fn get_has_tooltip(&self) -> bool;
fn get_height(&self) -> i32;
fn get_hexpand(&self) -> bool;
fn get_hexpand_set(&self) -> bool;
fn get_last_child(&self) -> Option<Widget>;
fn get_layout_manager(&self) -> Option<LayoutManager>;
fn get_mapped(&self) -> bool;
fn get_margin_bottom(&self) -> i32;
fn get_margin_end(&self) -> i32;
fn get_margin_start(&self) -> i32;
fn get_margin_top(&self) -> i32;
fn get_name(&self) -> Option<GString>;
fn get_native(&self) -> Option<Native>;
fn get_next_sibling(&self) -> Option<Widget>;
fn get_opacity(&self) -> f64;
fn get_overflow(&self) -> Overflow;
fn get_pango_context(&self) -> Option<pango::Context>;
fn get_parent(&self) -> Option<Widget>;
fn get_preferred_size(&self) -> (Requisition, Requisition);
fn get_prev_sibling(&self) -> Option<Widget>;
fn get_primary_clipboard(&self) -> Option<gdk::Clipboard>;
fn get_realized(&self) -> bool;
fn get_receives_default(&self) -> bool;
fn get_request_mode(&self) -> SizeRequestMode;
fn get_root(&self) -> Option<Root>;
fn get_scale_factor(&self) -> i32;
fn get_sensitive(&self) -> bool;
fn get_settings(&self) -> Option<Settings>;
fn get_size(&self, orientation: Orientation) -> i32;
fn get_size_request(&self) -> (i32, i32);
fn get_state_flags(&self) -> StateFlags;
fn get_style_context(&self) -> StyleContext;
fn get_template_child(
&self,
widget_type: glib::types::Type,
name: &str,
) -> Option<glib::Object>;
fn get_tooltip_markup(&self) -> Option<GString>;
fn get_tooltip_text(&self) -> Option<GString>;
fn get_valign(&self) -> Align;
fn get_vexpand(&self) -> bool;
fn get_vexpand_set(&self) -> bool;
fn get_visible(&self) -> bool;
fn get_width(&self) -> i32;
fn grab_focus(&self) -> bool;
fn has_css_class(&self, css_class: &str) -> bool;
fn has_default(&self) -> bool;
fn has_focus(&self) -> bool;
fn has_visible_focus(&self) -> bool;
fn hide(&self);
fn in_destruction(&self) -> bool;
fn init_template(&self);
fn insert_action_group<P: IsA<gio::ActionGroup>>(&self, name: &str, group: Option<&P>);
fn insert_after<P: IsA<Widget>, Q: IsA<Widget>>(
&self,
parent: &P,
previous_sibling: Option<&Q>,
);
fn insert_before<P: IsA<Widget>, Q: IsA<Widget>>(&self, parent: &P, next_sibling: Option<&Q>);
fn is_ancestor<P: IsA<Widget>>(&self, ancestor: &P) -> bool;
fn is_drawable(&self) -> bool;
fn is_focus(&self) -> bool;
fn is_sensitive(&self) -> bool;
fn is_visible(&self) -> bool;
fn keynav_failed(&self, direction: DirectionType) -> bool;
fn list_mnemonic_labels(&self) -> Vec<Widget>;
fn map(&self);
fn measure(&self, orientation: Orientation, for_size: i32) -> (i32, i32, i32, i32);
fn mnemonic_activate(&self, group_cycling: bool) -> bool;
fn observe_children(&self) -> Option<gio::ListModel>;
fn observe_controllers(&self) -> Option<gio::ListModel>;
fn pick(&self, x: f64, y: f64, flags: PickFlags) -> Option<Widget>;
fn queue_allocate(&self);
fn queue_draw(&self);
fn queue_resize(&self);
fn realize(&self);
fn remove_controller<P: IsA<EventController>>(&self, controller: &P);
fn remove_css_class(&self, css_class: &str);
fn remove_mnemonic_label<P: IsA<Widget>>(&self, label: &P);
fn set_can_focus(&self, can_focus: bool);
fn set_can_target(&self, can_target: bool);
fn set_child_visible(&self, child_visible: bool);
fn set_css_classes(&self, classes: &[&str]);
fn set_cursor(&self, cursor: Option<&gdk::Cursor>);
fn set_cursor_from_name(&self, name: Option<&str>);
fn set_direction(&self, dir: TextDirection);
fn set_focus_child<P: IsA<Widget>>(&self, child: Option<&P>);
fn set_focus_on_click(&self, focus_on_click: bool);
fn set_focusable(&self, focusable: bool);
fn set_font_map<P: IsA<pango::FontMap>>(&self, font_map: Option<&P>);
fn set_font_options(&self, options: Option<&cairo::FontOptions>);
fn set_halign(&self, align: Align);
fn set_has_tooltip(&self, has_tooltip: bool);
fn set_hexpand(&self, expand: bool);
fn set_hexpand_set(&self, set: bool);
fn set_layout_manager<P: IsA<LayoutManager>>(&self, layout_manager: Option<&P>);
fn set_margin_bottom(&self, margin: i32);
fn set_margin_end(&self, margin: i32);
fn set_margin_start(&self, margin: i32);
fn set_margin_top(&self, margin: i32);
fn set_name(&self, name: &str);
fn set_opacity(&self, opacity: f64);
fn set_overflow(&self, overflow: Overflow);
fn set_parent<P: IsA<Widget>>(&self, parent: &P);
fn set_receives_default(&self, receives_default: bool);
fn set_sensitive(&self, sensitive: bool);
fn set_size_request(&self, width: i32, height: i32);
fn set_state_flags(&self, flags: StateFlags, clear: bool);
fn set_tooltip_markup(&self, markup: Option<&str>);
fn set_tooltip_text(&self, text: Option<&str>);
fn set_valign(&self, align: Align);
fn set_vexpand(&self, expand: bool);
fn set_vexpand_set(&self, set: bool);
fn set_visible(&self, visible: bool);
fn should_layout(&self) -> bool;
fn show(&self);
fn size_allocate(&self, allocation: &Allocation, baseline: i32);
fn snapshot_child<P: IsA<Widget>>(&self, child: &P, snapshot: &Snapshot);
fn translate_coordinates<P: IsA<Widget>>(
&self,
dest_widget: &P,
src_x: f64,
src_y: f64,
) -> Option<(f64, f64)>;
fn trigger_tooltip_query(&self);
fn unmap(&self);
fn unparent(&self);
fn unrealize(&self);
fn unset_state_flags(&self, flags: StateFlags);
fn get_property_has_default(&self) -> bool;
fn get_property_has_focus(&self) -> bool;
fn get_property_height_request(&self) -> i32;
fn set_property_height_request(&self, height_request: i32);
fn get_property_width_request(&self) -> i32;
fn set_property_width_request(&self, width_request: i32);
fn connect_destroy<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_direction_changed<F: Fn(&Self, TextDirection) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_hide<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_keynav_failed<F: Fn(&Self, DirectionType) -> glib::signal::Inhibit + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_map<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_mnemonic_activate<F: Fn(&Self, bool) -> glib::signal::Inhibit + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_move_focus<F: Fn(&Self, DirectionType) + 'static>(&self, f: F) -> SignalHandlerId;
fn emit_move_focus(&self, direction: DirectionType);
fn connect_query_tooltip<F: Fn(&Self, i32, i32, bool, &Tooltip) -> bool + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_realize<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_show<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_state_flags_changed<F: Fn(&Self, StateFlags) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_unmap<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_unrealize<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_can_focus_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_can_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_css_classes_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_cursor_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_focus_on_click_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_focusable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_halign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_has_default_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_has_focus_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_has_tooltip_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_height_request_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_hexpand_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_hexpand_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_layout_manager_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_margin_bottom_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_margin_end_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_margin_start_notify<F: Fn(&Self) + 'static>(&self, f: F)
-> SignalHandlerId;
fn connect_property_margin_top_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_opacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_overflow_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_parent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_receives_default_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_root_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_scale_factor_notify<F: Fn(&Self) + 'static>(&self, f: F)
-> SignalHandlerId;
fn connect_property_sensitive_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_tooltip_markup_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
fn connect_property_tooltip_text_notify<F: Fn(&Self) + 'static>(&self, f: F)
-> SignalHandlerId;
fn connect_property_valign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_vexpand_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_vexpand_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_visible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_width_request_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
}
impl<O: IsA<Widget>> WidgetExt for O {
fn action_set_enabled(&self, action_name: &str, enabled: bool) {
unsafe {
gtk_sys::gtk_widget_action_set_enabled(
self.as_ref().to_glib_none().0,
action_name.to_glib_none().0,
enabled.to_glib(),
);
}
}
fn activate(&self) -> bool {
unsafe { from_glib(gtk_sys::gtk_widget_activate(self.as_ref().to_glib_none().0)) }
}
//fn activate_action(&self, name: &str, format_string: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> bool {
// unsafe { TODO: call gtk_sys:gtk_widget_activate_action() }
//}
fn activate_action_variant(&self, name: &str, args: Option<&glib::Variant>) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_activate_action_variant(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
args.to_glib_none().0,
))
}
}
fn activate_default(&self) {
unsafe {
gtk_sys::gtk_widget_activate_default(self.as_ref().to_glib_none().0);
}
}
fn add_controller<P: IsA<EventController>>(&self, controller: &P) {
unsafe {
gtk_sys::gtk_widget_add_controller(
self.as_ref().to_glib_none().0,
controller.as_ref().to_glib_full(),
);
}
}
fn add_css_class(&self, css_class: &str) {
unsafe {
gtk_sys::gtk_widget_add_css_class(
self.as_ref().to_glib_none().0,
css_class.to_glib_none().0,
);
}
}
fn add_mnemonic_label<P: IsA<Widget>>(&self, label: &P) {
unsafe {
gtk_sys::gtk_widget_add_mnemonic_label(
self.as_ref().to_glib_none().0,
label.as_ref().to_glib_none().0,
);
}
}
fn allocate(&self, width: i32, height: i32, baseline: i32, transform: Option<&gsk::Transform>) {
unsafe {
gtk_sys::gtk_widget_allocate(
self.as_ref().to_glib_none().0,
width,
height,
baseline,
transform.to_glib_full(),
);
}
}
fn child_focus(&self, direction: DirectionType) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_child_focus(
self.as_ref().to_glib_none().0,
direction.to_glib(),
))
}
}
fn compute_bounds<P: IsA<Widget>>(&self, target: &P) -> Option<graphene::Rect> {
unsafe {
let mut out_bounds = graphene::Rect::uninitialized();
let ret = from_glib(gtk_sys::gtk_widget_compute_bounds(
self.as_ref().to_glib_none().0,
target.as_ref().to_glib_none().0,
out_bounds.to_glib_none_mut().0,
));
if ret {
Some(out_bounds)
} else {
None
}
}
}
fn compute_expand(&self, orientation: Orientation) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_compute_expand(
self.as_ref().to_glib_none().0,
orientation.to_glib(),
))
}
}
fn compute_point<P: IsA<Widget>>(
&self,
target: &P,
point: &graphene::Point,
) -> Option<graphene::Point> {
unsafe {
let mut out_point = graphene::Point::uninitialized();
let ret = from_glib(gtk_sys::gtk_widget_compute_point(
self.as_ref().to_glib_none().0,
target.as_ref().to_glib_none().0,
point.to_glib_none().0,
out_point.to_glib_none_mut().0,
));
if ret {
Some(out_point)
} else {
None
}
}
}
fn compute_transform<P: IsA<Widget>>(&self, target: &P) -> Option<graphene::Matrix> {
unsafe {
let mut out_transform = graphene::Matrix::uninitialized();
let ret = from_glib(gtk_sys::gtk_widget_compute_transform(
self.as_ref().to_glib_none().0,
target.as_ref().to_glib_none().0,
out_transform.to_glib_none_mut().0,
));
if ret {
Some(out_transform)
} else {
None
}
}
}
fn contains(&self, x: f64, y: f64) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_contains(
self.as_ref().to_glib_none().0,
x,
y,
))
}
}
fn create_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_full(gtk_sys::gtk_widget_create_pango_context(
self.as_ref().to_glib_none().0,
))
}
}
fn create_pango_layout(&self, text: Option<&str>) -> Option<pango::Layout> {
unsafe {
from_glib_full(gtk_sys::gtk_widget_create_pango_layout(
self.as_ref().to_glib_none().0,
text.to_glib_none().0,
))
}
}
fn drag_check_threshold(
&self,
start_x: i32,
start_y: i32,
current_x: i32,
current_y: i32,
) -> bool {
unsafe {
from_glib(gtk_sys::gtk_drag_check_threshold(
self.as_ref().to_glib_none().0,
start_x,
start_y,
current_x,
current_y,
))
}
}
fn error_bell(&self) {
unsafe {
gtk_sys::gtk_widget_error_bell(self.as_ref().to_glib_none().0);
}
}
fn get_allocated_baseline(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_allocated_baseline(self.as_ref().to_glib_none().0) }
}
fn get_allocated_height(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_allocated_height(self.as_ref().to_glib_none().0) }
}
fn get_allocated_width(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_allocated_width(self.as_ref().to_glib_none().0) }
}
fn get_allocation(&self) -> Allocation {
unsafe {
let mut allocation = Allocation::uninitialized();
gtk_sys::gtk_widget_get_allocation(
self.as_ref().to_glib_none().0,
allocation.to_glib_none_mut().0,
);
allocation
}
}
fn get_ancestor(&self, widget_type: glib::types::Type) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_ancestor(
self.as_ref().to_glib_none().0,
widget_type.to_glib(),
))
}
}
fn get_can_focus(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_can_focus(
self.as_ref().to_glib_none().0,
))
}
}
fn get_can_target(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_can_target(
self.as_ref().to_glib_none().0,
))
}
}
fn get_child_visible(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_child_visible(
self.as_ref().to_glib_none().0,
))
}
}
fn get_clipboard(&self) -> gdk::Clipboard {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_clipboard(
self.as_ref().to_glib_none().0,
))
}
}
fn get_css_classes(&self) -> Vec<GString> {
unsafe {
FromGlibPtrContainer::from_glib_full(gtk_sys::gtk_widget_get_css_classes(
self.as_ref().to_glib_none().0,
))
}
}
fn get_css_name(&self) -> Option<GString> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_css_name(
self.as_ref().to_glib_none().0,
))
}
}
fn get_cursor(&self) -> Option<gdk::Cursor> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_cursor(
self.as_ref().to_glib_none().0,
))
}
}
fn get_direction(&self) -> TextDirection {
unsafe {
from_glib(gtk_sys::gtk_widget_get_direction(
self.as_ref().to_glib_none().0,
))
}
}
fn get_display(&self) -> Option<gdk::Display> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_display(
self.as_ref().to_glib_none().0,
))
}
}
fn get_first_child(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_first_child(
self.as_ref().to_glib_none().0,
))
}
}
fn get_focus_child(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_focus_child(
self.as_ref().to_glib_none().0,
))
}
}
fn get_focus_on_click(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_focus_on_click(
self.as_ref().to_glib_none().0,
))
}
}
fn get_focusable(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_focusable(
self.as_ref().to_glib_none().0,
))
}
}
fn get_font_map(&self) -> Option<pango::FontMap> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_font_map(
self.as_ref().to_glib_none().0,
))
}
}
fn get_font_options(&self) -> Option<cairo::FontOptions> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_font_options(
self.as_ref().to_glib_none().0,
))
}
}
fn get_frame_clock(&self) -> Option<gdk::FrameClock> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_frame_clock(
self.as_ref().to_glib_none().0,
))
}
}
fn get_halign(&self) -> Align {
unsafe {
from_glib(gtk_sys::gtk_widget_get_halign(
self.as_ref().to_glib_none().0,
))
}
}
fn get_has_tooltip(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_has_tooltip(
self.as_ref().to_glib_none().0,
))
}
}
fn get_height(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_height(self.as_ref().to_glib_none().0) }
}
fn get_hexpand(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_hexpand(
self.as_ref().to_glib_none().0,
))
}
}
fn get_hexpand_set(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_hexpand_set(
self.as_ref().to_glib_none().0,
))
}
}
fn get_last_child(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_last_child(
self.as_ref().to_glib_none().0,
))
}
}
fn get_layout_manager(&self) -> Option<LayoutManager> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_layout_manager(
self.as_ref().to_glib_none().0,
))
}
}
fn get_mapped(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_mapped(
self.as_ref().to_glib_none().0,
))
}
}
fn get_margin_bottom(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_margin_bottom(self.as_ref().to_glib_none().0) }
}
fn get_margin_end(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_margin_end(self.as_ref().to_glib_none().0) }
}
fn get_margin_start(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_margin_start(self.as_ref().to_glib_none().0) }
}
fn get_margin_top(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_margin_top(self.as_ref().to_glib_none().0) }
}
fn get_name(&self) -> Option<GString> {
unsafe { from_glib_none(gtk_sys::gtk_widget_get_name(self.as_ref().to_glib_none().0)) }
}
fn get_native(&self) -> Option<Native> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_native(
self.as_ref().to_glib_none().0,
))
}
}
fn get_next_sibling(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_next_sibling(
self.as_ref().to_glib_none().0,
))
}
}
fn get_opacity(&self) -> f64 {
unsafe { gtk_sys::gtk_widget_get_opacity(self.as_ref().to_glib_none().0) }
}
fn get_overflow(&self) -> Overflow {
unsafe {
from_glib(gtk_sys::gtk_widget_get_overflow(
self.as_ref().to_glib_none().0,
))
}
}
fn get_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_pango_context(
self.as_ref().to_glib_none().0,
))
}
}
fn get_parent(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_parent(
self.as_ref().to_glib_none().0,
))
}
}
fn get_preferred_size(&self) -> (Requisition, Requisition) {
unsafe {
let mut minimum_size = Requisition::uninitialized();
let mut natural_size = Requisition::uninitialized();
gtk_sys::gtk_widget_get_preferred_size(
self.as_ref().to_glib_none().0,
minimum_size.to_glib_none_mut().0,
natural_size.to_glib_none_mut().0,
);
(minimum_size, natural_size)
}
}
fn get_prev_sibling(&self) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_prev_sibling(
self.as_ref().to_glib_none().0,
))
}
}
fn get_primary_clipboard(&self) -> Option<gdk::Clipboard> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_primary_clipboard(
self.as_ref().to_glib_none().0,
))
}
}
fn get_realized(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_realized(
self.as_ref().to_glib_none().0,
))
}
}
fn get_receives_default(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_receives_default(
self.as_ref().to_glib_none().0,
))
}
}
fn get_request_mode(&self) -> SizeRequestMode {
unsafe {
from_glib(gtk_sys::gtk_widget_get_request_mode(
self.as_ref().to_glib_none().0,
))
}
}
fn get_root(&self) -> Option<Root> {
unsafe { from_glib_none(gtk_sys::gtk_widget_get_root(self.as_ref().to_glib_none().0)) }
}
fn get_scale_factor(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_scale_factor(self.as_ref().to_glib_none().0) }
}
fn get_sensitive(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_sensitive(
self.as_ref().to_glib_none().0,
))
}
}
fn get_settings(&self) -> Option<Settings> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_settings(
self.as_ref().to_glib_none().0,
))
}
}
fn get_size(&self, orientation: Orientation) -> i32 {
unsafe {
gtk_sys::gtk_widget_get_size(self.as_ref().to_glib_none().0, orientation.to_glib())
}
}
fn get_size_request(&self) -> (i32, i32) {
unsafe {
let mut width = mem::MaybeUninit::uninit();
let mut height = mem::MaybeUninit::uninit();
gtk_sys::gtk_widget_get_size_request(
self.as_ref().to_glib_none().0,
width.as_mut_ptr(),
height.as_mut_ptr(),
);
let width = width.assume_init();
let height = height.assume_init();
(width, height)
}
}
fn get_state_flags(&self) -> StateFlags {
unsafe {
from_glib(gtk_sys::gtk_widget_get_state_flags(
self.as_ref().to_glib_none().0,
))
}
}
fn get_style_context(&self) -> StyleContext {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_style_context(
self.as_ref().to_glib_none().0,
))
}
}
fn get_template_child(
&self,
widget_type: glib::types::Type,
name: &str,
) -> Option<glib::Object> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_template_child(
self.as_ref().to_glib_none().0,
widget_type.to_glib(),
name.to_glib_none().0,
))
}
}
fn get_tooltip_markup(&self) -> Option<GString> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_tooltip_markup(
self.as_ref().to_glib_none().0,
))
}
}
fn get_tooltip_text(&self) -> Option<GString> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_get_tooltip_text(
self.as_ref().to_glib_none().0,
))
}
}
fn get_valign(&self) -> Align {
unsafe {
from_glib(gtk_sys::gtk_widget_get_valign(
self.as_ref().to_glib_none().0,
))
}
}
fn get_vexpand(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_vexpand(
self.as_ref().to_glib_none().0,
))
}
}
fn get_vexpand_set(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_vexpand_set(
self.as_ref().to_glib_none().0,
))
}
}
fn get_visible(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_get_visible(
self.as_ref().to_glib_none().0,
))
}
}
fn get_width(&self) -> i32 {
unsafe { gtk_sys::gtk_widget_get_width(self.as_ref().to_glib_none().0) }
}
fn grab_focus(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_grab_focus(
self.as_ref().to_glib_none().0,
))
}
}
fn has_css_class(&self, css_class: &str) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_has_css_class(
self.as_ref().to_glib_none().0,
css_class.to_glib_none().0,
))
}
}
fn has_default(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_has_default(
self.as_ref().to_glib_none().0,
))
}
}
fn has_focus(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_has_focus(
self.as_ref().to_glib_none().0,
))
}
}
fn has_visible_focus(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_has_visible_focus(
self.as_ref().to_glib_none().0,
))
}
}
fn hide(&self) {
unsafe {
gtk_sys::gtk_widget_hide(self.as_ref().to_glib_none().0);
}
}
fn in_destruction(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_in_destruction(
self.as_ref().to_glib_none().0,
))
}
}
fn init_template(&self) {
unsafe {
gtk_sys::gtk_widget_init_template(self.as_ref().to_glib_none().0);
}
}
fn insert_action_group<P: IsA<gio::ActionGroup>>(&self, name: &str, group: Option<&P>) {
unsafe {
gtk_sys::gtk_widget_insert_action_group(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
group.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn insert_after<P: IsA<Widget>, Q: IsA<Widget>>(
&self,
parent: &P,
previous_sibling: Option<&Q>,
) {
unsafe {
gtk_sys::gtk_widget_insert_after(
self.as_ref().to_glib_none().0,
parent.as_ref().to_glib_none().0,
previous_sibling.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn insert_before<P: IsA<Widget>, Q: IsA<Widget>>(&self, parent: &P, next_sibling: Option<&Q>) {
unsafe {
gtk_sys::gtk_widget_insert_before(
self.as_ref().to_glib_none().0,
parent.as_ref().to_glib_none().0,
next_sibling.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn is_ancestor<P: IsA<Widget>>(&self, ancestor: &P) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_is_ancestor(
self.as_ref().to_glib_none().0,
ancestor.as_ref().to_glib_none().0,
))
}
}
fn is_drawable(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_is_drawable(
self.as_ref().to_glib_none().0,
))
}
}
fn is_focus(&self) -> bool {
unsafe { from_glib(gtk_sys::gtk_widget_is_focus(self.as_ref().to_glib_none().0)) }
}
fn is_sensitive(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_is_sensitive(
self.as_ref().to_glib_none().0,
))
}
}
fn is_visible(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_is_visible(
self.as_ref().to_glib_none().0,
))
}
}
fn keynav_failed(&self, direction: DirectionType) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_keynav_failed(
self.as_ref().to_glib_none().0,
direction.to_glib(),
))
}
}
fn list_mnemonic_labels(&self) -> Vec<Widget> {
unsafe {
FromGlibPtrContainer::from_glib_container(gtk_sys::gtk_widget_list_mnemonic_labels(
self.as_ref().to_glib_none().0,
))
}
}
fn map(&self) {
unsafe {
gtk_sys::gtk_widget_map(self.as_ref().to_glib_none().0);
}
}
fn measure(&self, orientation: Orientation, for_size: i32) -> (i32, i32, i32, i32) {
unsafe {
let mut minimum = mem::MaybeUninit::uninit();
let mut natural = mem::MaybeUninit::uninit();
let mut minimum_baseline = mem::MaybeUninit::uninit();
let mut natural_baseline = mem::MaybeUninit::uninit();
gtk_sys::gtk_widget_measure(
self.as_ref().to_glib_none().0,
orientation.to_glib(),
for_size,
minimum.as_mut_ptr(),
natural.as_mut_ptr(),
minimum_baseline.as_mut_ptr(),
natural_baseline.as_mut_ptr(),
);
let minimum = minimum.assume_init();
let natural = natural.assume_init();
let minimum_baseline = minimum_baseline.assume_init();
let natural_baseline = natural_baseline.assume_init();
(minimum, natural, minimum_baseline, natural_baseline)
}
}
fn mnemonic_activate(&self, group_cycling: bool) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_mnemonic_activate(
self.as_ref().to_glib_none().0,
group_cycling.to_glib(),
))
}
}
fn observe_children(&self) -> Option<gio::ListModel> {
unsafe {
from_glib_full(gtk_sys::gtk_widget_observe_children(
self.as_ref().to_glib_none().0,
))
}
}
fn observe_controllers(&self) -> Option<gio::ListModel> {
unsafe {
from_glib_full(gtk_sys::gtk_widget_observe_controllers(
self.as_ref().to_glib_none().0,
))
}
}
fn pick(&self, x: f64, y: f64, flags: PickFlags) -> Option<Widget> {
unsafe {
from_glib_none(gtk_sys::gtk_widget_pick(
self.as_ref().to_glib_none().0,
x,
y,
flags.to_glib(),
))
}
}
fn queue_allocate(&self) {
unsafe {
gtk_sys::gtk_widget_queue_allocate(self.as_ref().to_glib_none().0);
}
}
fn queue_draw(&self) {
unsafe {
gtk_sys::gtk_widget_queue_draw(self.as_ref().to_glib_none().0);
}
}
fn queue_resize(&self) {
unsafe {
gtk_sys::gtk_widget_queue_resize(self.as_ref().to_glib_none().0);
}
}
fn realize(&self) {
unsafe {
gtk_sys::gtk_widget_realize(self.as_ref().to_glib_none().0);
}
}
fn remove_controller<P: IsA<EventController>>(&self, controller: &P) {
unsafe {
gtk_sys::gtk_widget_remove_controller(
self.as_ref().to_glib_none().0,
controller.as_ref().to_glib_none().0,
);
}
}
fn remove_css_class(&self, css_class: &str) {
unsafe {
gtk_sys::gtk_widget_remove_css_class(
self.as_ref().to_glib_none().0,
css_class.to_glib_none().0,
);
}
}
fn remove_mnemonic_label<P: IsA<Widget>>(&self, label: &P) {
unsafe {
gtk_sys::gtk_widget_remove_mnemonic_label(
self.as_ref().to_glib_none().0,
label.as_ref().to_glib_none().0,
);
}
}
fn set_can_focus(&self, can_focus: bool) {
unsafe {
gtk_sys::gtk_widget_set_can_focus(self.as_ref().to_glib_none().0, can_focus.to_glib());
}
}
fn set_can_target(&self, can_target: bool) {
unsafe {
gtk_sys::gtk_widget_set_can_target(
self.as_ref().to_glib_none().0,
can_target.to_glib(),
);
}
}
fn set_child_visible(&self, child_visible: bool) {
unsafe {
gtk_sys::gtk_widget_set_child_visible(
self.as_ref().to_glib_none().0,
child_visible.to_glib(),
);
}
}
fn set_css_classes(&self, classes: &[&str]) {
unsafe {
gtk_sys::gtk_widget_set_css_classes(
self.as_ref().to_glib_none().0,
classes.to_glib_none().0,
);
}
}
fn set_cursor(&self, cursor: Option<&gdk::Cursor>) {
unsafe {
gtk_sys::gtk_widget_set_cursor(self.as_ref().to_glib_none().0, cursor.to_glib_none().0);
}
}
fn set_cursor_from_name(&self, name: Option<&str>) {
unsafe {
gtk_sys::gtk_widget_set_cursor_from_name(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
);
}
}
fn set_direction(&self, dir: TextDirection) {
unsafe {
gtk_sys::gtk_widget_set_direction(self.as_ref().to_glib_none().0, dir.to_glib());
}
}
fn set_focus_child<P: IsA<Widget>>(&self, child: Option<&P>) {
unsafe {
gtk_sys::gtk_widget_set_focus_child(
self.as_ref().to_glib_none().0,
child.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn set_focus_on_click(&self, focus_on_click: bool) {
unsafe {
gtk_sys::gtk_widget_set_focus_on_click(
self.as_ref().to_glib_none().0,
focus_on_click.to_glib(),
);
}
}
fn set_focusable(&self, focusable: bool) {
unsafe {
gtk_sys::gtk_widget_set_focusable(self.as_ref().to_glib_none().0, focusable.to_glib());
}
}
fn set_font_map<P: IsA<pango::FontMap>>(&self, font_map: Option<&P>) {
unsafe {
gtk_sys::gtk_widget_set_font_map(
self.as_ref().to_glib_none().0,
font_map.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
fn set_font_options(&self, options: Option<&cairo::FontOptions>) {
unsafe {
gtk_sys::gtk_widget_set_font_options(
self.as_ref().to_glib_none().0,
options.to_glib_none().0,
);
}
}
fn set_halign(&self, align: Align) {
unsafe {
gtk_sys::gtk_widget_set_halign(self.as_ref().to_glib_none().0, align.to_glib());
}
}
fn set_has_tooltip(&self, has_tooltip: bool) {
unsafe {
gtk_sys::gtk_widget_set_has_tooltip(
self.as_ref().to_glib_none().0,
has_tooltip.to_glib(),
);
}
}
fn set_hexpand(&self, expand: bool) {
unsafe {
gtk_sys::gtk_widget_set_hexpand(self.as_ref().to_glib_none().0, expand.to_glib());
}
}
fn set_hexpand_set(&self, set: bool) {
unsafe {
gtk_sys::gtk_widget_set_hexpand_set(self.as_ref().to_glib_none().0, set.to_glib());
}
}
fn set_layout_manager<P: IsA<LayoutManager>>(&self, layout_manager: Option<&P>) {
unsafe {
gtk_sys::gtk_widget_set_layout_manager(
self.as_ref().to_glib_none().0,
layout_manager.map(|p| p.as_ref()).to_glib_full(),
);
}
}
fn set_margin_bottom(&self, margin: i32) {
unsafe {
gtk_sys::gtk_widget_set_margin_bottom(self.as_ref().to_glib_none().0, margin);
}
}
fn set_margin_end(&self, margin: i32) {
unsafe {
gtk_sys::gtk_widget_set_margin_end(self.as_ref().to_glib_none().0, margin);
}
}
fn set_margin_start(&self, margin: i32) {
unsafe {
gtk_sys::gtk_widget_set_margin_start(self.as_ref().to_glib_none().0, margin);
}
}
fn set_margin_top(&self, margin: i32) {
unsafe {
gtk_sys::gtk_widget_set_margin_top(self.as_ref().to_glib_none().0, margin);
}
}
fn set_name(&self, name: &str) {
unsafe {
gtk_sys::gtk_widget_set_name(self.as_ref().to_glib_none().0, name.to_glib_none().0);
}
}
fn set_opacity(&self, opacity: f64) {
unsafe {
gtk_sys::gtk_widget_set_opacity(self.as_ref().to_glib_none().0, opacity);
}
}
fn set_overflow(&self, overflow: Overflow) {
unsafe {
gtk_sys::gtk_widget_set_overflow(self.as_ref().to_glib_none().0, overflow.to_glib());
}
}
fn set_parent<P: IsA<Widget>>(&self, parent: &P) {
unsafe {
gtk_sys::gtk_widget_set_parent(
self.as_ref().to_glib_none().0,
parent.as_ref().to_glib_none().0,
);
}
}
fn set_receives_default(&self, receives_default: bool) {
unsafe {
gtk_sys::gtk_widget_set_receives_default(
self.as_ref().to_glib_none().0,
receives_default.to_glib(),
);
}
}
fn set_sensitive(&self, sensitive: bool) {
unsafe {
gtk_sys::gtk_widget_set_sensitive(self.as_ref().to_glib_none().0, sensitive.to_glib());
}
}
fn set_size_request(&self, width: i32, height: i32) {
unsafe {
gtk_sys::gtk_widget_set_size_request(self.as_ref().to_glib_none().0, width, height);
}
}
fn set_state_flags(&self, flags: StateFlags, clear: bool) {
unsafe {
gtk_sys::gtk_widget_set_state_flags(
self.as_ref().to_glib_none().0,
flags.to_glib(),
clear.to_glib(),
);
}
}
fn set_tooltip_markup(&self, markup: Option<&str>) {
unsafe {
gtk_sys::gtk_widget_set_tooltip_markup(
self.as_ref().to_glib_none().0,
markup.to_glib_none().0,
);
}
}
fn set_tooltip_text(&self, text: Option<&str>) {
unsafe {
gtk_sys::gtk_widget_set_tooltip_text(
self.as_ref().to_glib_none().0,
text.to_glib_none().0,
);
}
}
fn set_valign(&self, align: Align) {
unsafe {
gtk_sys::gtk_widget_set_valign(self.as_ref().to_glib_none().0, align.to_glib());
}
}
fn set_vexpand(&self, expand: bool) {
unsafe {
gtk_sys::gtk_widget_set_vexpand(self.as_ref().to_glib_none().0, expand.to_glib());
}
}
fn set_vexpand_set(&self, set: bool) {
unsafe {
gtk_sys::gtk_widget_set_vexpand_set(self.as_ref().to_glib_none().0, set.to_glib());
}
}
fn set_visible(&self, visible: bool) {
unsafe {
gtk_sys::gtk_widget_set_visible(self.as_ref().to_glib_none().0, visible.to_glib());
}
}
fn should_layout(&self) -> bool {
unsafe {
from_glib(gtk_sys::gtk_widget_should_layout(
self.as_ref().to_glib_none().0,
))
}
}
fn show(&self) {
unsafe {
gtk_sys::gtk_widget_show(self.as_ref().to_glib_none().0);
}
}
fn size_allocate(&self, allocation: &Allocation, baseline: i32) {
unsafe {
gtk_sys::gtk_widget_size_allocate(
self.as_ref().to_glib_none().0,
allocation.to_glib_none().0,
baseline,
);
}
}
fn snapshot_child<P: IsA<Widget>>(&self, child: &P, snapshot: &Snapshot) {
unsafe {
gtk_sys::gtk_widget_snapshot_child(
self.as_ref().to_glib_none().0,
child.as_ref().to_glib_none().0,
snapshot.to_glib_none().0,
);
}
}
fn translate_coordinates<P: IsA<Widget>>(
&self,
dest_widget: &P,
src_x: f64,
src_y: f64,
) -> Option<(f64, f64)> {
unsafe {
let mut dest_x = mem::MaybeUninit::uninit();
let mut dest_y = mem::MaybeUninit::uninit();
let ret = from_glib(gtk_sys::gtk_widget_translate_coordinates(
self.as_ref().to_glib_none().0,
dest_widget.as_ref().to_glib_none().0,
src_x,
src_y,
dest_x.as_mut_ptr(),
dest_y.as_mut_ptr(),
));
let dest_x = dest_x.assume_init();
let dest_y = dest_y.assume_init();
if ret {
Some((dest_x, dest_y))
} else {
None
}
}
}
fn trigger_tooltip_query(&self) {
unsafe {
gtk_sys::gtk_widget_trigger_tooltip_query(self.as_ref().to_glib_none().0);
}
}
fn unmap(&self) {
unsafe {
gtk_sys::gtk_widget_unmap(self.as_ref().to_glib_none().0);
}
}
fn unparent(&self) {
unsafe {
gtk_sys::gtk_widget_unparent(self.as_ref().to_glib_none().0);
}
}
fn unrealize(&self) {
unsafe {
gtk_sys::gtk_widget_unrealize(self.as_ref().to_glib_none().0);
}
}
fn unset_state_flags(&self, flags: StateFlags) {
unsafe {
gtk_sys::gtk_widget_unset_state_flags(self.as_ref().to_glib_none().0, flags.to_glib());
}
}
fn get_property_has_default(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_sys::g_object_get_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"has-default\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `has-default` getter")
.unwrap()
}
}
fn get_property_has_focus(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_sys::g_object_get_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"has-focus\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `has-focus` getter")
.unwrap()
}
}
fn get_property_height_request(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_sys::g_object_get_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"height-request\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `height-request` getter")
.unwrap()
}
}
fn set_property_height_request(&self, height_request: i32) {
unsafe {
gobject_sys::g_object_set_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"height-request\0".as_ptr() as *const _,
Value::from(&height_request).to_glib_none().0,
);
}
}
fn get_property_width_request(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_sys::g_object_get_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"width-request\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `width-request` getter")
.unwrap()
}
}
fn set_property_width_request(&self, width_request: i32) {
unsafe {
gobject_sys::g_object_set_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"width-request\0".as_ptr() as *const _,
Value::from(&width_request).to_glib_none().0,
);
}
}
fn connect_destroy<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn destroy_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"destroy\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
destroy_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_direction_changed<F: Fn(&Self, TextDirection) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn direction_changed_trampoline<P, F: Fn(&P, TextDirection) + 'static>(
this: *mut gtk_sys::GtkWidget,
previous_direction: gtk_sys::GtkTextDirection,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(previous_direction),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"direction-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
direction_changed_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_hide<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn hide_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"hide\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
hide_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_keynav_failed<F: Fn(&Self, DirectionType) -> glib::signal::Inhibit + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn keynav_failed_trampoline<
P,
F: Fn(&P, DirectionType) -> glib::signal::Inhibit + 'static,
>(
this: *mut gtk_sys::GtkWidget,
direction: gtk_sys::GtkDirectionType,
f: glib_sys::gpointer,
) -> glib_sys::gboolean
where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(direction),
)
.to_glib()
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"keynav-failed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
keynav_failed_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_map<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn map_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"map\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
map_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_mnemonic_activate<F: Fn(&Self, bool) -> glib::signal::Inhibit + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn mnemonic_activate_trampoline<
P,
F: Fn(&P, bool) -> glib::signal::Inhibit + 'static,
>(
this: *mut gtk_sys::GtkWidget,
group_cycling: glib_sys::gboolean,
f: glib_sys::gpointer,
) -> glib_sys::gboolean
where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(group_cycling),
)
.to_glib()
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"mnemonic-activate\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
mnemonic_activate_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_move_focus<F: Fn(&Self, DirectionType) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn move_focus_trampoline<P, F: Fn(&P, DirectionType) + 'static>(
this: *mut gtk_sys::GtkWidget,
direction: gtk_sys::GtkDirectionType,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(direction),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"move-focus\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
move_focus_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn emit_move_focus(&self, direction: DirectionType) {
let _ = unsafe {
glib::Object::from_glib_borrow(self.as_ptr() as *mut gobject_sys::GObject)
.emit("move-focus", &[&direction])
.unwrap()
};
}
fn connect_query_tooltip<F: Fn(&Self, i32, i32, bool, &Tooltip) -> bool + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn query_tooltip_trampoline<
P,
F: Fn(&P, i32, i32, bool, &Tooltip) -> bool + 'static,
>(
this: *mut gtk_sys::GtkWidget,
x: libc::c_int,
y: libc::c_int,
keyboard_mode: glib_sys::gboolean,
tooltip: *mut gtk_sys::GtkTooltip,
f: glib_sys::gpointer,
) -> glib_sys::gboolean
where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
x,
y,
from_glib(keyboard_mode),
&from_glib_borrow(tooltip),
)
.to_glib()
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"query-tooltip\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
query_tooltip_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_realize<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn realize_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"realize\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
realize_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_show<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn show_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"show\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
show_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_state_flags_changed<F: Fn(&Self, StateFlags) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn state_flags_changed_trampoline<P, F: Fn(&P, StateFlags) + 'static>(
this: *mut gtk_sys::GtkWidget,
flags: gtk_sys::GtkStateFlags,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(
&Widget::from_glib_borrow(this).unsafe_cast_ref(),
from_glib(flags),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"state-flags-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
state_flags_changed_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_unmap<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn unmap_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"unmap\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
unmap_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_unrealize<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn unrealize_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"unrealize\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
unrealize_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_can_focus_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_can_focus_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::can-focus\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_can_focus_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_can_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_can_target_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::can-target\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_can_target_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_css_classes_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_css_classes_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::css-classes\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_css_classes_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_cursor_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_cursor_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::cursor\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_cursor_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_focus_on_click_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_focus_on_click_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::focus-on-click\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_focus_on_click_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_focusable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_focusable_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::focusable\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_focusable_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_halign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_halign_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::halign\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_halign_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_has_default_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_has_default_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::has-default\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_has_default_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_has_focus_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_has_focus_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::has-focus\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_has_focus_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_has_tooltip_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_has_tooltip_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::has-tooltip\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_has_tooltip_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_height_request_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_height_request_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::height-request\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_height_request_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_hexpand_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_hexpand_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::hexpand\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_hexpand_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_hexpand_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_hexpand_set_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::hexpand-set\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_hexpand_set_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_layout_manager_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_layout_manager_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::layout-manager\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_layout_manager_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_margin_bottom_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_margin_bottom_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::margin-bottom\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_margin_bottom_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_margin_end_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_margin_end_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::margin-end\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_margin_end_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_margin_start_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_margin_start_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::margin-start\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_margin_start_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_margin_top_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_margin_top_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::margin-top\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_margin_top_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_name_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::name\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_name_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_opacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_opacity_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::opacity\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_opacity_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_overflow_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_overflow_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::overflow\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_overflow_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_parent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_parent_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::parent\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_parent_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_receives_default_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_receives_default_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::receives-default\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_receives_default_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_root_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_root_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::root\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_root_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_scale_factor_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_scale_factor_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::scale-factor\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_scale_factor_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_sensitive_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_sensitive_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::sensitive\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_sensitive_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_tooltip_markup_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_tooltip_markup_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::tooltip-markup\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_tooltip_markup_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_tooltip_text_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_tooltip_text_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::tooltip-text\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_tooltip_text_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_valign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_valign_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::valign\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_valign_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_vexpand_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_vexpand_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::vexpand\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_vexpand_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_vexpand_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_vexpand_set_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::vexpand-set\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_vexpand_set_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_visible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_visible_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::visible\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_visible_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_width_request_notify<F: Fn(&Self) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_width_request_trampoline<P, F: Fn(&P) + 'static>(
this: *mut gtk_sys::GtkWidget,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<Widget>,
{
let f: &F = &*(f as *const F);
f(&Widget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::width-request\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_width_request_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for Widget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Widget")
}
}
| 30.816806 | 137 | 0.517985 |
725ecf1cfbfe74d96f6870a8df3e0497892ac51e | 37,495 | use core::convert::TryFrom;
use core::fmt;
use core::iter::FusedIterator;
use scolapasta_string_escape::{is_ascii_char_with_escape, InvalidUtf8ByteSequence};
use crate::ident::IdentifierType;
/// An iterator that yields a debug representation of a `Symbol` and its byte
/// contents as a sequence of `char`s.
///
/// This struct is created by the [`inspect`] method on [`Symbol`]. See its
/// documentation for more.
///
/// To format a `Symbol` directly into a writer, see [`format_into`] or
/// [`write_into`].
///
/// # Examples
///
/// To inspect an empty bytestring:
///
/// ```
/// # extern crate alloc;
/// # use alloc::string::String;
/// # use spinoso_symbol::Inspect;
/// let inspect = Inspect::default();
/// let debug = inspect.collect::<String>();
/// assert_eq!(debug, r#":"""#);
/// ```
///
/// To inspect a well-formed UTF-8 bytestring:
///
/// ```
/// # extern crate alloc;
/// # use alloc::string::String;
/// # use spinoso_symbol::Inspect;
/// let inspect = Inspect::from("spinoso");
/// let debug = inspect.collect::<String>();
/// assert_eq!(debug, ":spinoso");
/// ```
///
/// To inspect a bytestring with invalid UTF-8 bytes:
///
/// ```
/// # extern crate alloc;
/// # use alloc::string::String;
/// # use spinoso_symbol::Inspect;
/// let inspect = Inspect::from(&b"invalid-\xFF-utf8"[..]);
/// let debug = inspect.collect::<String>();
/// assert_eq!(debug, r#":"invalid-\xFF-utf8""#);
/// ```
///
/// [`inspect`]: crate::Symbol::inspect
/// [`Symbol`]: crate::Symbol
/// [`format_into`]: Self::format_into
/// [`write_into`]: Self::write_into
#[derive(Default, Debug, Clone)]
#[must_use = "this `Inspect` is an `Iterator`, which should be consumed if constructed"]
#[cfg_attr(docsrs, doc(cfg(feature = "inspect")))]
pub struct Inspect<'a>(State<'a>);
impl<'a> From<&'a str> for Inspect<'a> {
#[inline]
fn from(value: &'a str) -> Self {
Self::from(value.as_bytes())
}
}
impl<'a> From<&'a [u8]> for Inspect<'a> {
#[inline]
fn from(value: &'a [u8]) -> Self {
match value {
value if value.is_empty() => Self::default(),
value if IdentifierType::try_from(value).is_ok() => Self(State::ident(value)),
value => Self(State::quoted(value)),
}
}
}
impl<'a> Iterator for Inspect<'a> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
impl<'a> DoubleEndedIterator for Inspect<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0.next_back()
}
}
impl<'a> FusedIterator for Inspect<'a> {}
impl<'a> Inspect<'a> {
/// Write an `Inspect` iterator into the given destination using the debug
/// representation of the interned byteslice associated with the symbol in
/// the underlying interner.
///
/// This formatter writes content like `:spinoso` and `:"invalid-\xFF-utf8"`.
/// To see example output of the underlying iterator, see the `Inspect`
/// documentation.
///
/// To write binary output, use [`write_into`], which requires the **std**
/// feature to be activated.
///
/// # Errors
///
/// If the given writer returns an error as it is being written to, that
/// error is returned.
///
/// # Examples
///
/// ```
/// # use core::fmt::Write;
/// # use spinoso_symbol::Inspect;
/// let mut buf = String::new();
/// let iter = Inspect::from("spinoso");
/// iter.format_into(&mut buf);
/// assert_eq!(buf, ":spinoso");
///
/// let mut buf = String::new();
/// let iter = Inspect::from(&b"\xFF"[..]);
/// iter.format_into(&mut buf);
/// assert_eq!(buf, r#":"\xFF""#);
/// ```
///
/// [`write_into`]: Self::write_into
#[inline]
pub fn format_into<W>(self, mut dest: W) -> fmt::Result
where
W: fmt::Write,
{
for ch in self {
dest.write_char(ch)?;
}
Ok(())
}
/// Write an `Inspect` iterator into the given destination using the debug
/// representation of the interned byteslice associated with the symbol in
/// the underlying interner.
///
/// This formatter writes content like `:spinoso` and `:"invalid-\xFF-utf8"`.
/// To see example output of the underlying iterator, see the `Inspect`
/// documentation.
///
/// To write to a [formatter], use [`format_into`].
///
/// # Errors
///
/// If the given writer returns an error as it is being written to, that
/// error is returned.
///
/// # Examples
///
/// ```
/// # use std::io::Write;
/// # use spinoso_symbol::Inspect;
/// let mut buf = Vec::new();
/// let iter = Inspect::from("spinoso");
/// iter.write_into(&mut buf);
/// assert_eq!(buf, &b":spinoso"[..]);
///
/// let mut buf = Vec::new();
/// let iter = Inspect::from(&b"\xFF"[..]);
/// iter.write_into(&mut buf);
/// assert_eq!(buf, &[b':', b'"', b'\\', b'x', b'F', b'F', b'"']);
/// ```
///
/// [formatter]: fmt::Write
/// [`format_into`]: Self::format_into
#[inline]
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub fn write_into<W>(self, mut dest: W) -> std::io::Result<()>
where
W: std::io::Write,
{
let mut buf = [0; 4];
for ch in self {
let utf8 = ch.encode_utf8(&mut buf);
dest.write_all(utf8.as_bytes())?;
}
Ok(())
}
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Flags {
bits: u8,
}
impl Flags {
// Bit flags
const IS_IDENT: Self = Self { bits: 0b1000_0000 };
const EMIT_LEADING_COLON: Self = Self { bits: 0b0000_1000 };
const EMIT_LEADING_QUOTE: Self = Self { bits: 0b0000_0001 };
const EMIT_TRAILING_QUOTE: Self = Self { bits: 0b0000_0010 };
// Initial states
const IDENT: Self = Self {
bits: Self::IS_IDENT.bits | Self::EMIT_LEADING_COLON.bits,
};
const QUOTED: Self = Self {
bits: Self::EMIT_LEADING_COLON.bits | Self::EMIT_LEADING_QUOTE.bits | Self::EMIT_TRAILING_QUOTE.bits,
};
#[inline]
fn emit_leading_colon(&mut self) -> Option<char> {
if (self.bits & Self::EMIT_LEADING_COLON.bits) == Self::EMIT_LEADING_COLON.bits {
self.bits &= !Self::EMIT_LEADING_COLON.bits;
Some(':')
} else {
None
}
}
#[inline]
fn emit_leading_quote(&mut self) -> Option<char> {
if (self.bits & Self::EMIT_LEADING_QUOTE.bits) == Self::EMIT_LEADING_QUOTE.bits {
self.bits &= !Self::EMIT_LEADING_QUOTE.bits;
Some('"')
} else {
None
}
}
#[inline]
fn emit_trailing_quote(&mut self) -> Option<char> {
if (self.bits & Self::EMIT_TRAILING_QUOTE.bits) == Self::EMIT_TRAILING_QUOTE.bits {
self.bits &= !Self::EMIT_TRAILING_QUOTE.bits;
Some('"')
} else {
None
}
}
#[inline]
const fn is_ident(self) -> bool {
(self.bits & Self::IS_IDENT.bits) == Self::IS_IDENT.bits
}
}
#[derive(Debug, Clone)]
#[must_use = "this `State` is an `Iterator`, which should be consumed if constructed"]
struct State<'a> {
flags: Flags,
forward_byte_literal: InvalidUtf8ByteSequence,
bytes: &'a [u8],
reverse_byte_literal: InvalidUtf8ByteSequence,
}
impl<'a> State<'a> {
/// Construct a `State` that will not quote byte contents after the initial
/// `:`.
///
/// This constructor produces inspect contents like `:fred`.
#[inline]
fn ident(bytes: &'a [u8]) -> Self {
Self {
flags: Flags::IDENT,
forward_byte_literal: InvalidUtf8ByteSequence::new(),
bytes,
reverse_byte_literal: InvalidUtf8ByteSequence::new(),
}
}
/// Construct a `State` that will quote byte contents after the initial `:`.
///
/// This constructor produces inspect contents like `:"Spinoso Symbol".
#[inline]
fn quoted(bytes: &'a [u8]) -> Self {
Self {
flags: Flags::QUOTED,
forward_byte_literal: InvalidUtf8ByteSequence::new(),
bytes,
reverse_byte_literal: InvalidUtf8ByteSequence::new(),
}
}
}
impl<'a> Default for State<'a> {
/// Construct a `State` that will render debug output for the empty slice.
///
/// This constructor produces inspect contents like `:""`.
#[inline]
fn default() -> Self {
Self::quoted(b"")
}
}
impl<'a> Iterator for State<'a> {
type Item = char;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if let Some(ch) = self.flags.emit_leading_colon() {
return Some(ch);
}
if let Some(ch) = self.flags.emit_leading_quote() {
return Some(ch);
}
if let Some(ch) = self.forward_byte_literal.next() {
return Some(ch);
}
let (ch, size) = bstr::decode_utf8(self.bytes);
match ch {
Some('"' | '\\') if self.flags.is_ident() => {
self.bytes = &self.bytes[size..];
return ch;
}
Some(ch) if is_ascii_char_with_escape(ch) => {
let (ascii_byte, remainder) = self.bytes.split_at(size);
// This conversion is safe to unwrap due to the documented
// behavior of `bstr::decode_utf8` and `InvalidUtf8ByteSequence`
// which indicate that `size` is always in the range of 0..=3.
//
// While not an invalid byte, we rely on the documented
// behavior of `InvalidUtf8ByteSequence` to always escape
// any bytes given to it.
self.forward_byte_literal = InvalidUtf8ByteSequence::try_from(ascii_byte).unwrap();
self.bytes = remainder;
return self.forward_byte_literal.next();
}
Some(ch) => {
self.bytes = &self.bytes[size..];
return Some(ch);
}
None if size == 0 => {}
None => {
let (invalid_utf8_bytes, remainder) = self.bytes.split_at(size);
// This conversion is safe to unwrap due to the documented
// behavior of `bstr::decode_utf8` and `InvalidUtf8ByteSequence`
// which indicate that `size` is always in the range of 0..=3.
self.forward_byte_literal = InvalidUtf8ByteSequence::try_from(invalid_utf8_bytes).unwrap();
self.bytes = remainder;
return self.forward_byte_literal.next();
}
};
if let Some(ch) = self.reverse_byte_literal.next() {
return Some(ch);
}
if let Some(ch) = self.flags.emit_trailing_quote() {
return Some(ch);
}
None
}
}
impl<'a> DoubleEndedIterator for State<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
if let Some(ch) = self.flags.emit_trailing_quote() {
return Some(ch);
}
if let Some(ch) = self.reverse_byte_literal.next_back() {
return Some(ch);
}
let (ch, size) = bstr::decode_last_utf8(self.bytes);
match ch {
Some('"' | '\\') if self.flags.is_ident() => {
self.bytes = &self.bytes[..self.bytes.len() - size];
return ch;
}
Some(ch) if is_ascii_char_with_escape(ch) => {
let (remainder, ascii_byte) = self.bytes.split_at(self.bytes.len() - size);
// This conversion is safe to unwrap due to the documented
// behavior of `bstr::decode_utf8` and `InvalidUtf8ByteSequence`
// which indicate that `size` is always in the range of 0..=3.
//
// While not an invalid byte, we rely on the documented
// behavior of `InvalidUtf8ByteSequence` to always escape
// any bytes given to it.
self.reverse_byte_literal = InvalidUtf8ByteSequence::try_from(ascii_byte).unwrap();
self.bytes = remainder;
return self.reverse_byte_literal.next_back();
}
Some(ch) => {
self.bytes = &self.bytes[..self.bytes.len() - size];
return Some(ch);
}
None if size == 0 => {}
None => {
let (remainder, invalid_utf8_bytes) = self.bytes.split_at(self.bytes.len() - size);
// This conversion is safe to unwrap due to the documented
// behavior of `bstr::decode_utf8` and `InvalidUtf8ByteSequence`
// which indicate that `size` is always in the range of 0..=3.
self.reverse_byte_literal = InvalidUtf8ByteSequence::try_from(invalid_utf8_bytes).unwrap();
self.bytes = remainder;
return self.reverse_byte_literal.next_back();
}
};
if let Some(ch) = self.forward_byte_literal.next_back() {
return Some(ch);
}
if let Some(ch) = self.flags.emit_leading_quote() {
return Some(ch);
}
if let Some(ch) = self.flags.emit_leading_colon() {
return Some(ch);
}
None
}
}
impl<'a> FusedIterator for State<'a> {}
#[cfg(test)]
mod tests {
use std::string::String;
use super::Inspect;
#[test]
fn empty() {
let inspect = Inspect::from("");
let debug = inspect.collect::<String>();
assert_eq!(debug, r#":"""#);
}
#[test]
fn empty_backwards() {
let mut inspect = Inspect::from("");
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some(':'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
let mut inspect = Inspect::from("");
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
let mut inspect = Inspect::from("");
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
let mut inspect = Inspect::from("");
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
}
#[test]
fn fred() {
let inspect = Inspect::from("fred");
let debug = inspect.collect::<String>();
assert_eq!(debug, ":fred");
}
#[test]
fn fred_backwards() {
let mut inspect = Inspect::from("fred");
assert_eq!(inspect.next_back(), Some('d'));
assert_eq!(inspect.next_back(), Some('e'));
assert_eq!(inspect.next_back(), Some('r'));
assert_eq!(inspect.next_back(), Some('f'));
assert_eq!(inspect.next_back(), Some(':'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
}
#[test]
fn invalid_utf8_byte() {
assert_eq!(Inspect::from(&b"\xFF"[..]).collect::<String>(), r#":"\xFF""#);
}
#[test]
fn invalid_utf8() {
let inspect = Inspect::from(&b"invalid-\xFF-utf8"[..]);
let debug = inspect.collect::<String>();
assert_eq!(debug, r#":"invalid-\xFF-utf8""#);
}
#[test]
fn invalid_utf8_backwards() {
let mut inspect = Inspect::from(&b"invalid-\xFF-utf8"[..]);
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('8'));
assert_eq!(inspect.next_back(), Some('f'));
assert_eq!(inspect.next_back(), Some('t'));
assert_eq!(inspect.next_back(), Some('u'));
assert_eq!(inspect.next_back(), Some('-'));
assert_eq!(inspect.next_back(), Some('F'));
assert_eq!(inspect.next_back(), Some('F'));
assert_eq!(inspect.next_back(), Some('x'));
assert_eq!(inspect.next_back(), Some('\\'));
assert_eq!(inspect.next_back(), Some('-'));
assert_eq!(inspect.next_back(), Some('d'));
assert_eq!(inspect.next_back(), Some('i'));
assert_eq!(inspect.next_back(), Some('l'));
assert_eq!(inspect.next_back(), Some('a'));
assert_eq!(inspect.next_back(), Some('v'));
assert_eq!(inspect.next_back(), Some('n'));
assert_eq!(inspect.next_back(), Some('i'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some(':'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
}
#[test]
fn quoted() {
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('b'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(Inspect::from(r#"a"b"#).collect::<String>(), r#":"a\"b""#);
}
#[test]
fn quote_backwards() {
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('\\'));
assert_eq!(inspect.next_back(), Some('a'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some(':'));
assert_eq!(inspect.next_back(), None);
}
#[test]
fn quote_double_ended() {
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next(), None);
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), None);
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next(), None);
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next_back(), None);
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next_back(), Some('\\'));
let mut inspect = Inspect::from(r#"a"b"#);
assert_eq!(inspect.next(), Some(':'));
assert_eq!(inspect.next(), Some('"'));
assert_eq!(inspect.next(), Some('a'));
assert_eq!(inspect.next(), Some('\\'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next(), Some('"'));
}
#[test]
fn emoji() {
assert_eq!(Inspect::from("💎").collect::<String>(), ":💎");
assert_eq!(Inspect::from("$💎").collect::<String>(), ":$💎");
assert_eq!(Inspect::from("@💎").collect::<String>(), ":@💎");
assert_eq!(Inspect::from("@@💎").collect::<String>(), ":@@💎");
}
#[test]
fn unicode_replacement_char() {
assert_eq!(Inspect::from("�").collect::<String>(), ":�");
assert_eq!(Inspect::from("$�").collect::<String>(), ":$�");
assert_eq!(Inspect::from("@�").collect::<String>(), ":@�");
assert_eq!(Inspect::from("@@�").collect::<String>(), ":@@�");
assert_eq!(Inspect::from("abc�").collect::<String>(), ":abc�");
assert_eq!(Inspect::from("$abc�").collect::<String>(), ":$abc�");
assert_eq!(Inspect::from("@abc�").collect::<String>(), ":@abc�");
assert_eq!(Inspect::from("@@abc�").collect::<String>(), ":@@abc�");
}
#[test]
fn escape_slash() {
assert_eq!(Inspect::from("\\").collect::<String>(), r#":"\\""#);
assert_eq!(Inspect::from("foo\\bar").collect::<String>(), r#":"foo\\bar""#);
}
#[test]
fn escape_slash_backwards() {
let mut inspect = Inspect::from("a\\b");
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some('b'));
assert_eq!(inspect.next_back(), Some('\\'));
assert_eq!(inspect.next_back(), Some('\\'));
assert_eq!(inspect.next_back(), Some('a'));
assert_eq!(inspect.next_back(), Some('"'));
assert_eq!(inspect.next_back(), Some(':'));
assert_eq!(inspect.next_back(), None);
assert_eq!(inspect.next(), None);
}
#[test]
fn nul() {
assert_eq!(Inspect::from("\0").collect::<String>(), r#":"\x00""#);
}
#[test]
fn del() {
assert_eq!(Inspect::from("\x7F").collect::<String>(), r#":"\x7F""#);
}
#[test]
fn ascii_control() {
assert_eq!(Inspect::from("\0").collect::<String>(), r#":"\x00""#);
assert_eq!(Inspect::from("\x01").collect::<String>(), r#":"\x01""#);
assert_eq!(Inspect::from("\x02").collect::<String>(), r#":"\x02""#);
assert_eq!(Inspect::from("\x03").collect::<String>(), r#":"\x03""#);
assert_eq!(Inspect::from("\x04").collect::<String>(), r#":"\x04""#);
assert_eq!(Inspect::from("\x05").collect::<String>(), r#":"\x05""#);
assert_eq!(Inspect::from("\x06").collect::<String>(), r#":"\x06""#);
assert_eq!(Inspect::from("\x07").collect::<String>(), r#":"\a""#);
assert_eq!(Inspect::from("\x08").collect::<String>(), r#":"\b""#);
assert_eq!(Inspect::from("\x09").collect::<String>(), r#":"\t""#);
assert_eq!(Inspect::from("\x0A").collect::<String>(), r#":"\n""#);
assert_eq!(Inspect::from("\x0B").collect::<String>(), r#":"\v""#);
assert_eq!(Inspect::from("\x0C").collect::<String>(), r#":"\f""#);
assert_eq!(Inspect::from("\x0D").collect::<String>(), r#":"\r""#);
assert_eq!(Inspect::from("\x0E").collect::<String>(), r#":"\x0E""#);
assert_eq!(Inspect::from("\x0F").collect::<String>(), r#":"\x0F""#);
assert_eq!(Inspect::from("\x10").collect::<String>(), r#":"\x10""#);
assert_eq!(Inspect::from("\x11").collect::<String>(), r#":"\x11""#);
assert_eq!(Inspect::from("\x12").collect::<String>(), r#":"\x12""#);
assert_eq!(Inspect::from("\x13").collect::<String>(), r#":"\x13""#);
assert_eq!(Inspect::from("\x14").collect::<String>(), r#":"\x14""#);
assert_eq!(Inspect::from("\x15").collect::<String>(), r#":"\x15""#);
assert_eq!(Inspect::from("\x16").collect::<String>(), r#":"\x16""#);
assert_eq!(Inspect::from("\x17").collect::<String>(), r#":"\x17""#);
assert_eq!(Inspect::from("\x18").collect::<String>(), r#":"\x18""#);
assert_eq!(Inspect::from("\x19").collect::<String>(), r#":"\x19""#);
assert_eq!(Inspect::from("\x1A").collect::<String>(), r#":"\x1A""#);
assert_eq!(Inspect::from("\x1B").collect::<String>(), r#":"\e""#);
assert_eq!(Inspect::from("\x1C").collect::<String>(), r#":"\x1C""#);
assert_eq!(Inspect::from("\x1D").collect::<String>(), r#":"\x1D""#);
assert_eq!(Inspect::from("\x1E").collect::<String>(), r#":"\x1E""#);
assert_eq!(Inspect::from("\x1F").collect::<String>(), r#":"\x1F""#);
assert_eq!(Inspect::from("\x20").collect::<String>(), r#":" ""#);
}
#[test]
fn special_escapes() {
// double quote
assert_eq!(Inspect::from("\x22").collect::<String>(), r#":"\"""#);
assert_eq!(Inspect::from("\"").collect::<String>(), r#":"\"""#);
// backslash
assert_eq!(Inspect::from("\x5C").collect::<String>(), r#":"\\""#);
assert_eq!(Inspect::from("\\").collect::<String>(), r#":"\\""#);
}
#[test]
fn invalid_utf8_special_global() {
assert_eq!(Inspect::from(&b"$-\xFF"[..]).collect::<String>(), r#":"$-\xFF""#);
}
#[test]
fn replacement_char_special_global() {
assert_eq!(Inspect::from("$-�").collect::<String>(), ":$-�");
assert_eq!(Inspect::from("$-�a").collect::<String>(), r#":"$-�a""#);
assert_eq!(Inspect::from("$-��").collect::<String>(), r#":"$-��""#);
}
}
#[cfg(test)]
mod specs {
use std::string::String;
use super::{Flags, Inspect};
#[test]
fn flags_ident() {
let mut flags = Flags::IDENT;
assert!(flags.is_ident());
assert_eq!(flags.emit_leading_colon(), Some(':'));
assert!(flags.is_ident());
assert_eq!(flags.emit_leading_colon(), None);
assert!(flags.is_ident());
assert_eq!(flags.emit_leading_quote(), None);
assert!(flags.is_ident());
assert_eq!(flags.emit_trailing_quote(), None);
assert!(flags.is_ident());
}
#[test]
fn flags_quoted() {
let mut flags = Flags::QUOTED;
assert!(!flags.is_ident());
assert_eq!(flags.emit_leading_colon(), Some(':'));
assert!(!flags.is_ident());
assert_eq!(flags.emit_leading_colon(), None);
assert!(!flags.is_ident());
assert_eq!(flags.emit_leading_quote(), Some('"'));
assert!(!flags.is_ident());
assert_eq!(flags.emit_leading_quote(), None);
assert!(!flags.is_ident());
assert_eq!(flags.emit_trailing_quote(), Some('"'));
assert!(!flags.is_ident());
assert_eq!(flags.emit_trailing_quote(), None);
assert!(!flags.is_ident());
}
// From spec/core/symbol/inspect_spec.rb:
//
// ```ruby
// symbols = {
// fred: ":fred",
// :fred? => ":fred?",
// :fred! => ":fred!",
// :$ruby => ":$ruby",
// :@ruby => ":@ruby",
// :@@ruby => ":@@ruby",
// :"$ruby!" => ":\"$ruby!\"",
// :"$ruby?" => ":\"$ruby?\"",
// :"@ruby!" => ":\"@ruby!\"",
// :"@ruby?" => ":\"@ruby?\"",
// :"@@ruby!" => ":\"@@ruby!\"",
// :"@@ruby?" => ":\"@@ruby?\"",
//
// :$-w => ":$-w",
// :"$-ww" => ":\"$-ww\"",
// :"$+" => ":$+",
// :"$~" => ":$~",
// :"$:" => ":$:",
// :"$?" => ":$?",
// :"$<" => ":$<",
// :"$_" => ":$_",
// :"$/" => ":$/",
// :"$'" => ":$'",
// :"$\"" => ":$\"",
// :"$$" => ":$$",
// :"$." => ":$.",
// :"$," => ":$,",
// :"$`" => ":$`",
// :"$!" => ":$!",
// :"$;" => ":$;",
// :"$\\" => ":$\\",
// :"$=" => ":$=",
// :"$*" => ":$*",
// :"$>" => ":$>",
// :"$&" => ":$&",
// :"$@" => ":$@",
// :"$1234" => ":$1234",
//
// :-@ => ":-@",
// :+@ => ":+@",
// :% => ":%",
// :& => ":&",
// :* => ":*",
// :** => ":**",
// :"/" => ":/", # lhs quoted for emacs happiness
// :< => ":<",
// :<= => ":<=",
// :<=> => ":<=>",
// :== => ":==",
// :=== => ":===",
// :=~ => ":=~",
// :> => ":>",
// :>= => ":>=",
// :>> => ":>>",
// :[] => ":[]",
// :[]= => ":[]=",
// :"\<\<" => ":\<\<",
// :^ => ":^",
// :"`" => ":`", # for emacs, and justice!
// :~ => ":~",
// :| => ":|",
//
// :"!" => [":\"!\"", ":!" ],
// :"!=" => [":\"!=\"", ":!="],
// :"!~" => [":\"!~\"", ":!~"],
// :"\$" => ":\"$\"", # for justice!
// :"&&" => ":\"&&\"",
// :"'" => ":\"\'\"",
// :"," => ":\",\"",
// :"." => ":\".\"",
// :".." => ":\"..\"",
// :"..." => ":\"...\"",
// :":" => ":\":\"",
// :"::" => ":\"::\"",
// :";" => ":\";\"",
// :"=" => ":\"=\"",
// :"=>" => ":\"=>\"",
// :"\?" => ":\"?\"", # rawr!
// :"@" => ":\"@\"",
// :"||" => ":\"||\"",
// :"|||" => ":\"|||\"",
// :"++" => ":\"++\"",
//
// :"\"" => ":\"\\\"\"",
// :"\"\"" => ":\"\\\"\\\"\"",
//
// :"9" => ":\"9\"",
// :"foo bar" => ":\"foo bar\"",
// :"*foo" => ":\"*foo\"",
// :"foo " => ":\"foo \"",
// :" foo" => ":\" foo\"",
// :" " => ":\" \"",
// }
// ```
#[test]
fn specs() {
// idents
assert_eq!(Inspect::from("fred").collect::<String>(), ":fred");
assert_eq!(Inspect::from("fred?").collect::<String>(), ":fred?");
assert_eq!(Inspect::from("fred!").collect::<String>(), ":fred!");
assert_eq!(Inspect::from("$ruby").collect::<String>(), ":$ruby");
assert_eq!(Inspect::from("@ruby").collect::<String>(), ":@ruby");
assert_eq!(Inspect::from("@@ruby").collect::<String>(), ":@@ruby");
// idents can't end in bang or question
assert_eq!(Inspect::from("$ruby!").collect::<String>(), r#":"$ruby!""#);
assert_eq!(Inspect::from("$ruby?").collect::<String>(), r#":"$ruby?""#);
assert_eq!(Inspect::from("@ruby!").collect::<String>(), r#":"@ruby!""#);
assert_eq!(Inspect::from("@ruby?").collect::<String>(), r#":"@ruby?""#);
assert_eq!(Inspect::from("@@ruby!").collect::<String>(), r#":"@@ruby!""#);
assert_eq!(Inspect::from("@@ruby?").collect::<String>(), r#":"@@ruby?""#);
// globals
assert_eq!(Inspect::from("$-w").collect::<String>(), ":$-w");
assert_eq!(Inspect::from("$-ww").collect::<String>(), r#":"$-ww""#);
assert_eq!(Inspect::from("$+").collect::<String>(), ":$+");
assert_eq!(Inspect::from("$~").collect::<String>(), ":$~");
assert_eq!(Inspect::from("$:").collect::<String>(), ":$:");
assert_eq!(Inspect::from("$?").collect::<String>(), ":$?");
assert_eq!(Inspect::from("$<").collect::<String>(), ":$<");
assert_eq!(Inspect::from("$_").collect::<String>(), ":$_");
assert_eq!(Inspect::from("$/").collect::<String>(), ":$/");
assert_eq!(Inspect::from("$\"").collect::<String>(), ":$\"");
assert_eq!(Inspect::from("$$").collect::<String>(), ":$$");
assert_eq!(Inspect::from("$.").collect::<String>(), ":$.");
assert_eq!(Inspect::from("$,").collect::<String>(), ":$,");
assert_eq!(Inspect::from("$`").collect::<String>(), ":$`");
assert_eq!(Inspect::from("$!").collect::<String>(), ":$!");
assert_eq!(Inspect::from("$;").collect::<String>(), ":$;");
assert_eq!(Inspect::from("$\\").collect::<String>(), ":$\\");
assert_eq!(Inspect::from("$=").collect::<String>(), ":$=");
assert_eq!(Inspect::from("$*").collect::<String>(), ":$*");
assert_eq!(Inspect::from("$>").collect::<String>(), ":$>");
assert_eq!(Inspect::from("$&").collect::<String>(), ":$&");
assert_eq!(Inspect::from("$@").collect::<String>(), ":$@");
assert_eq!(Inspect::from("$1234").collect::<String>(), ":$1234");
// symbolic methods
assert_eq!(Inspect::from("-@").collect::<String>(), ":-@");
assert_eq!(Inspect::from("+@").collect::<String>(), ":+@");
assert_eq!(Inspect::from("%").collect::<String>(), ":%");
assert_eq!(Inspect::from("&").collect::<String>(), ":&");
assert_eq!(Inspect::from("*").collect::<String>(), ":*");
assert_eq!(Inspect::from("**").collect::<String>(), ":**");
assert_eq!(Inspect::from("/").collect::<String>(), ":/");
assert_eq!(Inspect::from("<").collect::<String>(), ":<");
assert_eq!(Inspect::from("<=").collect::<String>(), ":<=");
assert_eq!(Inspect::from("<=>").collect::<String>(), ":<=>");
assert_eq!(Inspect::from("==").collect::<String>(), ":==");
assert_eq!(Inspect::from("===").collect::<String>(), ":===");
assert_eq!(Inspect::from("=~").collect::<String>(), ":=~");
assert_eq!(Inspect::from(">").collect::<String>(), ":>");
assert_eq!(Inspect::from(">=").collect::<String>(), ":>=");
assert_eq!(Inspect::from(">>").collect::<String>(), ":>>");
assert_eq!(Inspect::from("[]").collect::<String>(), ":[]");
assert_eq!(Inspect::from("[]=").collect::<String>(), ":[]=");
assert_eq!(Inspect::from("<<").collect::<String>(), ":<<");
assert_eq!(Inspect::from("^").collect::<String>(), ":^");
assert_eq!(Inspect::from("`").collect::<String>(), ":`");
assert_eq!(Inspect::from("~").collect::<String>(), ":~");
assert_eq!(Inspect::from("|").collect::<String>(), ":|");
// non-symbol symbolics
assert_eq!(Inspect::from("!").collect::<String>(), ":!");
assert_eq!(Inspect::from("!=").collect::<String>(), ":!=");
assert_eq!(Inspect::from("!~").collect::<String>(), ":!~");
assert_eq!(Inspect::from("$").collect::<String>(), r#":"$""#);
assert_eq!(Inspect::from("&&").collect::<String>(), r#":"&&""#);
assert_eq!(Inspect::from("'").collect::<String>(), r#":"'""#);
assert_eq!(Inspect::from(",").collect::<String>(), r#":",""#);
assert_eq!(Inspect::from(".").collect::<String>(), r#":".""#);
assert_eq!(Inspect::from("..").collect::<String>(), r#":"..""#);
assert_eq!(Inspect::from("...").collect::<String>(), r#":"...""#);
assert_eq!(Inspect::from(":").collect::<String>(), r#":":""#);
assert_eq!(Inspect::from("::").collect::<String>(), r#":"::""#);
assert_eq!(Inspect::from(";").collect::<String>(), r#":";""#);
assert_eq!(Inspect::from("=").collect::<String>(), r#":"=""#);
assert_eq!(Inspect::from("=>").collect::<String>(), r#":"=>""#);
assert_eq!(Inspect::from("?").collect::<String>(), r#":"?""#);
assert_eq!(Inspect::from("@").collect::<String>(), r#":"@""#);
assert_eq!(Inspect::from("||").collect::<String>(), r#":"||""#);
assert_eq!(Inspect::from("|||").collect::<String>(), r#":"|||""#);
assert_eq!(Inspect::from("++").collect::<String>(), r#":"++""#);
// quotes
assert_eq!(Inspect::from(r#"""#).collect::<String>(), r#":"\"""#);
assert_eq!(Inspect::from(r#""""#).collect::<String>(), r#":"\"\"""#);
assert_eq!(Inspect::from("9").collect::<String>(), r#":"9""#);
assert_eq!(Inspect::from("foo bar").collect::<String>(), r#":"foo bar""#);
assert_eq!(Inspect::from("*foo").collect::<String>(), r#":"*foo""#);
assert_eq!(Inspect::from("foo ").collect::<String>(), r#":"foo ""#);
assert_eq!(Inspect::from(" foo").collect::<String>(), r#":" foo""#);
assert_eq!(Inspect::from(" ").collect::<String>(), r#":" ""#);
}
}
/// Tests generated from symbols loaded at MRI interpreter boot.
///
/// # Generation
///
/// ```shell
/// cat <<EOF | ruby --disable-gems --disable-did_you_mean
/// def boot_identifier_symbols
/// syms = Symbol.all_symbols.map(&:inspect)
/// # remove symbols that must be debug wrapped in quotes
/// syms = syms.reject { |s| s[0..1] == ':"' }
///
/// fixture = syms.map { |s| "r##\"#{s}\"##" }
/// puts fixture.join(",\n")
/// end
///
/// boot_identifier_symbols
/// EOF
/// ```
#[cfg(test)]
mod functionals {
use std::string::String;
use super::Inspect;
use crate::fixtures::{IDENTS, IDENT_INSPECTS};
#[test]
fn mri_symbol_idents() {
let pairs = IDENTS.iter().copied().zip(IDENT_INSPECTS.iter().copied());
for (sym, expected) in pairs {
let inspect = Inspect::from(sym).collect::<String>();
assert_eq!(
inspect, expected,
"Expected '{}', to be the result of '{}'.inspect; got '{}'",
expected, sym, inspect,
);
}
}
}
| 37.950405 | 109 | 0.497906 |
cc41992994ec81434ee270fec2c1917c1385f1d5 | 3,949 | use std::fs::File;
use std::io::{self, Stdin};
use std::mem;
use std::os::unix::io::AsRawFd;
use std::os::unix::io::RawFd;
// re-export
pub use termios::{
// c_iflag
BRKINT,
ICRNL,
IGNBRK,
IGNCR,
IGNPAR,
INLCR,
INPCK,
ISTRIP,
IXANY,
IXOFF,
IXON,
PARMRK,
};
pub use termios::{
// c_cflag
CLOCAL,
CREAD,
CS5,
CS6,
CS7,
CS8,
CSIZE,
CSTOPB,
HUPCL,
PARENB,
PARODD,
};
pub use termios::{
// c_lflag
ECHO,
ECHOE,
ECHOK,
ECHONL,
ICANON,
IEXTEN,
ISIG,
NOFLSH,
TOSTOP,
};
pub use termios::{
// c_oflag
OCRNL,
ONLCR,
ONLRET,
ONOCR,
OPOST,
};
pub struct TermAttrSetter {
fd: RawFd,
default: termios::Termios,
custom: termios::Termios,
}
pub struct TermAttrRestorer {
default: termios::Termios,
}
pub enum CcSymbol {
VEof = termios::VEOF as isize,
VEol = termios::VEOL as isize,
VErase = termios::VERASE as isize,
VIntr = termios::VINTR as isize,
VKill = termios::VKILL as isize,
VMin = termios::VMIN as isize,
VQuit = termios::VQUIT as isize,
VStart = termios::VSTART as isize,
VStop = termios::VSTOP as isize,
VSusp = termios::VSUSP as isize,
VTime = termios::VTIME as isize,
}
impl TermAttrSetter {
pub fn new(fd: RawFd) -> TermAttrSetter {
let stat = termios::Termios::from_fd(fd).unwrap_or_else(|_| panic!("invalid fd {:?}", fd));
Self {
fd,
default: stat,
custom: stat,
}
}
pub fn iflag(
&mut self,
set_flags: termios::tcflag_t,
clear_flags: termios::tcflag_t,
) -> &mut Self {
self.custom.c_iflag |= set_flags;
self.custom.c_iflag &= !clear_flags;
self
}
pub fn oflag(
&mut self,
set_flags: termios::tcflag_t,
clear_flags: termios::tcflag_t,
) -> &mut Self {
self.custom.c_oflag |= set_flags;
self.custom.c_oflag &= !clear_flags;
self
}
pub fn cflag(
&mut self,
set_flags: termios::tcflag_t,
clear_flags: termios::tcflag_t,
) -> &mut Self {
self.custom.c_cflag |= set_flags;
self.custom.c_cflag &= !clear_flags;
self
}
pub fn lflag(
&mut self,
set_flags: termios::tcflag_t,
clear_flags: termios::tcflag_t,
) -> &mut Self {
self.custom.c_lflag |= set_flags;
self.custom.c_lflag &= !clear_flags;
self
}
pub fn cc(&mut self, sym: CcSymbol, value: u8) -> &mut Self {
self.custom.c_cc[sym as usize] = value;
self
}
pub fn set(&self) -> TermAttrRestorer {
termios::tcsetattr(self.fd, termios::TCSANOW, &self.custom).unwrap();
TermAttrRestorer {
default: self.default,
}
}
}
impl TermAttrRestorer {
pub fn restore(&self, fd: RawFd) {
termios::tcsetattr(fd, termios::TCSANOW, &self.default).unwrap();
}
}
pub trait Block {
fn nonblocking(&self);
fn blocking(&self);
}
impl Block for Stdin {
fn nonblocking(&self) {
unsafe {
let mut nonblocking = 1_u64;
libc::ioctl(0, libc::FIONBIO, &mut nonblocking);
}
}
fn blocking(&self) {
unsafe {
let mut nonblocking = 0_u64;
libc::ioctl(0, libc::FIONBIO, &mut nonblocking);
}
}
}
pub fn dev_tty_size() -> io::Result<(u16, u16)> {
#[repr(C)]
struct WinSize {
row: libc::c_ushort,
col: libc::c_ushort,
_xpixel: libc::c_ushort,
_ypixel: libc::c_ushort,
}
let ftty = File::open("/dev/tty").unwrap();
let mut size: WinSize = unsafe { mem::zeroed() };
if unsafe { libc::ioctl(ftty.as_raw_fd(), libc::TIOCGWINSZ, &mut size as *mut _) } == 0 {
Ok((size.col, size.row))
} else {
Err(io::Error::last_os_error())
}
}
| 20.89418 | 99 | 0.553305 |
0344d6a46eeb58ffd54af4db766b59ec7c13785f | 3,858 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-lexer-test FIXME #15679
// Microbenchmarks for various functions in std and extra
#![feature(unboxed_closures, rand, old_io, old_path, std_misc, collections)]
use std::old_io::*;
use std::old_path::{Path, GenericPath};
use std::iter::repeat;
use std::mem::swap;
use std::env;
use std::rand::Rng;
use std::rand;
use std::str;
use std::time::Duration;
use std::vec;
fn main() {
let argv: Vec<String> = env::args().collect();
macro_rules! bench {
($id:ident) =>
(maybe_run_test(&argv,
stringify!($id).to_string(),
$id))
}
bench!(shift_push);
bench!(read_line);
bench!(vec_plus);
bench!(vec_append);
bench!(vec_push_all);
bench!(is_utf8_ascii);
bench!(is_utf8_multibyte);
}
fn maybe_run_test<F>(argv: &[String], name: String, test: F) where F: FnOnce() {
let mut run_test = false;
if env::var_os("RUST_BENCH").is_some() {
run_test = true
} else if argv.len() > 0 {
run_test = argv.iter().any(|x| x == &"all".to_string()) || argv.iter().any(|x| x == &name)
}
if !run_test {
return
}
let dur = Duration::span(test);
println!("{}:\t\t{} ms", name, dur.num_milliseconds());
}
fn shift_push() {
let mut v1 = repeat(1).take(30000).collect::<Vec<_>>();
let mut v2 = Vec::new();
while v1.len() > 0 {
v2.push(v1.remove(0));
}
}
fn read_line() {
use std::old_io::BufferedReader;
let mut path = Path::new(env!("CFG_SRC_DIR"));
path.push("src/test/bench/shootout-k-nucleotide.data");
for _ in 0..3 {
let mut reader = BufferedReader::new(File::open(&path).unwrap());
for _line in reader.lines() {
}
}
}
fn vec_plus() {
let mut r = rand::thread_rng();
let mut v = Vec::new();
let mut i = 0;
while i < 1500 {
let rv = repeat(i).take(r.gen_range(0, i + 1)).collect::<Vec<_>>();
if r.gen() {
v.extend(rv.into_iter());
} else {
let mut rv = rv.clone();
rv.push_all(&v);
v = rv;
}
i += 1;
}
}
fn vec_append() {
let mut r = rand::thread_rng();
let mut v = Vec::new();
let mut i = 0;
while i < 1500 {
let rv = repeat(i).take(r.gen_range(0, i + 1)).collect::<Vec<_>>();
if r.gen() {
let mut t = v.clone();
t.push_all(&rv);
v = t;
}
else {
let mut t = rv.clone();
t.push_all(&v);
v = t;
}
i += 1;
}
}
fn vec_push_all() {
let mut r = rand::thread_rng();
let mut v = Vec::new();
for i in 0..1500 {
let mut rv = repeat(i).take(r.gen_range(0, i + 1)).collect::<Vec<_>>();
if r.gen() {
v.push_all(&rv);
}
else {
swap(&mut v, &mut rv);
v.push_all(&rv);
}
}
}
fn is_utf8_ascii() {
let mut v : Vec<u8> = Vec::new();
for _ in 0..20000 {
v.push('b' as u8);
if str::from_utf8(&v).is_err() {
panic!("from_utf8 panicked");
}
}
}
fn is_utf8_multibyte() {
let s = "b¢€𤭢";
let mut v : Vec<u8> = Vec::new();
for _ in 0..5000 {
v.push_all(s.as_bytes());
if str::from_utf8(&v).is_err() {
panic!("from_utf8 panicked");
}
}
}
| 23.962733 | 98 | 0.529549 |
146276da7931e91687f67bf06c5b99c4906038e2 | 1,182 | mod binary_search;
mod binary_search_recursive;
mod exponential_search;
mod fibonacci_search;
mod interpolation_search;
mod jump_search;
mod kth_smallest;
mod kth_smallest_heap;
mod linear_search;
mod quick_select;
mod ternary_search;
mod ternary_search_min_max;
mod ternary_search_min_max_recursive;
mod ternary_search_recursive;
pub use self::binary_search::binary_search;
pub use self::binary_search_recursive::binary_search_rec;
pub use self::exponential_search::exponential_search;
pub use self::fibonacci_search::fibonacci_search;
pub use self::interpolation_search::interpolation_search;
pub use self::jump_search::jump_search;
pub use self::kth_smallest::kth_smallest;
pub use self::kth_smallest_heap::kth_smallest_heap;
pub use self::linear_search::linear_search;
pub use self::quick_select::quick_select;
pub use self::ternary_search::ternary_search;
pub use self::ternary_search_min_max::ternary_search_max;
pub use self::ternary_search_min_max::ternary_search_min;
pub use self::ternary_search_min_max_recursive::ternary_search_max_rec;
pub use self::ternary_search_min_max_recursive::ternary_search_min_rec;
pub use self::ternary_search_recursive::ternary_search_rec;
| 36.9375 | 71 | 0.85533 |
76577ea06f8ed24b7d65aabc24aaec81a8a822be | 13,576 | #![doc(html_root_url = "https://docs.rs/handlebars/3.0.0")]
#![deny(warnings)]
//! # Handlebars
//!
//! [Handlebars](http://handlebarsjs.com/) is a modern and extensible templating solution originally created in the JavaScript world. It's used by many popular frameworks like [Ember.js](http://emberjs.com) and Chaplin. It's also ported to some other platforms such as [Java](https://github.com/jknack/handlebars.java).
//!
//! And this is handlebars Rust implementation, designed for general purpose text generation.
//!
//! ## Quick Start
//!
//! ```
//! use std::collections::BTreeMap;
//! use handlebars::Handlebars;
//!
//! fn main() {
//! // create the handlebars registry
//! let mut handlebars = Handlebars::new();
//!
//! // register the template. The template string will be verified and compiled.
//! let source = "hello {{world}}";
//! assert!(handlebars.register_template_string("t1", source).is_ok());
//!
//! // Prepare some data.
//! //
//! // The data type should implements `serde::Serialize`
//! let mut data = BTreeMap::new();
//! data.insert("world".to_string(), "世界!".to_string());
//! assert_eq!(handlebars.render("t1", &data).unwrap(), "hello 世界!");
//! }
//! ```
//!
//! In this example, we created a template registry and registered a template named `t1`.
//! Then we rendered a `BTreeMap` with an entry of key `world`, the result is just what
//! we expected.
//!
//! I recommend you to walk through handlebars.js' [intro page](http://handlebarsjs.com)
//! if you are not quite familiar with the template language itself.
//!
//! ## Features
//!
//! Handlebars is a real-world templating system that you can use to build
//! your application without pain.
//!
//! ### Isolation of Rust and HTML
//!
//! This library doesn't attempt to use some macro magic to allow you to
//! write your template within your rust code. I admit that it's fun to do
//! that but it doesn't fit real-world use case in my opinion.
//!
//! ### Limited but essential control structure built-in
//!
//! Only essential control directive `if` and `each` were built-in. This
//! prevents you to put too much application logic into your template.
//!
//! ### Extensible helper system
//!
//! You can write your own helper with Rust! It can be a block helper or
//! inline helper. Put you logic into the helper and don't repeat
//! yourself.
//!
//! The built-in helpers like `if` and `each` were written with these
//! helper APIs and the APIs are fully available to developers.
//!
//! ### Template inheritance
//!
//! Every time I look into a templating system, I will investigate its
//! support for [template inheritance][t].
//!
//! [t]: https://docs.djangoproject.com/en/1.9/ref/templates/language/#template-inheritance
//!
//! Template inclusion is not enough. In most case you will need a skeleton
//! of page as parent (header, footer, etc.), and embed you page into this
//! parent.
//!
//! You can find a real example for template inheritance in
//! `examples/partials.rs`, and templates used by this file.
//!
//! ### Strict mode
//!
//! Handlebars, the language designed to work with JavaScript, has no
//! strict restriction on accessing non-existed fields or index. It
//! generates empty string for such case. However, in Rust we want a
//! little bit strict sometime.
//!
//! By enabling `strict_mode` on handlebars:
//!
//! ```
//! # use handlebars::Handlebars;
//! # let mut handlebars = Handlebars::new();
//! handlebars.set_strict_mode(true);
//! ```
//!
//! You will get a `RenderError` when accessing fields that not exists.
//!
//! ## Limitations
//!
//! ### Compatibility with original JavaScript version
//!
//! This implementation is **not fully compatible** with the original javascript version.
//!
//! First of all, mustache block is not supported. I suggest you to use `#if` and `#each` for
//! same functionality.
//!
//! There are some other minor features missing:
//!
//! * Chained else [#12](https://github.com/sunng87/handlebars-rust/issues/12)
//!
//! Feel free to fire an issue on [github](https://github.com/sunng87/handlebars-rust/issues) if
//! you find missing features.
//!
//! ### Types
//!
//! As a static typed language, it's a little verbose to use handlebars.
//! Handlebars templating language is designed against JSON data type. In rust,
//! we will convert user's structs, vectors or maps into SerDe-Json's `Value` type
//! in order to use in template. You have to make sure your data implements the
//! `Serialize` trait from the [Serde](https://serde.rs) project.
//!
//! ## Usage
//!
//! ### Template Creation and Registration
//!
//! Templates are created from String and registered to `Handlebars` with a name.
//!
//! ```
//! # extern crate handlebars;
//!
//! use handlebars::Handlebars;
//!
//! # fn main() {
//! let mut handlebars = Handlebars::new();
//! let source = "hello {{world}}";
//!
//! assert!(handlebars.register_template_string("t1", source).is_ok())
//! # }
//! ```
//!
//! On registration, the template is parsed, compiled and cached in the registry. So further
//! usage will benefit from the one-time work. Also features like include, inheritance
//! that involves template reference requires you to register those template first with
//! a name so the registry can find it.
//!
//! If you template is small or just to experiment, you can use `render_template` API
//! without registration.
//!
//! ```
//! # use std::error::Error;
//! use handlebars::Handlebars;
//! use std::collections::BTreeMap;
//!
//! # fn main() -> Result<(), Box<Error>> {
//! let mut handlebars = Handlebars::new();
//! let source = "hello {{world}}";
//!
//! let mut data = BTreeMap::new();
//! data.insert("world".to_string(), "世界!".to_string());
//! assert_eq!(handlebars.render_template(source, &data)?, "hello 世界!".to_owned());
//! # Ok(())
//! # }
//! ```
//!
//! ### Rendering Something
//!
//! Since handlebars is originally based on JavaScript type system. It supports dynamic features like duck-typing, truthy/falsey values. But for a static language like Rust, this is a little difficult. As a solution, we are using the `serde_json::value::Value` internally for data rendering.
//!
//! That means, if you want to render something, you have to ensure the data type implements the `serde::Serialize` trait. Most rust internal types already have that trait. Use `#derive[Serialize]` for your types to generate default implementation.
//!
//! You can use default `render` function to render a template into `String`. From 0.9, there's `renderw` to render text into anything of `std::io::Write`.
//!
//! ```
//! # use std::error::Error;
//! # #[macro_use]
//! # extern crate serde_derive;
//! # extern crate handlebars;
//!
//! use handlebars::Handlebars;
//!
//! #[derive(Serialize)]
//! struct Person {
//! name: String,
//! age: i16,
//! }
//!
//! # fn main() -> Result<(), Box<Error>> {
//! let source = "Hello, {{name}}";
//!
//! let mut handlebars = Handlebars::new();
//! assert!(handlebars.register_template_string("hello", source).is_ok());
//!
//!
//! let data = Person {
//! name: "Ning Sun".to_string(),
//! age: 27
//! };
//! assert_eq!(handlebars.render("hello", &data)?, "Hello, Ning Sun".to_owned());
//! # Ok(())
//! # }
//! #
//! ```
//!
//! Or if you don't need the template to be cached or referenced by other ones, you can
//! simply render it without registering.
//!
//! ```
//! # use std::error::Error;
//! # #[macro_use]
//! # extern crate serde_derive;
//! # extern crate handlebars;
//! use handlebars::Handlebars;
//! # #[derive(Serialize)]
//! # struct Person {
//! # name: String,
//! # age: i16,
//! # }
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! let source = "Hello, {{name}}";
//!
//! let mut handlebars = Handlebars::new();
//!
//! let data = Person {
//! name: "Ning Sun".to_string(),
//! age: 27
//! };
//! assert_eq!(handlebars.render_template("Hello, {{name}}", &data)?,
//! "Hello, Ning Sun".to_owned());
//! # Ok(())
//! # }
//! ```
//!
//! #### Escaping
//!
//! As per the handlebars spec, output using `{{expression}}` is escaped by default (to be precise, the characters `&"<>` are replaced by their respective html / xml entities). However, since the use cases of a rust template engine are probably a bit more diverse than those of a JavaScript one, this implementation allows the user to supply a custom escape function to be used instead. For more information see the `EscapeFn` type and `Handlebars::register_escape_fn()` method.
//!
//! ### Custom Helper
//!
//! Handlebars is nothing without helpers. You can also create your own helpers with rust. Helpers in handlebars-rust are custom struct implements the `HelperDef` trait, concretely, the `call` function. For your convenience, most of stateless helpers can be implemented as bare functions.
//!
//! ```
//! use std::io::Write;
//! # use std::error::Error;
//! use handlebars::{Handlebars, HelperDef, RenderContext, Helper, Context, JsonRender, HelperResult, Output, RenderError};
//!
//! // implement by a structure impls HelperDef
//! #[derive(Clone, Copy)]
//! struct SimpleHelper;
//!
//! impl HelperDef for SimpleHelper {
//! fn call<'reg: 'rc, 'rc>(&self, h: &Helper, _: &Handlebars, _: &Context, rc: &mut RenderContext, out: &mut dyn Output) -> HelperResult {
//! let param = h.param(0).unwrap();
//!
//! out.write("1st helper: ")?;
//! out.write(param.value().render().as_ref())?;
//! Ok(())
//! }
//! }
//!
//! // implement via bare function
//! fn another_simple_helper (h: &Helper, _: &Handlebars, _: &Context, rc: &mut RenderContext, out: &mut dyn Output) -> HelperResult {
//! let param = h.param(0).unwrap();
//!
//! out.write("2nd helper: ")?;
//! out.write(param.value().render().as_ref())?;
//! Ok(())
//! }
//!
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! let mut handlebars = Handlebars::new();
//! handlebars.register_helper("simple-helper", Box::new(SimpleHelper));
//! handlebars.register_helper("another-simple-helper", Box::new(another_simple_helper));
//! // via closure
//! handlebars.register_helper("closure-helper",
//! Box::new(|h: &Helper, r: &Handlebars, _: &Context, rc: &mut RenderContext, out: &mut dyn Output| -> HelperResult {
//! let param = h.param(0).ok_or(RenderError::new("param not found"))?;
//!
//! out.write("3rd helper: ")?;
//! out.write(param.value().render().as_ref())?;
//! Ok(())
//! }));
//!
//! let tpl = "{{simple-helper 1}}\n{{another-simple-helper 2}}\n{{closure-helper 3}}";
//! assert_eq!(handlebars.render_template(tpl, &())?,
//! "1st helper: 1\n2nd helper: 2\n3rd helper: 3".to_owned());
//! # Ok(())
//! # }
//!
//! ```
//! Data available to helper can be found in [Helper](struct.Helper.html). And there are more
//! examples in [HelperDef](trait.HelperDef.html) page.
//!
//! You can learn more about helpers by looking into source code of built-in helpers.
//!
//! #### Built-in Helpers
//!
//! * `{{{{raw}}}} ... {{{{/raw}}}}` escape handlebars expression within the block
//! * `{{#if ...}} ... {{else}} ... {{/if}}` if-else block
//! * `{{#unless ...}} ... {{else}} .. {{/unless}}` if-not-else block
//! * `{{#each ...}} ... {{/each}}` iterates over an array or object. Handlebar-rust doesn't support mustache iteration syntax so use this instead.
//! * `{{#with ...}} ... {{/with}}` change current context. Similar to {{#each}}, used for replace corresponding mustache syntax.
//! * `{{lookup ... ...}}` get value from array by `@index` or `@key`
//! * `{{> ...}}` include template with name
//! * `{{log ...}}` log value with rust logger, default level: INFO. Currently you cannot change the level.
//! * Boolean helpers that can be used in `if` as subexpression, for example `{{#if (gt 2 1)}} ...`:
//! * `eq`
//! * `ne`
//! * `gt`
//! * `gte`
//! * `lt`
//! * `lte`
//! * `and`
//! * `or`
//! * `not`
//!
//! ### Template inheritance
//!
//! Handlebars.js' partial system is fully supported in this implementation.
//! Check [example](https://github.com/sunng87/handlebars-rust/blob/master/examples/partials.rs#L49) for details.
//!
//!
#![allow(dead_code)]
#![recursion_limit = "200"]
#[cfg(not(feature = "no_logging"))]
#[macro_use]
extern crate log;
#[cfg(test)]
#[macro_use]
extern crate maplit;
extern crate pest;
#[macro_use]
extern crate pest_derive;
#[macro_use]
extern crate quick_error;
#[cfg(test)]
#[macro_use]
extern crate serde_derive;
#[cfg(test)]
extern crate tempfile;
extern crate serde;
#[allow(unused_imports)]
#[macro_use]
extern crate serde_json;
#[cfg(feature = "dir_source")]
extern crate walkdir;
pub use self::block::{BlockContext, BlockParams};
pub use self::context::Context;
pub use self::decorators::DecoratorDef;
pub use self::error::{RenderError, TemplateError, TemplateFileError, TemplateRenderError};
pub use self::helpers::{HelperDef, HelperResult};
pub use self::json::path::Path;
pub use self::json::value::{to_json, JsonRender, PathAndJson, ScopedJson};
pub use self::output::Output;
pub use self::registry::{html_escape, no_escape, EscapeFn, Registry as Handlebars};
pub use self::render::{Decorator, Evaluable, Helper, RenderContext, Renderable};
pub use self::template::Template;
#[doc(hidden)]
pub use self::serde_json::Value as JsonValue;
#[macro_use]
mod macros;
mod block;
mod context;
mod decorators;
mod error;
mod grammar;
mod helpers;
mod json;
mod output;
mod partial;
mod registry;
mod render;
mod support;
pub mod template;
mod util;
| 35.632546 | 478 | 0.652254 |
56bf5564cc7194d217f6a768e4b7df96234a0f42 | 1,433 | use criterion::{black_box, criterion_group, criterion_main, Criterion};
use pushgen::{SliceGenerator, GeneratorExt};
use itertools::Itertools;
fn run_iterator(data: &Vec<Vec<i32>>) {
let mut result = 0i32;
data.iter()
.flatten()
.dedup()
.filter(|x| *x % 2 == 0)
.map(|x| x * 3)
.for_each(|x| result = result.wrapping_add(x));
black_box(result);
}
fn run_generator(data: &Vec<Vec<i32>>) {
let mut result = 0i32;
SliceGenerator::new(data.as_slice())
.flatten(|x| SliceGenerator::new(x.as_slice()))
.dedup()
.filter(|x| *x % 2 == 0)
.map(|x| x * 3)
.for_each(|x| result = result.wrapping_add(x));
black_box(result);
}
pub fn make_data() -> Vec<Vec<i32>> {
let mut data = Vec::new();
data.reserve(100_000);
for x in 0..100_000/4 {
data.push(x);
data.push(x);
data.push(x);
data.push(x);
}
let mut retval = Vec::new();
for _x in 0..10 {
retval.push(data.clone());
}
retval
}
pub fn benchmarks(c: &mut Criterion) {
let data = make_data();
c.bench_function("iterator_flatten_dedup_filter_map", |b| {
b.iter(|| run_iterator(black_box(&data)))
});
c.bench_function("generator_flatten_dedup_filter_map", |b| {
b.iter(|| run_generator(black_box(&data)))
});
}
criterion_group!(benches, benchmarks);
criterion_main!(benches);
| 25.589286 | 71 | 0.585485 |
4ad0198aeb60d9ece7124e3bc11f3bd5a9dcd43b | 56 | fn main() {
println!("cargo:rustc-cfg=has_i128");
}
| 14 | 41 | 0.607143 |
f472f116781926fcdf571a28e00a6813898355da | 705 | /*
* Rust BareBones OS
* - By John Hodge (Mutabah/thePowersGang)
*
* macros.rs
* - Macros used by the kernel
*
* This code has been put into the public domain, there are no restrictions on
* its use, and the author takes no liability.
*/
/// A very primitive logging macro
///
/// Obtaines a logger instance (locking the log channel) with the current module name passed
/// then passes the standard format! arguments to it
macro_rules! log{
( $($arg:tt)* ) => ({
// Import the Writer trait (required by write!)
use core::fmt::Write;
let mut writer = crate::logging::Writer::get(module_path!());
let _ = write!(&mut writer, $($arg)*);
let _ = writer.write_str("\n");
})
}
| 28.2 | 92 | 0.656738 |
145a4f316be89b27e17c6a6656d0ef91991002e6 | 107 | //! Diagnostic reporting support for the codespan crate.
pub mod diagnostic;
pub mod files;
pub mod term;
| 17.833333 | 56 | 0.766355 |
9c292c998d70cfd81205b80d6477acb83040f388 | 3,698 | #![cfg_attr(not(feature = "std"), no_std)]
#![feature(try_trait)]
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::FromPrimitive;
#[repr(u32)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, FromPrimitive, ToPrimitive)]
pub enum StatusCode {
Success = 0,
NotFound = 1,
PermissionDenied = 2,
ConnectionRefused = 3,
ConnectionReset = 4,
ConnectionAborted = 5,
NotConnected = 6,
AddrInUse = 7,
AddrNotAvailable = 8,
BrokenPipe = 9,
AlreadyExists = 10,
WouldBlock = 11,
InvalidInput = 12,
InvalidData = 13,
TimedOut = 14,
WriteZero = 15,
Interrupted = 16,
Other = 17,
UnexpectedEof = 18,
Range = 19,
Segfault = 20,
TooManyResources = 21,
BadMemory = 22,
FrameUnderflow = 23,
InvalidConversion = 24,
InvalidTable = 25,
InvalidMemory = 26,
InvalidFrame = 27,
OutOfBoundGlobal = 28,
OutOfBoundLocal = 29,
OutOfBoundMemory = 30,
OutOfMemory = 31,
StackUnderflow = 32,
StackOverflow = 33,
Unreachable = 34,
InvalidIntegerDivision = 35,
InvalidIndirectSignature = 36,
CPUExhaustion = 37,
}
macro_rules! conv {
($ty:ident, $call:ident) => {
impl From<$ty> for StatusCode {
fn from(value: $ty) -> Self {
FromPrimitive::$call(value).unwrap_or(StatusCode::Other)
}
}
impl Into<$ty> for StatusCode {
fn into(self) -> $ty {
self as $ty
}
}
};
}
conv!(u8, from_u8);
conv!(u16, from_u16);
conv!(u32, from_u32);
conv!(u64, from_u64);
conv!(usize, from_usize);
impl Into<::core::result::Result<(), StatusCode>> for StatusCode {
fn into(self) -> ::core::result::Result<(), StatusCode> {
match self {
StatusCode::Success => Ok(()),
_ => Err(self),
}
}
}
impl core::ops::Try for StatusCode {
type Error = Self;
type Ok = Self;
fn into_result(self) -> Result<Self::Ok, Self::Error> {
match self {
StatusCode::Success => Ok(self),
_ => Err(self),
}
}
fn from_error(v: Self::Error) -> Self {
v
}
fn from_ok(v: Self::Error) -> Self {
v
}
}
macro_rules! io_error {
($($name:ident,)*) => {
#[cfg(feature = "std")]
impl From<std::io::ErrorKind> for StatusCode {
fn from(kind: std::io::ErrorKind) -> Self {
match kind {
$(std::io::ErrorKind::$name => StatusCode::$name,)*
_ => StatusCode::Other,
}
}
}
#[cfg(feature = "std")]
impl From<std::io::Error> for StatusCode {
fn from(error: std::io::Error) -> Self {
error.kind().into()
}
}
#[cfg(feature = "std")]
impl Into<std::io::ErrorKind> for StatusCode {
fn into(self) -> std::io::ErrorKind {
match self {
$(StatusCode::$name => std::io::ErrorKind::$name,)*
_ => std::io::ErrorKind::Other,
}
}
}
#[cfg(feature = "std")]
impl Into<std::io::Error> for StatusCode {
fn into(self) -> std::io::Error {
std::io::Error::new(self.into(), "")
}
}
}
}
io_error!(
NotFound,
PermissionDenied,
ConnectionRefused,
ConnectionReset,
ConnectionAborted,
NotConnected,
AddrInUse,
AddrNotAvailable,
BrokenPipe,
AlreadyExists,
WouldBlock,
InvalidInput,
InvalidData,
TimedOut,
WriteZero,
Interrupted,
UnexpectedEof,
);
| 23.405063 | 78 | 0.52623 |
872487eae4c0115f2e58a3cbad1f6727a64b1d1d | 722 | extern crate protoc;
use std::path::PathBuf;
fn main() {
let exe_suffix = if cfg!(windows) {
".exe"
} else if cfg!(unix) {
""
} else {
panic!("unknown OS")
};
let protoc_gen_rust = PathBuf::from(format!("../../target/debug/protoc-gen-rust{}", exe_suffix));
let protoc_gen_rust = protoc_gen_rust.canonicalize().expect("canonicalize");
assert!(protoc_gen_rust.is_file(), "{:?}", protoc_gen_rust);
protoc::run(protoc::Args {
lang: "rust",
out_dir: "src",
plugin: Some(&format!("protoc-gen-rust={}", protoc_gen_rust.as_os_str().to_str().unwrap())),
input: &["src/data.proto"],
..Default::default()
}).expect("protoc");
}
| 26.740741 | 101 | 0.580332 |
d6f7510ff719b5b2bb0c78c6ca5c0814a479185d | 12,657 | ////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2018, the Perspective Authors.
//
// This file is part of the Perspective library, distributed under the terms
// of the Apache License 2.0. The full license can be found in the LICENSE
// file.
use crate::components::*;
use crate::utils::*;
use derivative::Derivative;
use std::cell::{Cell, RefCell};
use std::rc::Rc;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use wasm_bindgen_futures::spawn_local;
use web_sys::*;
use yew::prelude::*;
type BlurHandlerType = Rc<RefCell<Option<Closure<dyn FnMut(FocusEvent)>>>>;
/// A `ModalElement` wraps the parameterized yew `Component` in a Custom
/// Element. Via the `open()` and `close()` methods, a `ModalElement` can be
/// positioned next to any existing on-page elements, accounting for viewport,
/// scroll position, etc.
///
///`#[derive(Clone)]` generates the trait bound `T: Clone`, which is not
/// required because `Scope<T>` implements Clone without this bound; thus
/// `Clone` must be implemented by the `derivative` crate's
/// [custom bounds](https://mcarton.github.io/rust-derivative/latest/Debug.html#custom-bound)
/// support.
#[derive(Derivative)]
#[derivative(Clone(bound = ""))]
pub struct ModalElement<T>
where
T: Component,
T::Properties: ModalLink<T>,
{
root: Rc<RefCell<Option<AppHandle<Modal<T>>>>>,
custom_element: HtmlElement,
target: Rc<RefCell<Option<HtmlElement>>>,
blurhandler: BlurHandlerType,
own_focus: bool,
resize_sub: Rc<RefCell<Option<Subscription>>>,
anchor: Rc<Cell<ModalAnchor>>,
}
/// Anchor point enum, `ModalCornerTargetCorner`
#[derive(Clone, Copy)]
enum ModalAnchor {
BottomRightTopLeft,
BottomRightBottomLeft,
BottomRightTopRight,
BottomLeftTopLeft,
TopRightTopLeft,
TopRightBottomRight,
TopLeftBottomLeft,
}
impl Default for ModalAnchor {
fn default() -> ModalAnchor {
ModalAnchor::TopLeftBottomLeft
}
}
impl ModalAnchor {
const fn is_rev_vert(&self) -> bool {
matches!(
self,
ModalAnchor::BottomLeftTopLeft
| ModalAnchor::BottomRightBottomLeft
| ModalAnchor::BottomRightTopLeft
| ModalAnchor::BottomRightTopRight
)
}
}
/// Given the bounds of the target element as previous computed, as well as the
/// browser's viewport and the bounds of the already-connected
/// `<perspectuve-style-menu>` element itself, determine a new (top, left)
/// coordinates that keeps the element on-screen.
fn calc_relative_position(
elem: &HtmlElement,
_top: i32,
left: i32,
height: i32,
width: i32,
) -> ModalAnchor {
let window = web_sys::window().unwrap();
let rect = elem.get_bounding_client_rect();
let inner_width = window.inner_width().unwrap().as_f64().unwrap() as i32;
let inner_height = window.inner_height().unwrap().as_f64().unwrap() as i32;
let rect_top = rect.top() as i32;
let rect_height = rect.height() as i32;
let rect_width = rect.width() as i32;
let rect_left = rect.left() as i32;
let elem_over_y = inner_height < rect_top + rect_height;
let elem_over_x = inner_width < rect_left + rect_width;
let target_over_x = inner_width < rect_left + width;
let target_over_y = inner_height < rect_top + height;
// modal/target
match (elem_over_y, elem_over_x, target_over_x, target_over_y) {
(true, _, true, true) => ModalAnchor::BottomRightTopLeft,
(true, _, true, false) => ModalAnchor::BottomRightBottomLeft,
(true, true, false, _) => {
if left + width - rect_width > 0 {
ModalAnchor::BottomRightTopRight
} else {
ModalAnchor::BottomLeftTopLeft
}
}
(true, false, false, _) => ModalAnchor::BottomLeftTopLeft,
(false, true, true, _) => ModalAnchor::TopRightTopLeft,
(false, true, false, _) => {
if left + width - rect_width > 0 {
ModalAnchor::TopRightBottomRight
} else {
ModalAnchor::TopLeftBottomLeft
}
}
_ => ModalAnchor::TopLeftBottomLeft,
}
}
impl<T> ModalElement<T>
where
T: Component,
T::Properties: ModalLink<T>,
{
pub fn new(
custom_element: web_sys::HtmlElement,
props: T::Properties,
own_focus: bool,
) -> ModalElement<T> {
custom_element.set_attribute("tabindex", "0").unwrap();
let init = web_sys::ShadowRootInit::new(web_sys::ShadowRootMode::Open);
let shadow_root = custom_element
.attach_shadow(&init)
.unwrap()
.unchecked_into::<web_sys::Element>();
let cprops = ModalProps {
child: Some(html_nested! {
<T ..props />
}),
};
let root = Rc::new(RefCell::new(Some(
yew::Renderer::with_root_and_props(shadow_root, cprops).render(),
)));
let blurhandler = Rc::new(RefCell::new(None));
ModalElement {
root,
custom_element,
target: Rc::new(RefCell::new(None)),
own_focus,
blurhandler,
resize_sub: Rc::new(RefCell::new(None)),
anchor: Default::default(),
}
}
fn calc_anchor_position(&self, target: &HtmlElement) -> (i32, i32) {
let height = target.offset_height() as i32;
let width = target.offset_width() as i32;
let elem = target.clone().unchecked_into::<HtmlElement>();
let rect = elem.get_bounding_client_rect();
let top = rect.top() as i32;
let left = rect.left() as i32;
let self_rect = self.custom_element.get_bounding_client_rect();
let rect_height = self_rect.height() as i32;
let rect_width = self_rect.width() as i32;
match self.anchor.get() {
ModalAnchor::BottomRightTopLeft => (top - rect_height, left - rect_width + 1),
ModalAnchor::BottomRightBottomLeft => {
(top - rect_height + height, left - rect_width + 1)
}
ModalAnchor::BottomRightTopRight => (top - rect_height + 1, left + width - rect_width),
ModalAnchor::BottomLeftTopLeft => (top - rect_height + 1, left),
ModalAnchor::TopRightTopLeft => (top, left - rect_width + 1),
ModalAnchor::TopRightBottomRight => (top + height - 1, left + width - rect_width),
ModalAnchor::TopLeftBottomLeft => ((top + height - 1), left),
}
}
async fn open_within_viewport(&self, target: HtmlElement) -> Result<(), JsValue> {
let height = target.offset_height() as i32;
let width = target.offset_width() as i32;
let elem = target.clone().unchecked_into::<HtmlElement>();
let rect = elem.get_bounding_client_rect();
let top = rect.top() as i32;
let left = rect.left() as i32;
*self.target.borrow_mut() = Some(target.clone());
// Default, top left/bottom left
let msg = ModalMsg::SetPos {
top: (top + height - 1) as i32,
left: left as i32,
visible: false,
rev_vert: false,
};
self.root.borrow().as_ref().unwrap().send_message(msg);
let window = web_sys::window().unwrap();
window
.document()
.unwrap()
.body()
.unwrap()
.append_child(&self.custom_element)?;
await_animation_frame().await?;
// Check if the modal has been positioned off-screen and re-locate if necessary
self.anchor.set(calc_relative_position(
&self.custom_element,
top,
left,
height,
width,
));
let (top, left) = self.calc_anchor_position(&target);
let msg = ModalMsg::SetPos {
top,
left,
visible: true,
rev_vert: self.anchor.get().is_rev_vert(),
};
self.root.borrow().as_ref().unwrap().send_message(msg);
if self.own_focus {
let mut this = Some(self.clone());
*self.blurhandler.borrow_mut() = Some(
(move |_| this.take().and_then(|x| x.hide().ok()).unwrap_or(())).into_closure_mut(),
);
self.custom_element.add_event_listener_with_callback(
"blur",
self.blurhandler
.borrow()
.as_ref()
.unwrap()
.as_ref()
.unchecked_ref(),
)?;
self.custom_element.focus()
} else {
Ok(())
}
}
pub fn send_message(&self, msg: T::Message) {
self.root
.borrow()
.as_ref()
.unwrap()
.send_message(ModalMsg::SubMsg(msg))
}
pub fn send_message_batch(&self, msgs: Vec<T::Message>) {
self.root
.borrow()
.as_ref()
.unwrap()
.send_message_batch(msgs.into_iter().map(ModalMsg::SubMsg).collect())
}
/// Open this modal by attaching directly to `document.body` with position
/// absolutely positioned relative to an alread-connected `target`
/// element.
///
/// Because the Custom Element has a `blur` handler, we must invoke this
/// before attempting to re-parent the element.
pub fn open(&self, target: web_sys::HtmlElement, resize_pubsub: Option<&PubSub<()>>) {
if let Some(resize) = resize_pubsub {
let this = self.clone();
let target = target.clone();
let anchor = self.anchor.clone();
*self.resize_sub.borrow_mut() = Some(resize.add_listener(move |()| {
let (top, left) = this.calc_anchor_position(&target);
let msg = ModalMsg::SetPos {
top,
left,
visible: true,
rev_vert: anchor.get().is_rev_vert(),
};
this.root.borrow().as_ref().unwrap().send_message(msg);
}));
};
if !self.is_open() {
self.custom_element.blur().unwrap();
let this = self.clone();
spawn_local(async move {
await_animation_frame().await.unwrap();
target.class_list().add_1("modal-target").unwrap();
let theme = get_theme(&target);
this.open_within_viewport(target).await.unwrap();
if let Some(theme) = theme {
this.custom_element.set_attribute("theme", &theme).unwrap();
}
});
}
}
pub fn is_open(&self) -> bool {
self.custom_element.is_connected()
}
/// Remove from document.
pub fn hide(&self) -> Result<(), JsValue> {
if self.is_open() {
if self.own_focus {
self.custom_element.remove_event_listener_with_callback(
"blur",
self.blurhandler
.borrow()
.as_ref()
.unwrap()
.as_ref()
.unchecked_ref(),
)?;
*self.blurhandler.borrow_mut() = None;
}
web_sys::window()
.unwrap()
.document()
.unwrap()
.body()
.unwrap()
.remove_child(&self.custom_element)?;
let target = self.target.borrow_mut().take().unwrap();
let event = web_sys::CustomEvent::new("-perspective-close-expression")?;
target.class_list().remove_1("modal-target").unwrap();
if get_theme(&target).is_some() {
self.custom_element.remove_attribute("theme")?;
}
target.dispatch_event(&event)?;
}
Ok(())
}
/// Remove from document and cleanup.
pub fn destroy(self) -> Result<(), JsValue> {
self.hide()?;
self.root.borrow_mut().take().unwrap().destroy();
Ok(())
}
}
fn get_theme(elem: &HtmlElement) -> Option<String> {
let styles = window().unwrap().get_computed_style(elem).unwrap().unwrap();
styles
.get_property_value("--theme-name")
.ok()
.and_then(|x| {
let trimmed = x.trim();
if !trimmed.is_empty() {
Some(trimmed[1..trimmed.len() - 1].to_owned())
} else {
None
}
})
}
| 33.220472 | 100 | 0.556925 |
cc465b86bdc748fb95c3d09cb225866e8a905f1e | 12,158 | use crate::{
pipeline::{
IndexFormat, PipelineCompiler, PipelineDescriptor, PipelineLayout, PipelineSpecialization,
},
renderer::{
AssetRenderResourceBindings, BindGroup, BindGroupId, BufferId, RenderResource,
RenderResourceBinding, RenderResourceBindings, RenderResourceContext, SharedBuffers,
},
shader::Shader,
};
use bevy_asset::{Asset, Assets, Handle};
use bevy_ecs::{
component::Component,
reflect::ReflectComponent,
system::{Query, Res, ResMut, SystemParam},
};
use bevy_reflect::Reflect;
use std::{marker::PhantomData, ops::Range, sync::Arc};
use thiserror::Error;
/// A queued command for the renderer
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum RenderCommand {
SetPipeline {
pipeline: Handle<PipelineDescriptor>,
},
SetVertexBuffer {
slot: u32,
buffer: BufferId,
offset: u64,
},
SetIndexBuffer {
buffer: BufferId,
offset: u64,
index_format: IndexFormat,
},
SetBindGroup {
index: u32,
bind_group: BindGroupId,
dynamic_uniform_indices: Option<Arc<[u32]>>,
},
DrawIndexed {
indices: Range<u32>,
base_vertex: i32,
instances: Range<u32>,
},
Draw {
vertices: Range<u32>,
instances: Range<u32>,
},
}
#[derive(Component, Debug, Clone, Reflect)]
#[reflect(Component)]
pub struct Visible {
pub is_visible: bool,
// TODO: consider moving this to materials
pub is_transparent: bool,
}
impl Default for Visible {
fn default() -> Self {
Visible {
is_visible: true,
is_transparent: false,
}
}
}
/// A component that indicates that an entity is outside the view frustum.
/// Any entity with this component will be ignored during rendering.
///
/// # Note
/// This does not handle multiple "views" properly as it is a "global" filter.
/// This will be resolved in the future. For now, disable frustum culling if you
/// need to support multiple views (ex: set the `SpriteSettings::frustum_culling_enabled` resource).
#[derive(Component, Debug, Default, Clone, Reflect)]
#[reflect(Component)]
#[component(storage = "SparseSet")]
pub struct OutsideFrustum;
/// A component that indicates how to draw an entity.
#[derive(Component, Debug, Clone, Reflect)]
#[reflect(Component)]
pub struct Draw {
#[reflect(ignore)]
pub render_commands: Vec<RenderCommand>,
}
impl Default for Draw {
fn default() -> Self {
Self {
render_commands: Default::default(),
}
}
}
impl Draw {
pub fn clear_render_commands(&mut self) {
self.render_commands.clear();
}
pub fn set_pipeline(&mut self, pipeline: &Handle<PipelineDescriptor>) {
self.render_command(RenderCommand::SetPipeline {
pipeline: pipeline.clone_weak(),
});
}
pub fn set_vertex_buffer(&mut self, slot: u32, buffer: BufferId, offset: u64) {
self.render_command(RenderCommand::SetVertexBuffer {
slot,
buffer,
offset,
});
}
pub fn set_index_buffer(&mut self, buffer: BufferId, offset: u64, index_format: IndexFormat) {
self.render_command(RenderCommand::SetIndexBuffer {
buffer,
offset,
index_format,
});
}
pub fn set_bind_group(&mut self, index: u32, bind_group: &BindGroup) {
self.render_command(RenderCommand::SetBindGroup {
index,
bind_group: bind_group.id,
dynamic_uniform_indices: bind_group.dynamic_uniform_indices.clone(),
});
}
pub fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
self.render_command(RenderCommand::DrawIndexed {
base_vertex,
indices,
instances,
});
}
pub fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
self.render_command(RenderCommand::Draw {
vertices,
instances,
});
}
#[inline]
pub fn render_command(&mut self, render_command: RenderCommand) {
self.render_commands.push(render_command);
}
}
#[derive(Debug, Error)]
pub enum DrawError {
#[error("pipeline does not exist")]
NonExistentPipeline,
#[error("no pipeline set")]
NoPipelineSet,
#[error("pipeline has no layout")]
PipelineHasNoLayout,
#[error("failed to get a buffer for the given `RenderResource`")]
BufferAllocationFailure,
#[error("the given asset does not have any render resources")]
MissingAssetRenderResources,
}
#[derive(SystemParam)]
pub struct DrawContext<'w, 's> {
pub pipelines: ResMut<'w, Assets<PipelineDescriptor>>,
pub shaders: ResMut<'w, Assets<Shader>>,
pub asset_render_resource_bindings: ResMut<'w, AssetRenderResourceBindings>,
pub pipeline_compiler: ResMut<'w, PipelineCompiler>,
pub render_resource_context: Res<'w, Box<dyn RenderResourceContext>>,
pub shared_buffers: ResMut<'w, SharedBuffers>,
#[system_param(ignore)]
pub current_pipeline: Option<Handle<PipelineDescriptor>>,
#[system_param(ignore)]
marker: PhantomData<&'s usize>,
}
impl<'w, 's> DrawContext<'w, 's> {
pub fn get_uniform_buffer<T: RenderResource>(
&mut self,
render_resource: &T,
) -> Result<RenderResourceBinding, DrawError> {
self.shared_buffers
.get_uniform_buffer(&**self.render_resource_context, render_resource)
.ok_or(DrawError::BufferAllocationFailure)
}
pub fn set_pipeline(
&mut self,
draw: &mut Draw,
pipeline_handle: &Handle<PipelineDescriptor>,
specialization: &PipelineSpecialization,
) -> Result<(), DrawError> {
let specialized_pipeline = if let Some(specialized_pipeline) = self
.pipeline_compiler
.get_specialized_pipeline(pipeline_handle, specialization)
{
specialized_pipeline
} else {
self.pipeline_compiler.compile_pipeline(
&**self.render_resource_context,
&mut self.pipelines,
&mut self.shaders,
pipeline_handle,
specialization,
)
};
draw.set_pipeline(&specialized_pipeline);
self.current_pipeline = Some(specialized_pipeline.clone_weak());
Ok(())
}
pub fn get_pipeline_descriptor(&self) -> Result<&PipelineDescriptor, DrawError> {
self.current_pipeline
.as_ref()
.and_then(|handle| self.pipelines.get(handle))
.ok_or(DrawError::NoPipelineSet)
}
pub fn get_pipeline_layout(&self) -> Result<&PipelineLayout, DrawError> {
self.get_pipeline_descriptor().and_then(|descriptor| {
descriptor
.get_layout()
.ok_or(DrawError::PipelineHasNoLayout)
})
}
pub fn set_asset_bind_groups<T: Asset>(
&mut self,
draw: &mut Draw,
asset_handle: &Handle<T>,
) -> Result<(), DrawError> {
if let Some(asset_bindings) = self
.asset_render_resource_bindings
.get_mut_untyped(&asset_handle.clone_weak_untyped())
{
Self::set_bind_groups_from_bindings_internal(
&self.current_pipeline,
&self.pipelines,
&**self.render_resource_context,
None,
draw,
&mut [asset_bindings],
)
} else {
Err(DrawError::MissingAssetRenderResources)
}
}
pub fn set_bind_groups_from_bindings(
&mut self,
draw: &mut Draw,
render_resource_bindings: &mut [&mut RenderResourceBindings],
) -> Result<(), DrawError> {
Self::set_bind_groups_from_bindings_internal(
&self.current_pipeline,
&self.pipelines,
&**self.render_resource_context,
Some(&mut self.asset_render_resource_bindings),
draw,
render_resource_bindings,
)
}
fn set_bind_groups_from_bindings_internal(
current_pipeline: &Option<Handle<PipelineDescriptor>>,
pipelines: &Assets<PipelineDescriptor>,
render_resource_context: &dyn RenderResourceContext,
mut asset_render_resource_bindings: Option<&mut AssetRenderResourceBindings>,
draw: &mut Draw,
render_resource_bindings: &mut [&mut RenderResourceBindings],
) -> Result<(), DrawError> {
let pipeline = current_pipeline.as_ref().ok_or(DrawError::NoPipelineSet)?;
let pipeline_descriptor = pipelines
.get(pipeline)
.ok_or(DrawError::NonExistentPipeline)?;
let layout = pipeline_descriptor
.get_layout()
.ok_or(DrawError::PipelineHasNoLayout)?;
'bind_group_descriptors: for bind_group_descriptor in layout.bind_groups.iter() {
for bindings in render_resource_bindings.iter_mut() {
if let Some(bind_group) =
bindings.update_bind_group(bind_group_descriptor, render_resource_context)
{
draw.set_bind_group(bind_group_descriptor.index, bind_group);
continue 'bind_group_descriptors;
}
}
// if none of the given RenderResourceBindings have the current bind group, try their
// assets
let asset_render_resource_bindings =
if let Some(value) = asset_render_resource_bindings.as_mut() {
value
} else {
continue 'bind_group_descriptors;
};
for bindings in render_resource_bindings.iter_mut() {
for (asset_handle, _) in bindings.iter_assets() {
let asset_bindings = if let Some(asset_bindings) =
asset_render_resource_bindings.get_mut_untyped(asset_handle)
{
asset_bindings
} else {
continue;
};
if let Some(bind_group) = asset_bindings
.update_bind_group(bind_group_descriptor, render_resource_context)
{
draw.set_bind_group(bind_group_descriptor.index, bind_group);
continue 'bind_group_descriptors;
}
}
}
}
Ok(())
}
pub fn create_bind_group_resource(
&self,
index: u32,
bind_group: &BindGroup,
) -> Result<(), DrawError> {
let pipeline = self
.current_pipeline
.as_ref()
.ok_or(DrawError::NoPipelineSet)?;
let pipeline_descriptor = self
.pipelines
.get(pipeline)
.ok_or(DrawError::NonExistentPipeline)?;
let layout = pipeline_descriptor
.get_layout()
.ok_or(DrawError::PipelineHasNoLayout)?;
let bind_group_descriptor = &layout.bind_groups[index as usize];
self.render_resource_context
.create_bind_group(bind_group_descriptor.id, bind_group);
Ok(())
}
pub fn set_vertex_buffers_from_bindings(
&self,
draw: &mut Draw,
render_resource_bindings: &[&RenderResourceBindings],
) -> Result<(), DrawError> {
for bindings in render_resource_bindings.iter() {
if let Some((index_buffer, index_format)) = bindings.index_buffer {
draw.set_index_buffer(index_buffer, 0, index_format);
}
if let Some(main_vertex_buffer) = bindings.vertex_attribute_buffer {
draw.set_vertex_buffer(0, main_vertex_buffer, 0);
}
}
Ok(())
}
}
pub trait Drawable {
fn draw(&mut self, draw: &mut Draw, context: &mut DrawContext) -> Result<(), DrawError>;
}
pub fn clear_draw_system(mut query: Query<&mut Draw>) {
for mut draw in query.iter_mut() {
draw.clear_render_commands();
}
}
| 32.508021 | 100 | 0.60709 |
8f99a78ae4cfe0ae147bc69ee6db631cf011a2bd | 14,018 | // -*- mode: rust; -*-
//
// This file is part of curve25519-dalek.
// Copyright (c) 2016-2021 isis lovecruft
// Copyright (c) 2016-2019 Henry de Valence
// See LICENSE for licensing information.
//
// Authors:
// - isis agora lovecruft <[email protected]>
// - Henry de Valence <[email protected]>
#![no_std]
#![cfg_attr(feature = "nightly", feature(test))]
#![cfg_attr(feature = "nightly", feature(doc_cfg))]
#![cfg_attr(feature = "simd_backend", feature(stdsimd))]
// Refuse to compile if documentation is missing.
#![deny(missing_docs)]
#![doc(html_logo_url = "https://doc.dalek.rs/assets/dalek-logo-clear.png")]
#![doc(html_root_url = "https://docs.rs/curve25519-dalek/3.2.1")]
//! # curve25519-dalek [](https://crates.io/crates/curve25519-dalek) [](https://doc.dalek.rs) [](https://travis-ci.org/dalek-cryptography/curve25519-dalek)
//!
//! <img
//! width="33%"
//! align="right"
//! src="https://doc.dalek.rs/assets/dalek-logo-clear.png"/>
//!
//! **A pure-Rust implementation of group operations on Ristretto and Curve25519.**
//!
//! `curve25519-dalek` is a library providing group operations on the Edwards and
//! Montgomery forms of Curve25519, and on the prime-order Ristretto group.
//!
//! `curve25519-dalek` is not intended to provide implementations of any particular
//! crypto protocol. Rather, implementations of those protocols (such as
//! [`x25519-dalek`][x25519-dalek] and [`ed25519-dalek`][ed25519-dalek]) should use
//! `curve25519-dalek` as a library.
//!
//! `curve25519-dalek` is intended to provide a clean and safe _mid-level_ API for use
//! implementing a wide range of ECC-based crypto protocols, such as key agreement,
//! signatures, anonymous credentials, rangeproofs, and zero-knowledge proof
//! systems.
//!
//! In particular, `curve25519-dalek` implements Ristretto, which constructs a
//! prime-order group from a non-prime-order Edwards curve. This provides the
//! speed and safety benefits of Edwards curve arithmetic, without the pitfalls of
//! cofactor-related abstraction mismatches.
//!
//! # Documentation
//!
//! The semver-stable, public-facing `curve25519-dalek` API is documented
//! [here][docs-external]. In addition, the unstable internal implementation
//! details are documented [here][docs-internal].
//!
//! The `curve25519-dalek` documentation requires a custom HTML header to include
//! KaTeX for math support. Unfortunately `cargo doc` does not currently support
//! this, but docs can be built using
//! ```sh
//! make doc
//! make doc-internal
//! ```
//!
//! # Use
//!
//! To import `curve25519-dalek`, add the following to the dependencies section of
//! your project's `Cargo.toml`:
//! ```toml
//! curve25519-dalek = "3"
//! ```
//!
//! The sole breaking change in the `3.x` series was an update to the `digest`
//! version, and in terms of non-breaking changes it includes:
//!
//! * support for using `alloc` instead of `std` on stable Rust,
//! * the Elligator2 encoding for Edwards points,
//! * a fix to use `packed_simd2`,
//! * various documentation fixes and improvements,
//! * support for configurably-sized, precomputed lookup tables for basepoint scalar
//! multiplication,
//! * two new formally-verified field arithmetic backends which use the Fiat Crypto
//! Rust code, which is generated from proofs of functional correctness checked by
//! the Coq theorem proving system, and
//! * support for explicitly calling the `zeroize` traits for all point types.
//!
//! The `2.x` series has API almost entirely unchanged from the `1.x` series,
//! except that:
//!
//! * an error in the data modeling for the (optional) `serde` feature was
//! corrected, so that when the `2.x`-series `serde` implementation is used
//! with `serde-bincode`, the derived serialization matches the usual X/Ed25519
//! formats;
//! * the `rand` version was updated.
//!
//! See `CHANGELOG.md` for more details.
//!
//! # Backends and Features
//!
//! The `nightly` feature enables features available only when using a Rust nightly
//! compiler. In particular, it is required for rendering documentation and for
//! the SIMD backends.
//!
//! Curve arithmetic is implemented using one of the following backends:
//!
//! * a `u32` backend using serial formulas and `u64` products;
//! * a `u64` backend using serial formulas and `u128` products;
//! * an `avx2` backend using [parallel formulas][parallel_doc] and `avx2` instructions (sets speed records);
//! * an `ifma` backend using [parallel formulas][parallel_doc] and `ifma` instructions (sets speed records);
//!
//! By default the `u64` backend is selected. To select a specific backend, use:
//! ```sh
//! cargo build --no-default-features --features "std u32_backend"
//! cargo build --no-default-features --features "std u64_backend"
//! # Requires nightly, RUSTFLAGS="-C target_feature=+avx2" to use avx2
//! cargo build --no-default-features --features "std simd_backend"
//! # Requires nightly, RUSTFLAGS="-C target_feature=+avx512ifma" to use ifma
//! cargo build --no-default-features --features "std simd_backend"
//! ```
//! Crates using `curve25519-dalek` can either select a backend on behalf of their
//! users, or expose feature flags that control the `curve25519-dalek` backend.
//!
//! The `std` feature is enabled by default, but it can be disabled for no-`std`
//! builds using `--no-default-features`. Note that this requires explicitly
//! selecting an arithmetic backend using one of the `_backend` features.
//! If no backend is selected, compilation will fail.
//!
//! # Safety
//!
//! The `curve25519-dalek` types are designed to make illegal states
//! unrepresentable. For example, any instance of an `EdwardsPoint` is
//! guaranteed to hold a point on the Edwards curve, and any instance of a
//! `RistrettoPoint` is guaranteed to hold a valid point in the Ristretto
//! group.
//!
//! All operations are implemented using constant-time logic (no
//! secret-dependent branches, no secret-dependent memory accesses),
//! unless specifically marked as being variable-time code.
//! We believe that our constant-time logic is lowered to constant-time
//! assembly, at least on `x86_64` targets.
//!
//! As an additional guard against possible future compiler optimizations,
//! the `subtle` crate places an optimization barrier before every
//! conditional move or assignment. More details can be found in [the
//! documentation for the `subtle` crate][subtle_doc].
//!
//! Some functionality (e.g., multiscalar multiplication or batch
//! inversion) requires heap allocation for temporary buffers. All
//! heap-allocated buffers of potentially secret data are explicitly
//! zeroed before release.
//!
//! However, we do not attempt to zero stack data, for two reasons.
//! First, it's not possible to do so correctly: we don't have control
//! over stack allocations, so there's no way to know how much data to
//! wipe. Second, because `curve25519-dalek` provides a mid-level API,
//! the correct place to start zeroing stack data is likely not at the
//! entrypoints of `curve25519-dalek` functions, but at the entrypoints of
//! functions in other crates.
//!
//! The implementation is memory-safe, and contains no significant
//! `unsafe` code. The SIMD backend uses `unsafe` internally to call SIMD
//! intrinsics. These are marked `unsafe` only because invoking them on an
//! inappropriate CPU would cause `SIGILL`, but the entire backend is only
//! compiled with appropriate `target_feature`s, so this cannot occur.
//!
//! # Performance
//!
//! Benchmarks are run using [`criterion.rs`][criterion]:
//!
//! ```sh
//! cargo bench --no-default-features --features "std u32_backend"
//! cargo bench --no-default-features --features "std u64_backend"
//! # Uses avx2 or ifma only if compiled for an appropriate target.
//! export RUSTFLAGS="-C target_cpu=native"
//! cargo bench --no-default-features --features "std simd_backend"
//! ```
//!
//! Performance is a secondary goal behind correctness, safety, and
//! clarity, but we aim to be competitive with other implementations.
//!
//! # FFI
//!
//! Unfortunately, we have no plans to add FFI to `curve25519-dalek` directly. The
//! reason is that we use Rust features to provide an API that maintains safety
//! invariants, which are not possible to maintain across an FFI boundary. For
//! instance, as described in the _Safety_ section above, invalid points are
//! impossible to construct, and this would not be the case if we exposed point
//! operations over FFI.
//!
//! However, `curve25519-dalek` is designed as a *mid-level* API, aimed at
//! implementing other, higher-level primitives. Instead of providing FFI at the
//! mid-level, our suggestion is to implement the higher-level primitive (a
//! signature, PAKE, ZKP, etc) in Rust, using `curve25519-dalek` as a dependency,
//! and have that crate provide a minimal, byte-buffer-oriented FFI specific to
//! that primitive.
//!
//! # Contributing
//!
//! Please see [CONTRIBUTING.md][contributing].
//!
//! Patches and pull requests should be make against the `develop`
//! branch, **not** `main`.
//!
//! # About
//!
//! **SPOILER ALERT:** *The Twelfth Doctor's first encounter with the Daleks is in
//! his second full episode, "Into the Dalek". A beleaguered ship of the "Combined
//! Galactic Resistance" has discovered a broken Dalek that has turned "good",
//! desiring to kill all other Daleks. The Doctor, Clara and a team of soldiers
//! are miniaturized and enter the Dalek, which the Doctor names Rusty. They
//! repair the damage, but accidentally restore it to its original nature, causing
//! it to go on the rampage and alert the Dalek fleet to the whereabouts of the
//! rebel ship. However, the Doctor manages to return Rusty to its previous state
//! by linking his mind with the Dalek's: Rusty shares the Doctor's view of the
//! universe's beauty, but also his deep hatred of the Daleks. Rusty destroys the
//! other Daleks and departs the ship, determined to track down and bring an end
//! to the Dalek race.*
//!
//! `curve25519-dalek` is authored by Isis Agora Lovecruft and Henry de Valence.
//!
//! Portions of this library were originally a port of [Adam Langley's
//! Golang ed25519 library](https://!github.com/agl/ed25519), which was in
//! turn a port of the reference `ref10` implementation. Most of this code,
//! including the 32-bit field arithmetic, has since been rewritten.
//!
//! The fast `u32` and `u64` scalar arithmetic was implemented by Andrew Moon, and
//! the addition chain for scalar inversion was provided by Brian Smith. The
//! optimised batch inversion was contributed by Sean Bowe and Daira Hopwood.
//!
//! The `no_std` and `zeroize` support was contributed by Tony Arcieri.
//!
//! The formally verified backends, `fiat_u32_backend` and `fiat_u64_backend`, which
//! integrate with the Rust generated by the
//! [Fiat Crypto project](https://github.com/mit-plv/fiat-crypto) were contributed
//! by François Garillot.
//!
//! Thanks also to Ashley Hauck, Lucas Salibian, Manish Goregaokar, Jack Grigg,
//! Pratyush Mishra, Michael Rosenberg, and countless others for their
//! contributions.
//!
//! [ed25519-dalek]: https://github.com/dalek-cryptography/ed25519-dalek
//! [x25519-dalek]: https://github.com/dalek-cryptography/x25519-dalek
//! [contributing]: https://github.com/dalek-cryptography/curve25519-dalek/blob/master/CONTRIBUTING.md
//! [docs-external]: https://doc.dalek.rs/curve25519_dalek/
//! [docs-internal]: https://doc-internal.dalek.rs/curve25519_dalek/
//! [criterion]: https://github.com/japaric/criterion.rs
//! [parallel_doc]: https://doc-internal.dalek.rs/curve25519_dalek/backend/vector/avx2/index.html
//! [subtle_doc]: https://doc.dalek.rs/subtle/
//------------------------------------------------------------------------
// External dependencies:
//------------------------------------------------------------------------
#[cfg(all(feature = "alloc", not(feature = "std")))]
#[macro_use]
extern crate alloc;
#[cfg(feature = "std")]
#[macro_use]
extern crate std;
#[cfg(all(feature = "nightly", feature = "packed_simd"))]
extern crate packed_simd;
extern crate byteorder;
pub extern crate digest;
extern crate rand_core;
extern crate zeroize;
#[cfg(any(feature = "fiat_u64_backend", feature = "fiat_u32_backend"))]
extern crate fiat_crypto;
// Used for traits related to constant-time code.
extern crate subtle;
#[cfg(all(test, feature = "serde"))]
extern crate bincode;
#[cfg(feature = "serde")]
extern crate serde;
// Internal macros. Must come first!
#[macro_use]
pub(crate) mod macros;
//------------------------------------------------------------------------
// curve25519-dalek public modules
//------------------------------------------------------------------------
// Scalar arithmetic mod l = 2^252 + ..., the order of the Ristretto group
pub mod scalar;
// Point operations on the Montgomery form of Curve25519
pub mod montgomery;
// Point operations on the Edwards form of Curve25519
pub mod edwards;
// Group operations on the Ristretto group
pub mod ristretto;
// Useful constants, like the Ed25519 basepoint
pub mod constants;
// External (and internal) traits.
pub mod traits;
//------------------------------------------------------------------------
// curve25519-dalek internal modules
//------------------------------------------------------------------------
// Finite field arithmetic mod p = 2^255 - 19
pub(crate) mod field;
// Arithmetic backends (using u32, u64, etc) live here
pub(crate) mod backend;
// Crate-local prelude (for alloc-dependent features like `Vec`)
pub(crate) mod prelude;
// Generic code for window lookups
pub(crate) mod window;
| 43.669782 | 475 | 0.702811 |
e4f2648ca196a3759f29d701702cadf84e2d346e | 3,064 | extern crate csv;
extern crate lz4;
#[macro_use]
extern crate serde_derive;
use std::error::Error;
use std::io::{BufRead, BufReader};
use std::fs::File;
use lz4::{Decoder};
#[derive(Debug,Deserialize)]
struct Record {
quizzes: String,
solutions: String,
}
pub struct Board {
pub brd: [u16; 81]
}
pub fn parse() -> Result<Box<Vec<Board>>, Box<dyn Error>> {
let mut boards: Vec<Board> = vec![];
//let mut num = 0;
let input_file = File::open("C:/Users/SEJOBAC8/projs/MastersOfCode/sudoku/sudoku.lz4")?;
let decoder = Decoder::new(input_file)?;
let mut f = BufReader::new(decoder);
let mut line = String::new();
while f.read_line(&mut line).unwrap() > 0 {
if line.len() < 81 {
line.clear();
continue;
}
//print!("{}\r\n", line);
let mut it = line.bytes();
let mut board = Board {
brd: [0; 81]
};
for x in 0..81 {
let v = it.next().unwrap() - 48;
if v > 0 {
board.brd[x] = 1 << (v-1);
}
}
boards.push(board);
line.clear();
break;
}
Ok(Box::new(boards))
}
pub fn solve(board: Board) -> Result<u32, Box<Error>> {
//Create constraints for every cell
// bit 0 - 8 means 1...9 is still possible
let mut constraints: [u16; 81] = [0; 81];
let mut counts: [u32; 81] = [0; 81];
for row in 0..9 {
let mut mask: u16 = 0;
let addr = row * 9;
for col in 0..9 {
//print!("{} {} {}\n", row, col, addr);
mask |= board.brd[addr+col];
}
for col in 0..9 {
constraints[addr+col] = mask;
}
//print!("{:08b}\n", mask);
}
for col in 0..9 {
let mut mask: u16 = 0;
for row in 0..9 {
mask |= board.brd[col + row*9];
}
for row in 0..9 {
constraints[col + row*9] |= mask;
constraints[col + row*9] &= !board.brd[col+row*9];
//Population count here
counts[col + row*9] = constraints[col + row*9].count_ones();
}
}
let mut max = 0;
let mut pos = 0;
for a in 0..81 {
if max < counts[a] {
max = counts[a];
pos = a;
}
}
// if max == 9 {
print!("{}:{}|{}\n", pos, max, constraints[pos]);
for row in 0..9 {
for col in 0..9 {
print!("{:09b}|{:04x}|{} ", constraints[row*9+col], board.brd[row*9+col], counts[row*9+col]);
}
print!("\n");
}
print!("\n");
// }
return Result::Ok(0);
}
fn main() {
let r = parse();
let f = match r {
Ok(result) => result,
Err(err) => {
panic!("There was a problem! {:?}", err)
},
};
for board in *f {
let _ = match solve(board) {
Ok(result) => result,
Err(err) => {
panic!("There was a problem {:?}", err)
}
};
break;
}
} | 22.696296 | 110 | 0.456593 |
c1739e47eab5f04673ce5db26bdb28b0ac6fcd1a | 2,977 | use ra_syntax::{
algo::visit::{visitor_ctx, VisitorCtx},
ast,
AstNode,
};
use rustc_hash::FxHashMap;
use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem};
/// Complete repeated parametes, both name and type. For example, if all
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
/// suggested.
pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_param {
return;
}
let mut params = FxHashMap::default();
for node in ctx.leaf.ancestors() {
let _ = visitor_ctx(&mut params)
.visit::<ast::SourceFile, _>(process)
.visit::<ast::ItemList, _>(process)
.accept(node);
}
params
.into_iter()
.filter_map(|(label, (count, param))| {
let lookup = param.pat()?.syntax().text().to_string();
if count < 2 {
None
} else {
Some((label, lookup))
}
})
.for_each(|(label, lookup)| {
CompletionItem::new(CompletionKind::Magic, label)
.lookup_by(lookup)
.add_to(acc)
});
fn process<'a, N: ast::FnDefOwner>(
node: &'a N,
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
) {
node.functions()
.filter_map(|it| it.param_list())
.flat_map(|it| it.params())
.for_each(|param| {
let text = param.syntax().text().to_string();
params.entry(text).or_insert((0, param)).0 += 1;
})
}
}
#[cfg(test)]
mod tests {
use crate::completion::*;
fn check_magic_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Magic);
}
#[test]
fn test_param_completion_last_param() {
check_magic_completion(
r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>) {}
",
r#"file_id "file_id: FileId""#,
);
}
#[test]
fn test_param_completion_nth_param() {
check_magic_completion(
r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>, x: i32) {}
",
r#"file_id "file_id: FileId""#,
);
}
#[test]
fn test_param_completion_trait_param() {
check_magic_completion(
r"
pub(crate) trait SourceRoot {
pub fn contains(&self, file_id: FileId) -> bool;
pub fn module_map(&self) -> &ModuleMap;
pub fn lines(&self, file_id: FileId) -> &LineIndex;
pub fn syntax(&self, file<|>)
}
",
r#"file_id "file_id: FileId""#,
);
}
}
| 28.902913 | 88 | 0.521666 |
f75e35133fe9ce815a20199f10347615642a0a89 | 14,506 | // Copyright 2018, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//
// TODO: Implement custom subtyping here for things like GParamSpecInt to get
// default/min/max values and similar
use gobject_sys;
use libc;
use translate::*;
use ParamFlags;
use Value;
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ParamSpec(Shared<gobject_sys::GParamSpec>);
match fn {
ref => |ptr| gobject_sys::g_param_spec_ref_sink(ptr),
unref => |ptr| gobject_sys::g_param_spec_unref(ptr),
get_type => || gobject_sys::G_TYPE_PARAM,
}
}
unsafe impl Send for ParamSpec {}
unsafe impl Sync for ParamSpec {}
impl ParamSpec {
pub fn get_value_type(&self) -> ::Type {
unsafe { from_glib((*self.to_glib_none().0).value_type) }
}
pub fn get_owner_type(&self) -> ::Type {
unsafe { from_glib((*self.to_glib_none().0).owner_type) }
}
pub fn get_flags(&self) -> ParamFlags {
unsafe { from_glib((*self.to_glib_none().0).flags) }
}
pub fn get_blurb(&self) -> String {
unsafe { from_glib_none(gobject_sys::g_param_spec_get_blurb(self.to_glib_none().0)) }
}
pub fn get_default_value(&self) -> Option<Value> {
unsafe {
from_glib_none(gobject_sys::g_param_spec_get_default_value(
self.to_glib_none().0,
))
}
}
pub fn get_name(&self) -> String {
unsafe { from_glib_none(gobject_sys::g_param_spec_get_name(self.to_glib_none().0)) }
}
#[cfg(any(feature = "v2_46", feature = "dox"))]
pub fn get_name_quark(&self) -> ::Quark {
unsafe {
from_glib(gobject_sys::g_param_spec_get_name_quark(
self.to_glib_none().0,
))
}
}
pub fn get_nick(&self) -> String {
unsafe { from_glib_none(gobject_sys::g_param_spec_get_nick(self.to_glib_none().0)) }
}
//pub fn get_qdata(&self, quark: /*Ignored*/glib::Quark) -> /*Unimplemented*/Option<Fundamental: Pointer> {
// unsafe { TODO: call gobject_sys::g_param_spec_get_qdata() }
//}
pub fn get_redirect_target(&self) -> Option<ParamSpec> {
unsafe {
from_glib_none(gobject_sys::g_param_spec_get_redirect_target(
self.to_glib_none().0,
))
}
}
//pub fn set_qdata(&self, quark: /*Ignored*/glib::Quark, data: Option</*Unimplemented*/Fundamental: Pointer>) {
// unsafe { TODO: call gobject_sys::g_param_spec_set_qdata() }
//}
//pub fn set_qdata_full(&self, quark: /*Ignored*/glib::Quark, data: Option</*Unimplemented*/Fundamental: Pointer>, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify) {
// unsafe { TODO: call gobject_sys::g_param_spec_set_qdata_full() }
//}
//pub fn steal_qdata(&self, quark: /*Ignored*/glib::Quark) -> /*Unimplemented*/Option<Fundamental: Pointer> {
// unsafe { TODO: call gobject_sys::g_param_spec_steal_qdata() }
//}
pub fn boolean(
name: &str,
nick: &str,
blurb: &str,
default_value: bool,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_boolean(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
default_value.to_glib(),
flags.to_glib(),
))
}
}
pub fn boxed(
name: &str,
nick: &str,
blurb: &str,
boxed_type: ::Type,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_boxed(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
boxed_type.to_glib(),
flags.to_glib(),
))
}
}
pub fn char(
name: &str,
nick: &str,
blurb: &str,
minimum: i8,
maximum: i8,
default_value: i8,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_char(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn double(
name: &str,
nick: &str,
blurb: &str,
minimum: f64,
maximum: f64,
default_value: f64,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_double(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn enum_(
name: &str,
nick: &str,
blurb: &str,
enum_type: ::Type,
default_value: i32,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_enum(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
enum_type.to_glib(),
default_value,
flags.to_glib(),
))
}
}
pub fn flags(
name: &str,
nick: &str,
blurb: &str,
flags_type: ::Type,
default_value: u32,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_flags(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
flags_type.to_glib(),
default_value,
flags.to_glib(),
))
}
}
pub fn float(
name: &str,
nick: &str,
blurb: &str,
minimum: f32,
maximum: f32,
default_value: f32,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_float(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn gtype(
name: &str,
nick: &str,
blurb: &str,
is_a_type: ::Type,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_gtype(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
is_a_type.to_glib(),
flags.to_glib(),
))
}
}
pub fn int(
name: &str,
nick: &str,
blurb: &str,
minimum: i32,
maximum: i32,
default_value: i32,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_int(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn int64(
name: &str,
nick: &str,
blurb: &str,
minimum: i64,
maximum: i64,
default_value: i64,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_int64(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn long(
name: &str,
nick: &str,
blurb: &str,
minimum: libc::c_long,
maximum: libc::c_long,
default_value: libc::c_long,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_long(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn object(
name: &str,
nick: &str,
blurb: &str,
object_type: ::Type,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_object(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
object_type.to_glib(),
flags.to_glib(),
))
}
}
pub fn override_(name: &str, overridden: &ParamSpec) -> ParamSpec {
unsafe {
from_glib_none(gobject_sys::g_param_spec_override(
name.to_glib_none().0,
overridden.to_glib_none().0,
))
}
}
pub fn param(
name: &str,
nick: &str,
blurb: &str,
param_type: ::Type,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_param(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
param_type.to_glib(),
flags.to_glib(),
))
}
}
pub fn pointer(name: &str, nick: &str, blurb: &str, flags: ParamFlags) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_pointer(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
flags.to_glib(),
))
}
}
pub fn string(
name: &str,
nick: &str,
blurb: &str,
default_value: Option<&str>,
flags: ParamFlags,
) -> ParamSpec {
let default_value = default_value.to_glib_none();
unsafe {
from_glib_full(gobject_sys::g_param_spec_string(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
default_value.0,
flags.to_glib(),
))
}
}
pub fn uchar(
name: &str,
nick: &str,
blurb: &str,
minimum: u8,
maximum: u8,
default_value: u8,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_uchar(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn uint(
name: &str,
nick: &str,
blurb: &str,
minimum: u32,
maximum: u32,
default_value: u32,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_uint(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn uint64(
name: &str,
nick: &str,
blurb: &str,
minimum: u64,
maximum: u64,
default_value: u64,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_uint64(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn ulong(
name: &str,
nick: &str,
blurb: &str,
minimum: libc::c_ulong,
maximum: libc::c_ulong,
default_value: libc::c_ulong,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_ulong(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
minimum,
maximum,
default_value,
flags.to_glib(),
))
}
}
pub fn unichar(
name: &str,
nick: &str,
blurb: &str,
default_value: char,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_full(gobject_sys::g_param_spec_unichar(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
default_value.to_glib(),
flags.to_glib(),
))
}
}
pub fn value_array(
name: &str,
nick: &str,
blurb: &str,
element_spec: &ParamSpec,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_none(gobject_sys::g_param_spec_value_array(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
element_spec.to_glib_none().0,
flags.to_glib(),
))
}
}
pub fn variant(
name: &str,
nick: &str,
blurb: &str,
type_: &::VariantTy,
default_value: Option<&::Variant>,
flags: ParamFlags,
) -> ParamSpec {
unsafe {
from_glib_none(gobject_sys::g_param_spec_variant(
name.to_glib_none().0,
nick.to_glib_none().0,
blurb.to_glib_none().0,
type_.to_glib_none().0,
default_value.to_glib_none().0,
flags.to_glib(),
))
}
}
}
| 26.962825 | 183 | 0.488143 |
4ab090b684722ccb9a0b412a787ff203bfe03bdf | 1,430 | // Copyright 2017 The UNIC Project Developers.
//
// See the COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::path::Path;
use source::idna::idna_mapping_table::IDNA_MAPPING;
use source::idna::readme::UNICODE_VERSION;
use writer::common::emit_unicode_version;
use writer::utils::tables::ToRangeCharTable;
use writer::utils::write;
pub fn generate(dir: &Path) {
emit_unicode_version(dir, &UNICODE_VERSION);
emit_idna_mapping(dir);
}
pub fn emit_idna_mapping(dir: &Path) {
let contents: String = IDNA_MAPPING.map.to_range_char_table(|entry, f| {
write!(f, "{}", entry.status)?;
if matches!(
entry.status,
"Mapped" | "Deviation" | "DisallowedStd3Mapped"
) {
// TODO(NIGHTLY_RUST): Use str::escape_unicode()
write!(f, "(\"")?;
if let Some(ref s) = entry.mapping {
for ch in s.chars() {
write!(f, "{}", ch.escape_unicode())?;
}
}
write!(f, "\")")?;
}
Ok(())
});
write(dir, "idna_mapping.rsv", &contents);
}
| 31.086957 | 76 | 0.613287 |
23308198bd763e111c4e50a4e96b0f32c72b83e0 | 5,547 | /// ffi.rs: re-exports trampoline symbols.
///
/// NB: rust (as of today's nightly) doesn't export symbols from .c/.S files,
/// also rust doesn't seem to have visibility controls such as
/// __attribute__((visibility("hidden"))), there's no good way to workaround
/// this, see rust issue ##36342 for more details.
/// As a result, we re-export all the needed C/ASM symbols to make sure our
/// cdylib is built correctly.
use core::ffi::c_void;
use reverie_common::consts;
use reverie_common::local_state::*;
use syscalls::*;
static SYSCALL_UNTRACED: u64 = 0x7000_0000;
static SYSCALL_TRACED: u64 = 0x7000_0004;
extern "C" {
fn _raw_syscall(
syscallno: i32,
arg0: i64,
arg1: i64,
arg2: i64,
arg3: i64,
arg4: i64,
arg5: i64,
syscall_insn: *mut c_void,
sp1: i64,
sp2: i64,
) -> i64;
fn _syscall_hook_trampoline();
fn _syscall_hook_trampoline_48_3d_01_f0_ff_ff();
fn _syscall_hook_trampoline_48_3d_00_f0_ff_ff();
fn _syscall_hook_trampoline_48_8b_3c_24();
fn _syscall_hook_trampoline_5a_5e_c3();
fn _syscall_hook_trampoline_89_c2_f7_da();
fn _syscall_hook_trampoline_90_90_90();
fn _syscall_hook_trampoline_ba_01_00_00_00();
fn _syscall_hook_trampoline_89_c1_31_d2();
fn _syscall_hook_trampoline_89_d0_87_07();
fn _syscall_hook_trampoline_c3_nop();
fn _syscall_hook_trampoline_85_c0_0f_94_c2();
fn _remote_syscall_helper();
fn _remote_funccall_helper();
fn captured_syscall(
_p: &mut ProcessState,
no: i32,
a0: i64,
a1: i64,
a2: i64,
a3: i64,
a4: i64,
a5: i64,
) -> i64;
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline() {
_syscall_hook_trampoline()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_48_3d_01_f0_ff_ff() {
_syscall_hook_trampoline_48_3d_01_f0_ff_ff()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_48_3d_00_f0_ff_ff() {
_syscall_hook_trampoline_48_3d_00_f0_ff_ff()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_48_8b_3c_24() {
_syscall_hook_trampoline_48_8b_3c_24()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_5a_5e_c3() {
_syscall_hook_trampoline_5a_5e_c3()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_89_c2_f7_da() {
_syscall_hook_trampoline_89_c2_f7_da()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_90_90_90() {
_syscall_hook_trampoline_90_90_90()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_ba_01_00_00_00() {
_syscall_hook_trampoline_ba_01_00_00_00()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_89_c1_31_d2() {
_syscall_hook_trampoline_89_c1_31_d2()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_89_d0_87_07() {
_syscall_hook_trampoline_89_d0_87_07()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_c3_nop() {
_syscall_hook_trampoline_c3_nop()
}
#[no_mangle]
unsafe extern "C" fn syscall_hook_trampoline_85_c0_0f_94_c2() {
_syscall_hook_trampoline_85_c0_0f_94_c2()
}
#[no_mangle]
unsafe extern "C" fn traced_syscall(
syscallno: i32,
arg0: i64,
arg1: i64,
arg2: i64,
arg3: i64,
arg4: i64,
arg5: i64,
) -> i64 {
_raw_syscall(
syscallno,
arg0,
arg1,
arg2,
arg3,
arg4,
arg5,
SYSCALL_TRACED as *mut _,
0,
0,
)
}
#[no_mangle]
unsafe extern "C" fn untraced_syscall(
syscallno: i32,
arg0: i64,
arg1: i64,
arg2: i64,
arg3: i64,
arg4: i64,
arg5: i64,
) -> i64 {
_raw_syscall(
syscallno,
arg0,
arg1,
arg2,
arg3,
arg4,
arg5,
SYSCALL_UNTRACED as *mut _,
0,
0,
)
}
#[no_mangle]
unsafe extern "C" fn remote_syscall_helper_do_not_call_me() {
_remote_syscall_helper();
}
#[repr(C)]
struct syscall_info {
no: u64,
args: [u64; 6],
}
#[no_mangle]
unsafe extern "C" fn syscall_hook(info: *const syscall_info) -> i64 {
if let Some(cell) = &PSTATE {
let mut pstate = cell.get().as_mut().unwrap();
let sc = info.as_ref().unwrap();
let _no = SyscallNo::from(sc.no as i32);
let _tid = syscall!(SYS_gettid).unwrap() as i32;
let res = captured_syscall(
&mut pstate,
sc.no as i32,
sc.args[0] as i64,
sc.args[1] as i64,
sc.args[2] as i64,
sc.args[3] as i64,
sc.args[4] as i64,
sc.args[5] as i64,
);
return res;
}
return -38; // ENOSYS
}
#[link_section = ".init_array"]
#[used]
static EARLY_TRAMPOLINE_INIT: extern "C" fn() = {
extern "C" fn trampoline_ctor() {
let syscall_hook_ptr =
consts::REVERIE_LOCAL_SYSCALL_HOOK_ADDR as *mut u64;
unsafe {
core::ptr::write(syscall_hook_ptr, syscall_hook as u64);
}
let ready = consts::REVERIE_LOCAL_SYSCALL_TRAMPOLINE as *mut u64;
unsafe {
core::ptr::write(ready, 1);
}
let syscall_helper_ptr =
consts::REVERIE_LOCAL_SYSCALL_HELPER as *mut u64;
unsafe {
core::ptr::write(syscall_helper_ptr, _remote_syscall_helper as u64);
}
let rpc_helper_ptr = consts::REVERIE_LOCAL_RPC_HELPER as *mut u64;
unsafe {
core::ptr::write(rpc_helper_ptr, _remote_funccall_helper as u64);
}
};
trampoline_ctor
};
| 24.986486 | 80 | 0.643952 |
87173c17007b204024dfa8b3418cc1db30e857fd | 7,872 | //! Tock kernel for the Nordic Semiconductor nRF52840 development kit (DK).
//!
//! It is based on nRF52840 SoC (Cortex M4 core with a BLE transceiver) with
//! many exported I/O and peripherals.
#![no_std]
#![no_main]
#![deny(missing_docs)]
#[allow(unused_imports)]
use kernel::{debug, debug_gpio, debug_verbose, static_init};
use nrf52dk_base::{SpiMX25R6435FPins, SpiPins, UartPins};
// The nRF52840DK LEDs (see back of board)
const LED1_PIN: usize = 13;
const LED2_PIN: usize = 14;
const LED3_PIN: usize = 15;
const LED4_PIN: usize = 16;
// The nRF52840DK buttons (see back of board)
const BUTTON1_PIN: usize = 11;
const BUTTON2_PIN: usize = 12;
const BUTTON3_PIN: usize = 24;
const BUTTON4_PIN: usize = 25;
const BUTTON_RST_PIN: usize = 18;
const UART_RTS: usize = 5;
const UART_TXD: usize = 6;
const UART_CTS: usize = 7;
const UART_RXD: usize = 8;
const SPI_MOSI: usize = 20;
const SPI_MISO: usize = 21;
const SPI_CLK: usize = 19;
const SPI_MX25R6435F_CHIP_SELECT: usize = 17;
const SPI_MX25R6435F_WRITE_PROTECT_PIN: usize = 22;
const SPI_MX25R6435F_HOLD_PIN: usize = 23;
/// UART Writer
pub mod io;
// State for loading and holding applications.
// How should the kernel respond when a process faults.
const FAULT_RESPONSE: kernel::procs::FaultResponse = kernel::procs::FaultResponse::Panic;
// Number of concurrent processes this platform supports.
const NUM_PROCS: usize = 8;
#[link_section = ".app_memory"]
static mut APP_MEMORY: [u8; 245760] = [0; 245760];
static mut PROCESSES: [Option<&'static dyn kernel::procs::ProcessType>; NUM_PROCS] =
[None, None, None, None, None, None, None, None];
/// Dummy buffer that causes the linker to reserve enough space for the stack.
#[no_mangle]
#[link_section = ".stack_buffer"]
pub static mut STACK_MEMORY: [u8; 0x1000] = [0; 0x1000];
/// Entry point in the vector table called on hard reset.
#[no_mangle]
pub unsafe fn reset_handler() {
// Loads relocations and clears BSS
nrf52::init();
// GPIOs
let gpio_pins = static_init!(
[&'static dyn kernel::hil::gpio::InterruptValuePin; 13],
[
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[3])
)
.finalize(), // Bottom right header on DK board
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[4])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[28])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[29])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[30])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[10])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[9])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[8])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[7])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[6])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[5])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[1])
)
.finalize(),
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[0])
)
.finalize(),
]
);
// LEDs
let led_pins = static_init!(
[(
&'static dyn kernel::hil::gpio::Pin,
capsules::led::ActivationMode
); 4],
[
(
&nrf5x::gpio::PORT[LED1_PIN],
capsules::led::ActivationMode::ActiveLow
),
(
&nrf5x::gpio::PORT[LED2_PIN],
capsules::led::ActivationMode::ActiveLow
),
(
&nrf5x::gpio::PORT[LED3_PIN],
capsules::led::ActivationMode::ActiveLow
),
(
&nrf5x::gpio::PORT[LED4_PIN],
capsules::led::ActivationMode::ActiveLow
),
]
);
let button_pins = static_init!(
[(
&'static dyn kernel::hil::gpio::InterruptValuePin,
capsules::button::GpioMode
); 4],
[
(
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[BUTTON1_PIN])
)
.finalize(),
capsules::button::GpioMode::LowWhenPressed
), // 13
(
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[BUTTON2_PIN])
)
.finalize(),
capsules::button::GpioMode::LowWhenPressed
), // 14
(
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[BUTTON3_PIN])
)
.finalize(),
capsules::button::GpioMode::LowWhenPressed
), // 15
(
static_init!(
kernel::hil::gpio::InterruptValueWrapper,
kernel::hil::gpio::InterruptValueWrapper::new(&nrf5x::gpio::PORT[BUTTON4_PIN])
)
.finalize(),
capsules::button::GpioMode::LowWhenPressed
), // 16
]
);
for &(btn, _) in button_pins.iter() {
btn.set_floating_state(kernel::hil::gpio::FloatingState::PullUp);
}
let board_kernel = static_init!(kernel::Kernel, kernel::Kernel::new(&PROCESSES));
nrf52dk_base::setup_board(
board_kernel,
BUTTON_RST_PIN,
gpio_pins,
LED1_PIN,
LED2_PIN,
LED3_PIN,
led_pins,
&UartPins::new(UART_RTS, UART_TXD, UART_CTS, UART_RXD),
&SpiPins::new(SPI_MOSI, SPI_MISO, SPI_CLK),
&Some(SpiMX25R6435FPins::new(
SPI_MX25R6435F_CHIP_SELECT,
SPI_MX25R6435F_WRITE_PROTECT_PIN,
SPI_MX25R6435F_HOLD_PIN,
)),
button_pins,
true,
&mut APP_MEMORY,
&mut PROCESSES,
FAULT_RESPONSE,
);
}
| 33.497872 | 98 | 0.551829 |
019cb302e64ed3a408ab154aa1271fa5a74e8af2 | 1,835 | use rtm::{File, Message};
use timestamp::Timestamp;
/// Pins an item to a channel.
///
/// Wraps https://api.slack.com/methods/pins.add
#[derive(Clone, Debug, Serialize)]
#[serde(rename = "snake_case")]
pub enum Pinnable {
/// File to pin or unpin
File(::FileId),
/// Timestamp of the message to pin or unpin
Timestamp(::Timestamp),
}
#[derive(Clone, Debug, Serialize, new)]
pub struct AddRequest {
/// Channel to pin the item in.
pub channel: ::ConversationId,
#[serde(flatten)]
pub item: Pinnable,
}
/// Lists items pinned to a channel.
///
/// Wraps https://api.slack.com/methods/pins.list
#[derive(Clone, Debug, Serialize, new)]
pub struct ListRequest {
/// Channel to get pinned items for.
pub channel: ::ConversationId,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ListResponse {
pub items: Option<Vec<ListResponseItem>>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "type")]
#[serde(deny_unknown_fields)]
pub enum ListResponseItem {
Message(ListResponseItemMessage),
File(ListResponseItemFile),
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ListResponseItemFile {
pub created: Option<Timestamp>,
pub created_by: Option<::UserId>,
pub file: File,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ListResponseItemMessage {
pub channel: ::ConversationId,
pub created: Option<Timestamp>,
pub created_by: Option<::UserId>,
pub message: Message,
}
/// Un-pins an item from a channel.
///
/// Wraps https://api.slack.com/methods/pins.remove
#[derive(Clone, Debug, Serialize, new)]
pub struct RemoveRequest {
/// Channel where the item is pinned to.
pub channel: ::ConversationId,
#[serde(flatten)]
pub item: Pinnable,
}
| 23.831169 | 51 | 0.687738 |
e2bc63f5ee5bc2d4ea6b379fb236476c25856b93 | 9,082 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
format_module_id,
test_reporter::{FailureReason, TestFailure, TestResults, TestStatistics},
};
use anyhow::Result;
use colored::*;
use move_binary_format::file_format::CompiledModule;
use move_core_types::{
gas_schedule::{CostTable, GasAlgebra, GasCost, GasUnits},
identifier::IdentStr,
value::serialize_values,
vm_status::StatusCode,
};
use move_lang::unit_test::{ExpectedFailure, ModuleTestPlan, TestPlan};
use move_vm_runtime::{logging::NoContextLog, move_vm::MoveVM};
use move_vm_test_utils::InMemoryStorage;
use move_vm_types::gas_schedule::{zero_cost_schedule, GasStatus};
use rayon::prelude::*;
use std::{io::Write, marker::Send, sync::Mutex};
/// Test state common to all tests
#[derive(Debug)]
pub struct SharedTestingConfig {
execution_bound: u64,
cost_table: CostTable,
starting_storage_state: InMemoryStorage,
}
#[derive(Debug)]
pub struct TestRunner {
num_threads: usize,
testing_config: SharedTestingConfig,
tests: TestPlan,
}
/// A gas schedule where every instruction has a cost of "1". This is used to bound execution of a
/// test to a certain number of ticks.
fn unit_cost_table() -> CostTable {
let mut cost_schedule = zero_cost_schedule();
cost_schedule.instruction_table.iter_mut().for_each(|cost| {
*cost = GasCost::new(1, 1);
});
cost_schedule.native_table.iter_mut().for_each(|cost| {
*cost = GasCost::new(1, 1);
});
cost_schedule
}
/// Setup storage state with the set of modules that will be needed for all tests
fn setup_test_storage<'a>(
modules: impl Iterator<Item = &'a CompiledModule>,
) -> Result<InMemoryStorage> {
let mut storage = InMemoryStorage::new();
for module in modules {
let module_id = module.self_id();
let mut module_bytes = Vec::new();
module.serialize(&mut module_bytes)?;
storage.publish_or_overwrite_module(module_id, module_bytes);
}
Ok(storage)
}
impl TestRunner {
pub fn new(execution_bound: u64, num_threads: usize, tests: TestPlan) -> Result<Self> {
let modules = tests.module_info.values().map(|info| &info.0);
let starting_storage_state = setup_test_storage(modules)?;
Ok(Self {
testing_config: SharedTestingConfig {
starting_storage_state,
execution_bound,
cost_table: unit_cost_table(),
},
num_threads,
tests,
})
}
pub fn run<W: Write + Send>(self, writer: &Mutex<W>) -> Result<TestResults> {
rayon::ThreadPoolBuilder::new()
.num_threads(self.num_threads)
.build()
.unwrap()
.install(|| {
let final_statistics = self
.tests
.module_tests
.par_iter()
.map(|(_, test_plan)| self.testing_config.exec_module_tests(test_plan, writer))
.reduce(TestStatistics::new, |acc, stats| acc.combine(stats));
Ok(TestResults::new(final_statistics, self.tests))
})
}
pub fn filter(&mut self, test_name_slice: &str) {
for (module_id, module_test) in self.tests.module_tests.iter_mut() {
if module_id.name().as_str().contains(test_name_slice) {
continue;
} else {
let tests = std::mem::take(&mut module_test.tests);
module_test.tests = tests
.into_iter()
.filter(|(test_name, _)| test_name.as_str().contains(test_name_slice))
.collect();
}
}
}
}
impl SharedTestingConfig {
fn exec_module_tests<W: Write>(
&self,
test_plan: &ModuleTestPlan,
writer: &Mutex<W>,
) -> TestStatistics {
let mut stats = TestStatistics::new();
let pass = |fn_name: &str| {
writeln!(
writer.lock().unwrap(),
"[ {} ] {}::{}",
"PASS".bold().bright_green(),
format_module_id(&test_plan.module_id),
fn_name
)
.unwrap()
};
let fail = |fn_name: &str| {
writeln!(
writer.lock().unwrap(),
"[ {} ] {}::{}",
"FAIL".bold().bright_red(),
format_module_id(&test_plan.module_id),
fn_name,
)
.unwrap()
};
let timeout = |fn_name: &str| {
writeln!(
writer.lock().unwrap(),
"[ {} ] {}::{}",
"TIMEOUT".bold().bright_yellow(),
format_module_id(&test_plan.module_id),
fn_name,
)
.unwrap();
};
for (function_name, test_info) in &test_plan.tests {
let move_vm = MoveVM::new();
let mut session = move_vm.new_session(&self.starting_storage_state);
let log_context = NoContextLog::new();
match session.execute_function(
&test_plan.module_id,
&IdentStr::new(function_name).unwrap(),
vec![], // no ty args, at least for now
serialize_values(test_info.arguments.iter()),
&mut GasStatus::new(&self.cost_table, GasUnits::new(self.execution_bound)),
&log_context,
) {
Err(err) => match (test_info.expected_failure.as_ref(), err.sub_status()) {
// Ran out of ticks, report a test timeout and log a test failure
_ if err.major_status() == StatusCode::OUT_OF_GAS => {
timeout(function_name);
stats.test_failure(
TestFailure::new(FailureReason::timeout(), function_name, Some(err)),
&test_plan,
)
}
// Expected the test to not abort, but it aborted with `code`
(None, Some(code)) => {
fail(function_name);
stats.test_failure(
TestFailure::new(
FailureReason::aborted(code),
function_name,
Some(err),
),
&test_plan,
)
}
// Expected the test the abort with a specific `code`, and it did abort with
// that abort code
(Some(ExpectedFailure::ExpectedWithCode(code)), Some(other_code))
if err.major_status() == StatusCode::ABORTED && *code == other_code =>
{
pass(function_name);
stats.test_success();
}
// Expected the test to abort with a specific `code` but it aborted with a
// different `other_code`
(Some(ExpectedFailure::ExpectedWithCode(code)), Some(other_code)) => {
fail(function_name);
stats.test_failure(
TestFailure::new(
FailureReason::wrong_abort(*code, other_code),
function_name,
Some(err),
),
&test_plan,
)
}
// Expected the test to abort and it aborted, but we don't need to check the code
(Some(ExpectedFailure::Expected), Some(_)) => {
pass(function_name);
stats.test_success();
}
// Unexpected return status from the VM, signal that we hit an unknown error.
(_, None) => {
fail(function_name);
stats.test_failure(
TestFailure::new(FailureReason::unknown(), function_name, Some(err)),
&test_plan,
)
}
},
Ok(_) => {
// Expected the test to fail, but it executed
if test_info.expected_failure.is_some() {
fail(function_name);
stats.test_failure(
TestFailure::new(FailureReason::no_abort(), function_name, None),
&test_plan,
)
} else {
// Expected the test to execute fully and it did
pass(function_name);
stats.test_success();
}
}
}
}
stats
}
}
| 37.684647 | 101 | 0.498679 |
d69b01f2e5c37a46b81db56d32a6d4c43b68f672 | 5,154 | #[doc = "Register `CNSTAT_CMB0` reader"]
pub struct R(crate::R<CNSTAT_CMB0_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<CNSTAT_CMB0_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<CNSTAT_CMB0_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<CNSTAT_CMB0_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `CNSTAT_CMB0` writer"]
pub struct W(crate::W<CNSTAT_CMB0_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<CNSTAT_CMB0_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<CNSTAT_CMB0_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<CNSTAT_CMB0_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `DLC` reader - Data Length Code"]
pub struct DLC_R(crate::FieldReader<u8, u8>);
impl DLC_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
DLC_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DLC_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DLC` writer - Data Length Code"]
pub struct DLC_W<'a> {
w: &'a mut W,
}
impl<'a> DLC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 12)) | ((value as u32 & 0x0f) << 12);
self.w
}
}
#[doc = "Field `PRI` reader - Transmit Priority Code"]
pub struct PRI_R(crate::FieldReader<u8, u8>);
impl PRI_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
PRI_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PRI_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PRI` writer - Transmit Priority Code"]
pub struct PRI_W<'a> {
w: &'a mut W,
}
impl<'a> PRI_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | ((value as u32 & 0x0f) << 4);
self.w
}
}
#[doc = "Field `ST` reader - Buffer Status"]
pub struct ST_R(crate::FieldReader<u8, u8>);
impl ST_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ST_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ST` writer - Buffer Status"]
pub struct ST_W<'a> {
w: &'a mut W,
}
impl<'a> ST_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
impl R {
#[doc = "Bits 12:15 - Data Length Code"]
#[inline(always)]
pub fn dlc(&self) -> DLC_R {
DLC_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 4:7 - Transmit Priority Code"]
#[inline(always)]
pub fn pri(&self) -> PRI_R {
PRI_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 0:3 - Buffer Status"]
#[inline(always)]
pub fn st(&self) -> ST_R {
ST_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 12:15 - Data Length Code"]
#[inline(always)]
pub fn dlc(&mut self) -> DLC_W {
DLC_W { w: self }
}
#[doc = "Bits 4:7 - Transmit Priority Code"]
#[inline(always)]
pub fn pri(&mut self) -> PRI_W {
PRI_W { w: self }
}
#[doc = "Bits 0:3 - Buffer Status"]
#[inline(always)]
pub fn st(&mut self) -> ST_W {
ST_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Buffer Status / Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cnstat_cmb0](index.html) module"]
pub struct CNSTAT_CMB0_SPEC;
impl crate::RegisterSpec for CNSTAT_CMB0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [cnstat_cmb0::R](R) reader structure"]
impl crate::Readable for CNSTAT_CMB0_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [cnstat_cmb0::W](W) writer structure"]
impl crate::Writable for CNSTAT_CMB0_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets CNSTAT_CMB0 to value 0"]
impl crate::Resettable for CNSTAT_CMB0_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 28.955056 | 424 | 0.578386 |
79150f340cad84f4feb9fef1db883720aca0c6a6 | 697 | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for issue #46557
#![feature(nll)]
#![allow(dead_code)]
fn gimme_static_mut() -> &'static mut u32 {
let ref mut x = 1234543; //~ ERROR borrowed value does not live long enough [E0597]
x
}
fn main() {}
| 31.681818 | 87 | 0.713056 |
292fa9c68e544d275b631d3d00184bb65e84fccd | 2,525 | /*
use array_init::array_init;
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use num_bigint::BigInt;
use ordered_float::*;
use rand::Rng;
use rgeometry::algorithms::convex_hull;
use rgeometry::data::*;
use std::convert::*;
pub fn gen_arr<R, const N: usize>(rng: &mut R) -> [Point<i32, 2>; N]
where
R: Rng + ?Sized,
{
let mut arr = [Point::new([0, 0]); N];
for pt in arr.iter_mut() {
*pt = rng.gen();
}
arr
}
pub fn gen_arr_f64<R, const N: usize>(rng: &mut R) -> [Point<OrderedFloat<f64>, 2>; N]
where
R: Rng + ?Sized,
{
let mut arr = [Point::new([OrderedFloat(0.), OrderedFloat(0.)]); N];
for pt in arr.iter_mut() {
*pt = rng.gen();
}
arr
}
pub fn criterion_benchmark(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let p1: [Point<i32, 2>; 10] = gen_arr(&mut rng);
let p2: [Point<i32, 2>; 100] = gen_arr(&mut rng);
let p3: [Point<i32, 2>; 1000] = gen_arr(&mut rng);
let p4: [Point<OrderedFloat<f64>, 2>; 10000] = gen_arr_f64(&mut rng);
let p5: [Point<i32, 2>; 100000] = gen_arr(&mut rng);
// c.bench_function("convex_hull(1e1)", |b| {
// b.iter(|| convex_hull(Vec::from(p1)))
// });
// c.bench_function("convex_hull(1e2)", |b| {
// b.iter(|| convex_hull(Vec::from(p2)))
// });
// c.bench_function("convex_hull(1e3)", |b| {
// b.iter(|| convex_hull(Vec::from(p3)))
// });
c.bench_function("convex_hull(1e4)", |b| {
b.iter(|| convex_hull(Vec::from(p4)))
});
c.bench_function("convex_hull(1e4)", |b| {
b.iter(|| {
let mut vec = Vec::from(p4);
vec.sort_unstable();
vec.dedup();
vec
})
});
c.bench_function("convex_hull(1e4)", |b| b.iter(|| Vec::from(p4)));
// c.bench_function("convex_hull(1e5)", |b| {
// b.iter(|| convex_hull(Vec::from(p5)))
// });
// let p: [Point<BigInt, 2>; 10_000] = array_init(|_i| {
// let tmp: Point<u64, 2> = rng.gen();
// tmp.cast(BigInt::from)
// });
// for &n in &[1_000, 10_000] {
// c.bench_function(&format!("convex_hull({})", n), |b| {
// b.iter_batched(
// || Vec::from(&p[0..n]),
// |inp| convex_hull(inp),
// BatchSize::LargeInput,
// )
// });
// }
}
// 33 => 5
// 664 => 3.9
// 9965 => 3.813
// 132877 => 5.411
// 1660964 => 5.237
// 10*ln 10 * x = 0.165us
// 100*ln 100 * x = 2.6us
// 1000*ln 1000 * x = 38us
// 10000*ln 10000 * x = 719us
// 100000*ln 100000 * x = 8.7ms
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
*/
fn main() {}
| 26.861702 | 86 | 0.567129 |
cc94f77165ca36d642cf46d8b77faf8da987f580 | 3,849 | // This file is part of the SORA network and Polkaswap app.
// Copyright (c) 2020, 2021, Polka Biome Ltd. All rights reserved.
// SPDX-License-Identifier: BSD-4-Clause
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
// Redistributions of source code must retain the above copyright notice, this list
// of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice, this
// list of conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
//
// All advertising materials mentioning features or use of this software must display
// the following acknowledgement: This product includes software developed by Polka Biome
// Ltd., SORA, and Polkaswap.
//
// Neither the name of the Polka Biome Ltd. nor the names of its contributors may be used
// to endorse or promote products derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY Polka Biome Ltd. AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Polka Biome Ltd. BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use codec::Codec;
use common::InvokeRPCError;
use jsonrpc_core::{Error as RpcError, ErrorCode, Result};
use jsonrpc_derive::rpc;
use sp_api::ProvideRuntimeApi;
use sp_blockchain::HeaderBackend;
use sp_runtime::generic::BlockId;
use sp_runtime::traits::{Block as BlockT, MaybeDisplay, MaybeFromStr};
use std::sync::Arc;
// Runtime API imports.
pub use rewards_runtime_api::{BalanceInfo, RewardsAPI as RewardsRuntimeAPI};
#[rpc]
pub trait RewardsAPI<BlockHash, EthereumAddress, VecBalanceInfo> {
#[rpc(name = "rewards_claimables")]
fn claimables(
&self,
eth_address: EthereumAddress,
at: Option<BlockHash>,
) -> Result<VecBalanceInfo>;
}
pub struct RewardsClient<C, B> {
client: Arc<C>,
_marker: std::marker::PhantomData<B>,
}
impl<C, B> RewardsClient<C, B> {
/// Construct default `Template`.
pub fn new(client: Arc<C>) -> Self {
Self {
client,
_marker: Default::default(),
}
}
}
impl<C, Block, EthereumAddress, Balance>
RewardsAPI<<Block as BlockT>::Hash, EthereumAddress, Vec<BalanceInfo<Balance>>>
for RewardsClient<C, Block>
where
Block: BlockT,
C: Send + Sync + 'static,
C: ProvideRuntimeApi<Block> + HeaderBackend<Block>,
C::Api: RewardsRuntimeAPI<Block, EthereumAddress, Balance>,
EthereumAddress: Codec,
Balance: Codec + MaybeFromStr + MaybeDisplay,
{
fn claimables(
&self,
eth_address: EthereumAddress,
at: Option<<Block as BlockT>::Hash>,
) -> Result<Vec<BalanceInfo<Balance>>> {
let api = self.client.runtime_api();
let at = BlockId::hash(at.unwrap_or(
// If the block hash is not supplied assume the best block.
self.client.info().best_hash,
));
api.claimables(&at, eth_address).map_err(|e| RpcError {
code: ErrorCode::ServerError(InvokeRPCError::RuntimeError.into()),
message: "Unable to get claimables.".into(),
data: Some(format!("{:?}", e).into()),
})
}
}
| 38.878788 | 103 | 0.708496 |
48392c443244d84743ddc8f100592bed91976132 | 9,061 | use std::{
ffi::{c_void, CString},
os::raw::c_char,
};
use crate::Result;
/// Common trait of all OpenCV related types, helps with generic handling of FFI marshalling
///
/// This trait is somewhat unnecessary complex because of the need of handling String, we need to be able to
/// pass &str as argument to functions that await String and do necessary conversion through CString.
#[doc(hidden)]
pub trait OpenCVType<'a>: Sized {
/// Type when passed as argument to function, e.g. &str for String, for most other types it's Self
#[doc(hidden)]
type Arg: OpenCVTypeArg<'a>;
/// Return type when this type is returned over the FFI boundary from the C++ function, Self for simple
/// types, *mut c_void for complex ones
#[doc(hidden)]
type ExternReceive;
/// Container to help marshall type over FFI boundary, e.g. CString for String or &str, for most other
/// types it's Self
#[doc(hidden)]
type ExternContainer: OpenCVTypeExternContainer;
/// Convert Self into external container with possible error result, it shouldn't panic
#[doc(hidden)]
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> { Ok(self.opencv_into_extern_container_nofail()) }
/// Convert Self into external container in the nofail context, this can panic
#[doc(hidden)]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer;
/// Construct the new Self from the data received from C++ function
#[doc(hidden)]
unsafe fn opencv_from_extern(s: Self::ExternReceive) -> Self;
}
#[doc(hidden)]
pub trait OpenCVTypeArg<'a>: Sized {
/// Container to help marshall type over FFI boundary, e.g. CString for String or &str, for most other
/// types it's Self
#[doc(hidden)]
type ExternContainer: OpenCVTypeExternContainer;
/// Convert Self into external container with possible error result, it shouldn't panic
#[doc(hidden)]
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> { Ok(self.opencv_into_extern_container_nofail()) }
/// Convert Self into external container in the nofail context, this can panic
#[doc(hidden)]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer;
}
/// Common trait for the type that is used to help marshall OpenCV related type over the FFI boundary
#[doc(hidden)]
pub trait OpenCVTypeExternContainer {
/// Type when constant Self is sent to C++ function, usually it's Self for simple types or *const c_void
/// for complex ones
#[doc(hidden)]
type ExternSend;
/// Type when mutable Self is sent to C++ function, usually it's Self for simple types or *mut c_void for
/// complex ones
#[doc(hidden)]
type ExternSendMut;
#[doc(hidden)]
fn opencv_as_extern(&self) -> Self::ExternSend;
#[doc(hidden)]
fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut;
#[doc(hidden)]
fn opencv_into_extern(self) -> Self::ExternSendMut;
}
#[macro_export]
macro_rules! opencv_type_copy {
($($type: ty),+ $(,)?) => {
$(
impl $crate::traits::OpenCVType<'_> for $type {
type Arg = Self;
type ExternReceive = Self;
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
#[inline] unsafe fn opencv_from_extern(s: Self) -> Self { s }
}
impl $crate::traits::OpenCVTypeArg<'_> for $type {
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
}
impl $crate::traits::OpenCVTypeExternContainer for $type {
type ExternSend = Self;
type ExternSendMut = Self;
#[inline] fn opencv_as_extern(&self) -> Self { *self }
#[inline] fn opencv_as_extern_mut(&mut self) -> Self { *self }
#[inline] fn opencv_into_extern(self) -> Self { self }
}
)+
};
}
#[macro_export]
macro_rules! opencv_type_enum {
($type: ty) => {
$crate::opencv_type_copy! { $type }
};
}
#[macro_export]
macro_rules! opencv_type_simple {
($type: ty) => {
impl $crate::traits::OpenCVType<'_> for $type {
type Arg = Self;
type ExternReceive = Self;
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
#[inline] unsafe fn opencv_from_extern(s: Self) -> Self { s }
}
impl $crate::traits::OpenCVTypeArg<'_> for $type {
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
}
impl $crate::traits::OpenCVTypeExternContainer for $type {
type ExternSend = *const Self;
type ExternSendMut = *mut Self;
#[inline] fn opencv_as_extern(&self) -> Self::ExternSend { self }
#[inline] fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut { self }
#[inline] fn opencv_into_extern(self) -> Self::ExternSendMut { &mut *std::mem::ManuallyDrop::new(self) as _ }
}
};
}
#[macro_export]
macro_rules! opencv_type_simple_generic {
($type: ident<$trait: ident>) => {
impl<T: $trait> $crate::traits::OpenCVType<'_> for $type<T> {
type Arg = Self;
type ExternReceive = Self;
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
#[inline] unsafe fn opencv_from_extern(s: Self) -> Self { s }
}
impl<T: $trait> $crate::traits::OpenCVTypeArg<'_> for $type<T> {
type ExternContainer = Self;
#[inline] fn opencv_into_extern_container(self) -> $crate::Result<Self> { Ok(self) }
#[inline] fn opencv_into_extern_container_nofail(self) -> Self { self }
}
impl<T: $trait> $crate::traits::OpenCVTypeExternContainer for $type<T> {
type ExternSend = *const Self;
type ExternSendMut = *mut Self;
#[inline] fn opencv_as_extern(&self) -> Self::ExternSend { self }
#[inline] fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut { self }
#[inline] fn opencv_into_extern(self) -> Self::ExternSendMut { &mut *std::mem::ManuallyDrop::new(self) as _ }
}
};
}
pub fn cstring_new_nofail(bytes: impl Into<Vec<u8>>) -> CString {
match CString::new(bytes) {
Ok(s) => {
s
}
Err(e) => {
let nul_pos = e.nul_position();
let mut bytes = e.into_vec();
bytes.drain(nul_pos..);
unsafe { CString::from_vec_unchecked(bytes) }
}
}
}
impl<'a> OpenCVType<'a> for String {
type Arg = &'a str;
type ExternReceive = *mut c_void;
type ExternContainer = CString;
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> {
CString::new(self).map_err(|e| e.into())
}
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
cstring_new_nofail(self)
}
#[inline]
unsafe fn opencv_from_extern(s: Self::ExternReceive) -> Self {
crate::templ::receive_string(s as *mut String)
}
}
impl OpenCVTypeArg<'_> for &str {
type ExternContainer = CString;
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> {
CString::new(self).map_err(|e| e.into())
}
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
cstring_new_nofail(self)
}
}
impl OpenCVTypeExternContainer for CString {
type ExternSend = *const c_char;
type ExternSendMut = *mut c_char;
#[inline]
fn opencv_as_extern(&self) -> Self::ExternSend {
self.as_ptr()
}
#[inline]
fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut {
self.as_ptr() as _ // fixme: use as_mut_ptr() when it's stabilized
}
#[inline]
fn opencv_into_extern(self) -> Self::ExternSendMut {
self.into_raw()
}
}
impl<'a> OpenCVType<'a> for Vec<u8> {
type Arg = Self;
type ExternReceive = *mut c_void;
type ExternContainer = Self;
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> {
Ok(self)
}
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
self
}
#[inline]
unsafe fn opencv_from_extern(s: Self::ExternReceive) -> Self {
crate::templ::receive_byte_string(s as *mut Vec<u8>)
}
}
impl OpenCVTypeArg<'_> for Vec<u8> {
type ExternContainer = Self;
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
self.to_vec()
}
}
impl OpenCVTypeExternContainer for Vec<u8> {
type ExternSend = *const u8;
type ExternSendMut = *mut u8;
#[inline]
fn opencv_as_extern(&self) -> Self::ExternSend {
self.as_ptr()
}
#[inline]
fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut {
self.as_mut_ptr()
}
#[inline]
fn opencv_into_extern(self) -> Self::ExternSendMut {
unimplemented!("This is intentionally left unimplemented as there seems to be no need for it and it's difficult to implement it without leakage")
}
}
opencv_type_copy! {
(),
bool,
i8, u8,
i16, u16,
i32, u32,
i64, u64,
f32, f64,
isize, usize,
*const c_void, *mut c_void,
}
| 29.514658 | 147 | 0.698378 |
72e93fcfc65fa4c34e88ebc8c185704c22b8968a | 7,901 | // To use example, press any key in serial terminal
// Packet will send and "Transmit Done!" will print when radio is done sending packet
#![cfg_attr(not(test), no_std)]
#![no_main]
extern crate nb;
extern crate panic_halt;
use hal::{
exti::{
self,
Exti,
ExtiLine as _,
},
gpio::*, prelude::*, rcc, serial::{self, Serial1Ext}, syscfg,
};
use hal::rng::Rng;
use stm32l0xx_hal as hal;
use longfi_device;
use longfi_device::{ClientEvent, Config, LongFi, Radio, RfEvent};
use catena_4610;
use core::fmt::Write;
static mut PRESHARED_KEY: [u8; 16] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
#[rtic::app(device = stm32l0xx_hal::pac, peripherals = true)]
const APP: () = {
struct Resources {
int: Exti,
radio_irq: catena_4610::RadioIRQ,
debug_uart: serial::Tx<catena_4610::DebugUsart>,
uart_rx: serial::Rx<catena_4610::DebugUsart>,
#[init([0;512])]
buffer: [u8; 512],
#[init(0)]
count: u8,
longfi: LongFi,
}
#[init(spawn = [send_ping], resources = [buffer])]
fn init(ctx: init::Context) -> init::LateResources {
static mut BINDINGS: Option<catena_4610::LongFiBindings> = None;
let device = ctx.device;
let mut rcc = device.RCC.freeze(rcc::Config::hsi16());
let mut syscfg = syscfg::SYSCFG::new(device.SYSCFG, &mut rcc);
let gpioa = device.GPIOA.split(&mut rcc);
let gpiob = device.GPIOB.split(&mut rcc);
let gpioc = device.GPIOC.split(&mut rcc);
let (tx_pin, rx_pin, serial_peripheral) = (gpioa.pa9, gpioa.pa10, device.USART1);
let mut serial = serial_peripheral
.usart(tx_pin, rx_pin, serial::Config::default(), &mut rcc)
.unwrap();
// listen for incoming bytes which will trigger transmits
serial.listen(serial::Event::Rxne);
let (mut tx, rx) = serial.split();
write!(tx, "LongFi Device Test\r\n").unwrap();
let mut exti = Exti::new(device.EXTI);
let hsi48 = rcc.enable_hsi48(&mut syscfg, device.CRS);
let rng = Rng::new(device.RNG, &mut rcc, hsi48);
let radio_irq = catena_4610::initialize_radio_irq(
gpiob.pb4.into_floating_input(),
&mut syscfg,
&mut exti,
);
*BINDINGS = Some(catena_4610::LongFiBindings::new(
device.SPI1,
&mut rcc,
rng,
gpiob.pb3,
gpioa.pa6,
gpioa.pa7,
gpioa.pa15.into_floating_input(),
gpioc.pc0.into_floating_input(),
gpioa.pa1.into_floating_input(),
gpioc.pc2.into_floating_input(),
gpioc.pc1.into_floating_input(),
Some(gpioa.pa8.into_push_pull_output()),
));
let rf_config = Config {
oui: 1,
device_id: 3,
auth_mode: longfi_device::AuthMode::PresharedKey128,
};
let mut longfi_radio;
if let Some(bindings) = BINDINGS {
longfi_radio = unsafe {
LongFi::new(
Radio::sx1276(),
&mut bindings.bindings,
rf_config,
&PRESHARED_KEY,
)
.unwrap()
};
} else {
panic!("No bindings exist");
}
longfi_radio.set_buffer(ctx.resources.buffer);
write!(tx, "Going to main loop\r\n").unwrap();
// Return the initialised resources.
init::LateResources {
int: exti,
radio_irq,
debug_uart: tx,
uart_rx: rx,
longfi: longfi_radio,
}
}
#[task(capacity = 4, priority = 2, resources = [debug_uart, buffer, longfi])]
fn radio_event(ctx: radio_event::Context, event: RfEvent) {
let longfi_radio = ctx.resources.longfi;
let client_event = longfi_radio.handle_event(event);
match client_event {
ClientEvent::ClientEvent_TxDone => {
write!(ctx.resources.debug_uart, "Transmit Done!\r\n").unwrap();
}
ClientEvent::ClientEvent_Rx => {
// get receive buffer
let rx_packet = longfi_radio.get_rx();
write!(ctx.resources.debug_uart, "Received packet\r\n").unwrap();
write!(
ctx.resources.debug_uart,
" Length = {}\r\n",
rx_packet.len
)
.unwrap();
write!(
ctx.resources.debug_uart,
" Rssi = {}\r\n",
rx_packet.rssi
)
.unwrap();
write!(
ctx.resources.debug_uart,
" Snr = {}\r\n",
rx_packet.snr
)
.unwrap();
unsafe {
for i in 0..rx_packet.len {
write!(
ctx.resources.debug_uart,
"{:X} ",
*rx_packet.buf.offset(i as isize)
)
.unwrap();
}
write!(ctx.resources.debug_uart, "\r\n").unwrap();
}
// give buffer back to library
longfi_radio.set_buffer(ctx.resources.buffer);
}
ClientEvent::ClientEvent_None => {}
}
}
#[task(capacity = 4, priority = 2, resources = [debug_uart, count, longfi])]
fn send_ping(ctx: send_ping::Context) {
write!(ctx.resources.debug_uart, "Sending Ping\r\n").unwrap();
let packet: [u8; 72] = [
0xDE,
0xAD,
0xBE,
0xEF,
*ctx.resources.count,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xa1,
0xa2,
0xa3,
0xa4,
0xDE,
0xAD,
0xBE,
0xEF,
*ctx.resources.count,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xa1,
0xa2,
0xa3,
0xa4,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xDE,
0xAD,
0xBE,
0xEF,
0xa1,
0xa2,
0xa3,
0xa4,
0xBE,
0xEF,
0xa1,
0xa2,
0xa3,
0xa4,
];
*ctx.resources.count += 1;
ctx.resources.longfi.send(&packet);
}
#[task(binds = USART1, priority=1, resources = [uart_rx], spawn = [send_ping])]
fn USART1(ctx: USART1::Context) {
let rx = ctx.resources.uart_rx;
rx.read().unwrap();
ctx.spawn.send_ping().unwrap();
}
#[task(binds = EXTI4_15, priority = 1, resources = [radio_irq, int], spawn = [radio_event])]
fn EXTI4_15(ctx: EXTI4_15::Context) {
let line = exti::GpioLine::from_raw_line(
ctx.resources.radio_irq.pin_number(),
);
Exti::unpend(line.unwrap());
ctx.spawn.radio_event(RfEvent::DIO0).unwrap();
}
// Interrupt handlers used to dispatch software tasks
extern "C" {
fn USART4_USART5();
}
};
| 28.523466 | 96 | 0.470447 |
641a3be8a8a11d6053ab5555e1d2dd4de820bed7 | 1,207 | use super::button::dom::render_button;
use super::state::*;
use dominator::{html, Dom, DomBuilder};
use std::rc::Rc;
use web_sys::HtmlElement;
pub fn render_settings(state: Rc<ModuleSettings>) -> Dom {
_render_mixin(
state,
None::<fn(DomBuilder<HtmlElement>) -> DomBuilder<HtmlElement>>,
)
}
pub fn render_button_mixin<F>(state: Rc<ModuleSettings>, mixin: F) -> Dom
where
F: FnOnce(DomBuilder<HtmlElement>) -> DomBuilder<HtmlElement>,
{
_render_mixin(state, Some(mixin))
}
fn _render_mixin<F>(state: Rc<ModuleSettings>, _mixin: Option<F>) -> Dom
where
F: FnOnce(DomBuilder<HtmlElement>) -> DomBuilder<HtmlElement>,
{
html!("module-settings-container", {
.children(
state.lines.iter().map(|(line_kind, buttons)| {
html!("module-settings-line", {
.property("kind", line_kind.as_str_id())
.children(buttons.iter().map(|button| {
match button {
Some(button) => render_button(button.clone()),
None => html!("span"),
}
}))
})
})
)
})
}
| 29.439024 | 74 | 0.547639 |
f5f7af807f713257eee4698812ab963126e5dddd | 73 | use martian_derive::martian_filetype;
martian_filetype! {}
fn main() {} | 14.6 | 37 | 0.753425 |
efd917d83dd0f8b6431bae9d293e52705b963f61 | 2,250 | use std::{ error::Error, fmt::Debug };
/// A type representing a number that can be used as a generic type argument
pub trait TypeNum: Debug + Default {
/// The value represented by this type
const VALUE: usize;
/// The value represented by this type
fn value() -> usize {
Self::VALUE
}
}
/// Creates a new type number representing `$value` with `$name` as identifier
#[macro_export]
macro_rules! type_num {
($value:expr => $name:ident) => (
/// A type representing a number that can be used as a generic type argument
#[derive(Debug, Default)]
pub struct $name;
impl $crate::type_math::TypeNum for $name {
const VALUE: usize = $value;
}
);
($($value:expr => $name:ident),+) => ( $(type_num!($value => $name);)+ )
}
type_num! {
0 => _0,
1 => _1, 2 => _2, 4 => _4, 8 => _8,
12 => _12, 16 => _16, 24 => _24, 32 => _32,
48 => _48, 64 => _64, 96 => _96, 128 => _128,
256 => _256, 384 => _384, 512 => _512, 1024 => _1024,
2048 => _2048, 4096 => _4096, 8192 => _8192, 16384 => _16384,
32768 => _32768, 65536 => _65536
}
/// A type representing an operator that can be used as a generic type argument
pub trait Operator: Debug + Default {
/// Performs the operation represented by this type between `a` and `b`
fn r#do(a: usize, b: usize) -> Result<usize, Box<Error + 'static>>;
}
/// An operator representing an addition
#[derive(Debug, Default)]
pub struct Add;
impl Operator for Add {
fn r#do(a: usize, b: usize) -> Result<usize, Box<Error + 'static>> {
Ok(a.checked_add(b).ok_or("Integer overflow")?)
}
}
/// An operator representing a subtraction
#[derive(Debug, Default)]
pub struct Sub;
impl Operator for Sub {
fn r#do(a: usize, b: usize) -> Result<usize, Box<Error + 'static>> {
Ok(a.checked_sub(b).ok_or("Integer underflow")?)
}
}
/// An operator representing a multiplication
#[derive(Debug, Default)]
pub struct Mul;
impl Operator for Mul {
fn r#do(a: usize, b: usize) -> Result<usize, Box<Error + 'static>> {
Ok(a.checked_mul(b).ok_or("Integer overflow")?)
}
}
/// An operator representing a division
#[derive(Debug, Default)]
pub struct Div;
impl Operator for Div {
fn r#do(a: usize, b: usize) -> Result<usize, Box<Error + 'static>> {
Ok(a.checked_div(b).ok_or("Division by zero")?)
}
} | 28.846154 | 79 | 0.649778 |
9b111629b41924a0de39136ee00cfd3a8c30cc7f | 6,180 | #[doc = "Reader of register DCHPRI7"]
pub type R = crate::R<u8, super::DCHPRI7>;
#[doc = "Writer for register DCHPRI7"]
pub type W = crate::W<u8, super::DCHPRI7>;
#[doc = "Register DCHPRI7 `reset()`'s with value 0x07"]
impl crate::ResetValue for super::DCHPRI7 {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0x07
}
}
#[doc = "Reader of field `CHPRI`"]
pub type CHPRI_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CHPRI`"]
pub struct CHPRI_W<'a> {
w: &'a mut W,
}
impl<'a> CHPRI_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u8) & 0x0f);
self.w
}
}
#[doc = "Disable Preempt Ability. This field resets to 0.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DPA_A {
#[doc = "0: Channel n can suspend a lower priority channel."]
_0 = 0,
#[doc = "1: Channel n cannot suspend any channel, regardless of channel priority."]
_1 = 1,
}
impl From<DPA_A> for bool {
#[inline(always)]
fn from(variant: DPA_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DPA`"]
pub type DPA_R = crate::R<bool, DPA_A>;
impl DPA_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DPA_A {
match self.bits {
false => DPA_A::_0,
true => DPA_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == DPA_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == DPA_A::_1
}
}
#[doc = "Write proxy for field `DPA`"]
pub struct DPA_W<'a> {
w: &'a mut W,
}
impl<'a> DPA_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DPA_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Channel n can suspend a lower priority channel."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(DPA_A::_0)
}
#[doc = "Channel n cannot suspend any channel, regardless of channel priority."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(DPA_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u8) & 0x01) << 6);
self.w
}
}
#[doc = "Enable Channel Preemption. This field resets to 0.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ECP_A {
#[doc = "0: Channel n cannot be suspended by a higher priority channel's service request."]
_0 = 0,
#[doc = "1: Channel n can be temporarily suspended by the service request of a higher priority channel."]
_1 = 1,
}
impl From<ECP_A> for bool {
#[inline(always)]
fn from(variant: ECP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ECP`"]
pub type ECP_R = crate::R<bool, ECP_A>;
impl ECP_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ECP_A {
match self.bits {
false => ECP_A::_0,
true => ECP_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == ECP_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == ECP_A::_1
}
}
#[doc = "Write proxy for field `ECP`"]
pub struct ECP_W<'a> {
w: &'a mut W,
}
impl<'a> ECP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ECP_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Channel n cannot be suspended by a higher priority channel's service request."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(ECP_A::_0)
}
#[doc = "Channel n can be temporarily suspended by the service request of a higher priority channel."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(ECP_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u8) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Channel n Arbitration Priority"]
#[inline(always)]
pub fn chpri(&self) -> CHPRI_R {
CHPRI_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 6 - Disable Preempt Ability. This field resets to 0."]
#[inline(always)]
pub fn dpa(&self) -> DPA_R {
DPA_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Enable Channel Preemption. This field resets to 0."]
#[inline(always)]
pub fn ecp(&self) -> ECP_R {
ECP_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - Channel n Arbitration Priority"]
#[inline(always)]
pub fn chpri(&mut self) -> CHPRI_W {
CHPRI_W { w: self }
}
#[doc = "Bit 6 - Disable Preempt Ability. This field resets to 0."]
#[inline(always)]
pub fn dpa(&mut self) -> DPA_W {
DPA_W { w: self }
}
#[doc = "Bit 7 - Enable Channel Preemption. This field resets to 0."]
#[inline(always)]
pub fn ecp(&mut self) -> ECP_W {
ECP_W { w: self }
}
}
| 29.2891 | 109 | 0.550647 |
d6cb7db9091f052e2e9f9c7710171b7c1ffece9c | 2,743 | mod expr;
use std::collections::HashMap;
use clap::Clap;
use crate::expr::Expr;
#[derive(Debug, Clap)]
struct Opts {
#[clap(short, long, default_value = "6")]
dice: u32,
#[clap(short, long, default_value = "20")]
goal: u32,
}
#[derive(Debug, Eq, PartialEq, Hash)]
struct State {
original_pos: u32,
pos: u32,
dice: Option<u32>,
}
impl State {
pub const fn new(original_pos: u32) -> State {
State {
original_pos,
pos: original_pos,
dice: None,
}
}
pub const fn dice(&self, dice: u32) -> State {
State {
original_pos: self.original_pos,
pos: self.original_pos + dice,
dice: Some(dice),
}
}
pub fn go_success(&self) -> State {
State {
original_pos: self.original_pos,
pos: self.pos + self.dice.unwrap(),
dice: self.dice,
}
}
pub const fn stop(&self) -> State {
State {
original_pos: self.pos,
pos: self.pos,
dice: None,
}
}
}
#[derive(Debug)]
struct Solver {
opts: Opts,
mem: HashMap<u32, f64>,
}
impl Solver {
pub fn new(opts: Opts) -> Solver {
let n = (opts.goal + 1) * (opts.goal + 1) * opts.dice;
Solver {
opts,
mem: HashMap::with_capacity(n as usize),
}
}
pub fn solve(&mut self, n: u32) -> f64 {
self.dfs_a(State::new(n))
}
const fn dice_n(&self) -> f64 {
self.opts.dice as f64
}
fn dfs_a(&mut self, state: State) -> f64 {
assert!(state.dice.is_none());
if state.pos >= self.opts.goal {
return 0.0;
}
if let Some(result) = self.mem.get(&state.pos) {
return result.clone();
}
let mut s = Expr::linear(0.0, 1.0);
for dice in 1..=self.opts.dice {
s = s + self.dfs_b(state.dice(dice)) / self.dice_n();
}
let r = s.bisect();
self.mem.insert(state.pos, r.clone());
r
}
fn dfs_b(&mut self, state: State) -> Expr {
assert!(state.dice.is_some());
if state.pos >= self.opts.goal {
return Expr::linear(0.0, 0.0);
}
let stop = self.dfs_a(state.stop());
let go_success = self.dfs_b(state.go_success());
let go_fail = Expr::linear(1.0, 1.0);
let go = go_success / self.dice_n() + go_fail * ((self.dice_n() - 1.0) / self.dice_n());
go.min(Expr::linear(0.0, stop))
}
}
fn main() {
let opts: Opts = Opts::parse();
let goal = opts.goal;
let mut solver = Solver::new(opts);
for i in 0..goal {
println!("{} {}", i, solver.solve(i));
}
}
| 23.05042 | 96 | 0.504922 |
161bfaab48aded5312667e89da11f1c9fe9e729b | 194 | // Checks that `const fn` with const params can be used.
// run-pass
const fn const_u32_identity<const X: u32>() -> u32 {
X
}
fn main() {
assert_eq!(const_u32_identity::<18>(), 18);
}
| 17.636364 | 56 | 0.634021 |
ac0645cd403a1ee68f2e7b00aa4270f1824c952c | 945 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// pretty-expanded FIXME #23616
use std::slice;
pub struct PhfMapEntries<'a, T: 'a> {
iter: slice::Iter<'a, (&'static str, T)>,
}
impl<'a, T> Iterator for PhfMapEntries<'a, T> {
type Item = (&'static str, &'a T);
fn next(&mut self) -> Option<(&'static str, &'a T)> {
self.iter.by_ref().map(|&(key, ref value)| (key, value)).next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
fn main() {}
| 28.636364 | 71 | 0.649735 |
f5eca79849860f36633e10a16d2c00487f3ff50e | 5,060 | use crate::features::imgui::{ExtractedImGuiData, ImGuiRenderFeature, ImGuiUniformBufferObject};
use crate::render_contexts::{RenderJobExtractContext, RenderJobWriteContext, RenderJobPrepareContext};
use renderer::nodes::{
FramePacket, RenderView, PrepareJob, RenderFeatureIndex, RenderFeature, ExtractJob,
};
use crate::features::imgui::prepare::ImguiPrepareJobImpl;
use renderer::vulkan::VkDeviceContext;
use renderer::assets::resources::{PipelineSwapchainInfo, DescriptorSetAllocatorRef};
use atelier_assets::loader::handle::Handle;
use minimum_sdl2::imgui::Sdl2ImguiManager;
use ash::vk::Extent2D;
use renderer::assets::{ImageViewResource, ResourceArc};
use renderer::assets::MaterialAsset;
// This is almost copy-pasted from glam. I wanted to avoid pulling in the entire library for a
// single function
pub fn orthographic_rh_gl(
left: f32,
right: f32,
bottom: f32,
top: f32,
near: f32,
far: f32,
) -> [[f32; 4]; 4] {
let a = 2.0 / (right - left);
let b = 2.0 / (top - bottom);
let c = -2.0 / (far - near);
let tx = -(right + left) / (right - left);
let ty = -(top + bottom) / (top - bottom);
let tz = -(far + near) / (far - near);
[
[a, 0.0, 0.0, 0.0],
[0.0, b, 0.0, 0.0],
[0.0, 0.0, c, 0.0],
[tx, ty, tz, 1.0],
]
}
pub struct ImGuiExtractJobImpl {
device_context: VkDeviceContext,
descriptor_set_allocator: DescriptorSetAllocatorRef,
pipeline_info: PipelineSwapchainInfo,
extents: Extent2D,
imgui_material: Handle<MaterialAsset>,
font_atlas: ResourceArc<ImageViewResource>,
}
impl ImGuiExtractJobImpl {
pub fn new(
device_context: VkDeviceContext,
descriptor_set_allocator: DescriptorSetAllocatorRef,
pipeline_info: PipelineSwapchainInfo,
extents: Extent2D,
imgui_material: &Handle<MaterialAsset>,
font_atlas: ResourceArc<ImageViewResource>,
) -> Self {
ImGuiExtractJobImpl {
device_context,
descriptor_set_allocator,
pipeline_info,
extents,
imgui_material: imgui_material.clone(),
font_atlas,
}
}
}
impl ExtractJob<RenderJobExtractContext, RenderJobPrepareContext, RenderJobWriteContext>
for ImGuiExtractJobImpl
{
fn extract(
mut self: Box<Self>,
extract_context: &RenderJobExtractContext,
_frame_packet: &FramePacket,
_views: &[&RenderView],
) -> Box<dyn PrepareJob<RenderJobPrepareContext, RenderJobWriteContext>> {
let imgui_draw_data = extract_context
.resources
.get::<Sdl2ImguiManager>()
.unwrap()
.copy_draw_data();
let framebuffer_scale = match &imgui_draw_data {
Some(data) => data.framebuffer_scale,
None => [1.0, 1.0],
};
let view_proj = orthographic_rh_gl(
0.0,
self.extents.width as f32 / framebuffer_scale[0],
0.0,
self.extents.height as f32 / framebuffer_scale[1],
-100.0,
100.0,
);
let ubo = ImGuiUniformBufferObject { view_proj };
let dyn_resource_allocator = extract_context
.resource_manager
.create_dyn_resource_allocator_set();
let per_pass_layout =
extract_context
.resource_manager
.get_descriptor_set_info(&self.imgui_material, 0, 0);
let mut per_pass_descriptor_set = self
.descriptor_set_allocator
.create_dyn_descriptor_set_uninitialized(&per_pass_layout.descriptor_set_layout)
.unwrap();
per_pass_descriptor_set.set_buffer_data(0, &ubo);
per_pass_descriptor_set
.flush(&mut self.descriptor_set_allocator)
.unwrap();
let per_image_layout =
extract_context
.resource_manager
.get_descriptor_set_info(&self.imgui_material, 0, 1);
let mut per_image_descriptor_set = self
.descriptor_set_allocator
.create_dyn_descriptor_set_uninitialized(&per_image_layout.descriptor_set_layout)
.unwrap();
per_image_descriptor_set.set_image(0, self.font_atlas);
per_image_descriptor_set
.flush(&mut self.descriptor_set_allocator)
.unwrap();
let per_pass_descriptor_set = per_pass_descriptor_set.descriptor_set().clone();
let per_image_descriptor_sets = vec![per_image_descriptor_set.descriptor_set().clone()];
Box::new(ImguiPrepareJobImpl::new(
self.device_context,
self.pipeline_info,
dyn_resource_allocator,
per_pass_descriptor_set,
per_image_descriptor_sets,
ExtractedImGuiData { imgui_draw_data },
))
}
fn feature_debug_name(&self) -> &'static str {
ImGuiRenderFeature::feature_debug_name()
}
fn feature_index(&self) -> RenderFeatureIndex {
ImGuiRenderFeature::feature_index()
}
}
| 33.509934 | 102 | 0.641502 |
f4b121e33ae2df65bf4381689865e25fad03febf | 6,672 | // Copyright 2018 The Epic Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! Macros to support Rust BIP-32 code (though could conceivably be used for other things)
#[macro_export]
macro_rules! impl_array_newtype {
($thing:ident, $ty:ty, $len:expr) => {
impl $thing {
#[inline]
/// Converts the object to a raw pointer
pub fn as_ptr(&self) -> *const $ty {
let &$thing(ref dat) = self;
dat.as_ptr()
}
#[inline]
/// Converts the object to a mutable raw pointer
pub fn as_mut_ptr(&mut self) -> *mut $ty {
let &mut $thing(ref mut dat) = self;
dat.as_mut_ptr()
}
#[inline]
/// Returns the length of the object as an array
pub fn len(&self) -> usize {
$len
}
#[inline]
/// Returns whether the object, as an array, is empty. Always false.
pub fn is_empty(&self) -> bool {
false
}
#[inline]
/// Returns the underlying bytes.
pub fn as_bytes(&self) -> &[$ty; $len] {
&self.0
}
#[inline]
/// Returns the underlying bytes.
pub fn to_bytes(&self) -> [$ty; $len] {
self.0.clone()
}
#[inline]
/// Returns the underlying bytes.
pub fn into_bytes(self) -> [$ty; $len] {
self.0
}
}
impl<'a> From<&'a [$ty]> for $thing {
fn from(data: &'a [$ty]) -> $thing {
assert_eq!(data.len(), $len);
let mut ret = [0; $len];
ret.copy_from_slice(&data[..]);
$thing(ret)
}
}
impl ::std::ops::Index<usize> for $thing {
type Output = $ty;
#[inline]
fn index(&self, index: usize) -> &$ty {
let &$thing(ref dat) = self;
&dat[index]
}
}
impl_index_newtype!($thing, $ty);
impl PartialEq for $thing {
#[inline]
fn eq(&self, other: &$thing) -> bool {
&self[..] == &other[..]
}
}
impl Eq for $thing {}
impl PartialOrd for $thing {
#[inline]
fn partial_cmp(&self, other: &$thing) -> Option<::std::cmp::Ordering> {
Some(self.cmp(&other))
}
}
impl Ord for $thing {
#[inline]
fn cmp(&self, other: &$thing) -> ::std::cmp::Ordering {
// manually implement comparison to get little-endian ordering
// (we need this for our numeric types; non-numeric ones shouldn't
// be ordered anyway except to put them in BTrees or whatever, and
// they don't care how we order as long as we're consisistent).
for i in 0..$len {
if self[$len - 1 - i] < other[$len - 1 - i] {
return ::std::cmp::Ordering::Less;
}
if self[$len - 1 - i] > other[$len - 1 - i] {
return ::std::cmp::Ordering::Greater;
}
}
::std::cmp::Ordering::Equal
}
}
#[cfg_attr(feature = "clippy", allow(expl_impl_clone_on_copy))] // we don't define the `struct`, we have to explicitly impl
impl Clone for $thing {
#[inline]
fn clone(&self) -> $thing {
$thing::from(&self[..])
}
}
impl Copy for $thing {}
impl ::std::hash::Hash for $thing {
#[inline]
fn hash<H>(&self, state: &mut H)
where
H: ::std::hash::Hasher,
{
(&self[..]).hash(state);
}
fn hash_slice<H>(data: &[$thing], state: &mut H)
where
H: ::std::hash::Hasher,
{
for d in data.iter() {
(&d[..]).hash(state);
}
}
}
};
}
#[macro_export]
macro_rules! impl_array_newtype_encodable {
($thing:ident, $ty:ty, $len:expr) => {
#[cfg(feature = "serde")]
impl<'de> $crate::serde::Deserialize<'de> for $thing {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: $crate::serde::Deserializer<'de>,
{
use $crate::std::fmt::{self, Formatter};
struct Visitor;
impl<'de> $crate::serde::de::Visitor<'de> for Visitor {
type Value = $thing;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
formatter.write_str("a fixed size array")
}
#[inline]
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: $crate::serde::de::SeqAccess<'de>,
{
let mut ret: [$ty; $len] = [0; $len];
for item in ret.iter_mut() {
*item = match seq.next_element()? {
Some(c) => c,
None => {
return Err($crate::serde::de::Error::custom("end of stream"))
}
};
}
Ok($thing(ret))
}
}
deserializer.deserialize_seq(Visitor)
}
}
#[cfg(feature = "serde")]
impl $crate::serde::Serialize for $thing {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: $crate::serde::Serializer,
{
let &$thing(ref dat) = self;
(&dat[..]).serialize(serializer)
}
}
};
}
#[macro_export]
macro_rules! impl_array_newtype_show {
($thing:ident) => {
impl ::std::fmt::Debug for $thing {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, concat!(stringify!($thing), "({:?})"), &self[..])
}
}
};
}
#[macro_export]
macro_rules! impl_index_newtype {
($thing:ident, $ty:ty) => {
impl ::std::ops::Index<::std::ops::Range<usize>> for $thing {
type Output = [$ty];
#[inline]
fn index(&self, index: ::std::ops::Range<usize>) -> &[$ty] {
&self.0[index]
}
}
impl ::std::ops::Index<::std::ops::RangeTo<usize>> for $thing {
type Output = [$ty];
#[inline]
fn index(&self, index: ::std::ops::RangeTo<usize>) -> &[$ty] {
&self.0[index]
}
}
impl ::std::ops::Index<::std::ops::RangeFrom<usize>> for $thing {
type Output = [$ty];
#[inline]
fn index(&self, index: ::std::ops::RangeFrom<usize>) -> &[$ty] {
&self.0[index]
}
}
impl ::std::ops::Index<::std::ops::RangeFull> for $thing {
type Output = [$ty];
#[inline]
fn index(&self, _: ::std::ops::RangeFull) -> &[$ty] {
&self.0[..]
}
}
};
}
| 24.529412 | 125 | 0.586481 |
fe7e8d027b7ffd62cab44765810a484b439d98da | 50,798 | // Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub mod dirent;
pub mod fs;
pub mod hostfileop;
pub mod hostinodeop;
pub mod ioctl;
pub mod socket_iovec;
pub mod tty;
pub mod util;
//pub mod control;
use alloc::collections::btree_map::BTreeMap;
use alloc::string::String;
use alloc::string::ToString;
use core::any::Any;
use self::hostinodeop::*;
use super::super::super::auth::*;
use super::dirent::*;
use super::inode::*;
use super::mount::*;
pub struct SuperOperations {
pub mountSourceOperations: SimpleMountSourceOperations,
pub root: String,
pub inodeMapping: BTreeMap<u64, String>,
pub mounter: FileOwner,
pub dontTranslateOwnership: bool,
}
impl DirentOperations for SuperOperations {
fn Revalidate(&self, _name: &str, _parent: &Inode, _child: &Inode) -> bool {
return self.mountSourceOperations.revalidate;
}
fn Keep(&self, _dirent: &Dirent) -> bool {
//error!("SuperOperations keep ...");
//todo: check
//return false;
return true;
}
fn CacheReadDir(&self) -> bool {
return self.mountSourceOperations.cacheReaddir;
}
}
impl MountSourceOperations for SuperOperations {
fn as_any(&self) -> &Any {
return self;
}
fn Destroy(&mut self) {}
fn ResetInodeMappings(&mut self) {
self.inodeMapping.clear();
}
fn SaveInodeMapping(&mut self, inode: &Inode, path: &str) {
let sattr = inode
.lock()
.InodeOp
.as_any()
.downcast_ref::<HostInodeOp>()
.expect("ReadDirAll: not HostInodeOp")
.StableAttr();
self.inodeMapping.insert(sattr.InodeId, path.to_string());
}
}
#[cfg(test1)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use crate::qlib::mutex::*;
use alloc::sync::Arc;
use std::fs::*;
use std::str;
use tempfile::Builder;
use self::util::*;
use super::*;
//use super::super::mount::*;
//use super::super::attr::*;
//use super::super::inode::*;
use super::super::super::libcDef::*;
use super::super::super::syscalls::sys_file;
use super::super::super::syscalls::sys_read;
use super::super::super::syscalls::sys_stat;
use super::super::super::syscalls::sys_write;
use super::super::super::task::*;
use super::super::super::util::*;
use super::super::super::Common::*;
use super::super::dentry::*;
use super::super::filesystems::*;
use super::super::flags::*;
use super::super::path::*;
use super::super::ramfs::tree::*;
fn newTestMountNamespace() -> Result<(MountNs, String)> {
//let p = Builder::new().prefix("root").tempdir().unwrap();
//let rootStr = p.path().to_str().unwrap().to_string();
let root = "/tmp/root";
remove_dir_all(root).ok();
create_dir(root).unwrap();
let rootStr = root.to_string();
let fd = OpenAt(-100, &rootStr);
if fd < 0 {
return Err(Error::SysError(-fd));
}
let ms = MountSource::NewHostMountSource(
&rootStr,
&ROOT_OWNER,
&WhitelistFileSystem::New(),
&MountSourceFlags::default(),
false,
);
let root = Inode::NewHostInode(&Arc::new(QMutex::new(ms)), fd)?;
let mm = MountNs::New(&root);
return Ok((Arc::new(QMutex::new(mm)), rootStr));
}
// createTestDirs populates the root with some test files and directories.
// /a/a1.txt
// /a/a2.txt
// /b/b1.txt
// /b/c/c1.txt
// /symlinks/normal.txt
// /symlinks/to_normal.txt -> /symlinks/normal.txt
// /symlinks/recursive -> /symlinks
fn createTestDirs(m: &MountNs, task: &Task) -> Result<()> {
let r = m.lock().Root().clone();
r.CreateDirectory(
task,
&r,
&"a".to_string(),
&FilePermissions::FromMode(FileMode(0o777)),
)?;
let a = r.Walk(task, &r, &"a".to_string())?;
let _a1 = a.Create(
task,
&r,
&"a1.txt".to_string(),
&FileFlags {
Read: true,
Write: true,
..Default::default()
},
&FilePermissions::FromMode(FileMode(0o666)),
)?;
let _a2 = a.Create(
task,
&r,
&"a2.txt".to_string(),
&FileFlags {
Read: true,
Write: true,
..Default::default()
},
&FilePermissions::FromMode(FileMode(0o666)),
)?;
r.CreateDirectory(
task,
&r,
&"b".to_string(),
&FilePermissions::FromMode(FileMode(0o0777)),
)?;
let b = r.Walk(task, &r, &"b".to_string())?;
let _b1 = b.Create(
task,
&r,
&"b1.txt".to_string(),
&FileFlags {
Read: true,
Write: true,
..Default::default()
},
&FilePermissions::FromMode(FileMode(0o666)),
)?;
b.CreateDirectory(
task,
&r,
&"c".to_string(),
&FilePermissions::FromMode(FileMode(0o0777)),
)?;
let c = b.Walk(task, &r, &"c".to_string())?;
let _c1 = c.Create(
task,
&r,
&"c1.txt".to_string(),
&FileFlags {
Read: true,
Write: true,
..Default::default()
},
&FilePermissions::FromMode(FileMode(0o666)),
)?;
r.CreateDirectory(
task,
&r,
&"symlinks".to_string(),
&FilePermissions::FromMode(FileMode(0o0777)),
)?;
let symlinks = r.Walk(task, &r, &"symlinks".to_string())?;
let _normal = symlinks.Create(
task,
&r,
&"normal.txt".to_string(),
&FileFlags {
Read: true,
Write: true,
..Default::default()
},
&FilePermissions::FromMode(FileMode(0o666)),
)?;
symlinks.CreateLink(
task,
&r,
&"/symlinks/normal.txt".to_string(),
&"to_normal.txt".to_string(),
)?;
symlinks.CreateLink(task, &r, &"/symlinks".to_string(), &"recursive".to_string())?;
return Ok(());
}
fn allPaths(task: &Task, m: &MountNs, base: &str) -> Result<Vec<String>> {
let mut paths: Vec<String> = Vec::new();
let root = m.lock().Root().clone();
let mut maxTravelsals = 1;
let d = m
.lock()
.FindLink(&task, &root, None, &base, &mut maxTravelsals)?;
let inode = d.lock().Inode.clone();
let sattr = inode.StableAttr();
if sattr.IsDir() {
let dir = inode.GetFile(
&d,
&FileFlags {
Read: true,
..Default::default()
},
)?;
let iter = dir.lock().FileOp.clone();
let mut serializer = CollectEntriesSerilizer::New();
let mut dirCtx = DirCtx::New(&mut serializer);
DirentReadDir(task, &d, &(*iter.borrow()), &root, &mut dirCtx, 0)?;
for (name, _) in &serializer.Entries {
if name.as_str() == "." || name.as_str() == ".." {
continue;
}
let fullName = Join(&base, name);
let mut subpaths = allPaths(&task, m, &fullName)?;
paths.append(&mut subpaths);
paths.push(fullName);
}
}
return Ok(paths);
}
pub struct TestCase {
pub desc: &'static str,
pub paths: Vec<&'static str>,
pub want: Vec<&'static str>,
}
fn ToStrs(arr: &Vec<&'static str>) -> Vec<String> {
let mut res = Vec::new();
for s in arr {
res.push(s.to_string())
}
return res;
}
/*#[test]
fn TestWhitelist() {
let testCase = TestCase {
desc: "root",
paths: vec!["/"],
want: vec!["/a", "/a/a1.txt", "/a/a2.txt", "/b", "/b/b1.txt", "/b/c", "/b/c/c1.txt", "/symlinks", "/symlinks/normal.txt", "/symlinks/to_normal.txt", "/symlinks/recursive"]
};
let (m, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = m.lock().root.clone();
createTestDirs(&m).unwrap();
InstallWhitelist(&task, &m, &ToStrs(&testCase.paths)).unwrap();
let got = allPaths(&task, &m, &"/".to_string()).unwrap();
println!("got count is {}", got.len());
for s in &got {
println!("get folder {}", s);
}
assert!(got == ToStrs(&testCase.want));
//assert!(1 == 0);
}*/
#[test]
fn TestRootPath() {
let rootPath = Builder::new().prefix("root").tempdir().unwrap();
//let rootPath = rootDir.path().to_str().to_string();
let whitelisted = rootPath.path().join("white");
let _ = File::create(&whitelisted).unwrap();
let blacklisted = rootPath.path().join("black");
let _ = File::create(&blacklisted).unwrap();
let mut hostFS = WhitelistFileSystem::New();
let data = format!(
"{}={},{}={}",
ROOT_PATH_KEY,
rootPath.path().to_str().unwrap(),
WHITELIST_KEY,
whitelisted.as_path().to_str().unwrap()
);
let mut task = Task::default();
let inode = hostFS
.Mount(&task, &"".to_owned(), &MountSourceFlags::default(), &data)
.unwrap();
let mm = Arc::new(QMutex::new(MountNs::New(&inode)));
hostFS.InstallWhitelist(&task, &mm).unwrap();
let rootDir = mm.lock().Root();
println!(
"after install withlist: children count is {}",
&rootDir.lock().Children.len()
);
println!("get rootdir");
task.root = rootDir.clone();
let inode = rootDir.lock().Inode.clone();
println!("the rootdir's frozen is {}", rootDir.lock().frozen);
let f = inode.GetFile(&rootDir, &FileFlags::default()).unwrap();
let mut c = CollectEntriesSerilizer::New();
f.lock().ReadDir(&task, &mut c).unwrap();
let got = c.Order();
println!("start print......, got couunt is {}", got.len());
for g in &got {
println!("val is {}", g);
}
let want = vec![".", "..", "white"];
assert!(got == want);
}
// createTestDirs populates the root with some test files and directories.
// /a/a1.txt
// /a/a2.txt
// /b/b1.txt
// /b/c/c1.txt
// /symlinks/normal.txt
// /symlinks/to_normal.txt -> /symlinks/normal.txt
// /symlinks/recursive -> /symlinks
#[test]
fn TestReadPath() {
let subdirs = vec![
("/a".to_string(), true),
("/b/c".to_string(), true),
("/symlinks".to_string(), true),
("/a/a1.txt".to_string(), false),
("/b/b1.txt".to_string(), false),
("/symlinks/normal.txt".to_string(), false),
("/symlinks/to_normal.txt".to_string(), false),
("/symlinks/recursive".to_string(), false),
("/symlinks/recursive/normal.txt".to_string(), false),
(
"/symlinks/recursive/recursive/normal.txt".to_string(),
false,
),
];
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
for p in &subdirs {
let mut maxTraversals = 2;
let dirent = mm
.lock()
.FindLink(&task, &task.root, None, &p.0, &mut maxTraversals)
.unwrap();
assert!(dirent.lock().Inode.StableAttr().IsDir() == p.1)
}
let mut maxTraversals = 2;
let mp = mm
.lock()
.FindLink(
&task,
&task.root,
None,
&"/symlinks".to_string(),
&mut maxTraversals,
)
.unwrap();
let memdirs = vec![
"/tmp".to_string(),
"/tmp/a/b".to_string(),
"/tmp/a/c/d".to_string(),
"/tmp/c".to_string(),
"/proc".to_string(),
"/dev/a/b".to_string(),
];
let mount = MountSource::NewPseudoMountSource();
let tree = MakeDirectoryTree(&task, &Arc::new(QMutex::new(mount)), &memdirs).unwrap();
mm.lock().Mount(&mp, &tree).unwrap();
let expectdir = vec![
("/symlinks/tmp".to_string(), true),
("/symlinks/tmp/a/b".to_string(), true),
("/symlinks/tmp/a/c/d".to_string(), true),
("/symlinks/tmp/c".to_string(), true),
("/symlinks/proc".to_string(), true),
("/symlinks/dev/a/b".to_string(), true),
];
println!("test...................");
for p in &expectdir {
let mut maxTraversals = 2;
let dirent = mm
.lock()
.FindLink(&task, &task.root, None, &p.0, &mut maxTraversals)
.unwrap();
assert!(dirent.lock().Inode.StableAttr().IsDir() == p.1)
}
}
// createTestDirs populates the root with some test files and directories.
// /a/a1.txt
// /a/a2.txt
// /b/b1.txt
// /b/c/c1.txt
// /symlinks/normal.txt
// /symlinks/to_normal.txt -> /symlinks/normal.txt
// /symlinks/recursive -> /symlinks
#[test]
fn TestSysFileOpenAtWriteRead1() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSysFileOpenAtWriteRead2() {
//openat with dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
let filename = CString::New(&"a1.txt".to_string());
let fd1 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd2 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd2 == 1);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSysFileCreateAt1() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
let filename = CString::New(&"/a/a.txt".to_string());
let fd1 = sys_file::createAt(
&task,
ATType::AT_FDCWD,
filename.Ptr(),
Flags::O_RDWR as u32,
FileMode(0o777),
)
.unwrap();
assert!(fd1 == 1);
println!("finish createAT........*************");
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd2 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd2 == 1);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSysFileCreateAt2() {
//openat with dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
println!("start create, the fd0 is {}---------------------", fd0);
let filename = CString::New(&"xxx.txt".to_string());
let fd1 = sys_file::createAt(
&task,
fd0,
filename.Ptr(),
Flags::O_RDWR as u32,
FileMode(0o777),
)
.unwrap();
assert!(fd1 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
sys_file::close(&task, fd1).unwrap();
println!("start open, the fd0 is {}------------------------", fd0);
let fd2 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd2 == 1);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestGetCwd1() {
//openat with dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
task.workdir = task.Root();
let mut arr: [u8; 128] = [0; 128];
let oldAddr = &mut arr[0] as *mut _ as u64;
let addr = sys_file::getcwd(&task, oldAddr, arr.len()).unwrap();
let str = str::from_utf8(&arr[..(addr - oldAddr - 1) as usize]).unwrap();
println!("the str is {}, len is {}", str, addr - oldAddr);
assert!(str == "/");
createTestDirs(&mm, &task).unwrap();
let str = "/b/c".to_string();
let cstr = CString::New(&str);
sys_file::chdir(&mut task, cstr.Ptr()).unwrap();
let addr = sys_file::getcwd(&task, oldAddr, arr.len()).unwrap();
let str = str::from_utf8(&arr[..(addr - oldAddr - 1) as usize]).unwrap();
assert!(str == "/b/c");
}
#[test]
fn TestGetCwd2() {
//openat with dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
task.workdir = task.Root();
let mut arr: [u8; 128] = [0; 128];
let oldAddr = &mut arr[0] as *mut _ as u64;
let addr = sys_file::getcwd(&task, oldAddr, arr.len()).unwrap();
let str = str::from_utf8(&arr[..(addr - oldAddr - 1) as usize]).unwrap();
println!("the str is {}, len is {}", str, addr - oldAddr);
assert!(str == "/");
createTestDirs(&mm, &task).unwrap();
let str = "/b/c".to_string();
let cstr = CString::New(&str);
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
sys_file::fchdir(&mut task, fd0).unwrap();
let addr = sys_file::getcwd(&task, oldAddr, arr.len()).unwrap();
let str = str::from_utf8(&arr[..(addr - oldAddr - 1) as usize]).unwrap();
assert!(str == "/b/c");
}
//need to enable task.Creds().lock().HasCapability(Capability::CAP_SYS_CHROOT) before enable the test
//#[test]
fn TestSysChroot() {
//openat with dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
task.workdir = task.Root();
createTestDirs(&mm, &task).unwrap();
let str = "/b".to_string();
let cstr = CString::New(&str);
println!("**************start to chroot");
sys_file::chroot(&mut task, cstr.Ptr()).unwrap();
println!("**************after chroot");
let cstr = CString::New(&"/c".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
let filename = CString::New(&"c1.txt".to_string());
let fd1 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd2 = sys_file::openAt(&task, fd0, filename.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd2 == 1);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSysDup1() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd3 = sys_file::Dup(&mut task, fd2).unwrap();
println!("fd3 = {}", fd3);
assert!(fd3 == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd3, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
sys_file::close(&task, fd3).unwrap();
}
#[test]
fn TestSysDup2() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd3 = sys_file::Dup2(&mut task, fd2, 10).unwrap();
assert!(fd3 == 10);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd3, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
sys_file::close(&task, fd3).unwrap();
}
#[test]
fn TestSysDup3() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd3 = sys_file::Dup3(&mut task, fd2, 10, Flags::O_CLOEXEC as u32).unwrap();
assert!(fd3 == 10);
let flag = sys_file::Fcntl(&mut task, fd3, Cmd::F_GETFD, 0).unwrap() as u32;
println!("flag is {:b}", flag);
assert!(flag == LibcConst::FD_CLOEXEC as u32);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd3, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
sys_file::close(&task, fd3).unwrap();
}
#[test]
fn TestFcntl1() {
//Cmd::F_DUPFD_CLOEXEC/F_GETFD/F_SETFD
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let fd3 = sys_file::Fcntl(&mut task, fd2, Cmd::F_DUPFD_CLOEXEC, 0).unwrap() as i32;
assert!(fd3 == 0);
let flag = sys_file::Fcntl(&mut task, fd3, Cmd::F_GETFD, 0).unwrap() as u32;
assert!(flag == LibcConst::FD_CLOEXEC as u32);
let res = sys_file::Fcntl(&mut task, fd3, Cmd::F_SETFD, 0).unwrap() as u32;
assert!(res == 0);
let flag = sys_file::Fcntl(&mut task, fd3, Cmd::F_GETFD, 0).unwrap() as u32;
assert!(flag == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd3, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
sys_file::close(&task, fd3).unwrap();
}
#[test]
fn TestFcntl2() {
//Cmd::F_DUPFD_CLOEXEC/F_GETFD/F_SETFD
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let flag = sys_file::Fcntl(&mut task, fd2, Cmd::F_GETFL, 0).unwrap() as i32;
assert!(flag & Flags::O_NONBLOCK != Flags::O_NONBLOCK);
let res =
sys_file::Fcntl(&mut task, fd2, Cmd::F_SETFL, Flags::O_NONBLOCK as u64).unwrap() as i32;
assert!(res == 0);
let flag = sys_file::Fcntl(&mut task, fd2, Cmd::F_GETFL, 0).unwrap() as i32;
assert!(flag & Flags::O_NONBLOCK == Flags::O_NONBLOCK);
}
#[test]
fn TestMkdir1() {
//TestMkdir
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/new".to_string());
let res = sys_file::Mkdir(&task, cstr.Ptr(), 0o777).unwrap();
assert!(res == 0);
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd0 == 0);
let filename = CString::New(&"/a/new/a.txt".to_string());
let fd1 = sys_file::createAt(
&task,
ATType::AT_FDCWD,
filename.Ptr(),
Flags::O_RDWR as u32,
FileMode(0o777),
)
.unwrap();
assert!(fd1 == 1);
let res = sys_file::Unlinkat(&task, ATType::AT_FDCWD, filename.Ptr()).unwrap();
assert!(res == 0);
let res = sys_file::Rmdir(&task, cstr.Ptr()).unwrap();
assert!(res == 0);
}
#[test]
fn TestMkdir2() {
//TestMkdirat
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd0 == 0);
let cstr = CString::New(&"new".to_string());
let res = sys_file::Mkdirat(&task, fd0, cstr.Ptr(), 0o777).unwrap();
assert!(res == 0);
let fd1 = sys_file::openAt(&task, fd0, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 1);
let filename = CString::New(&"a.txt".to_string());
let fd2 = sys_file::createAt(
&task,
fd1,
filename.Ptr(),
Flags::O_RDWR as u32,
FileMode(0o777),
)
.unwrap();
assert!(fd2 == 2);
let res = sys_file::Unlinkat(&task, fd1, filename.Ptr()).unwrap();
assert!(res == 0);
let cstr = CString::New(&"/a/new".to_string());
let res = sys_file::Rmdir(&task, cstr.Ptr()).unwrap();
assert!(res == 0);
}
// createTestDirs populates the root with some test files and directories.
// /a/a1.txt
// /a/a2.txt
// /b/b1.txt
// /b/c/c1.txt
// /symlinks/normal.txt
// /symlinks/to_normal.txt -> /symlinks/normal.txt
// /symlinks/recursive -> /symlinks
#[test]
fn TestSymlink_filelink() {
//TestMkdirat
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let newPath = CString::New(&"/b/link.txt".to_string());
let res = sys_file::Symlink(&task, newPath.Ptr(), cstr.Ptr()).unwrap();
assert!(res == 0);
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newPath.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSymlink_folderlink() {
//TestMkdirat
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let oldfolder = CString::New(&"/a".to_string());
let newfolder = CString::New(&"/d".to_string());
let res = sys_file::Symlink(&task, newfolder.Ptr(), oldfolder.Ptr()).unwrap();
assert!(res == 0);
let newPath = CString::New(&"/d/a1.txt".to_string());
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newPath.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestSymlink_linkat() {
//TestMkdirat
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/b".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd0 == 0);
let cstr = CString::New(&"/a/a1.txt".to_string());
let newPath = CString::New(&"link.txt".to_string());
let res = sys_file::Symlinkat(&task, newPath.Ptr(), fd0, cstr.Ptr()).unwrap();
assert!(res == 0);
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let cstr = CString::New(&"/b/link.txt".to_string());
let mut buf: [u8; 1024] = [0; 1024];
let size =
sys_file::ReadLink(&task, cstr.Ptr(), &mut buf[0] as *mut _ as u64, 1024).unwrap();
assert!(size > 0);
assert!(str::from_utf8(&buf[..size as usize]).unwrap() == "/a/a1.txt");
let mut buf: [u8; 1024] = [0; 1024];
let size = sys_file::ReadLinkAt(&task, fd0, cstr.Ptr(), &mut buf[0] as *mut _ as u64, 1024)
.unwrap();
assert!(size > 0);
assert!(str::from_utf8(&buf[..size as usize]).unwrap() == "/a/a1.txt");
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 1);
assert!(fd2 == 2);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn Testlink_link() {
//TestMkdirat
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let _fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let newPath = CString::New(&"/b/link.txt".to_string());
let res = sys_file::Link(&task, cstr.Ptr(), newPath.Ptr());
assert!(res == Err(Error::SysError(SysErr::EPERM)));
}
#[test]
fn TestTruncate1() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let data = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, data.Ptr(), data.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let size = 6;
let res = sys_file::Truncate(&task, cstr.Ptr(), size).unwrap();
assert!(res == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == size as i64);
assert!(data.data[..size as usize] == buf[0..size as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestTruncate2() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
let fd2 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
assert!(fd1 == 0);
assert!(fd2 == 1);
let str = "1234567890".to_string();
let data = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, data.Ptr(), data.Len() as i64).unwrap();
println!("after write");
let size = 6;
let res = sys_file::Ftruncate(&task, fd1, size).unwrap();
assert!(res == 0);
sys_file::close(&task, fd1).unwrap();
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == size as i64);
assert!(data.data[..size as usize] == buf[0..size as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestUmask() {
let mut task = Task::default();
let mask = sys_file::Umask(&mut task, 123).unwrap();
assert!(mask == 0);
let mask = sys_file::Umask(&mut task, 456).unwrap();
assert!(mask == 123);
}
#[test]
fn TestChown() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let stat = LibcStat::default();
let res = sys_stat::Stat(&task, cstr.Ptr(), &stat as *const _ as u64).unwrap();
assert!(res == 0);
println!("the gid is {}", stat.st_gid);
assert!(stat.st_uid == 0);
assert!(stat.st_gid == 65534);
let res = sys_file::Chown(&task, cstr.Ptr(), 123, 456);
assert!(res == Err(Error::SysError(SysErr::EPERM)));
}
#[test]
fn TestUTime() {
//openat without dirfd
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let stat = LibcStat::default();
let res = sys_stat::Stat(&task, cstr.Ptr(), &stat as *const _ as u64).unwrap();
assert!(res == 0);
println!("the atime is {}, mtime is {}", stat.st_atime, stat.st_mtime);
let _utime = Utime {
Actime: stat.st_atime + 100,
Modtime: stat.st_mtime + 100,
};
//let res = sys_file::Utime(&task, cstr.Ptr(), &utime as * const _ as u64).unwrap();
//todo: fix this. Get -1
let _res = sys_file::Utime(&task, cstr.Ptr(), 0).unwrap();
//assert!(res == 0);*/
}
#[test]
fn TestRename1() {
//rename to another file in same folder
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 0);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let oldname = CString::New(&"/a/a1.txt".to_string());
let newname = CString::New(&"/a/b1.txt".to_string());
let res = sys_file::Rename(&task, oldname.Ptr(), newname.Ptr()).unwrap();
assert!(res == 0);
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newname.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd2 == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestRename2() {
//replace exist file
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 0);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let oldname = CString::New(&"/a/a1.txt".to_string());
let newname = CString::New(&"/b/b1.txt".to_string());
let res = sys_file::Rename(&task, oldname.Ptr(), newname.Ptr()).unwrap();
assert!(res == 0);
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newname.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd2 == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestRename3() {
//rename to a file in differnt folder
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 0);
let str = "1234567890".to_string();
let cstr = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, cstr.Ptr(), cstr.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
let oldname = CString::New(&"/a/a1.txt".to_string());
let newname = CString::New(&"/d.txt".to_string());
let res = sys_file::Rename(&task, oldname.Ptr(), newname.Ptr()).unwrap();
assert!(res == 0);
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newname.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd2 == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap();
assert!(cnt == cstr.Len() as i64);
assert!(cstr.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
#[test]
fn TestRename4() {
//renameat to a file in differnt folder
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
createTestDirs(&mm, &task).unwrap();
let cstr = CString::New(&"/a/a1.txt".to_string());
let fd1 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDWR as u32).unwrap();
assert!(fd1 == 0);
let str = "1234567890".to_string();
let data = CString::New(&str);
println!("before write");
sys_write::Write(&task, fd1, data.Ptr(), data.Len() as i64).unwrap();
println!("after write");
sys_file::close(&task, fd1).unwrap();
println!("**************************before renameat111");
let cstr = CString::New(&"/a".to_string());
let fd0 =
sys_file::openAt(&task, ATType::AT_FDCWD, cstr.Ptr(), Flags::O_RDONLY as u32).unwrap();
let oldname = CString::New(&"a1.txt".to_string());
let newname = CString::New(&"/d.txt".to_string());
println!("**************************before renameat");
let res =
sys_file::Renameat(&task, fd0, oldname.Ptr(), ATType::AT_FDCWD, newname.Ptr()).unwrap();
println!("***************************after renameat");
assert!(res == 0);
sys_file::close(&task, fd0).unwrap();
let fd2 = sys_file::openAt(
&task,
ATType::AT_FDCWD,
newname.Ptr(),
Flags::O_RDONLY as u32,
)
.unwrap();
assert!(fd2 == 0);
let buf: [u8; 100] = [0; 100];
let cnt = sys_read::Read(&task, fd2, &buf[0] as *const _ as u64, buf.len() as i64).unwrap()
as usize;
println!("cnt is {}, len is {}", cnt, data.Len());
//todo: the value is same, but the assert fail. Fix it.
// assert!(cnt == cstr.Len());
assert!(data.data[..] == buf[0..cnt as usize]);
sys_file::close(&task, fd2).unwrap();
}
pub fn Dup(oldfd: i32) -> i64 {
return unsafe { libc::dup(oldfd) as i64 };
}
#[test]
fn TestStdIo() {
let (mm, _) = newTestMountNamespace().unwrap();
let mut task = Task::default();
task.root = mm.lock().root.clone();
//todo: doesn't why the fstat stdin doesn't work. fix it
//let stdin = Dup(0) as i32;
let stdout = Dup(1) as i32;
let stderr = Dup(2) as i32;
let stdfds = [stdout, stdout, stderr];
println!("before newstdfds");
task.NewStdFds(&stdfds, true).unwrap();
println!("after newstdfds");
let str = "1234567890".to_string();
let data = CString::New(&str);
let res = sys_write::Write(&task, 1, data.Ptr(), data.Len() as i64).unwrap();
println!("after sys_write::Write, the res is {}", res);
assert!(res == data.Len() as i64);
println!("the end of test...");
}
}
| 32.836458 | 187 | 0.523525 |
3832b655d8b2075e629f6dbbf1a4a3480783b4a6 | 46,632 | use super::{lattices::*, Outcome};
use crate::{dot::*, optimization};
use firm_construction::program_generator::Spans;
use libfirm_rs::{
bindings,
nodes::{
try_as_value_node, Block, NewKind, Node, NodeDebug, NodeTrait, Phi, Proj, ProjKind, Store,
},
types::{Ty, TyTrait},
Entity, Graph, Mode, Tarval, TarvalKind,
};
use priority_queue::PriorityQueue;
use std::{
collections::{HashMap, HashSet},
fmt::Write,
rc::Rc,
};
// == Priority ==
#[derive(PartialEq, Eq, Clone, Copy)]
struct Priority {
topo_order: u32,
priority: u32, // highest priority first
}
impl std::cmp::Ord for Priority {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
(self.priority)
.cmp(&other.priority)
.then_with(|| self.topo_order.cmp(&other.topo_order).reverse())
}
}
impl PartialOrd for Priority {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
pub struct ConstantFoldingWithLoadStore {
// lattice per each node
values: HashMap<Node, NodeLattice>,
// global lattice, tracks which stores cannot be removed
required_stores: HashSet<Store>,
// worklist queue
queue: PriorityQueue<Node, Priority>,
// Tracks additional update dependencys between nodes.
// This is needed for nodes which use `values` information
// from non-predecessors.
deps: HashMap<Node, HashSet<Node>>,
// Map each node to a priority. Nodes with higher priority are updated first.
node_topo_idx: HashMap<Node, Priority>,
// The graph on which the optimization is applied to.
graph: Graph,
// the start block of the graph.
start_block: Node,
// counts how many updates were scheduled.
node_update_count: usize,
// this field is for debugging only.
cur_node: Option<Node>,
created_phis: HashSet<Phi>,
}
impl optimization::Local for ConstantFoldingWithLoadStore {
fn optimize_function(graph: Graph) -> Outcome {
if cfg!(debug_assertions) {
if let Ok(filter) = std::env::var("FILTER_CONSTANT_FOLDING_METHOD") {
// for debugging
if !graph.entity().name_string().contains(&filter) {
return Outcome::Unchanged;
}
}
}
let mut constant_folding = Self::new(graph);
constant_folding.run();
let result = constant_folding.apply();
if cfg!(debug_assertions) {
check_asserts(graph);
}
result
}
}
#[derive(Clone, Copy, Debug)]
enum AssertNodesAre {
Const(bool),
Eq(bool),
}
fn check_asserts(graph: Graph) {
log::debug!("Checking {}", graph.entity().name_string());
/*breakpoint!("Graph", graph, &|node: &Node| default_label(node)
.append(Spans::span_str(*node)));*/
graph.walk(|node| {
let _res = (|| -> Result<(), std::option::NoneError> {
let call = Node::as_call(*node)?;
let method_name = call.method_name()?;
let assert_type = if method_name.contains(&"assertNodesAreConst") {
Some(AssertNodesAre::Const(true))
} else if method_name.contains(&"assertNodesAreNonConst") {
Some(AssertNodesAre::Const(false))
} else if method_name.contains(&"assertNodesAreEq") {
Some(AssertNodesAre::Eq(true))
} else if method_name.contains(&"assertNodesAreNotEq") {
Some(AssertNodesAre::Eq(false))
} else {
None
};
match assert_type? {
AssertNodesAre::Const(assert_const) => {
for arg in call.args().skip(1) {
if Node::is_const(arg) && !assert_const {
panic!(
"Node {:?}{} was asserted to be non-constant, but was constant",
arg,
Spans::span_str(arg),
)
} else if !Node::is_const(arg) && assert_const {
panic!(
"Node {:?}{} was asserted to be constant, but was not",
arg,
Spans::span_str(arg),
)
}
}
}
AssertNodesAre::Eq(assert_eq) => {
let set: HashSet<_> = call.args().skip(1).collect();
if assert_eq && set.len() > 1 {
let mut iter = set.iter();
let (first, second) = (iter.next().unwrap(), iter.next().unwrap());
panic!(
"Node {:?}{} was asserted to equal {:?}{}, but it wasn't",
first,
Spans::span_str(*first),
second,
Spans::span_str(*second),
)
}
if !assert_eq && set.len() != call.args().len() - 1 {
panic!(
"Some nodes were equal in {:?}{}",
call,
Spans::span_str(call),
)
}
}
}
Ok(())
})();
});
}
impl ConstantFoldingWithLoadStore {
fn new(graph: Graph) -> Self {
let mut queue = PriorityQueue::new();
let mut node_topo_idx = HashMap::new();
let mut values = HashMap::new();
graph.assure_outs();
graph.compute_doms();
breakpoint!("Dominator information", graph, &|node: &Node| {
let block = match node {
Node::Block(block) => *block,
_ => node.block(),
};
default_label(node).append(format!("\ndom_depth: {}", block.dom_depth()))
});
let mut topo_order = 0;
graph.walk_topological(|node| {
topo_order += 1;
// blocks and jumps are handled immediately,
// so that they make things alive quicker.
// they don't depend on anything but its predecessor.
log::debug!("insert {:?}", node);
node_topo_idx.insert(
*node,
Priority {
topo_order,
priority: match node {
Node::Block(_) | Node::Jmp(_) => 1,
_ => 0,
},
},
);
values.insert(*node, NodeLattice::start());
});
let start_block = graph.start().block().into();
queue.push(
start_block,
Priority {
topo_order: 0,
priority: 0,
},
);
Self {
queue,
values,
graph,
node_topo_idx,
start_block,
cur_node: None,
deps: HashMap::new(),
node_update_count: 0,
required_stores: HashSet::new(),
created_phis: HashSet::new(),
}
}
// used for debugging
#[allow(clippy::single_match)]
fn debug_data(&self) -> String {
if let Some(node) = self.cur_node {
if let Some(span) = Spans::lookup_span(node) {
let mut result = String::new();
write!(
&mut result,
"highlight-line:{},{},{},{}",
span.start_position().line_number(),
span.start_position().column() + 1,
span.end_position().line_number(),
span.end_position().column() + 1,
)
.unwrap();
let mem = match node {
Node::Load(load) => load.mem(),
Node::Store(store) => store.mem(),
Node::Call(call) => call.mem(),
Node::Proj(proj, ProjKind::Store_M(_))
| Node::Proj(proj, ProjKind::Load_M(_))
| Node::Proj(proj, ProjKind::Call_M(_)) => proj.pred(),
_ => node,
};
let val = self.lookup(mem);
let val = match val {
NodeLattice::Tuple(_a, b) => &b,
val => val,
};
let mut text = HashMap::new();
match val {
NodeLattice::Heap(heap) => {
for (node, info) in &heap.array_infos {
if let InfoIdx::Node(node) = node {
text.insert(*node, format!("{:?}", info));
}
}
for (node, info) in &heap.object_infos {
if let InfoIdx::Node(node) = node {
text.insert(*node, format!("{:?}", info));
}
}
}
_ => {}
}
for (n, val) in &self.values {
match val {
NodeLattice::Value(val) if !Node::is_const(*n) => {
if let Some(span) = Spans::lookup_span(*n) {
write!(
&mut result,
"\n{}:{}: {:?}{}",
span.start_position().line_number(),
(*n).debug_fmt().short(true),
val,
if let Some(t) = text.get(n) {
" | ".to_owned() + t
} else {
"".to_owned()
}
)
.unwrap();
}
}
_ => {}
}
}
return result;
}
}
"None".to_owned()
}
// used for debugging
#[allow(clippy::single_match)]
fn debug_dump_heap(node: Node, heap: &Heap) -> String {
if let Some(span) = Spans::lookup_span(node) {
let mut result = String::new();
write!(
&mut result,
"highlight-line:{},{},{},{}",
span.start_position().line_number(),
span.start_position().column() + 1,
span.end_position().line_number(),
span.end_position().column() + 1,
)
.unwrap();
let mut text = HashMap::new();
for (node, info) in &heap.array_infos {
if let InfoIdx::Node(node) = node {
text.insert(*node, format!("{:?}", info));
}
}
for (node, info) in &heap.object_infos {
if let InfoIdx::Node(node) = node {
text.insert(*node, format!("{:?}", info));
}
}
for (node, info) in &text {
if let Some(span) = Spans::lookup_span(*node) {
write!(
&mut result,
"\n{}:{}: {}",
span.start_position().line_number(),
(*node).debug_fmt().short(true),
info
)
.unwrap();
}
}
result
} else {
"None".to_owned()
}
}
fn lookup(&self, node: Node) -> &NodeLattice {
&self.values[&node]
}
fn lookup_val(&self, node: Node) -> Option<&NodeValue> {
self.lookup(node).expect_value_or_no_info()
}
fn update(&mut self, node: Node, new: NodeLattice) {
self.values.insert(node, new).unwrap();
}
fn run(&mut self) {
log::info!(
"Run constant folding on {}",
self.graph.entity().name_string()
);
self.debug_data();
macro_rules! invalidate {
($node: expr) => {
let prio = *self
.node_topo_idx
.get(&$node)
.expect(&format!("{:?} have an topological order", $node));
self.queue.push($node, prio);
};
}
let mut required_stores = HashSet::new();
let mut phi_container = PhiContainer::new();
let mut deps = Vec::new();
while let Some((cur_node, _priority)) = self.queue.pop() {
self.cur_node = Some(cur_node);
self.node_update_count += 1;
let cur_lattice = self.lookup(cur_node);
let updated_lattice = self.update_node(
cur_node,
cur_lattice,
&mut deps,
&mut required_stores,
&mut phi_container,
);
if &updated_lattice != cur_lattice {
if let Some(deps) = self.deps.get(&cur_node) {
for out_node in deps.iter() {
invalidate!(*out_node);
}
}
for out_node in cur_node.out_nodes() {
invalidate!(out_node);
}
self.update(cur_node, updated_lattice);
}
if !deps.is_empty() {
for dep in &deps {
self.deps
.entry(*dep)
.and_modify(|e| {
e.insert(cur_node);
})
.or_insert_with(|| vec![cur_node].into_iter().collect());
}
deps.clear();
}
}
self.required_stores = required_stores;
self.cur_node = None;
self.created_phis = phi_container.phis.values().cloned().collect();
}
fn breakpoint(&self, cur_node: Node) {
breakpoint!("Constant Folding: iteration", self.graph, &|node: &Node| {
let mut label = default_label(node);
if let Some(lattice) = self.values.get(&node) {
label = label.append(format!("\n{:?}", lattice));
}
if node == &cur_node {
label = label
.style(Style::Filled)
.fillcolor(X11Color::Blue)
.fontcolor(X11Color::White);
}
if let Node::Phi(phi) = node {
for (arg, pred) in phi.in_nodes().zip(phi.block().in_nodes()) {
label = label.append(format!(
"\nfrom {} use {:?}{}",
pred.node_id(),
self.lookup(arg),
if self.lookup(pred).reachable() {
""
} else {
" [dead]"
}
));
}
}
if self.queue.get(&node).is_some() {
label = label.style(Style::Bold);
}
label = label.append(format!(
"\ntopo: {} dom: {}",
self.node_topo_idx[node].topo_order,
node.block().dom_depth()
));
label
});
}
#[allow(clippy::cyclomatic_complexity)]
fn update_node(
&self,
cur_node: Node,
cur_lattice: &'_ NodeLattice,
deps: &mut Vec<Node>,
required_stores: &mut HashSet<Store>,
mut phi_container: &'_ mut PhiContainer,
) -> NodeLattice {
use self::{Node::*, ProjKind::*};
self.breakpoint(cur_node);
let mut mark_stores_as_required = |stores: &HashSet<self::Store>| {
log::debug!(
"Mark stores as required: {:?}",
stores
.iter()
.map(|s| format!("{:?}{}", s, Spans::span_str(*s)))
.collect::<Vec<_>>()
.join(","),
);
required_stores.extend(stores);
};
let reachable = cur_lattice.reachable()
|| if Node::is_block(cur_node) {
cur_node
.in_nodes()
.any(|pred| self.lookup(pred).reachable())
|| cur_node == self.start_block
} else {
self.lookup(cur_node.block().into()).reachable()
};
if !reachable {
// we don't need to update non-reachable nodes
return NodeLattice::NotReachableYet;
}
match cur_node {
// == Load-Store optimizations ==
Return(ret) => match self.lookup(ret.mem()) {
NodeLattice::NotReachableYet => NodeLattice::NotReachableYet,
NodeLattice::Heap(heap) => {
let mut mem = MemoryArea::external();
for res in ret.return_res() {
match self.lookup(res) {
NodeLattice::NotReachableYet => return NodeLattice::NotReachableYet,
NodeLattice::Value(val) => {
mem.join_mut(&heap.mem_reachable_from(val.points_to()));
}
val => panic!("Unexpected val {:?} for arg {:?}", val, res),
}
}
mark_stores_as_required(&heap.last_stores_into(&mem));
NodeLattice::Invalid
}
val => panic!("Unexpected val {:?} for heap", val),
},
Proj(_, Start_M(_)) => NodeLattice::Heap(Rc::new(Heap::start())),
Call(call) => {
let mem_val = self.lookup(call.mem());
match (mem_val, call.new_kind(), call.out_single_result()) {
(NodeLattice::NotReachableYet, _, _) => NodeLattice::NotReachableYet,
(mem_val, Some(_new_kind), None) => {
NodeLattice::tuple(NodeLattice::Invalid, mem_val.clone())
}
(NodeLattice::Heap(heap), Some(new_kind), result_node) => {
let mut heap = (**heap).clone();
let result = if let Some(result_node) = result_node {
let ptr = match new_kind {
NewKind::Object(class_ty) => heap.new_obj(result_node, class_ty),
NewKind::Array { item_ty, .. } => {
heap.new_arr(result_node, item_ty)
}
};
NodeValue::new(ptr.into(), Some(result_node)).into()
} else {
NodeLattice::Invalid
};
NodeLattice::tuple(result, NodeLattice::Heap(Rc::new(heap)))
}
// reset heap if an unknown method is called
// which could modify arbitrary memory.
(NodeLattice::Heap(heap), None, _result_node) => {
let mut used_mem = MemoryArea::empty();
let mut used_nodes = HashSet::new();
for arg in call.args() {
if let Some(val) = self.lookup_val(arg) {
if let Some(ptr) = val.as_pointer() {
used_mem.join_mut(&ptr.target);
used_nodes.insert(arg);
}
} else {
// don't continue if one of the args has no info yet
return NodeLattice::NotReachableYet;
}
}
if format!("{:?}", call).contains("__log") {
log::info!(
"Log call in {}: {:?} heap: <<<<{}>>>>",
Spans::span_str(call),
call.args()
.filter_map(|a| self.lookup_val(a).map(|v| (a, v)))
.map(|(a, val)| format!(
"{:?}{}: {:?}",
a,
Spans::span_str(a),
val
))
.collect::<Vec<_>>()
.join(","),
Self::debug_dump_heap(call.as_node(), heap)
);
NodeLattice::tuple(
NodeLattice::Invalid,
NodeLattice::Heap(heap.clone()),
)
} else {
log::debug!(
"{:?} uses {:?} and {:?} as args",
call,
used_mem,
used_nodes
);
let accessible_mem = heap.mem_reachable_from(used_mem);
mark_stores_as_required(&heap.last_stores_into(&accessible_mem));
let heap = heap.reset_mem(&accessible_mem);
NodeLattice::tuple(
call.single_result_ty()
.map(|ty| {
NodeValue::non_const_val(ty.mode(), accessible_mem).into()
})
.unwrap_or(NodeLattice::Invalid),
NodeLattice::Heap(Rc::new(heap)),
)
}
}
val => panic!("unreachable {:?}", val),
}
}
Proj(_, Call_TResult(node)) => {
// we have to wrap the result in a tuple, as modeT nodes cannot have a pointer
// as value
NodeLattice::tuple(
self.lookup(node.into()).tuple_1().clone(),
NodeLattice::Invalid,
)
}
Proj(_, Call_TResult_Arg(_, _, node)) => self.lookup(node.into()).tuple_1().clone(),
Proj(_, Call_M(node)) => self.lookup(node.into()).tuple_2().clone(),
cur_node @ Store(_) | cur_node @ Load(_) => {
enum TK {
ArrItem(Idx, Ty),
ObjField(Entity),
}
let (raw_ptr_node, mem) = match cur_node {
Store(store) => (store.ptr(), store.mem()),
Load(load) => (load.ptr(), load.mem()),
_ => panic!("unreach"),
};
let (ptr_node, target_kind) = match raw_ptr_node {
Member(member) => (member.ptr(), TK::ObjField(member.entity())),
Sel(sel) => {
// as sel.index() is not a direct predecessor, we need to put it on deps
deps.push(sel.index());
(
sel.ptr(),
TK::ArrItem(
match &self.lookup_val(sel.index()) {
Some(val) if val.is_tarval() => {
let idx_val = val.tarval();
let idx_source = val.source_or_some_ex(sel.index());
if idx_val.is_constant() {
Idx::Const(idx_val.get_long() as usize, idx_source)
} else {
Idx::Dynamic(idx_source)
}
}
Some(_) => panic!("unreach"),
None => return NodeLattice::NotReachableYet,
},
sel.element_ty(),
),
)
}
_ => panic!("unreach"),
};
// as ptr_node is not a direct predecessor, we need to put it on deps
deps.push(ptr_node);
match (self.lookup(mem), self.lookup_val(ptr_node)) {
(NodeLattice::NotReachableYet, _) | (_, None) => NodeLattice::NotReachableYet,
(NodeLattice::Heap(heap), Some(ptr_val)) if ptr_val.is_pointer() => {
let o = ptr_val.source_or_some_ex(ptr_node);
let ptr = ptr_val.as_pointer().unwrap();
if ptr.is_null_or_empty() {
// we would crash on such a `ptr` anyways, so wait for more info.
return NodeLattice::NotReachableYet;
}
let mut heap = (**heap).clone();
match cur_node {
Store(store) => {
let val = self.lookup_val(store.value());
if val.is_none() {
return NodeLattice::NotReachableYet;
}
let val =
ValWithStoreInfo::single_store(val.unwrap().clone(), store);
match target_kind {
TK::ArrItem(idx, ty) => heap.update_cell(o, ptr, idx, &val, ty),
TK::ObjField(entity) => heap.update_field(o, ptr, entity, &val),
}
NodeLattice::Heap(Rc::new(heap))
}
Load(load) => {
let val = match target_kind {
TK::ArrItem(idx, ty) => heap.lookup_cell(o, ptr, idx, ty),
TK::ObjField(entity) => heap.lookup_field(o, ptr, entity),
};
let ValWithStoreInfo { val, stores } = if let Some(val) = val {
val
} else {
return NodeLattice::NotReachableYet;
};
let is_source_usable = if let Some(source_node) =
val.source.usable_source_node(cur_node.block())
{
log::debug!(
"{:?} uses {:?} directly instead of loading it",
load,
source_node
);
true
} else {
false
};
// TODO use is_source_usable here in this check!
if !is_source_usable && val.tarval().is_bad() {
mark_stores_as_required(&stores);
}
let val = match load.out_proj_res() {
Some(res) if !is_source_usable => {
let val = val.into_updated_source_ex(res.into());
let val = ValWithStoreInfo { val, stores };
match target_kind {
TK::ArrItem(idx, ty) => {
heap.enhance_cell(o, ptr, idx, &val, ty)
}
TK::ObjField(entity) => {
heap.enhance_field(o, ptr, entity, &val)
}
}
val.val
}
_ => val,
};
// TODO implement into for heap
NodeLattice::tuple(val.into(), NodeLattice::Heap(Rc::new(heap)))
}
_ => panic!("unreach"),
}
}
(heap, val) => panic!("unreach {:?} {:?}", heap, val),
}
}
Proj(_, Store_M(store)) => self.lookup(store.into()).clone(),
Proj(_, Load_Res(node)) => self.lookup(node.into()).tuple_1().clone(),
Proj(_, Load_M(node)) => self.lookup(node.into()).tuple_2().clone(),
Proj(_, Div_M(node)) => {
deps.push(node.mem());
self.lookup(node.mem()).clone()
}
Proj(_, Mod_M(node)) => {
deps.push(node.mem());
self.lookup(node.mem()).clone()
}
// == Conditionals ==
Cmp(cmp) => {
#[derive(Clone, Copy, Debug)]
enum SimpleRelation {
Equal,
NotEqual,
}
fn as_simple_relation(
relation: bindings::ir_relation::Type,
) -> Option<SimpleRelation> {
match relation {
bindings::ir_relation::Equal => Some(SimpleRelation::Equal),
bindings::ir_relation::LessGreater => Some(SimpleRelation::NotEqual),
_ => None,
}
}
#[derive(Clone, Copy, Debug)]
enum CmpResult {
Bool(bool),
NotReachableYet,
Bad,
Tarval(Tarval),
}
let left_val = self.lookup_val(cmp.left());
let right_val = self.lookup_val(cmp.right());
let result = match (left_val, right_val) {
(None, _) | (_, None) => CmpResult::NotReachableYet,
(Some(val1), Some(val2)) => {
match (
&val1.value,
&val2.value,
as_simple_relation(cmp.relation()),
(&val1.source, &val2.source),
) {
(
_,
_,
Some(simple_rel),
(NodeValueSource::Node(src1), NodeValueSource::Node(src2)),
) if src1 == src2 => {
// we have `node == node` or `node != node`
match simple_rel {
SimpleRelation::Equal => CmpResult::Bool(true),
SimpleRelation::NotEqual => CmpResult::Bool(false),
}
}
(
AbstractValue::Pointer(ptr1),
AbstractValue::Pointer(ptr2),
rel,
_,
) => {
let result = match (rel, ptr1.eq(ptr2)) {
(Some(SimpleRelation::Equal), Some(res)) => {
// e.g. p1 == p2
// with p1 -> {obj1,null} and p2 -> {@obj2}
CmpResult::Bool(res)
}
(Some(SimpleRelation::NotEqual), Some(res)) => {
// e.g. p1 != p2
// with p1 -> {obj1,obj3} and p2 -> {obj2,null}
CmpResult::Bool(!res)
}
_ => CmpResult::Bad,
};
log::info!(
"Result of compare: {:?} {:?} {:?} = {:?} in {}",
ptr1,
rel,
ptr2,
result,
Spans::span_str(cmp),
);
CmpResult::Bad
}
(AbstractValue::Tarval(t1), AbstractValue::Tarval(t2), _, _) => {
if t1.is_bad() || t2.is_bad() {
CmpResult::Tarval(Tarval::bad())
} else {
CmpResult::Tarval(t1.lattice_cmp(cmp.relation(), *t2))
}
}
(v1, v2, _, _) => panic!(
"Cannot compare values with invalid types: {:?}, {:?}",
v1, v2,
),
}
}
};
let tarval = match result {
CmpResult::Tarval(val) => val,
CmpResult::Bool(val) => Tarval::bool_val(val),
CmpResult::Bad => Tarval::bad(),
CmpResult::NotReachableYet => return NodeLattice::NotReachableYet,
};
NodeLattice::from_tarval(tarval, Mode::b())
}
Cond(cond) => self.lookup(cond.selector()).clone(),
Proj(_, Cond_Val(is_true_branch, cond)) => match &self.lookup_val(cond.into()) {
Some(val) if val.tarval().is_bool_val(is_true_branch) || val.tarval().is_bad() => {
NodeLattice::Invalid
}
_ => NodeLattice::NotReachableYet,
},
// == Phi ==
Phi(phi) => {
let mut join_context = if phi.mode().is_mem() && phi.in_nodes().len() == 2 {
JoinContext::PhiWith2Preds {
phi,
phi_container: &mut phi_container,
cur_info_idx: None,
cur_phi_id: None,
}
} else {
JoinContext::None
};
let result =
phi.in_nodes()
.zip(phi.block().in_nodes())
.fold(None, |acc, (pred, block)| {
// only consider reachable blocks for phi inputs
if !self.lookup(block).reachable() {
// we must get informed when that block gets reachable
deps.push(block);
log::debug!(
"{:?} is unreachable, thus {:?} can be ignored",
block,
pred
);
acc.or(Some(NodeLattice::NotReachableYet))
} else {
let pred_lat = self.lookup(pred);
match acc {
None => Some(pred_lat.clone()),
Some(acc) => {
let new_lat = acc.join(pred_lat, &mut join_context);
match new_lat {
NodeLattice::Value(val) => Some(
val.into_updated_source_ex(phi.as_node()).into(),
),
lat => Some(lat),
}
}
}
/*log::debug!(
"for {:?}; pred_val: {:?} -> val: {:?}",
pred,
pred_lat,
new_lat
);*/
}
});
result.unwrap_or(NodeLattice::NotReachableYet)
}
// == Value nodes ==
_ => {
if let Ok(value_node) = try_as_value_node(cur_node) {
let mut tarval_args = vec![];
let mut non_constant = false;
let mut no_info = false;
for arg in value_node.value_nodes() {
let val = self.lookup_val(arg.as_node());
match val {
None => no_info = true,
Some(val) if val.is_tarval() => match &val.value {
AbstractValue::Pointer(..) => panic!("unreach"),
AbstractValue::Tarval(val) => {
if val.is_bad() {
non_constant = true;
} else {
tarval_args.push(*val)
}
}
},
// todo optimize code
Some(_) => panic!(
"Cannot use {:?} from {:?} - pointer are not valid here",
val,
arg.as_node(),
),
}
}
if non_constant {
NodeValue::non_const_node(cur_node).into()
} else if no_info {
return NodeLattice::NotReachableYet;
} else {
let tarval = value_node.compute(tarval_args);
NodeLattice::from_tarval_node(tarval, cur_node)
}
} else {
NodeLattice::Invalid
}
}
}
}
#[allow(clippy::cyclomatic_complexity)]
fn apply(&mut self) -> Outcome {
let mut values = self.values.iter().collect::<Vec<_>>();
values.sort_by_key(|(l, _)| l.node_id());
let mut to_be_marked_as_bad: Vec<Block> = Vec::new();
let mut folded_constants = 0;
let mut optimized_loads = 0;
let mut optimized_conds = 0;
let mut optimized_stores = 0;
let mut optimized_phis = 0;
let mut removed_news = 0;
let mut patch =
|mem_before: Node, mem_after: Option<Proj>, res: Option<Proj>, node: Node| {
if res.is_none() {
if let Some(mem_after) = mem_after {
log::debug!(
"Remove {:?}{} from memory flow",
node,
Spans::span_str(node)
);
Graph::exchange(mem_after, mem_before);
match node {
Node::Store(_n) => optimized_stores += 1,
Node::Call(_n) => removed_news += 1,
Node::Load(_n) => {}
Node::Div(_n) => {}
Node::Mod(_n) => {}
_ => {}
}
}
}
};
for (&node, lattice) in &values {
if Node::is_const(node) {
continue;
}
let (value, source_node) = if let NodeLattice::Value(val) = lattice {
(val.tarval(), val.source.clone())
} else {
continue;
};
if try_as_value_node(node).is_ok() {
let new_node = if value.is_constant() {
let const_node = self.graph.new_const(value);
Spans::copy_span(const_node, node);
const_node.into()
} else {
match source_node {
NodeValueSource::Node(new_node)
if source_node.is_usable_from(node.block()) =>
{
new_node
}
_ => continue,
}
};
if let Node::Phi(phi) = new_node {
if self.created_phis.contains(&phi) {
optimized_phis += 1;
}
}
if new_node == node {
continue;
}
/*if Node::is_add(new_node) {
// libfirm fix
log::warn!("Skip add {:?}", new_node);
continue;
}*/
if let Node::Proj(_, ProjKind::Load_Res(_)) = node {
optimized_loads += 1;
} else {
folded_constants += 1;
}
log::debug!(
"exchange value {:?}{} with {:?}{}",
node,
Spans::span_str(node),
new_node,
Spans::span_str(new_node)
);
match node {
// only replace their proj, not the node itself.
// divs, mods and loads are handled later.
Node::Div(n) => patch(n.mem(), n.out_proj_m(), n.out_proj_res(), n.as_node()),
Node::Mod(n) => patch(n.mem(), n.out_proj_m(), n.out_proj_res(), n.as_node()),
_ => Graph::exchange(node, new_node),
};
} else if let (Node::Cond(cond), TarvalKind::Bool(val)) = (node, value.kind()) {
// delete unnecessary branching
let (always_taken_path, target_block, _target_block_idx) =
cond.out_proj_target_block(val).unwrap();
let (dead_path, nontarget_block, _nontarget_block_idx) =
cond.out_proj_target_block(!val).unwrap();
if nontarget_block.cfg_preds().len() <= 1 {
log::debug!(
"Schedule nontarget_block {:?} and its children to be marked as bad",
nontarget_block
);
to_be_marked_as_bad.push(nontarget_block);
}
let jmp = cond.block().new_jmp();
optimized_conds += 1;
log::debug!(
"Replace {:?}{} with {:?} to {:?}",
always_taken_path,
Spans::span_str(always_taken_path),
jmp,
target_block
);
self.graph.mark_as_bad(dead_path);
self.graph.mark_as_bad(cond);
Graph::exchange(always_taken_path, jmp);
target_block.keep_alive();
}
}
for (&node, _lattice) in &values {
match node {
Node::Store(n) if !self.required_stores.contains(&n) => {
patch(n.mem(), n.out_proj_m(), None, n.as_node())
}
Node::Load(n) => patch(n.mem(), n.out_proj_m(), n.out_proj_res(), n.as_node()),
_ => {}
};
}
for block in &to_be_marked_as_bad {
for child in block.out_nodes() {
log::debug!("Mark block child {:?} as bad", child);
self.graph.mark_as_bad(child);
}
self.graph.mark_as_bad(*block);
}
self.graph.remove_bads();
self.graph.remove_unreachable_code();
self.graph.remove_bads();
self.graph.assure_outs();
for node in self.graph.nodes() {
match node {
Node::Call(call) => {
if call.new_kind().is_some() {
let res = call
.out_proj_t_result()
.and_then(|p| p.out_nodes().next().and_then(Node::as_proj));
patch(call.mem(), call.out_proj_m(), res, call.as_node());
}
}
_ => {}
}
}
log::info!(
"Optimized {:>3} constants, {:>3} loads, {:>3} stores, \
{:>2} news, {:>2} phis and {:>2} conds \
with {:>4} node updates from {:>4} total nodes and {:>2} \
phi creations in graph {}",
folded_constants,
optimized_loads,
optimized_stores,
removed_news,
optimized_phis,
optimized_conds,
self.node_update_count,
self.node_topo_idx.len(),
self.created_phis.len(),
self.graph.entity().name_string(),
);
if folded_constants + optimized_loads + optimized_conds > 0 {
Outcome::Changed
} else {
Outcome::Unchanged
}
}
}
| 39.619371 | 100 | 0.378839 |
1d40a400d072b8de419a619bef7e7d3a524197d6 | 812 | // Copyright 2021 Vladimir Melnikov.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Convenience re-export of common members
//!
//! Like the standard library's prelude, this module simplifies importing of
//! common items. Unlike the standard prelude, the contents of this module must
//! be imported manually:
//!
//! ```
//! use zettabgp::bmp::prelude::*;
//! ```
pub use crate::bmp::*;
pub use crate::bmp::bmputl::*;
pub use crate::bmp::msginit::*;
pub use crate::bmp::msgpeer::*;
pub use crate::bmp::msgrmon::*;
pub use crate::bmp::msgterm::*;
| 32.48 | 79 | 0.699507 |
90e5465ce16f8f04cd7188d3b089c92905fd9403 | 8,769 | //! Implementation of configuration for various sources
//!
//! This module will parse the various `source.*` TOML configuration keys into a
//! structure usable by Cargo itself. Currently this is primarily used to map
//! sources to one another via the `replace-with` key in `.cargo/config`.
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use log::debug;
use url::Url;
use crate::core::{GitReference, PackageId, Source, SourceId};
use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY};
use crate::util::config::ConfigValue;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::{Config, IntoUrl};
#[derive(Clone)]
pub struct SourceConfigMap<'cfg> {
cfgs: HashMap<String, SourceConfig>,
id2name: HashMap<SourceId, String>,
config: &'cfg Config,
}
/// Configuration for a particular source, found in TOML looking like:
///
/// ```toml
/// [source.crates-io]
/// registry = 'https://github.com/rust-lang/crates.io-index'
/// replace-with = 'foo' # optional
/// ```
#[derive(Clone)]
struct SourceConfig {
// id this source corresponds to, inferred from the various defined keys in
// the configuration
id: SourceId,
// Name of the source that this source should be replaced with. This field
// is a tuple of (name, path) where path is where this configuration key was
// defined (the literal `.cargo/config` file).
replace_with: Option<(String, PathBuf)>,
}
impl<'cfg> SourceConfigMap<'cfg> {
pub fn new(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
let mut base = SourceConfigMap::empty(config)?;
if let Some(table) = config.get_table("source")? {
for (key, value) in table.val.iter() {
base.add_config(key, value)?;
}
}
Ok(base)
}
pub fn empty(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
let mut base = SourceConfigMap {
cfgs: HashMap::new(),
id2name: HashMap::new(),
config,
};
base.add(
CRATES_IO_REGISTRY,
SourceConfig {
id: SourceId::crates_io(config)?,
replace_with: None,
},
);
Ok(base)
}
pub fn config(&self) -> &'cfg Config {
self.config
}
pub fn load(
&self,
id: SourceId,
yanked_whitelist: &HashSet<PackageId>,
) -> CargoResult<Box<dyn Source + 'cfg>> {
debug!("loading: {}", id);
let mut name = match self.id2name.get(&id) {
Some(name) => name,
None => return Ok(id.load(self.config, yanked_whitelist)?),
};
let mut path = Path::new("/");
let orig_name = name;
let new_id;
loop {
let cfg = match self.cfgs.get(name) {
Some(cfg) => cfg,
None => failure::bail!(
"could not find a configured source with the \
name `{}` when attempting to lookup `{}` \
(configuration in `{}`)",
name,
orig_name,
path.display()
),
};
match cfg.replace_with {
Some((ref s, ref p)) => {
name = s;
path = p;
}
None if id == cfg.id => return Ok(id.load(self.config, yanked_whitelist)?),
None => {
new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
break;
}
}
debug!("following pointer to {}", name);
if name == orig_name {
failure::bail!(
"detected a cycle of `replace-with` sources, the source \
`{}` is eventually replaced with itself \
(configuration in `{}`)",
name,
path.display()
)
}
}
let new_src = new_id.load(
self.config,
&yanked_whitelist
.iter()
.map(|p| p.map_source(id, new_id))
.collect(),
)?;
let old_src = id.load(self.config, yanked_whitelist)?;
if !new_src.supports_checksums() && old_src.supports_checksums() {
failure::bail!(
"\
cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
checksums, but `{name}` does not
a lock file compatible with `{orig}` cannot be generated in this situation
",
orig = orig_name,
name = name
);
}
if old_src.requires_precise() && id.precise().is_none() {
failure::bail!(
"\
the source {orig} requires a lock file to be present first before it can be
used against vendored source code
remove the source replacement configuration, generate a lock file, and then
restore the source replacement configuration to continue the build
",
orig = orig_name
);
}
Ok(Box::new(ReplacedSource::new(id, new_id, new_src)))
}
fn add(&mut self, name: &str, cfg: SourceConfig) {
self.id2name.insert(cfg.id, name.to_string());
self.cfgs.insert(name.to_string(), cfg);
}
fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> {
let (table, _path) = cfg.table(&format!("source.{}", name))?;
let mut srcs = Vec::new();
if let Some(val) = table.get("registry") {
let url = url(val, &format!("source.{}.registry", name))?;
srcs.push(SourceId::for_registry(&url)?);
}
if let Some(val) = table.get("local-registry") {
let (s, path) = val.string(&format!("source.{}.local-registry", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
path.push(s);
srcs.push(SourceId::for_local_registry(&path)?);
}
if let Some(val) = table.get("directory") {
let (s, path) = val.string(&format!("source.{}.directory", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
path.push(s);
srcs.push(SourceId::for_directory(&path)?);
}
if let Some(val) = table.get("git") {
let url = url(val, &format!("source.{}.git", name))?;
let r#try = |s: &str| {
let val = match table.get(s) {
Some(s) => s,
None => return Ok(None),
};
let key = format!("source.{}.{}", name, s);
val.string(&key).map(Some)
};
let reference = match r#try("branch")? {
Some(b) => GitReference::Branch(b.0.to_string()),
None => match r#try("tag")? {
Some(b) => GitReference::Tag(b.0.to_string()),
None => match r#try("rev")? {
Some(b) => GitReference::Rev(b.0.to_string()),
None => GitReference::Branch("master".to_string()),
},
},
};
srcs.push(SourceId::for_git(&url, reference)?);
}
if name == "crates-io" && srcs.is_empty() {
srcs.push(SourceId::crates_io(self.config)?);
}
let mut srcs = srcs.into_iter();
let src = srcs.next().ok_or_else(|| {
failure::format_err!(
"no source URL specified for `source.{}`, need \
either `registry` or `local-registry` defined",
name
)
})?;
if srcs.next().is_some() {
failure::bail!("more than one source URL specified for `source.{}`", name)
}
let mut replace_with = None;
if let Some(val) = table.get("replace-with") {
let (s, path) = val.string(&format!("source.{}.replace-with", name))?;
replace_with = Some((s.to_string(), path.to_path_buf()));
}
self.add(
name,
SourceConfig {
id: src,
replace_with,
},
);
return Ok(());
fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
let (url, path) = cfg.string(key)?;
let url = url.into_url().chain_err(|| {
format!(
"configuration key `{}` specified an invalid \
URL (in {})",
key,
path.display()
)
})?;
Ok(url)
}
}
}
| 34.120623 | 91 | 0.500513 |
8acf9b635fa6c40feb7a2c2cab284f922ab57f89 | 23,520 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Node types of [`SparseMerkleTree`](crate::SparseMerkleTree)
//!
//! This module defines three types of patricia Merkle tree nodes: [`BranchNode`],
//! [`ExtensionNode`] and [`LeafNode`] as building blocks of a 256-bit
//! [`SparseMerkleTree`](crate::SparseMerkleTree). [`BranchNode`] represents a 4-level binary tree
//! to optimize for IOPS: it compresses a tree with 31 nodes into one node with 16 chidren at the
//! lowest level. [`ExtensionNode`] compresses a partial path without any fork into a single node by
//! storing the partial path inside. [`LeafNode`] stores the full key and the value hash which is
//! used as the key to query binary account blob data from the storage.
#[cfg(test)]
mod node_type_test;
use crate::nibble_path::{skip_common_prefix, NibbleIterator, NibblePath};
use bincode::{deserialize, serialize};
use crypto::{
hash::{
CryptoHash, SparseMerkleInternalHasher, SparseMerkleLeafHasher,
SPARSE_MERKLE_PLACEHOLDER_HASH,
},
HashValue,
};
use failure::{Fail, Result};
use serde::{Deserialize, Serialize};
use std::collections::hash_map::HashMap;
use types::proof::{SparseMerkleInternalNode, SparseMerkleLeafNode};
pub(crate) type Children = HashMap<u8, (HashValue, bool)>;
/// Represents a 4-level subtree with 16 children at the bottom level. Theoretically, this reduces
/// IOPS to query a tree by 4x since we compress 4 levels in a standard Merkle tree into 1 node.
/// Though we choose the same branch node structure as that of a patricia Merkle tree, the root hash
/// computation logic is similar to a 4-level sparse Merkle tree except for some customizations. See
/// the `CryptoHash` trait implementation below for details.
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct BranchNode {
// key: child index from 0 to 15, inclusive.
// value: Child node hash and a boolean whose true value indicates the child is a leaf node.
children: Children,
}
/// Node in a patricia Merkle tree. It compresses a path without any fork with a single
/// node instead of multiple single-child branch nodes.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ExtensionNode {
// The nibble path this extension node encapsulates.
nibble_path: NibblePath,
// Represents the next node down the path.
child: HashValue,
}
/// Represents an account. It has two fields: `key` is the hash of the acccont adress and
/// `value_hash` is the hash of account state blob.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct LeafNode {
// the full key of this node
key: HashValue,
// the hash of the data blob identified by the key
value_hash: HashValue,
}
/// The explicit tag is used as a prefix in the encoded format of nodes to distinguish different
/// node discrinminants.
trait Tag {
const TAG: u8;
}
// We leave 0 reserved.
impl Tag for BranchNode {
const TAG: u8 = 1;
}
impl Tag for ExtensionNode {
const TAG: u8 = 2;
}
impl Tag for LeafNode {
const TAG: u8 = 3;
}
/// Computes the hash of branch node according to [`SparseMerkleTree`](crate::SparseMerkleTree)
/// data structure in the logical view. `start` and `nibble_height` determine a subtree whose
/// root hash we want to get. For a branch node with 16 children at the bottom level, we compute
/// the root hash of it as if a full binary Merkle tree with 16 leaves as below:
///
/// ```text
///
/// 4 -> +------ root hash ------+
/// | |
/// 3 -> +---- # ----+ +---- # ----+
/// | | | |
/// 2 -> # # # #
/// / \ / \ / \ / \
/// 1 -> # # # # # # # #
/// / \ / \ / \ / \ / \ / \ / \ / \
/// 0 -> 0 1 2 3 4 5 6 7 8 9 A B C D E F
/// ^
/// height
/// ```
///
/// As illustrated above, at nibble height 0, `0..F` in hex denote 16 chidren hashes. Each `#`
/// means the hash of its two direct children, which will be used to generate the hash of its
/// parent with the hash of its sibling. Finally, we can get the hash of this branch node.
///
/// However, if a branch node doesn't have all 16 chidren exist at height 0 but just a few of
/// them, we have a modified hashing rule on top of what is stated above:
/// 1. From top to bottom, a node will be replaced by a leaf child if the subtree rooted at this
/// node has only one child at height 0 and it is a leaf child.
/// 2. From top to bottom, a node will be replaced by the placeholder node if the subtree rooted at
/// this node doesn't have any child at height 0. For example, if a branch node has 3 leaf nodes
/// at index 0, 3, 8, respectively, and 1 branch/extension node at index C, then the computation
/// graph will be like:
///
/// ```text
///
/// 4 -> +------ root hash ------+
/// | |
/// 3 -> +---- # ----+ +---- # ----+
/// | | | |
/// 2 -> # @ 8 #
/// / \ / \
/// 1 -> 0 3 # @
/// / \
/// 0 -> C @
/// ^
/// height
/// Note: @ denotes placeholder hash.
/// ```
impl CryptoHash for BranchNode {
// Unused hasher.
type Hasher = SparseMerkleInternalHasher;
fn hash(&self) -> HashValue {
self.merkle_hash(
0, /* start index */
16, /* the number of leaves in the subtree of which we want the hash of root */
self.generate_bitmaps(),
)
}
}
/// Computes the hash of an [`ExtensionNode`]. Similar to [`BranchNode`], we generate
/// the hash by logically expanding it into a sparse Merkle tree. For an extension node with 2
/// nibbles, compute the final hash as follows:
///
/// ```text
///
/// #(final hash)
/// / \
/// # placeholder
/// / \
/// # placeholder
/// / \
/// placeholder #
/// / \
/// # placeholder
/// / \
/// placeholder \
/// / \
/// # placeholder
/// / \
/// # placeholder
/// / \
/// child placeholder
/// ```
///
/// The final hash is generated by iteratively hashing the concatenation of two children of each
/// node following a bottom-up order. It is worth nothing that by definition [`ExtensionNode`] is
/// just a path, so each intermediate node must only have one child. When being expanded to a
/// sparse Merkle tree logically, empty nodes should be replaced by the default digest.
impl CryptoHash for ExtensionNode {
// Unused hasher.
type Hasher = SparseMerkleInternalHasher;
fn hash(&self) -> HashValue {
self.nibble_path.bits().rev().fold(self.child, |hash, bit| {
if bit {
SparseMerkleInternalNode::new(*SPARSE_MERKLE_PLACEHOLDER_HASH, hash).hash()
} else {
SparseMerkleInternalNode::new(hash, *SPARSE_MERKLE_PLACEHOLDER_HASH).hash()
}
})
}
}
/// Computes the hash of a [`LeafNode`].
impl CryptoHash for LeafNode {
// Unused hasher.
type Hasher = SparseMerkleLeafHasher;
fn hash(&self) -> HashValue {
SparseMerkleLeafNode::new(self.key, self.value_hash).hash()
}
}
/// The concrete node type of [`SparseMerkleTree`](crate::SparseMerkleTree).
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum Node {
/// A wrapper of [`BranchNode`].
Branch(BranchNode),
/// A wrapper of [`ExtensionNode`].
Extension(ExtensionNode),
/// A wrapper of [`LeafNode`].
Leaf(LeafNode),
}
impl From<BranchNode> for Node {
fn from(node: BranchNode) -> Self {
Node::Branch(node)
}
}
impl From<ExtensionNode> for Node {
fn from(node: ExtensionNode) -> Self {
Node::Extension(node)
}
}
impl From<LeafNode> for Node {
fn from(node: LeafNode) -> Self {
Node::Leaf(node)
}
}
impl BranchNode {
/// Creates a new branch node.
pub fn new(children: HashMap<u8, (HashValue, bool)>) -> Self {
Self { children }
}
/// Sets the `n`-th child to given hash and stores a `bool` indicating whether the child passed
/// in is a leaf node.
pub fn set_child(&mut self, n: u8, child: (HashValue, bool)) {
assert!(n < 16);
self.children.insert(n, child);
}
/// Gets the hash of the `n`-th child.
pub fn child(&self, n: u8) -> Option<HashValue> {
assert!(n < 16);
self.children.get(&n).map(|p| p.0)
}
/// Returns an `Option<bool>` indicating whether the `n`-th child is a leaf node. If the child
/// doesn't exist, returns `None`.
pub fn is_leaf(&self, n: u8) -> Option<bool> {
assert!(n < 16);
self.children.get(&n).map(|p| p.1)
}
/// Return the total number of children
pub fn num_children(&self) -> usize {
self.children.len()
}
/// Generates `child_bitmap` and `leaf_bitmap` as a pair of `u16`: child at index `i` exists if
/// `child_bitmap[i]` is set; child at index `i` is leaf node if `leaf_bitmap[i]` is set.
pub fn generate_bitmaps(&self) -> (u16, u16) {
let mut child_bitmap = 0_u16;
let mut leaf_bitmap = 0_u16;
for (k, v) in self.children.iter() {
child_bitmap |= 1u16 << k;
leaf_bitmap |= (v.1 as u16) << k;
}
// `leaf_bitmap` must be a subset of `child_bitmap`.
assert!(child_bitmap | leaf_bitmap == child_bitmap);
(child_bitmap, leaf_bitmap)
}
/// Given a range [start, start + width), returns the sub-bitmap of that range.
fn range_bitmaps(start: u8, width: u8, bitmaps: (u16, u16)) -> (u16, u16) {
assert!(start < 16 && start % width == 0);
// A range with `start == 8` and `width == 4` will generate a mask 0b0000111100000000.
let mask = if width == 16 {
0xffff
} else {
assert!(width <= 16 && (width & (width - 1)) == 0);
(1 << width) - 1
} << start;
(bitmaps.0 & mask, bitmaps.1 & mask)
}
fn merkle_hash(
&self,
start: u8,
width: u8,
(child_bitmap, leaf_bitmap): (u16, u16),
) -> HashValue {
// Given a bit [start, 1 << nibble_height], return the value of that range.
let (range_child_bitmap, range_leaf_bitmap) =
BranchNode::range_bitmaps(start, width, (child_bitmap, leaf_bitmap));
if range_child_bitmap == 0 {
// No child under this subtree
*SPARSE_MERKLE_PLACEHOLDER_HASH
} else if range_child_bitmap & (range_child_bitmap - 1) == 0
&& (range_leaf_bitmap != 0 || width == 1)
{
// Only 1 leaf child under this subtree or reach the lowest level
let only_child_index = range_child_bitmap.trailing_zeros() as u8;
self.children
.get(&only_child_index)
.unwrap_or_else(|| {
panic!(
"Corrupted branch node: child_bitmap indicates \
the existense of a non-exist child at index {}",
only_child_index
)
})
.0
} else {
let left_child = self.merkle_hash(start, width / 2, (child_bitmap, leaf_bitmap));
let right_child =
self.merkle_hash(start + width / 2, width / 2, (child_bitmap, leaf_bitmap));
SparseMerkleInternalNode::new(left_child, right_child).hash()
}
}
/// Gets the child and its corresponding siblings that are necessary to generate the proof for
/// the `n`-th child. If it is an existence proof, the returned child must be the `n`-th
/// child; otherwise, the returned child may be another child. See inline explanation for
/// details. When calling this function with n = 11 (node `b` in the following graph), the
/// range at each level is illustrated as a pair of square brackets:
///
/// ```text
/// 4 [f e d c b a 9 8 7 6 5 4 3 2 1 0] -> root level
/// ---------------------------------------------------------------
/// 3 [f e d c b a 9 8] [7 6 5 4 3 2 1 0] width = 8
/// chs <--┘ shs <--┘
/// 2 [f e d c] [b a 9 8] [7 6 5 4] [3 2 1 0] width = 4
/// lhs <--┘ └--> chs
/// 1 [f e] [d c] [b a] [9 8] [7 6] [5 4] [3 2] [1 0] width = 2
/// chs <--┘ └--> shs
/// 0 [f] [e] [d] [c] [b] [a] [9] [8] [7] [6] [5] [4] [3] [2] [1] [0] width = 1
/// ^ chs <--┘ └--> schs
/// | MSB|<---------------------- uint 16 ---------------------------->|LSB
/// height chs: `child_half_start` shs: `sibling_half_start`
/// ```
pub fn get_child_for_proof_and_siblings(&self, n: u8) -> (Option<HashValue>, Vec<HashValue>) {
let mut siblings = vec![];
assert!(n < 16);
let (child_bitmap, leaf_bitmap) = self.generate_bitmaps();
// Nibble height from 3 to 0.
for h in (0..4).rev() {
// Get the number of children of the branch node that each subtree at this height
// covers.
let width = 1 << h;
// Get the index of the first child belonging to the same subtree whose root, let's say
// `r` is at height `h` that the n-th child belongs to.
// Note: `child_half_start` will be always equal to `n` at height 0.
let child_half_start = (0xff << h) & n;
// Get the index of the first child belonging to the subtree whose root is the sibling
// of `r` at height `h`.
let sibling_half_start = child_half_start ^ (1 << h);
// Compute the root hash of the subtree rooted at the sibling of `r`.
siblings.push(self.merkle_hash(sibling_half_start, width, (child_bitmap, leaf_bitmap)));
let (range_child_bitmap, range_leaf_bitmap) =
BranchNode::range_bitmaps(child_half_start, width, (child_bitmap, leaf_bitmap));
if range_child_bitmap == 0 {
// No child in this range.
return (None, siblings);
} else if range_child_bitmap.count_ones() == 1
&& (range_leaf_bitmap.count_ones() == 1 || width == 1)
{
// Return the only 1 leaf child under this subtree or reach the lowest level
// Even this leaf child is not the n-th child, it should be returned instead of
// `None` because it's existence indirectly proves the n-th child doesn't exist.
// Please read proof format for details.
let only_child_index = range_child_bitmap.trailing_zeros() as u8;
return (
Some(
self.children
.get(&only_child_index)
.unwrap_or_else(|| {
panic!(
"Corrupted branch node: child_bitmap indicates \
the existense of a non-exist child at index {}",
only_child_index
)
})
.0,
),
siblings,
);
}
}
unreachable!()
}
}
impl ExtensionNode {
/// Creates a new extension node.
pub fn new(nibble_path: NibblePath, child: HashValue) -> Self {
Self { nibble_path, child }
}
/// Gets the only child.
pub fn child(&self) -> HashValue {
self.child
}
/// Sets the child.
pub fn set_child(&mut self, child_hash: HashValue) {
self.child = child_hash;
}
/// Gets the `encoded_path`.
pub fn nibble_path(&self) -> &NibblePath {
&self.nibble_path
}
/// Gets the siblings from this extension node according to the requested nibble iterator.
/// Also return a boolean indicating whether we can stop traversing and return early.
pub fn get_siblings(&self, nibble_iter: &mut NibbleIterator) -> (Vec<HashValue>, bool) {
let mut extension_nibble_iter = self.nibble_path().nibbles();
let mut siblings = vec![
*SPARSE_MERKLE_PLACEHOLDER_HASH;
skip_common_prefix(&mut extension_nibble_iter, nibble_iter) * 4 /* 1 nibble == 4 bits */
];
// There are two possible cases after matching prefix:
// 1. Not all the nibbles of the extension node match the nibble path of the queried key.
// This means the queried key meets a default node when being matched with the extension
// node nibble path, so we can terminate the search early and return a non-existence proof
// with the proper number of siblings.
if !extension_nibble_iter.is_finished() {
let mut extension_bit_iter = extension_nibble_iter.bits();
let mut request_bit_iter = nibble_iter.bits();
let num_matched_bits =
skip_common_prefix(&mut extension_bit_iter, &mut request_bit_iter);
assert!(num_matched_bits < 4);
// Note: We have to skip 1 bit here to ensure the right result. For example, assume the
// extension node has 2 nibbles (8 bits) and only the first 5 bits are matched. The
// siblings of the queried key should include 5 default hashes followed by `#1`, which
// is the result of iteratively hashing `n` times from bottom up starting with `child`
// where `n` equals the number of bits left after matching minus 1.
//
//```text
//
// #(final hash)
// / \------------------> 1st bit \
// # placeholder \
// / \--------------------> 2nd bit \
// # placeholder } 1st nibble
// / \----------------------> 3rd bit /
// placeholder # /
// / \--------------------> 4th bit /
// # placeholder
// / \----------------------> 5th bit \
// placeholder \ \
// / \--------------------> 6th bit \
// #1 the queried key } 2nd nibble
// / \----------------------> 7th bit /
// # placeholder /
// / \------------------------> 8th bit /
// child placeholder
// ```
extension_bit_iter.next();
siblings.append(&mut vec![*SPARSE_MERKLE_PLACEHOLDER_HASH; num_matched_bits]);
siblings.push(extension_bit_iter.rev().fold(self.child, |hash, bit| {
if bit {
SparseMerkleInternalNode::new(*SPARSE_MERKLE_PLACEHOLDER_HASH, hash).hash()
} else {
SparseMerkleInternalNode::new(hash, *SPARSE_MERKLE_PLACEHOLDER_HASH).hash()
}
}));
(siblings, true /* early termination */)
} else {
// 2. All the nibbles of the extension node match the nibble path of the queried key.
// Then just return the siblings and a `false` telling the callsite to continue to
// traverse the tree.
(siblings, false /* early termination */)
}
}
}
impl LeafNode {
/// Creates a new leaf node.
pub fn new(key: HashValue, value_hash: HashValue) -> Self {
Self { key, value_hash }
}
/// Gets the `key`.
pub fn key(&self) -> HashValue {
self.key
}
/// Gets the associated `value_hash`.
pub fn value_hash(&self) -> HashValue {
self.value_hash
}
/// Sets the associated `value_hash`.
pub fn set_value_hash(&mut self, value_hash: HashValue) {
self.value_hash = value_hash;
}
}
impl Node {
/// Creates the [`Branch`](Node::Branch) variant.
pub fn new_branch(children: HashMap<u8, (HashValue, bool)>) -> Self {
Node::Branch(BranchNode::new(children))
}
/// Creates the [`Extension`](Node::Extension) variant.
pub fn new_extension(nibble_path: NibblePath, child: HashValue) -> Self {
Node::Extension(ExtensionNode::new(nibble_path, child))
}
/// Creates the [`Leaf`](Node::Leaf) variant.
pub fn new_leaf(key: HashValue, value_hash: HashValue) -> Self {
Node::Leaf(LeafNode::new(key, value_hash))
}
/// Serializes to bytes for physical storage.
pub fn encode(&self) -> Result<Vec<u8>> {
let mut out = vec![];
match self {
Node::Branch(branch_node) => {
out.push(BranchNode::TAG);
out.extend(serialize(&branch_node)?);
}
Node::Leaf(leaf_node) => {
out.push(LeafNode::TAG);
out.extend(serialize(leaf_node)?);
}
Node::Extension(extension_node) => {
out.push(ExtensionNode::TAG);
out.extend(serialize(extension_node)?);
}
}
Ok(out)
}
/// Hashes are used to lookup the node in the database.
pub fn hash(&self) -> HashValue {
match self {
Node::Branch(branch_node) => branch_node.hash(),
Node::Extension(extension_node) => extension_node.hash(),
Node::Leaf(leaf_node) => leaf_node.hash(),
}
}
/// Recovers from serialized bytes in physical storage.
pub fn decode(val: &[u8]) -> Result<Node> {
if val.is_empty() {
Err(NodeDecodeError::EmptyInput)?
}
let node_tag = val[0];
match node_tag {
BranchNode::TAG => Ok(Node::Branch(deserialize(&val[1..].to_vec())?)),
ExtensionNode::TAG => Ok(Node::Extension(deserialize(&val[1..].to_vec())?)),
LeafNode::TAG => Ok(Node::Leaf(deserialize(&val[1..].to_vec())?)),
unknown_tag => Err(NodeDecodeError::UnknownTag { unknown_tag })?,
}
}
}
/// Error thrown when a [`Node`] fails to be deserialized out of a byte sequence stored in physical
/// storage, via [`Node::decode`].
#[derive(Debug, Fail, Eq, PartialEq)]
pub enum NodeDecodeError {
/// Input is empty.
#[fail(display = "Missing tag due to empty input")]
EmptyInput,
/// The first byte of the input is not a known tag representing one of the variants.
#[fail(display = "lead tag byte is unknown: {}", unknown_tag)]
UnknownTag { unknown_tag: u8 },
}
| 40.904348 | 100 | 0.539753 |
28caa6bfc583e09f9422bde56fcbb85ae235ec6e | 3,470 | use std::error::Error;
use std::fs;
use std::rc::Rc;
use task;
type ActionFn = Fn(&str) -> Result<(), Box<Error>>;
/// A rule task that matches against files. Rules are used to generate tasks from file name
/// patterns.
pub struct Rule {
/// The file pattern to match.
pub pattern: String,
/// A list of tasks that must be ran before this task.
dependencies: Vec<String>,
/// Rule action.
action: Option<Rc<ActionFn>>,
}
impl Rule {
pub fn new<S, V, F>(pattern: S, dependencies: V, action: Option<F>) -> Rule
where S: Into<String>,
V: Into<Vec<String>>,
F: Fn(&str) -> Result<(), Box<Error>> + 'static
{
Rule {
pattern: pattern.into(),
dependencies: dependencies.into(),
action: action.map(|a| Rc::new(a) as Rc<ActionFn>),
}
}
/// Checks if a file name matches the rule.
pub fn matches<S: AsRef<str>>(&self, name: S) -> bool {
if let Some(index) = self.pattern.find("%") {
let (prefix, suffix) = self.pattern.split_at(index);
let suffix = &suffix[1..];
name.as_ref().starts_with(prefix) && name.as_ref().ends_with(suffix)
} else {
&self.pattern == name.as_ref()
}
}
/// Creates a task for a given file based on the rule.
pub fn create_task<S: Into<String>>(&self, name: S) -> Option<FileTask> {
let name = name.into();
// First, check if the given filename matches.
if !self.matches(&name) {
return None;
}
// Clone the input files (dependencies).
let mut inputs = self.dependencies.clone();
// If the rule name is a pattern, determine the value of the replacement character "%".
if let Some(index) = self.pattern.find("%") {
let end = index + 1 + name.len() - self.pattern.len();
let replacement = &name[index..end];
// Expand the inputs with the corresponding names that match the output name.
inputs = inputs.into_iter()
.map(|input| input.replace("%", replacement))
.collect();
}
Some(FileTask {
inputs: inputs,
output: name,
action: self.action.clone(),
})
}
}
pub struct FileTask {
pub inputs: Vec<String>,
pub output: String,
action: Option<Rc<ActionFn>>,
}
impl task::Task for FileTask {
fn name<'a>(&'a self) -> &'a str {
&self.output
}
/// Checks if the task is dirty by comparing the file modification time of the input and output
/// files. If any of the input files are newer than the output file, then the task is dirty.
fn satisfied(&self) -> bool {
fs::metadata(&self.output)
.and_then(|m| m.modified())
.map(|time| {
self.inputs
.iter()
.all(|input| {
fs::metadata(input)
.and_then(|m| m.modified())
.map(|t| t <= time)
.unwrap_or(true)
})
})
.unwrap_or(false)
}
fn dependencies(&self) -> &[String] {
&self.inputs
}
fn run(&self) -> Result<(), Box<Error>> {
if let Some(ref action) = self.action {
action(&self.output)
} else {
Ok(())
}
}
}
| 29.159664 | 99 | 0.518732 |
f534f744a20305f7aced7081dadf122daa14c4aa | 689 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(PartialEq)]
struct Bike {
name: String,
}
pub fn main() {
let town_bike = Bike { name: "schwinn".to_string() };
let my_bike = Bike { name: "surly".to_string() };
assert!(town_bike != my_bike);
}
| 31.318182 | 68 | 0.699565 |
9c7b7adaf9e98684d7fb48fdc0e19ce4ba1f8ca5 | 32,228 | #![cfg(feature = "test-bpf")]
mod utils;
use mpl_token_metadata::pda::find_collection_authority_account;
use mpl_token_metadata::state::Collection;
use mpl_token_metadata::state::{UseMethod, Uses};
use mpl_token_metadata::{
error::MetadataError,
state::{Key, MAX_NAME_LENGTH, MAX_SYMBOL_LENGTH, MAX_URI_LENGTH},
utils::puffed_out_string,
};
use num_traits::FromPrimitive;
use solana_program_test::*;
use solana_sdk::{
instruction::InstructionError,
signature::{Keypair, Signer},
transaction::TransactionError,
transport::TransportError,
};
use utils::*;
mod verify_collection {
use mpl_token_metadata::state::{COLLECTION_AUTHORITY_RECORD_SIZE, CollectionAuthorityRecord};
use solana_program::borsh::try_from_slice_unchecked;
use solana_sdk::transaction::Transaction;
use super::*;
#[tokio::test]
async fn success_verify_collection() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let puffed_name = puffed_out_string(&name, MAX_NAME_LENGTH);
let puffed_symbol = puffed_out_string(&symbol, MAX_SYMBOL_LENGTH);
let puffed_uri = puffed_out_string(&uri, MAX_URI_LENGTH);
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.data.name, puffed_name);
assert_eq!(metadata.data.symbol, puffed_symbol);
assert_eq!(metadata.data.uri, puffed_uri);
assert_eq!(metadata.data.seller_fee_basis_points, 10);
assert_eq!(metadata.data.creators, None);
assert_eq!(metadata.uses, uses.to_owned());
assert_eq!(
metadata.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata.collection.unwrap().verified, false);
assert_eq!(metadata.primary_sale_happened, false);
assert_eq!(metadata.is_mutable, false);
assert_eq!(metadata.mint, test_metadata.mint.pubkey());
assert_eq!(metadata.update_authority, context.payer.pubkey());
assert_eq!(metadata.key, Key::MetadataV1);
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
None,
)
.await
.unwrap();
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, true);
}
#[tokio::test]
async fn fail_wrong_collection_from_authority() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let test_collection2 = Metadata::new();
test_collection2
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None
)
.await
.unwrap();
let collection_master_edition_account2 = MasterEditionV2::new(&test_collection2);
collection_master_edition_account2
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let puffed_name = puffed_out_string(&name, MAX_NAME_LENGTH);
let puffed_symbol = puffed_out_string(&symbol, MAX_SYMBOL_LENGTH);
let puffed_uri = puffed_out_string(&uri, MAX_URI_LENGTH);
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.data.name, puffed_name);
assert_eq!(metadata.data.symbol, puffed_symbol);
assert_eq!(metadata.data.uri, puffed_uri);
assert_eq!(metadata.data.seller_fee_basis_points, 10);
assert_eq!(metadata.data.creators, None);
assert_eq!(metadata.uses, uses.to_owned());
assert_eq!(
metadata.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata.collection.unwrap().verified, false);
assert_eq!(metadata.primary_sale_happened, false);
assert_eq!(metadata.is_mutable, false);
assert_eq!(metadata.mint, test_metadata.mint.pubkey());
assert_eq!(metadata.update_authority, context.payer.pubkey());
assert_eq!(metadata.key, Key::MetadataV1);
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection2.mint.pubkey(),
collection_master_edition_account2.pubkey,
None,
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::CollectionNotFound);
}
#[tokio::test]
async fn fail_no_collection_nft_token_standard() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
None,
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::CollectionMustBeAUniqueMasterEdition);
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, false);
}
#[tokio::test]
async fn fail_non_unique_master_edition() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create(&mut context, Some(1))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
None,
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::CollectionMustBeAUniqueMasterEdition);
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, false);
}
#[tokio::test]
async fn fail_no_master_edition() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection.mint.pubkey(),
test_collection.pubkey,
None,
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::CollectionMustBeAUniqueMasterEdition);
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, false);
}
#[tokio::test]
async fn fail_collection_authority_mismatch() {
let mut context = program_test().start_with_context().await;
let collection_authority = Keypair::new();
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&collection_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
None,
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::InvalidCollectionUpdateAuthority);
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, false);
}
#[tokio::test]
async fn success() {
let mut context = program_test().start_with_context().await;
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let puffed_name = puffed_out_string(&name, MAX_NAME_LENGTH);
let puffed_symbol = puffed_out_string(&symbol, MAX_SYMBOL_LENGTH);
let puffed_uri = puffed_out_string(&uri, MAX_URI_LENGTH);
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.data.name, puffed_name);
assert_eq!(metadata.data.symbol, puffed_symbol);
assert_eq!(metadata.data.uri, puffed_uri);
assert_eq!(metadata.data.seller_fee_basis_points, 10);
assert_eq!(metadata.data.creators, None);
assert_eq!(metadata.uses, uses.to_owned());
assert_eq!(
metadata.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata.collection.unwrap().verified, false);
assert_eq!(metadata.primary_sale_happened, false);
assert_eq!(metadata.is_mutable, false);
assert_eq!(metadata.mint, test_metadata.mint.pubkey());
assert_eq!(metadata.update_authority, context.payer.pubkey());
assert_eq!(metadata.key, Key::MetadataV1);
let kpbytes = &context.payer;
let kp = Keypair::from_bytes(&kpbytes.to_bytes()).unwrap();
test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&kp,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
None,
)
.await
.unwrap();
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, true);
}
#[tokio::test]
async fn success_verify_collection_with_authority() {
let mut context = program_test().start_with_context().await;
let new_collection_authority = Keypair::new();
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
None,
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata.collection.unwrap().verified, false);
let (record, _) = find_collection_authority_account(
&test_collection.mint.pubkey(),
&new_collection_authority.pubkey(),
);
let ix = mpl_token_metadata::instruction::approve_collection_authority(
mpl_token_metadata::id(),
record,
new_collection_authority.pubkey(),
context.payer.pubkey(),
context.payer.pubkey(),
test_collection.pubkey,
test_collection.mint.pubkey(),
);
let tx = Transaction::new_signed_with_payer(
&[ix],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
context.banks_client.process_transaction(tx).await.unwrap();
let record_account = get_account(&mut context, &record).await;
let record_data: CollectionAuthorityRecord =
try_from_slice_unchecked(&record_account.data).unwrap();
assert_eq!(record_data.key, Key::CollectionAuthorityRecord);
test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&new_collection_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
Some(record),
)
.await
.unwrap();
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, true);
test_metadata
.unverify_collection(
&mut context,
test_collection.pubkey,
&new_collection_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
Some(record),
)
.await
.unwrap();
let metadata_after_unverify = test_metadata.get_data(&mut context).await;
assert_eq!(metadata_after_unverify.collection.unwrap().verified, false);
}
#[tokio::test]
async fn success_set_and_verify_collection_with_authority() {
let mut context = program_test().start_with_context().await;
let new_collection_authority = Keypair::new();
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
test_metadata
.create_v2(&mut context, name, symbol, uri, None, 10, false, None, None, None)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.collection.is_none(), true);
let update_authority = context.payer.pubkey().clone();
let (record, _) = find_collection_authority_account(
&test_collection.mint.pubkey(),
&new_collection_authority.pubkey(),
);
let ix = mpl_token_metadata::instruction::approve_collection_authority(
mpl_token_metadata::id(),
record,
new_collection_authority.pubkey(),
update_authority,
context.payer.pubkey(),
test_collection.pubkey,
test_collection.mint.pubkey(),
);
let tx = Transaction::new_signed_with_payer(
&[ix],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
context.banks_client.process_transaction(tx).await.unwrap();
let record_account = get_account(&mut context, &record).await;
let record_data: CollectionAuthorityRecord =
try_from_slice_unchecked(&record_account.data).unwrap();
assert_eq!(record_data.key, Key::CollectionAuthorityRecord);
test_metadata
.set_and_verify_collection(
&mut context,
test_collection.pubkey,
&new_collection_authority,
update_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
Some(record),
)
.await
.unwrap();
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata_after.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata_after.collection.unwrap().verified, true);
test_metadata
.unverify_collection(
&mut context,
test_collection.pubkey,
&new_collection_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
Some(record),
)
.await
.unwrap();
let metadata_after_unverify = test_metadata.get_data(&mut context).await;
assert_eq!(metadata_after_unverify.collection.unwrap().verified, false);
}
#[tokio::test]
async fn fail_verify_collection_with_authority() {
let mut context = program_test().start_with_context().await;
let new_collection_authority = Keypair::new();
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(0))
.await
.unwrap();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let test_metadata = Metadata::new();
let uses = Some(Uses {
total: 1,
remaining: 1,
use_method: UseMethod::Single,
});
test_metadata
.create_v2(
&mut context,
name,
symbol,
uri,
None,
10,
false,
None,
Some(Collection {
key: test_collection.mint.pubkey(),
verified: false,
}),
uses.to_owned(),
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(
metadata.collection.to_owned().unwrap().key,
test_collection.mint.pubkey()
);
assert_eq!(metadata.collection.unwrap().verified, false);
let (record, _) = find_collection_authority_account(
&test_collection.mint.pubkey(),
&new_collection_authority.pubkey(),
);
let ix = mpl_token_metadata::instruction::approve_collection_authority(
mpl_token_metadata::id(),
record,
new_collection_authority.pubkey(),
context.payer.pubkey(),
context.payer.pubkey(),
test_collection.pubkey,
test_collection.mint.pubkey(),
);
let tx = Transaction::new_signed_with_payer(
&[ix],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
context.banks_client.process_transaction(tx).await.unwrap();
let account_before = context.banks_client.get_account(record).await.unwrap().unwrap();
assert_eq!(account_before.data.len(), COLLECTION_AUTHORITY_RECORD_SIZE);
let ixrevoke = mpl_token_metadata::instruction::revoke_collection_authority(
mpl_token_metadata::id(),
record,
new_collection_authority.pubkey(),
context.payer.pubkey(),
test_collection.pubkey,
test_collection.mint.pubkey(),
);
let txrevoke = Transaction::new_signed_with_payer(
&[ixrevoke],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
context
.banks_client
.process_transaction(txrevoke)
.await
.unwrap();
let account_after_none = context.banks_client.get_account(record).await.unwrap().is_none();
assert_eq!(account_after_none, true);
let err = test_metadata
.verify_collection(
&mut context,
test_collection.pubkey,
&new_collection_authority,
test_collection.mint.pubkey(),
collection_master_edition_account.pubkey,
Some(record),
)
.await
.unwrap_err();
assert_custom_error!(err, MetadataError::InvalidCollectionUpdateAuthority);
let metadata_after = test_metadata.get_data(&mut context).await;
assert_eq!(metadata_after.collection.unwrap().verified, false);
}
}
| 32.455186 | 99 | 0.517159 |
87aedf34437b7a123e795448cb4598c955a7b282 | 292 | #[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate piston;
extern crate graphics;
extern crate image;
extern crate opengl_graphics;
extern crate find_folder;
extern crate glutin_window;
pub mod colors;
pub mod consts;
pub mod visual;
pub mod app;
pub mod conf;
| 15.368421 | 29 | 0.794521 |
08337c918b0f5ebae9d50a2208c73320b2298408 | 2,121 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A "shape" is a compact encoding of a type that is used by interpreted glue.
// This substitutes for the runtime tags used by e.g. MLs.
use lib::llvm::llvm;
use lib::llvm::{True, ModuleRef, ValueRef};
use middle::trans::common::*;
use middle::trans;
use core::str;
use core::vec;
pub struct Ctxt {
next_tag_id: u16,
pad: u16,
pad2: u32
}
pub fn mk_global(ccx: @CrateContext,
name: ~str,
llval: ValueRef,
internal: bool)
-> ValueRef {
unsafe {
let llglobal = do str::as_c_str(name) |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llval), buf)
};
llvm::LLVMSetInitializer(llglobal, llval);
llvm::LLVMSetGlobalConstant(llglobal, True);
if internal {
::lib::llvm::SetLinkage(llglobal,
::lib::llvm::InternalLinkage);
}
return llglobal;
}
}
pub fn mk_ctxt(llmod: ModuleRef) -> Ctxt {
unsafe {
let llshapetablesty = trans::common::T_named_struct(~"shapes");
let _llshapetables = str::as_c_str(~"shapes", |buf| {
llvm::LLVMAddGlobal(llmod, llshapetablesty, buf)
});
return Ctxt {
next_tag_id: 0u16,
pad: 0u16,
pad2: 0u32
};
}
}
/*
Although these two functions are never called, they are here
for a VERY GOOD REASON. See #3670
*/
pub fn add_u16(dest: &mut ~[u8], val: u16) {
*dest += ~[(val & 0xffu16) as u8, (val >> 8u16) as u8];
}
pub fn add_substr(dest: &mut ~[u8], src: ~[u8]) {
add_u16(&mut *dest, vec::len(src) as u16);
*dest += src;
}
| 27.192308 | 78 | 0.600189 |
fe178c5cbb349ba5cbcd48b72fbcce399a5e5bdc | 8,851 | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use std::env;
use hyper::server::conn::AddrIncoming;
use hyper::service;
use hyper::Server;
use hyper::StatusCode;
use hyper::{Body, Response};
use lazy_static::lazy_static;
use ccsr::{Client, DeleteError, GetByIdError, GetBySubjectError, PublishError};
lazy_static! {
pub static ref SCHEMA_REGISTRY_URL: reqwest::Url = match env::var("SCHEMA_REGISTRY_URL") {
Ok(addr) => addr.parse().expect("unable to parse SCHEMA_REGISTRY_URL"),
_ => "http://localhost:8081".parse().unwrap(),
};
}
#[tokio::test]
async fn test_client() -> Result<(), anyhow::Error> {
let client = ccsr::ClientConfig::new(SCHEMA_REGISTRY_URL.clone()).build()?;
let existing_subjects = client.list_subjects().await?;
for s in existing_subjects {
if s.starts_with("ccsr-test-") {
client.delete_subject(&s).await?;
}
}
let schema_v1 = r#"{ "type": "record", "name": "na", "fields": [
{ "name": "a", "type": "long" }
]}"#;
let schema_v2 = r#"{ "type": "record", "name": "na", "fields": [
{ "name": "a", "type": "long" },
{ "name": "b", "type": "long", "default": 0 }
]}"#;
let schema_v2_incompat = r#"{ "type": "record", "name": "na", "fields": [
{ "name": "a", "type": "string" }
]}"#;
assert_eq!(count_schemas(&client, "ccsr-test-").await?, 0);
let schema_v1_id = client.publish_schema("ccsr-test-schema", schema_v1).await?;
assert!(schema_v1_id > 0);
match client
.publish_schema("ccsr-test-schema", schema_v2_incompat)
.await
{
Err(PublishError::IncompatibleSchema) => (),
res => panic!("expected IncompatibleSchema error, got {:?}", res),
}
{
let res = client.get_schema_by_subject("ccsr-test-schema").await?;
assert_eq!(schema_v1_id, res.id);
assert_raw_schemas_eq(schema_v1, &res.raw);
}
let schema_v2_id = client.publish_schema("ccsr-test-schema", schema_v2).await?;
assert!(schema_v2_id > 0);
assert!(schema_v2_id > schema_v1_id);
assert_eq!(
schema_v1_id,
client.publish_schema("ccsr-test-schema", schema_v1).await?
);
{
let res1 = client.get_schema_by_id(schema_v1_id).await?;
let res2 = client.get_schema_by_id(schema_v2_id).await?;
assert_eq!(schema_v1_id, res1.id);
assert_eq!(schema_v2_id, res2.id);
assert_raw_schemas_eq(schema_v1, &res1.raw);
assert_raw_schemas_eq(schema_v2, &res2.raw);
}
{
let res = client.get_schema_by_subject("ccsr-test-schema").await?;
assert_eq!(schema_v2_id, res.id);
assert_raw_schemas_eq(schema_v2, &res.raw);
}
assert_eq!(count_schemas(&client, "ccsr-test-").await?, 1);
client
.publish_schema("ccsr-test-another-schema", "\"int\"")
.await?;
assert_eq!(count_schemas(&client, "ccsr-test-").await?, 2);
Ok(())
}
#[tokio::test]
async fn test_client_errors() -> Result<(), anyhow::Error> {
let client = ccsr::ClientConfig::new(SCHEMA_REGISTRY_URL.clone()).build()?;
// Get-by-id-specific errors.
match client.get_schema_by_id(i32::max_value()).await {
Err(GetByIdError::SchemaNotFound) => (),
res => panic!("expected GetError::SchemaNotFound, got {:?}", res),
}
// Get-by-subject-specific errors.
match client.get_schema_by_subject("ccsr-test-noexist").await {
Err(GetBySubjectError::SubjectNotFound) => (),
res => panic!("expected GetBySubjectError::SubjectNotFound, got {:?}", res),
}
// Publish-specific errors.
match client.publish_schema("ccsr-test-schema", "blah").await {
Err(PublishError::InvalidSchema) => (),
res => panic!("expected PublishError::InvalidSchema, got {:?}", res),
}
// Delete-specific errors.
match client.delete_subject("ccsr-test-noexist").await {
Err(DeleteError::SubjectNotFound) => (),
res => panic!("expected DeleteError::SubjectNotFound, got {:?}", res),
}
Ok(())
}
#[tokio::test]
async fn test_server_errors() -> Result<(), anyhow::Error> {
// When the schema registry gracefully reports an error by including a
// properly-formatted JSON document in the response, the specific error code
// and message should be propagated.
let client_graceful = start_server(
StatusCode::INTERNAL_SERVER_ERROR,
r#"{ "error_code": 50001, "message": "overloaded; try again later" }"#,
)?;
match client_graceful.publish_schema("foo", "bar").await {
Err(PublishError::Server {
code: 50001,
ref message,
}) if message == "overloaded; try again later" => (),
res => panic!("expected PublishError::Server, got {:?}", res),
}
match client_graceful.get_schema_by_id(0).await {
Err(GetByIdError::Server {
code: 50001,
ref message,
}) if message == "overloaded; try again later" => (),
res => panic!("expected GetByIdError::Server, got {:?}", res),
}
match client_graceful.get_schema_by_subject("foo").await {
Err(GetBySubjectError::Server {
code: 50001,
ref message,
}) if message == "overloaded; try again later" => (),
res => panic!("expected GetBySubjectError::Server, got {:?}", res),
}
match client_graceful.delete_subject("foo").await {
Err(DeleteError::Server {
code: 50001,
ref message,
}) if message == "overloaded; try again later" => (),
res => panic!("expected DeleteError::Server, got {:?}", res),
}
// If the schema registry crashes so hard that it spits out an exception
// handler in the response, we should report the HTTP status code and a
// generic message indicating that no further details were available.
let client_crash = start_server(
StatusCode::INTERNAL_SERVER_ERROR,
r#"panic! an exception occured!"#,
)?;
match client_crash.publish_schema("foo", "bar").await {
Err(PublishError::Server {
code: 500,
ref message,
}) if message == "unable to decode error details" => (),
res => panic!("expected PublishError::Server, got {:?}", res),
}
match client_crash.get_schema_by_id(0).await {
Err(GetByIdError::Server {
code: 500,
ref message,
}) if message == "unable to decode error details" => (),
res => panic!("expected GetError::Server, got {:?}", res),
}
match client_crash.get_schema_by_subject("foo").await {
Err(GetBySubjectError::Server {
code: 500,
ref message,
}) if message == "unable to decode error details" => (),
res => panic!("expected GetError::Server, got {:?}", res),
}
match client_crash.delete_subject("foo").await {
Err(DeleteError::Server {
code: 500,
ref message,
}) if message == "unable to decode error details" => (),
res => panic!("expected DeleteError::Server, got {:?}", res),
}
Ok(())
}
fn start_server(status_code: StatusCode, body: &'static str) -> Result<Client, anyhow::Error> {
let addr = {
let incoming = AddrIncoming::bind(&([127, 0, 0, 1], 0).into()).unwrap();
let addr = incoming.local_addr();
let server =
Server::builder(incoming).serve(service::make_service_fn(move |_conn| async move {
Ok::<_, hyper::Error>(service::service_fn(move |_req| async move {
Response::builder()
.status(status_code)
.body(Body::from(body))
}))
}));
tokio::spawn(async {
match server.await {
Ok(()) => (),
Err(err) => eprintln!("server error: {}", err),
}
});
addr
};
let url: reqwest::Url = format!("http://{}", addr).parse().unwrap();
ccsr::ClientConfig::new(url).build()
}
fn assert_raw_schemas_eq(schema1: &str, schema2: &str) {
let schema1: serde_json::Value = serde_json::from_str(schema1).unwrap();
let schema2: serde_json::Value = serde_json::from_str(schema2).unwrap();
assert_eq!(schema1, schema2);
}
async fn count_schemas(client: &Client, subject_prefix: &str) -> Result<usize, anyhow::Error> {
Ok(client
.list_subjects()
.await?
.iter()
.filter(|s| s.starts_with(subject_prefix))
.count())
}
| 33.911877 | 95 | 0.599254 |
1c8fa279b639156e695f87af3f73e9a1add1c122 | 297 | #[doc = "Reader of register CM3NMISEL0"]
pub type R = crate::R<u32, super::CM3NMISEL0>;
#[doc = "Reader of field `EV`"]
pub type EV_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:6 - EV"]
#[inline(always)]
pub fn ev(&self) -> EV_R {
EV_R::new((self.bits & 0x7f) as u8)
}
}
| 24.75 | 46 | 0.558923 |
79d1c205bd2a9f5943dfcf791e2fc03e7ecf7b0f | 4,935 | use crate::assets::shader::ShaderAssetData;
use distill::core::AssetUuid;
use distill::importer::{ImportOp, ImportedAsset, Importer, ImporterValue};
use rafx_api::{RafxShaderPackage, RafxShaderPackageVulkan};
use rafx_framework::{CookedShaderPackage, ShaderModuleHash};
use serde::{Deserialize, Serialize};
use std::io::Read;
use type_uuid::*;
// There may be a better way to do this type coercing
// fn coerce_result_str<T>(result: Result<T, &str>) -> distill::importer::Result<T> {
// let ok = result.map_err(|x| -> Box<dyn std::error::Error + Send> { Box::<dyn std::error::Error + Send + Sync>::from(x) })?;
// Ok(ok)
// }
fn coerce_result_string<T>(result: Result<T, String>) -> distill::importer::Result<T> {
let ok = result.map_err(|x| -> Box<dyn std::error::Error + Send> {
Box::<dyn std::error::Error + Send + Sync>::from(x)
})?;
Ok(ok)
}
#[derive(TypeUuid, Serialize, Deserialize, Default)]
#[uuid = "867bc278-67b5-469c-aeea-1c05da722918"]
pub struct ShaderImporterSpvState(Option<AssetUuid>);
#[derive(TypeUuid)]
#[uuid = "90fdad4b-cec1-4f59-b679-97895711b6e1"]
pub struct ShaderImporterSpv;
impl Importer for ShaderImporterSpv {
fn version_static() -> u32
where
Self: Sized,
{
4
}
fn version(&self) -> u32 {
Self::version_static()
}
type Options = ();
type State = ShaderImporterSpvState;
/// Reads the given bytes and produces assets.
#[profiling::function]
fn import(
&self,
_op: &mut ImportOp,
source: &mut dyn Read,
_options: &Self::Options,
state: &mut Self::State,
) -> distill::importer::Result<ImporterValue> {
let asset_id = state
.0
.unwrap_or_else(|| AssetUuid(*uuid::Uuid::new_v4().as_bytes()));
*state = ShaderImporterSpvState(Some(asset_id));
// Raw compiled shader
let mut spv_bytes = Vec::new();
source.read_to_end(&mut spv_bytes)?;
log::trace!(
"Import shader asset {:?} with {} bytes of code",
asset_id,
spv_bytes.len()
);
// The hash is used in some places identify the shader
let shader_package = RafxShaderPackage {
metal: None,
vk: Some(RafxShaderPackageVulkan::SpvBytes(spv_bytes)),
};
let shader_module_hash = ShaderModuleHash::new(&shader_package);
let shader_asset = ShaderAssetData {
shader_module_hash,
shader_package,
reflection_data: None,
};
Ok(ImporterValue {
assets: vec![ImportedAsset {
id: asset_id,
search_tags: vec![],
build_deps: vec![],
load_deps: vec![],
build_pipeline: None,
asset_data: Box::new(shader_asset),
}],
})
}
}
#[derive(TypeUuid, Serialize, Deserialize, Default)]
#[uuid = "d4fb07ce-76e6-497e-ac31-bcaeb43528aa"]
pub struct ShaderImporterCookedState(Option<AssetUuid>);
#[derive(TypeUuid)]
#[uuid = "cab0cf4c-16ff-4dbd-aae7-8705246d85d6"]
pub struct ShaderImporterCooked;
impl Importer for ShaderImporterCooked {
fn version_static() -> u32
where
Self: Sized,
{
4
}
fn version(&self) -> u32 {
Self::version_static()
}
type Options = ();
type State = ShaderImporterCookedState;
/// Reads the given bytes and produces assets.
#[profiling::function]
fn import(
&self,
_op: &mut ImportOp,
source: &mut dyn Read,
_options: &Self::Options,
state: &mut Self::State,
) -> distill::importer::Result<ImporterValue> {
let asset_id = state
.0
.unwrap_or_else(|| AssetUuid(*uuid::Uuid::new_v4().as_bytes()));
*state = ShaderImporterCookedState(Some(asset_id));
// Raw compiled shader
let mut bytes = Vec::new();
source.read_to_end(&mut bytes)?;
let cooked_shader: CookedShaderPackage = coerce_result_string(
bincode::deserialize::<CookedShaderPackage>(&bytes)
.map_err(|x| format!("Failed to deserialize cooked shader: {:?}", x)),
)?;
log::trace!(
"Import shader asset {:?} with hash {:?}",
asset_id,
cooked_shader.hash,
);
let shader_asset = ShaderAssetData {
shader_module_hash: cooked_shader.hash,
shader_package: cooked_shader.shader_package,
reflection_data: Some(cooked_shader.entry_points),
};
Ok(ImporterValue {
assets: vec![ImportedAsset {
id: asset_id,
search_tags: vec![],
build_deps: vec![],
load_deps: vec![],
build_pipeline: None,
asset_data: Box::new(shader_asset),
}],
})
}
}
| 29.550898 | 130 | 0.5846 |
16116a017948492163275c30aa516af95bbda7a3 | 9,832 | /// Construct a `serde_json::Value` from a JSON literal.
///
/// ```edition2018
/// # use serde_json::json;
/// #
/// let value = json!({
/// "code": 200,
/// "success": true,
/// "payload": {
/// "features": [
/// "serde",
/// "json"
/// ]
/// }
/// });
/// ```
///
/// Variables or expressions can be interpolated into the JSON literal. Any type
/// interpolated into an array element or object value must implement Serde's
/// `Serialize` trait, while any type interpolated into a object key must
/// implement `Into<String>`. If the `Serialize` implementation of the
/// interpolated type decides to fail, or if the interpolated type contains a
/// map with non-string keys, the `json!` macro will panic.
///
/// ```edition2018
/// # use serde_json::json;
/// #
/// let code = 200;
/// let features = vec!["serde", "json"];
///
/// let value = json!({
/// "code": code,
/// "success": code == 200,
/// "payload": {
/// features[0]: features[1]
/// }
/// });
/// ```
///
/// Trailing commas are allowed inside both arrays and objects.
///
/// ```edition2018
/// # use serde_json::json;
/// #
/// let value = json!([
/// "notice",
/// "the",
/// "trailing",
/// "comma -->",
/// ]);
/// ```
#[macro_export(local_inner_macros)]
macro_rules! json {
// Hide distracting implementation details from the generated rustdoc.
($($json:tt)+) => {
json_internal!($($json)+)
};
}
// Rocket relies on this because they export their own `json!` with a different
// doc comment than ours, and various Rust bugs prevent them from calling our
// `json!` from their `json!` so they call `json_internal!` directly. Check with
// @SergioBenitez before making breaking changes to this macro.
//
// Changes are fine as long as `json_internal!` does not call any new helper
// macros and can still be invoked as `json_internal!($($json)+)`.
#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! json_internal {
//////////////////////////////////////////////////////////////////////////
// TT muncher for parsing the inside of an array [...]. Produces a vec![...]
// of the elements.
//
// Must be invoked as: json_internal!(@array [] $($tt)*)
//////////////////////////////////////////////////////////////////////////
// Done with trailing comma.
(@array [$($elems:expr,)*]) => {
json_internal_vec![$($elems,)*]
};
// Done without trailing comma.
(@array [$($elems:expr),*]) => {
json_internal_vec![$($elems),*]
};
// Next element is `null`.
(@array [$($elems:expr,)*] null $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!(null)] $($rest)*)
};
// Next element is `true`.
(@array [$($elems:expr,)*] true $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!(true)] $($rest)*)
};
// Next element is `false`.
(@array [$($elems:expr,)*] false $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!(false)] $($rest)*)
};
// Next element is an array.
(@array [$($elems:expr,)*] [$($array:tt)*] $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!([$($array)*])] $($rest)*)
};
// Next element is a map.
(@array [$($elems:expr,)*] {$($map:tt)*} $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!({$($map)*})] $($rest)*)
};
// Next element is an expression followed by comma.
(@array [$($elems:expr,)*] $next:expr, $($rest:tt)*) => {
json_internal!(@array [$($elems,)* json_internal!($next),] $($rest)*)
};
// Last element is an expression with no trailing comma.
(@array [$($elems:expr,)*] $last:expr) => {
json_internal!(@array [$($elems,)* json_internal!($last)])
};
// Comma after the most recent element.
(@array [$($elems:expr),*] , $($rest:tt)*) => {
json_internal!(@array [$($elems,)*] $($rest)*)
};
// Unexpected token after most recent element.
(@array [$($elems:expr),*] $unexpected:tt $($rest:tt)*) => {
json_unexpected!($unexpected)
};
//////////////////////////////////////////////////////////////////////////
// TT muncher for parsing the inside of an object {...}. Each entry is
// inserted into the given map variable.
//
// Must be invoked as: json_internal!(@object $map () ($($tt)*) ($($tt)*))
//
// We require two copies of the input tokens so that we can match on one
// copy and trigger errors on the other copy.
//////////////////////////////////////////////////////////////////////////
// Done.
(@object $object:ident () () ()) => {};
// Insert the current entry followed by trailing comma.
(@object $object:ident [$($key:tt)+] ($value:expr) , $($rest:tt)*) => {
let _ = $object.insert(($($key)+).into(), $value);
json_internal!(@object $object () ($($rest)*) ($($rest)*));
};
// Current entry followed by unexpected token.
(@object $object:ident [$($key:tt)+] ($value:expr) $unexpected:tt $($rest:tt)*) => {
json_unexpected!($unexpected);
};
// Insert the last entry without trailing comma.
(@object $object:ident [$($key:tt)+] ($value:expr)) => {
let _ = $object.insert(($($key)+).into(), $value);
};
// Next value is `null`.
(@object $object:ident ($($key:tt)+) (: null $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!(null)) $($rest)*);
};
// Next value is `true`.
(@object $object:ident ($($key:tt)+) (: true $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!(true)) $($rest)*);
};
// Next value is `false`.
(@object $object:ident ($($key:tt)+) (: false $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!(false)) $($rest)*);
};
// Next value is an array.
(@object $object:ident ($($key:tt)+) (: [$($array:tt)*] $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!([$($array)*])) $($rest)*);
};
// Next value is a map.
(@object $object:ident ($($key:tt)+) (: {$($map:tt)*} $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!({$($map)*})) $($rest)*);
};
// Next value is an expression followed by comma.
(@object $object:ident ($($key:tt)+) (: $value:expr , $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!($value)) , $($rest)*);
};
// Last value is an expression with no trailing comma.
(@object $object:ident ($($key:tt)+) (: $value:expr) $copy:tt) => {
json_internal!(@object $object [$($key)+] (json_internal!($value)));
};
// Missing value for last entry. Trigger a reasonable error message.
(@object $object:ident ($($key:tt)+) (:) $copy:tt) => {
// "unexpected end of macro invocation"
json_internal!();
};
// Missing colon and value for last entry. Trigger a reasonable error
// message.
(@object $object:ident ($($key:tt)+) () $copy:tt) => {
// "unexpected end of macro invocation"
json_internal!();
};
// Misplaced colon. Trigger a reasonable error message.
(@object $object:ident () (: $($rest:tt)*) ($colon:tt $($copy:tt)*)) => {
// Takes no arguments so "no rules expected the token `:`".
json_unexpected!($colon);
};
// Found a comma inside a key. Trigger a reasonable error message.
(@object $object:ident ($($key:tt)*) (, $($rest:tt)*) ($comma:tt $($copy:tt)*)) => {
// Takes no arguments so "no rules expected the token `,`".
json_unexpected!($comma);
};
// Key is fully parenthesized. This avoids clippy double_parens false
// positives because the parenthesization may be necessary here.
(@object $object:ident () (($key:expr) : $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object ($key) (: $($rest)*) (: $($rest)*));
};
// Munch a token into the current key.
(@object $object:ident ($($key:tt)*) ($tt:tt $($rest:tt)*) $copy:tt) => {
json_internal!(@object $object ($($key)* $tt) ($($rest)*) ($($rest)*));
};
//////////////////////////////////////////////////////////////////////////
// The main implementation.
//
// Must be invoked as: json_internal!($($json)+)
//////////////////////////////////////////////////////////////////////////
(null) => {
$crate::Value::Null
};
(true) => {
$crate::Value::Bool(true)
};
(false) => {
$crate::Value::Bool(false)
};
([]) => {
$crate::Value::Array(json_internal_vec![])
};
([ $($tt:tt)+ ]) => {
$crate::Value::Array(json_internal!(@array [] $($tt)+))
};
({}) => {
$crate::Value::Object($crate::Map::new())
};
({ $($tt:tt)+ }) => {
$crate::Value::Object({
let mut object = $crate::Map::new();
json_internal!(@object object () ($($tt)+) ($($tt)+));
object
})
};
// Any Serialize type: numbers, strings, struct literals, variables etc.
// Must be below every other rule.
($other:expr) => {
$crate::to_value(&$other).unwrap()
};
}
// The json_internal macro above cannot invoke vec directly because it uses
// local_inner_macros. A vec invocation there would resolve to $crate::vec.
// Instead invoke vec here outside of local_inner_macros.
#[macro_export]
#[doc(hidden)]
macro_rules! json_internal_vec {
($($content:tt)*) => {
vec![$($content)*]
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! json_unexpected {
() => {};
}
| 33.556314 | 92 | 0.522478 |
1811316414db935ea01ca8f2454c3128b54e8d35 | 1,607 | use futures_util::StreamExt;
use std::net::SocketAddr;
use tls_listener::{AsyncAccept, TlsListener};
use tokio::io::{copy, split};
use tokio::net::{TcpListener, TcpStream};
use tokio::signal::ctrl_c;
#[cfg(all(
feature = "native-tls",
not(any(feature = "rustls", feature = "openssl"))
))]
use tokio_native_tls::TlsStream;
#[cfg(all(
feature = "openssl",
not(any(feature = "rustls", feature = "native-tls"))
))]
use tokio_openssl::SslStream as TlsStream;
#[cfg(feature = "rustls")]
use tokio_rustls::server::TlsStream;
mod tls_config;
use tls_config::tls_acceptor;
#[inline]
async fn handle_stream(stream: TlsStream<TcpStream>) {
let (mut reader, mut writer) = split(stream);
match copy(&mut reader, &mut writer).await {
Ok(cnt) => eprintln!("Processed {} bytes", cnt),
Err(err) => eprintln!("Error during copy: {}", err),
};
}
/// For example try opening and closing a connection with:
/// `echo "Q" | openssl s_client -connect localhost:3000`
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let addr: SocketAddr = ([127, 0, 0, 1], 3000).into();
let listener = TcpListener::bind(&addr).await?.until(ctrl_c());
TlsListener::new(tls_acceptor(), listener)
.for_each_concurrent(None, |s| async {
match s {
Ok(stream) => {
handle_stream(stream).await;
}
Err(e) => {
eprintln!("Error accepting connection: {:?}", e);
}
}
})
.await;
Ok(())
}
| 29.218182 | 69 | 0.594897 |
5b2da5deddea42156987659c1681db1e7fee761f | 2,620 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Defines constants for enum type values
//! See the following Diem JSON-RPC response type documents for more details:
//! * https://github.com/diem/diem/blob/master/json-rpc/docs/type_account.md#type-account
//! * https://github.com/diem/diem/blob/master/json-rpc/docs/type_event.md#event-data
//! * https://github.com/diem/diem/blob/master/json-rpc/docs/type_transaction.md#type-vmstatus
//! * https://github.com/diem/diem/blob/master/json-rpc/docs/type_transaction.md#type-transactiondata
// AccountRole#type field values
pub const ACCOUNT_ROLE_UNKNOWN: &str = "unknown";
pub const ACCOUNT_ROLE_CHILD_VASP: &str = "child_vasp";
pub const ACCOUNT_ROLE_PARENT_VASP: &str = "parent_vasp";
pub const ACCOUNT_ROLE_DESIGNATED_DEALER: &str = "designated_dealer";
// EventData#type field values
pub const EVENT_DATA_UNKNOWN: &str = "unknown";
pub const EVENT_DATA_BURN: &str = "burn";
pub const EVENT_DATA_CANCEL_BURN: &str = "cancelburn";
pub const EVENT_DATA_MINT: &str = "mint";
pub const EVENT_DATA_TO_XDX_EXCHANGE_RATE_UPDATE: &str = "to_xdx_exchange_rate_update";
pub const EVENT_DATA_PREBURN: &str = "preburn";
pub const EVENT_DATA_RECEIVED_PAYMENT: &str = "receivedpayment";
pub const EVENT_DATA_SENT_PAYMENT: &str = "sentpayment";
pub const EVENT_DATA_NEW_EPOCH: &str = "newepoch";
pub const EVENT_DATA_NEW_BLOCK: &str = "newblock";
pub const EVENT_DATA_RECEIVED_MINT: &str = "receivedmint";
pub const EVENT_DATA_COMPLIANCE_KEY_ROTATION: &str = "compliancekeyrotation";
pub const EVENT_DATA_BASE_URL_ROTATION: &str = "baseurlrotation";
pub const EVENT_DATA_CREATE_ACCOUNT: &str = "createaccount";
pub const EVENT_DATA_ADMIN_TRANSACTION: &str = "admintransaction";
// VMStatus#type field values
pub const VM_STATUS_EXECUTED: &str = "executed";
pub const VM_STATUS_OUT_OF_GAS: &str = "out_of_gas";
pub const VM_STATUS_MOVE_ABORT: &str = "move_abort";
pub const VM_STATUS_EXECUTION_FAILURE: &str = "execution_failure";
pub const VM_STATUS_MISC_ERROR: &str = "miscellaneous_error";
// TransactionData#type field values
pub const TRANSACTION_DATA_BLOCK_METADATA: &str = "blockmetadata";
pub const TRANSACTION_DATA_WRITE_SET: &str = "writeset";
pub const TRANSACTION_DATA_USER: &str = "user";
pub const TRANSACTION_DATA_UNKNOWN: &str = "unknown";
// Script#type field values, only set unknown type here,
// other types, plese see https://github.com/diem/diem/blob/master/language/diem-framework/transaction_scripts/doc/transaction_script_documentation.md for all available script names.
pub const SCRIPT_UNKNOWN: &str = "unknown";
| 52.4 | 182 | 0.785496 |
1c7eb68f1493e630bfaa756c72449635f14d4665 | 2,399 | use crate::raytracer::hit::{face_normal, Hit};
use crate::raytracer::material::Material;
use crate::raytracer::ray::Ray;
use crate::raytracer::vector3d::{blend_vectors, dot, is_in_interval, Vector3d};
#[derive(Copy, Clone)]
pub struct Sphere {
pub center: Vector3d,
pub radius: f64,
pub material: Material,
pub speed: Vector3d,
pub mass: f64,
pub extra_brightness: f64,
pub center_old: Vector3d,
}
impl Sphere {
#[inline(always)]
fn calculate_hit(&self, ray: &Ray, t: f64, center: &Vector3d) -> Hit {
let p = ray.at(t);
let outward_normal = (p - ¢er) / self.radius;
let (front_face, normal) = face_normal(ray, &outward_normal);
Hit {
position: p,
t,
normal,
front_face,
material: Material {
albedo: self.material.albedo + &(self.material.albedo * self.extra_brightness),
reflectiveness: self.material.reflectiveness - self.extra_brightness,
reflection_fuzz: self.material.reflection_fuzz + self.extra_brightness,
},
}
}
#[inline(always)]
pub fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<Hit> {
let frame_time_center = self.center_at_frame_time(ray.frame_time);
let ray_origin_to_center = ray.origin - &frame_time_center;
let day_direction_squared_length = ray.direction.length_squared();
let half_b = dot(&ray_origin_to_center, &ray.direction);
let c = ray_origin_to_center.length_squared() - self.radius * self.radius;
let discriminant = half_b * half_b - day_direction_squared_length * c;
if discriminant > 0.0 {
let root = discriminant.sqrt();
let t_front = (-half_b - root) / day_direction_squared_length;
if is_in_interval(t_front, t_min, t_max) {
return Some(self.calculate_hit(ray, t_front, &frame_time_center));
}
let t_back = (-half_b + root) / day_direction_squared_length;
if is_in_interval(t_back, t_min, t_max) {
return Some(self.calculate_hit(ray, t_back, &frame_time_center));
}
}
None
}
#[inline(always)]
pub fn center_at_frame_time(&self, frame_time: f64) -> Vector3d {
return blend_vectors(&self.center, &self.center_old, frame_time);
}
}
| 38.079365 | 95 | 0.620675 |
bfa79edb94c5c7e919aedcf461617fe8be62c748 | 2,882 | use regex::Regex;
use std::error;
use std::fmt;
// Describes a dice roll type
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct DiceType {
pub n_dice: i32,
pub die_type: i32,
pub bonus: i32,
}
impl DiceType {
pub fn new(n_dice: i32, die_type: i32, bonus: i32) -> Self {
DiceType {
n_dice,
die_type,
bonus,
}
}
}
impl Default for DiceType {
fn default() -> DiceType {
DiceType {
n_dice: 1,
die_type: 4,
bonus: 0,
}
}
}
#[derive(Debug, Clone)]
pub struct DiceParseError;
impl std::fmt::Display for DiceParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Invalid dice string")
}
}
impl error::Error for DiceParseError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
// Generic error, underlying cause isn't tracked.
None
}
}
#[allow(dead_code)]
// Parses a dice string, of the type "1d6+3", "3d8-4" or "1d20".
#[cfg(feature = "parsing")]
pub fn parse_dice_string(dice: &str) -> Result<DiceType, DiceParseError> {
lazy_static! {
static ref DICE_RE: Regex = Regex::new(r"(\d+)d(\d+)([\+\-]\d+)?").unwrap();
}
let mut result: DiceType = DiceType::default();
let mut did_something = false;
for cap in DICE_RE.captures_iter(dice) {
did_something = true;
if let Some(group) = cap.get(1) {
match group.as_str().parse::<i32>() {
Ok(number) => result.n_dice = number,
Err(_) => return Err(DiceParseError {}),
}
} else {
return Err(DiceParseError {});
}
if let Some(group) = cap.get(2) {
match group.as_str().parse::<i32>() {
Ok(number) => result.die_type = number,
Err(_) => return Err(DiceParseError {}),
}
} else {
return Err(DiceParseError {});
}
if let Some(group) = cap.get(3) {
match group.as_str().parse::<i32>() {
Ok(number) => result.bonus = number,
Err(_) => return Err(DiceParseError {}),
}
}
}
if !did_something {
return Err(DiceParseError {});
}
Ok(result)
}
#[cfg(test)]
mod tests {
use super::{parse_dice_string, DiceType};
#[test]
fn parse_1d6() {
assert_eq!(parse_dice_string("1d6").unwrap(), DiceType::new(1, 6, 0));
}
#[test]
fn parse_1d20plus4() {
assert_eq!(
parse_dice_string("1d20+4").unwrap(),
DiceType::new(1, 20, 4)
);
}
#[test]
fn parse_3d6minus2() {
assert_eq!(parse_dice_string("3d6-2").unwrap(), DiceType::new(3, 6, -2));
}
#[test]
fn fail_parsing() {
assert!(parse_dice_string("blah").is_err());
}
}
| 24.844828 | 84 | 0.526371 |
0a9f70f8013cf0ea5f8eddf4d5b0116e8580b9b2 | 9,739 | use glib_sys;
use gst_audio_sys;
use glib::translate::*;
use glib::subclass::prelude::*;
use gst::subclass::prelude::*;
use gst::LoggableError;
use gst_base::subclass::prelude::*;
use AudioRingBufferSpec;
use AudioSink;
use AudioSinkClass;
pub trait AudioSinkImpl: AudioSinkImplExt + BaseSinkImpl + Send + Sync + 'static {
fn close(&self, sink: &mut AudioSink) -> Result<(), LoggableError> {
self.parent_close(sink)
}
fn delay(&self, sink: &AudioSink) -> u32 {
self.parent_delay(sink)
}
fn open(&self, sink: &AudioSink) -> Result<(), LoggableError> {
self.parent_open(sink)
}
fn prepare(
&self,
sink: &AudioSink,
spec: &mut AudioRingBufferSpec,
) -> Result<(), LoggableError> {
AudioSinkImplExt::parent_prepare(self, sink, spec)
}
fn unprepare(&self, sink: &AudioSink) -> Result<(), LoggableError> {
self.parent_unprepare(sink)
}
fn write(&self, sink: &AudioSink, audio_data: &[u8]) -> Result<i32, LoggableError>;
}
pub trait AudioSinkImplExt {
fn parent_close(&self, sink: &mut AudioSink) -> Result<(), LoggableError>;
fn parent_delay(&self, sink: &AudioSink) -> u32;
fn parent_open(&self, sink: &AudioSink) -> Result<(), LoggableError>;
fn parent_prepare(
&self,
sink: &AudioSink,
spec: &mut AudioRingBufferSpec,
) -> Result<(), LoggableError>;
fn parent_unprepare(&self, sink: &AudioSink) -> Result<(), LoggableError>;
fn parent_write(&self, sink: &AudioSink, audio_data: &[u8]) -> Result<i32, LoggableError>;
}
impl<T: AudioSinkImpl + ObjectImpl> AudioSinkImplExt for T {
fn parent_close(&self, sink: &mut AudioSink) -> Result<(), LoggableError> {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).close {
None => return Ok(()),
Some(f) => f,
};
gst_result_from_gboolean!(
f(sink.to_glib_none().0),
gst::CAT_RUST,
"Failed to close element using the parent function"
)
}
}
fn parent_delay(&self, sink: &AudioSink) -> u32 {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).delay {
Some(f) => f,
None => return 0,
};
f(sink.to_glib_none().0)
}
}
fn parent_open(&self, sink: &AudioSink) -> Result<(), LoggableError> {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).open {
Some(f) => f,
None => return Ok(()),
};
gst_result_from_gboolean!(
f(sink.to_glib_none().0),
gst::CAT_RUST,
"Failed to open element using the parent function"
)
}
}
fn parent_prepare(
&self,
sink: &AudioSink,
spec: &mut AudioRingBufferSpec,
) -> Result<(), LoggableError> {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).prepare {
Some(f) => f,
None => return Ok(()),
};
gst_result_from_gboolean!(
f(sink.to_glib_none().0, &mut spec.0),
gst::CAT_RUST,
"Failed to prepare element using the parent function"
)
}
}
fn parent_unprepare(&self, sink: &AudioSink) -> Result<(), LoggableError> {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).unprepare {
Some(f) => f,
None => {
return Err(gst::gst_loggable_error!(
gst::CAT_RUST,
"Unprepare is not implemented!"
))
}
};
gst_result_from_gboolean!(
f(sink.to_glib_none().0),
gst::CAT_RUST,
"Failed to unprepare element using the parent function"
)
}
}
fn parent_write(&self, sink: &AudioSink, buffer: &[u8]) -> Result<i32, LoggableError> {
unsafe {
let data = self.get_type_data();
let parent_class =
data.as_ref().get_parent_class() as *mut gst_audio_sys::GstAudioSinkClass;
let f = match (*parent_class).write {
Some(f) => f,
None => return Ok(-1),
};
let buffer_ptr = buffer.as_ptr() as *const _ as *mut _;
let ret = f(sink.to_glib_none().0, buffer_ptr, buffer.len() as u32);
if ret > 0 {
Ok(ret)
} else {
Err(gst::gst_loggable_error!(
gst::CAT_RUST,
"Failed to write using the parent function"
))
}
}
}
}
unsafe impl<T: ObjectSubclass + AudioSinkImpl + BaseSinkImpl> IsSubclassable<T> for AudioSinkClass
where
<T as ObjectSubclass>::Instance: PanicPoison,
{
fn override_vfuncs(&mut self) {
<gst_base::BaseSinkClass as IsSubclassable<T>>::override_vfuncs(self);
unsafe {
let klass = &mut *(self as *mut Self as *mut gst_audio_sys::GstAudioSinkClass);
klass.close = Some(audiosink_close::<T>);
klass.delay = Some(audiosink_delay::<T>);
klass.open = Some(audiosink_open::<T>);
klass.prepare = Some(audiosink_prepare::<T>);
klass.unprepare = Some(audiosink_unprepare::<T>);
klass.write = Some(audiosink_write::<T>);
}
}
}
unsafe extern "C" fn audiosink_close<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
) -> glib_sys::gboolean
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let mut wrap: AudioSink = from_glib_borrow(ptr);
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
match imp.close(&mut wrap) {
Ok(()) => true,
Err(err) => {
err.log_with_object(&wrap);
false
}
}
})
.to_glib()
}
unsafe extern "C" fn audiosink_delay<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
) -> u32
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let wrap: AudioSink = from_glib_borrow(ptr);
gst_panic_to_error!(&wrap, &instance.panicked(), 0, { imp.delay(&wrap) })
}
unsafe extern "C" fn audiosink_open<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
) -> glib_sys::gboolean
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let wrap: AudioSink = from_glib_borrow(ptr);
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
match imp.open(&wrap) {
Ok(()) => true,
Err(err) => {
err.log_with_object(&wrap);
false
}
}
})
.to_glib()
}
unsafe extern "C" fn audiosink_prepare<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
spec: *mut gst_audio_sys::GstAudioRingBufferSpec,
) -> glib_sys::gboolean
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let wrap: AudioSink = from_glib_borrow(ptr);
let spec = &mut *(spec as *mut AudioRingBufferSpec);
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
match AudioSinkImpl::prepare(imp, &wrap, spec) {
Ok(()) => true,
Err(err) => {
err.log_with_object(&wrap);
false
}
}
})
.to_glib()
}
unsafe extern "C" fn audiosink_unprepare<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
) -> glib_sys::gboolean
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let wrap: AudioSink = from_glib_borrow(ptr);
gst_panic_to_error!(&wrap, &instance.panicked(), false, {
match imp.unprepare(&wrap) {
Ok(()) => true,
Err(err) => {
err.log_with_object(&wrap);
false
}
}
})
.to_glib()
}
unsafe extern "C" fn audiosink_write<T: ObjectSubclass>(
ptr: *mut gst_audio_sys::GstAudioSink,
data: glib_sys::gpointer,
length: u32,
) -> i32
where
T: AudioSinkImpl,
T::Instance: PanicPoison,
{
let instance = &*(ptr as *mut T::Instance);
let imp = instance.get_impl();
let wrap: AudioSink = from_glib_borrow(ptr);
let data_slice = std::slice::from_raw_parts(data as *const u8, length as usize);
gst_panic_to_error!(&wrap, &instance.panicked(), -1, {
imp.write(&wrap, data_slice).unwrap_or(-1)
})
}
| 30.91746 | 98 | 0.556423 |
f4c74834c789d7429c35bbfb6320cc8f5876b758 | 9,173 | use crate::Num;
use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, ToTokens};
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
pub(crate) mod deku_read;
pub(crate) mod deku_write;
#[cfg(feature = "proc-macro-crate")]
fn get_crate_name() -> Ident {
let crate_name = ::proc_macro_crate::crate_name("deku").unwrap_or_else(|_err| {
// default if not found
String::from("deku")
});
Ident::new(&crate_name, Span::call_site())
}
// proc-macro-crate depends on std, for no_std, use default name. Sorry.
#[cfg(not(feature = "proc-macro-crate"))]
fn get_crate_name() -> Ident {
Ident::new("deku", Span::call_site())
}
/// Generate enum initialization TokenStream
/// Cases:
/// - No fields: `MyEnum`
/// - Named: `MyEnum { field_idents }`
/// - Unnamed: `MyEnum ( field_idents )`
fn gen_enum_init<V: ToTokens, I: ToTokens>(
is_named: bool,
enum_variant: V,
field_idents: Vec<I>,
) -> TokenStream {
if field_idents.is_empty() {
return quote! { #enum_variant };
}
if is_named {
quote! {
#enum_variant {
#(#field_idents),*
}
}
} else {
quote! {
#enum_variant (
#(#field_idents),*
)
}
}
}
/// Generate struct initialization TokenStream
/// Cases:
/// - No fields: `Self {}`
/// - Named: `Self { field_idents }`
/// - Unnamed: `Self ( field_idents )`
fn gen_struct_init<I: ToTokens>(is_named: bool, field_idents: Vec<I>) -> TokenStream {
if field_idents.is_empty() {
return quote! { Self {} };
}
if is_named {
quote! {
Self {
#(#field_idents),*
}
}
} else {
quote! {
Self (
#(#field_idents),*
)
}
}
}
/// Generate struct destruction
///
/// - Named: `#ident { ref fields }`
/// - Unnamed: `#ident ( ref fields )`
fn gen_struct_destruction<I: ToTokens, F: ToTokens>(
named: bool,
ident: I,
field_idents: &[F],
) -> TokenStream {
if named {
quote! {
#ident {
#(ref #field_idents),*
}
}
} else {
quote! {
#ident (
#(ref #field_idents),*
)
}
}
}
/// Convert a field ident to internal ident:
/// `a` -> `__deku_a`
fn gen_internal_field_ident(ident: &TokenStream) -> TokenStream {
// Concat token: https://github.com/rust-lang/rust/issues/29599
let span = ident.span();
let s = ident.to_string();
let mut name = "__deku_".to_owned();
name.push_str(&s);
syn::Ident::new(&name, span).to_token_stream()
}
/// Map all field indents to internal idents
///
/// - Named: `{ a: __deku_a }`
/// - Unnamed: `( __deku_a )`
fn gen_internal_field_idents(named: bool, idents: Vec<&TokenStream>) -> Vec<TokenStream> {
if named {
idents
.into_iter()
.map(|i| (i.clone(), gen_internal_field_ident(i)))
.map(|(i, h)| quote! {#i: #h})
.collect()
} else {
idents.into_iter().map(gen_internal_field_ident).collect()
}
}
fn split_ctx_to_pats_and_types(
ctx: &Punctuated<syn::FnArg, syn::token::Comma>,
) -> syn::Result<Vec<(&syn::Pat, &syn::Type)>> {
// `()` or `(u8, u32)`
ctx.iter()
.map(|arg| {
match arg {
syn::FnArg::Typed(pat_type) => Ok((pat_type.pat.as_ref(), pat_type.ty.as_ref())),
// a self is unacceptable
syn::FnArg::Receiver(r) => Err(syn::Error::new(r.span(), "Unacceptable context")),
}
})
.collect::<Result<Vec<_>, _>>()
}
/// Generate ctx types and argument
///
/// - Empty: arg: `(): ()`, type: `()`
/// - One: arg: `a: usize`, type: `usize`
/// - Other: arg: `(a, b, ...): (u8, u8, ...)`, type: `(u8, u8, ...)`
fn gen_ctx_types_and_arg(
ctx: Option<&Punctuated<syn::FnArg, syn::token::Comma>>,
) -> syn::Result<(TokenStream, TokenStream)> {
if let Some(ctx) = ctx {
let pats_types = split_ctx_to_pats_and_types(ctx)?;
if pats_types.len() == 1 {
// remove parens for single item
let (pat, ty) = pats_types[0];
Ok((quote! {#ty}, quote! {#pat:#ty}))
} else {
let pats = pats_types.iter().map(|(pat, _)| pat);
let types = pats_types.iter().map(|(_, ty)| ty);
// "a: u8, b: usize" -> (u8, usize)
let types_cpy = types.clone();
let ctx_types = quote! {(#(#types_cpy),*)};
// "a: u8, b: usize" -> (a, b): (u8, usize)
let ctx_arg = quote! {(#(#pats),*): (#(#types),*)};
Ok((ctx_types, ctx_arg))
}
} else {
Ok((quote! {()}, quote! {_: ()}))
}
}
/// Generate type from matching ident from `id` in `ctx`
///
/// Given #[deku(ctx = "test: u16, my_id: u8", id = "my_id")], will return `u8`
fn gen_type_from_ctx_id(
ctx: &Punctuated<syn::FnArg, syn::token::Comma>,
id: &crate::Id,
) -> Option<TokenStream> {
let id = syn::Ident::new(&id.to_string(), id.span());
ctx.iter().find_map(|arg| {
if let syn::FnArg::Typed(pat_type) = arg {
if let syn::Pat::Ident(ident) = &*pat_type.pat {
if id == ident.ident {
let t = &pat_type.ty;
return Some(quote! {#t});
}
}
}
None
})
}
/// Generate argument for `id`:
/// `#deku(endian = "big", bits = "1")` -> `Endian::Big, Size::Bits(1)`
pub(crate) fn gen_id_args(
endian: Option<&syn::LitStr>,
bits: Option<&Num>,
bytes: Option<&Num>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::Size::Bits(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::Size::Bytes(#n)});
// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let id_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
match &id_args[..] {
[arg] => Ok(quote! {#arg}),
args => Ok(quote! {#(#args),*}),
}
}
/// Generate argument for fields:
///
/// `#deku(endian = "big", bits = "1", ctx = "a")` -> `Endian::Big, Size::Bits(1), a`
fn gen_field_args(
endian: Option<&syn::LitStr>,
bits: Option<&Num>,
bytes: Option<&Num>,
ctx: Option<&Punctuated<syn::Expr, syn::token::Comma>>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::Size::Bits(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::Size::Bytes(#n)});
let ctx = ctx.map(|c| quote! {#c});
// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let field_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref(), ctx.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
// Because `impl DekuRead<'_, (T1, T2)>` but `impl DekuRead<'_, T1>`(not tuple)
match &field_args[..] {
[arg] => Ok(quote! {#arg}),
args => Ok(quote! {#(#args),*}),
}
}
/// Generate endian tokens from string: `big` -> `Endian::Big`.
fn gen_endian_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
match s.value().as_str() {
"little" => Ok(quote! {::#crate_::ctx::Endian::Little}),
"big" => Ok(quote! {::#crate_::ctx::Endian::Big}),
_ => {
// treat as variable, possibly from `ctx`
let v: TokenStream = s.value().parse()?;
Ok(quote! {#v})
}
}
}
/// Wraps a TokenStream with a closure providing access to `ctx` variables when
/// `ctx_default` is provided
fn wrap_default_ctx(
body: TokenStream,
ctx: &Option<syn::punctuated::Punctuated<syn::FnArg, syn::token::Comma>>,
ctx_default: &Option<Punctuated<syn::Expr, syn::token::Comma>>,
) -> TokenStream {
if let (Some(ctx), Some(ctx_default)) = (ctx, ctx_default) {
// wrap in a function to make `ctx` variables in scope
quote! {
|#ctx| -> Result<_, _> {
#body
}(#ctx_default)
}
} else {
body
}
}
/// Returns true if the literal substring `s` is in the token
fn token_contains_string(tok: &Option<TokenStream>, s: &str) -> bool {
tok.as_ref()
.map(|v| {
let v = v.to_string();
v.contains(s)
})
.unwrap_or(false)
}
fn pad_bits(
bits: Option<&TokenStream>,
bytes: Option<&TokenStream>,
emit_padding: fn(&TokenStream) -> TokenStream,
) -> TokenStream {
match (bits, bytes) {
(Some(pad_bits), Some(pad_bytes)) => emit_padding("e! { #pad_bits + (#pad_bytes * 8) }),
(Some(pad_bits), None) => emit_padding(&pad_bits),
(None, Some(pad_bytes)) => emit_padding("e! {(#pad_bytes * 8)}),
(None, None) => quote!(),
}
}
| 29.686084 | 100 | 0.537229 |
4ade6a6ede889f5f4f0c56702d3ac7cddd9cf110 | 436 | //! Tests auto-converted from "sass-spec/spec/non_conformant/extend-tests/045_test_element_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn test() {
assert_eq!(
crate::rsass(
"%-a .foo.bar {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a *|a.bar {\
\n a: b;\
\n}\
\n"
);
}
| 22.947368 | 126 | 0.486239 |
69dc011bc948090ddd850be3a453e7d6000c90fb | 71 |
// These modules contain `compile_fail` doc tests.
mod future_escape;
| 17.75 | 50 | 0.774648 |
69df1250a48ee9087cc424604b80b2f414827d88 | 601 | #![cfg(feature = "budgets")]
extern crate rusoto_core;
extern crate rusoto_budgets;
use rusoto_budgets::{Budgets, BudgetsClient, DescribeBudgetsRequest};
use rusoto_core::Region;
// Switch to DescribeReportDefinitions when botocore is updated?
#[test]
#[ignore] // Ignore until we get this working
fn should_describe_budgets() {
let client = BudgetsClient::simple(Region::UsEast1);
// This request needs the accountId set:
let request = DescribeBudgetsRequest::default();
let response = client.describe_budgets(&request).sync().unwrap();
println!("response: {:?}", response);
} | 31.631579 | 69 | 0.737105 |
f9650f4820ab739c03c277c6c42febc7f7b9872e | 5,967 | use crate::{
components::{
visibility_blocking, CommandBlocking, CommandInfo,
CommitList, Component, DrawableComponent, EventState,
},
keys::SharedKeyConfig,
queue::{Action, InternalEvent, Queue},
strings,
ui::style::SharedTheme,
};
use anyhow::Result;
use asyncgit::{
sync::{self, CommitId},
CWD,
};
use crossterm::event::Event;
pub struct StashList {
list: CommitList,
visible: bool,
queue: Queue,
key_config: SharedKeyConfig,
}
impl StashList {
///
pub fn new(
queue: &Queue,
theme: SharedTheme,
key_config: SharedKeyConfig,
) -> Self {
Self {
visible: false,
list: CommitList::new(
&strings::stashlist_title(&key_config),
theme,
key_config.clone(),
),
queue: queue.clone(),
key_config,
}
}
///
pub fn update(&mut self) -> Result<()> {
if self.is_visible() {
let stashes = sync::get_stashes(CWD)?;
let commits =
sync::get_commits_info(CWD, stashes.as_slice(), 100)?;
self.list.set_count_total(commits.len());
self.list.items().set_items(0, commits);
}
Ok(())
}
fn apply_stash(&mut self) {
if let Some(e) = self.list.selected_entry() {
match sync::stash_apply(CWD, e.id, false) {
Ok(_) => {
self.queue
.borrow_mut()
.push_back(InternalEvent::TabSwitch);
}
Err(e) => {
self.queue.borrow_mut().push_back(
InternalEvent::ShowErrorMsg(format!(
"stash apply error:\n{}",
e,
)),
);
}
}
}
}
fn drop_stash(&mut self) {
if let Some(e) = self.list.selected_entry() {
self.queue.borrow_mut().push_back(
InternalEvent::ConfirmAction(Action::StashDrop(e.id)),
);
}
}
fn pop_stash(&mut self) {
if let Some(e) = self.list.selected_entry() {
self.queue.borrow_mut().push_back(
InternalEvent::ConfirmAction(Action::StashPop(e.id)),
);
}
}
fn inspect(&mut self) {
if let Some(e) = self.list.selected_entry() {
self.queue
.borrow_mut()
.push_back(InternalEvent::InspectCommit(e.id, None));
}
}
/// Called when a pending stash action has been confirmed
pub fn action_confirmed(&self, action: &Action) -> bool {
match *action {
Action::StashDrop(id) => Self::drop(id),
Action::StashPop(id) => self.pop(id),
_ => false,
}
}
fn drop(id: CommitId) -> bool {
sync::stash_drop(CWD, id).is_ok()
}
fn pop(&self, id: CommitId) -> bool {
match sync::stash_pop(CWD, id) {
Ok(_) => {
self.queue
.borrow_mut()
.push_back(InternalEvent::TabSwitch);
true
}
Err(e) => {
self.queue.borrow_mut().push_back(
InternalEvent::ShowErrorMsg(format!(
"stash pop error:\n{}",
e,
)),
);
true
}
}
}
}
impl DrawableComponent for StashList {
fn draw<B: tui::backend::Backend>(
&self,
f: &mut tui::Frame<B>,
rect: tui::layout::Rect,
) -> Result<()> {
self.list.draw(f, rect)?;
Ok(())
}
}
impl Component for StashList {
fn commands(
&self,
out: &mut Vec<CommandInfo>,
force_all: bool,
) -> CommandBlocking {
if self.visible || force_all {
self.list.commands(out, force_all);
let selection_valid =
self.list.selected_entry().is_some();
out.push(CommandInfo::new(
strings::commands::stashlist_pop(&self.key_config),
selection_valid,
true,
));
out.push(CommandInfo::new(
strings::commands::stashlist_apply(&self.key_config),
selection_valid,
true,
));
out.push(CommandInfo::new(
strings::commands::stashlist_drop(&self.key_config),
selection_valid,
true,
));
out.push(CommandInfo::new(
strings::commands::stashlist_inspect(
&self.key_config,
),
selection_valid,
true,
));
}
visibility_blocking(self)
}
fn event(
&mut self,
ev: crossterm::event::Event,
) -> Result<EventState> {
if self.is_visible() {
if self.list.event(ev)?.is_consumed() {
return Ok(EventState::Consumed);
}
if let Event::Key(k) = ev {
if k == self.key_config.enter {
self.pop_stash()
} else if k == self.key_config.stash_apply {
self.apply_stash()
} else if k == self.key_config.stash_drop {
self.drop_stash()
} else if k == self.key_config.stash_open {
self.inspect()
} else {
}
}
}
Ok(EventState::NotConsumed)
}
fn is_visible(&self) -> bool {
self.visible
}
fn hide(&mut self) {
self.visible = false;
}
fn show(&mut self) -> Result<()> {
self.visible = true;
self.update()?;
Ok(())
}
}
| 26.402655 | 70 | 0.460365 |
79cbea34872e1f5343f66860f5894a2e9c20241c | 2,206 | use tracing::warn;
use casper_types::{Key, U512};
use super::Error;
use crate::{
core::engine_state::{
execution_effect::ExecutionEffect, execution_result::ExecutionResult, op::Op,
},
shared::{gas::Gas, transform::Transform},
};
fn on_fail_charge_test_helper<T>(
f: impl Fn() -> Result<T, Error>,
success_cost: Gas,
error_cost: Gas,
) -> ExecutionResult {
let _result = on_fail_charge!(f(), error_cost);
ExecutionResult::Success {
effect: Default::default(),
cost: success_cost,
}
}
#[test]
fn on_fail_charge_ok_test() {
let val = Gas::new(U512::from(123));
match on_fail_charge_test_helper(|| Ok(()), val, Gas::new(U512::from(456))) {
ExecutionResult::Success { cost, .. } => assert_eq!(cost, val),
ExecutionResult::Failure { .. } => panic!("Should be success"),
}
}
#[test]
fn on_fail_charge_err_laziness_test() {
let input: Result<(), Error> = Err(Error::GasLimit);
let error_cost = Gas::new(U512::from(456));
match on_fail_charge_test_helper(|| input.clone(), Gas::new(U512::from(123)), error_cost) {
ExecutionResult::Success { .. } => panic!("Should fail"),
ExecutionResult::Failure { cost, .. } => assert_eq!(cost, error_cost),
}
}
#[test]
fn on_fail_charge_with_action() {
let f = || {
let input: Result<(), Error> = Err(Error::GasLimit);
on_fail_charge!(input, Gas::new(U512::from(456)), {
let mut effect = ExecutionEffect::default();
effect.ops.insert(Key::Hash([42u8; 32]), Op::Read);
effect
.transforms
.insert(Key::Hash([42u8; 32]), Transform::Identity);
effect
});
ExecutionResult::Success {
effect: Default::default(),
cost: Gas::default(),
}
};
match f() {
ExecutionResult::Success { .. } => panic!("Should fail"),
ExecutionResult::Failure { cost, effect, .. } => {
assert_eq!(cost, Gas::new(U512::from(456)));
// Check if the containers are non-empty
assert_eq!(effect.ops.len(), 1);
assert_eq!(effect.transforms.len(), 1);
}
}
}
| 30.219178 | 95 | 0.577516 |
e53ccf328481fafb2adcd4511ce558dcaa0fbad3 | 10,364 | //! LiteX LiteEth peripheral
//!
//! The hardware source and any documentation can be found in the
//! [LiteEth Git
//! repository](https://github.com/enjoy-digital/liteeth).
use crate::event_manager::LiteXEventManager;
use crate::litex_registers::{LiteXSoCRegisterConfiguration, Read, Write};
use core::cell::Cell;
use core::slice;
use kernel::debug;
use kernel::utilities::cells::{OptionalCell, TakeCell};
use kernel::utilities::StaticRef;
use kernel::ErrorCode;
// Both events have the same index since they are located on different
// event manager instances
const LITEETH_TX_EVENT: usize = 0;
const LITEETH_RX_EVENT: usize = 0;
type LiteEthRXEV<'a, R> = LiteXEventManager<
'a,
u8,
<R as LiteXSoCRegisterConfiguration>::ReadOnly8,
<R as LiteXSoCRegisterConfiguration>::ReadWrite8,
<R as LiteXSoCRegisterConfiguration>::ReadWrite8,
>;
type LiteEthTXEV<'a, R> = LiteEthRXEV<'a, R>;
#[repr(C)]
pub struct LiteEthPhyRegisters<R: LiteXSoCRegisterConfiguration> {
/// ETHPHY_CRG_RESET
reset: R::WriteOnly8,
/// ETHPHY_MDIO_W
mdio_w: R::ReadWrite8, //<EthPhyMDIOW>,
/// ETHPHY_MDIO_R
mdio_r: R::ReadOnly8, //<EthPhyMDIOR>,
}
#[repr(C)]
pub struct LiteEthMacRegisters<R: LiteXSoCRegisterConfiguration> {
/// ETHMAC_SRAM_WRITER_SLOT
rx_slot: R::ReadOnly8,
/// ETHMAC_SRAM_WRITER_LENGTH
rx_length: R::ReadOnly32,
/// ETHMAC_SRAM_WRITER_ERRORS
rx_errors: R::ReadOnly32,
/// ETHMAC_SRAM_WRITER_EV
rx_ev_status: R::ReadOnly8,
rx_ev_pending: R::ReadWrite8,
rx_ev_enable: R::ReadWrite8,
/// ETHMAC_SRAM_READER_START
tx_start: R::ReadWrite8,
tx_ready: R::ReadOnly8,
tx_level: R::ReadOnly8,
tx_slot: R::ReadWrite8,
/// ETHMAC_SRAM_READER_LENGTH
tx_length: R::ReadWrite16,
/// ETHMAC_SRAM_READER_EV
tx_ev_status: R::ReadOnly8,
tx_ev_pending: R::ReadWrite8,
tx_ev_enable: R::ReadWrite8,
/// ETHMAC_PREAMBLE_CRC
preamble_crc: R::ReadWrite8,
/// ETHMAC_PREAMBLE_ERRORS
preamble_errors: R::ReadOnly8,
/// ETHMAC_CRC_ERRORS
crc_errors: R::ReadOnly32,
}
impl<R: LiteXSoCRegisterConfiguration> LiteEthMacRegisters<R> {
fn rx_ev<'a>(&'a self) -> LiteEthRXEV<'a, R> {
LiteEthRXEV::<R>::new(&self.rx_ev_status, &self.rx_ev_pending, &self.rx_ev_enable)
}
fn tx_ev<'a>(&'a self) -> LiteEthTXEV<'a, R> {
LiteEthTXEV::<R>::new(&self.tx_ev_status, &self.tx_ev_pending, &self.tx_ev_enable)
}
}
pub trait LiteEthClient {
fn tx_done(&self, rc: Result<(), ErrorCode>, packet_buffer: &'static mut [u8]);
fn rx_packet(&self, packet: &'static mut [u8], len: usize);
}
pub struct LiteEth<'a, R: LiteXSoCRegisterConfiguration> {
mac_regs: StaticRef<LiteEthMacRegisters<R>>,
mac_memory_base: usize,
mac_memory_len: usize,
slot_size: usize,
rx_slots: usize,
tx_slots: usize,
client: OptionalCell<&'a dyn LiteEthClient>,
tx_packet: TakeCell<'static, [u8]>,
rx_buffer: TakeCell<'static, [u8]>,
initialized: Cell<bool>,
}
impl<'a, R: LiteXSoCRegisterConfiguration> LiteEth<'a, R> {
pub unsafe fn new(
mac_regs: StaticRef<LiteEthMacRegisters<R>>,
mac_memory_base: usize,
mac_memory_len: usize,
slot_size: usize,
rx_slots: usize,
tx_slots: usize,
rx_buffer: &'static mut [u8],
) -> LiteEth<'a, R> {
LiteEth {
mac_regs,
mac_memory_base,
mac_memory_len,
slot_size,
rx_slots,
tx_slots,
client: OptionalCell::empty(),
tx_packet: TakeCell::empty(),
rx_buffer: TakeCell::new(rx_buffer),
initialized: Cell::new(false),
}
}
pub fn set_client(&self, client: &'a dyn LiteEthClient) {
self.client.set(client);
}
pub fn initialize(&self) {
// Sanity check the memory parameters
//
// Technically the constructor is unsafe as it will (over the
// lifetime of this struct) "cast" the raw mac_memory pointer
// (and slot offsets) into pointers and access them
// directly. However checking it at runtime once seems like a
// good idea.
assert!(
(self.rx_slots + self.tx_slots) * self.slot_size <= self.mac_memory_len,
"LiteEth: slots would exceed assigned MAC memory area"
);
assert!(self.rx_slots > 0, "LiteEth: no RX slot");
assert!(self.tx_slots > 0, "LiteEth: no TX slot");
// Clear any pending EV events
self.mac_regs.rx_ev().clear_event(LITEETH_RX_EVENT);
self.mac_regs.tx_ev().clear_event(LITEETH_TX_EVENT);
// Disable TX events (only enabled when a packet is sent)
self.mac_regs.tx_ev().disable_event(LITEETH_TX_EVENT);
// Enable RX events
self.mac_regs.rx_ev().enable_event(LITEETH_RX_EVENT);
self.initialized.set(true);
}
unsafe fn get_slot_buffer<'s>(&'s self, tx: bool, slot_id: usize) -> Option<&'s mut [u8]> {
if (tx && slot_id > self.tx_slots) || (!tx && slot_id > self.rx_slots) {
return None;
}
let slots_offset = if tx {
self.mac_memory_base + self.slot_size * self.rx_slots
} else {
self.mac_memory_base
};
let slot_addr = slots_offset + slot_id * self.slot_size;
Some(slice::from_raw_parts_mut(
slot_addr as *mut u8,
self.slot_size,
))
}
pub fn return_rx_buffer(&self, rx_buffer: &'static mut [u8]) {
// Assert that we won't overwrite a buffer
assert!(
self.rx_buffer.is_none(),
"LiteEth: return RX buffer while one is registered"
);
// Put the buffer back
self.rx_buffer.replace(rx_buffer);
// In case we received a packet RX interrupt but couldn't
// handle it due to the missing buffer, reenable RX interrupts
self.mac_regs.rx_ev().enable_event(LITEETH_RX_EVENT);
}
fn rx_interrupt(&self) {
// Check whether we have a buffer to read the packet into. If
// not, we must disable, but not clear the event and enable it
// again as soon as we get the buffer back from the client
if self.rx_buffer.is_none() {
self.mac_regs.rx_ev().disable_event(LITEETH_RX_EVENT);
} else {
// Get the buffer first to be able to check the length
let rx_buffer = self.rx_buffer.take().unwrap();
// Get the frame length. If it exceeds the length of the
// rx_buffer, discard the packet, put the buffer back
let pkt_len = self.mac_regs.rx_length.get() as usize;
if pkt_len > rx_buffer.len() {
debug!("LiteEth: discarding ethernet packet with len {}", pkt_len);
// Acknowledge the interrupt so that the HW may use the slot again
self.mac_regs.rx_ev().clear_event(LITEETH_RX_EVENT);
// Replace the buffer
self.rx_buffer.replace(rx_buffer);
} else {
// Obtain the packet slot id
let slot_id: usize = self.mac_regs.rx_slot.get().into();
// Get the slot buffer reference
let slot = unsafe {
self.get_slot_buffer(false, slot_id).unwrap() // Unwrap fail = LiteEth: invalid RX slot id
};
// Copy the packet into the buffer
rx_buffer[..pkt_len].copy_from_slice(&slot[..pkt_len]);
// Since all data is copied, acknowledge the interrupt
// so that the slot is ready for use again
self.mac_regs.rx_ev().clear_event(LITEETH_RX_EVENT);
self.client
.map(move |client| client.rx_packet(rx_buffer, pkt_len));
}
}
}
/// Transmit an ethernet packet over the interface
///
/// For now this will only use a single slot on the interface and
/// is therefore blocking. A client must wait until a callback to
/// `tx_done` prior to sending a new packet.
pub fn transmit(
&self,
packet: &'static mut [u8],
len: usize,
) -> Result<(), (Result<(), ErrorCode>, &'static mut [u8])> {
if packet.len() < len || len > u16::MAX as usize {
return Err((Err(ErrorCode::INVAL), packet));
}
if self.tx_packet.is_some() {
return Err((Err(ErrorCode::BUSY), packet));
}
let slot = unsafe { self.get_slot_buffer(true, 0) }.unwrap(); // Unwrap fail = LiteEth: no TX slot
if slot.len() < len {
return Err((Err(ErrorCode::SIZE), packet));
}
// Copy the packet into the slot HW buffer
slot[..len].copy_from_slice(&packet[..len]);
// Put the currently transmitting packet into the designated
// TakeCell
self.tx_packet.replace(packet);
// Set the slot and packet length
self.mac_regs.tx_slot.set(0);
self.mac_regs.tx_length.set(len as u16);
// Wait for the device to be ready to transmit
while self.mac_regs.tx_ready.get() == 0 {}
// Enable TX interrupts
self.mac_regs.tx_ev().enable_event(LITEETH_TX_EVENT);
// Start the transmission
self.mac_regs.tx_start.set(1);
Ok(())
}
fn tx_interrupt(&self) {
// Deassert the interrupt, but can be left enabled
self.mac_regs.tx_ev().clear_event(LITEETH_TX_EVENT);
if self.tx_packet.is_none() {
debug!("LiteEth: tx interrupt called without tx_packet set");
}
// We use only one slot, so this event is unambiguous
let packet = self.tx_packet.take().unwrap(); // Unwrap fail = LiteEth: TakeCell empty in tx callback
self.client
.map(move |client| client.tx_done(Ok(()), packet));
}
pub fn service_interrupt(&self) {
// The interrupt could've been generated by both a packet
// being received or finished transmitting. Check and handle
// both cases
if self.mac_regs.rx_ev().event_asserted(LITEETH_RX_EVENT) {
self.rx_interrupt();
}
if self.mac_regs.tx_ev().event_asserted(LITEETH_TX_EVENT) {
self.tx_interrupt();
}
}
}
| 33.649351 | 110 | 0.614628 |
acef653be46a202926b4eae351336eb092af53fa | 8,363 | use lumi::{BalanceSheet, Error, Ledger, Transaction, TxnFlag};
use lumi_server_defs::{
FilterOptions, JournalItem, Position, TrieNode, TrieOptions, TrieTable, TrieTableRow,
};
use rust_decimal::Decimal;
use std::sync::Arc;
use std::{
collections::{HashMap, HashSet},
convert::Infallible,
};
use tokio::sync::RwLock;
fn balance_sheet_to_list(sheet: &BalanceSheet) -> HashMap<String, Vec<Position>> {
let mut result = HashMap::new();
for (account, account_map) in sheet {
let list = result.entry(account.to_string()).or_insert_with(Vec::new);
for (currency, currency_map) in account_map {
for (cost, number) in currency_map {
list.push(Position {
number: *number,
currency: currency.clone(),
cost: cost.clone(),
})
}
}
}
result
}
fn build_trie_table_helper<'s, 'r: 's>(
root: &'r str,
level: usize,
node: &TrieNode<&'s str>,
currencies: &[&'s str],
rows: &mut Vec<TrieTableRow<&'s str>>,
) {
let numbers = currencies
.iter()
.map(|c| {
let number = node.numbers.get(*c).copied().unwrap_or_default();
if number.is_zero() {
String::new()
} else {
format!("{:.2}", number)
}
})
.collect();
let row = TrieTableRow {
level,
name: root,
numbers,
};
rows.push(row);
let mut sorted_kv: Vec<_> = node.nodes.iter().collect();
sorted_kv.sort_by_key(|kv| kv.0);
for (account, sub_trie) in sorted_kv {
build_trie_table_helper(account, level + 1, sub_trie, currencies, rows);
}
}
fn build_trie_table<'s, 'r: 's>(
ledger: &'s Ledger,
root_account: &'r str,
options: TrieOptions,
) -> Option<TrieTable<&'s str>> {
let (trie, currencies) = build_trie(ledger, root_account, options);
if let Some(node) = trie.nodes.get(root_account) {
let mut currencies: Vec<_> = currencies.into_iter().collect();
currencies.sort_unstable();
let mut rows = Vec::new();
build_trie_table_helper(root_account, 0, node, ¤cies, &mut rows);
Some(TrieTable { rows, currencies })
} else {
None
}
}
pub fn build_trie<'s>(
ledger: &'s Ledger,
root_account: &str,
options: TrieOptions,
) -> (TrieNode<&'s str>, HashSet<&'s str>) {
let show_closed = options.show_closed.unwrap_or(false);
let mut root_node = TrieNode::default();
let mut currencies = HashSet::new();
for (account, account_map) in ledger.balance_sheet() {
if ledger.accounts()[account].close().is_some() && !show_closed {
continue;
}
let mut parts = account.split(':');
if parts.next() != Some(root_account) {
continue;
}
let mut account_holdings: HashMap<&'s str, Decimal> = HashMap::new();
for (currency, cost_map) in account_map {
for (cost, number) in cost_map {
if number.is_zero() {
continue;
}
if let Some(unit_cost) = cost {
let cost_currency = unit_cost.amount.currency.as_str();
*account_holdings.entry(cost_currency).or_default() +=
unit_cost.amount.number * number;
currencies.insert(cost_currency);
} else {
*account_holdings.entry(currency.as_str()).or_default() += number;
currencies.insert(currency.as_str());
}
}
}
let mut leaf_node = &mut root_node;
for key in account.split(':') {
leaf_node = leaf_node.nodes.entry(key).or_default();
for (currency, number) in account_holdings.iter() {
*leaf_node.numbers.entry(currency).or_default() += number;
}
}
}
(root_node, currencies)
}
pub async fn trie(
root_account: String,
options: TrieOptions,
ledger: Arc<RwLock<Ledger>>,
) -> Result<impl warp::Reply, Infallible> {
let ledger = ledger.read().await;
let trie_table = build_trie_table(&ledger, &root_account, options);
let result = trie_table.unwrap_or_default();
Ok(warp::reply::json(&result))
}
pub async fn errors(errors: Arc<RwLock<Vec<Error>>>) -> Result<impl warp::Reply, Infallible> {
let errors = errors.read().await;
Ok(warp::reply::json(&*errors))
}
pub async fn balances(ledger: Arc<RwLock<Ledger>>) -> Result<impl warp::Reply, Infallible> {
let ledger = ledger.read().await;
Ok(warp::reply::json(&balance_sheet_to_list(
ledger.balance_sheet(),
)))
}
fn filter_account(txn: &Transaction, account: &str) -> bool {
for posting in txn.postings() {
if posting.account.starts_with(account) {
return true;
}
}
false
}
fn update_balance<'t>(
txn: &'t Transaction,
account: &str,
running_balance: &mut HashMap<&'t str, Decimal>,
) -> HashMap<&'t str, Decimal> {
if txn.flag() == TxnFlag::Balance {
return HashMap::new();
}
let mut changes: HashMap<&str, Decimal> = HashMap::new();
for posting in txn.postings().iter() {
if posting.cost.is_none() && posting.account.starts_with(&account) {
*changes.entry(posting.amount.currency.as_str()).or_default() += posting.amount.number;
}
}
for (c, n) in changes.iter() {
*running_balance.entry(c).or_default() += n;
}
changes
}
pub async fn account_journal(
account: Option<String>,
options: FilterOptions,
ledger: Arc<RwLock<Ledger>>,
) -> Result<impl warp::Reply, Infallible> {
let ledger = ledger.read().await;
let mut filters: Vec<Box<dyn Fn(&Transaction) -> bool>> = Vec::new();
if let Some(ref account) = account {
filters.push(Box::new(move |txn: &Transaction| {
filter_account(txn, account)
}));
}
if let Some(account) = &options.account {
filters.push(Box::new(move |txn: &Transaction| {
filter_account(txn, account)
}));
};
let txns: Vec<_> = ledger
.txns()
.iter()
.filter(|t| {
for filter in filters.iter() {
if !filter(t) {
return false;
}
}
true
})
.collect();
let total_number = txns.len();
let page = std::cmp::max(options.page.unwrap_or(1), 1);
let entries = std::cmp::max(options.entries.unwrap_or(50), 1);
let old_first = options.old_first.unwrap_or(false);
if (page - 1) * entries >= txns.len() {
Ok(warp::reply::json(&(
Vec::<Transaction>::new(),
total_number,
)))
} else {
let num_skip = if old_first {
(page - 1) * entries
} else if page * entries >= txns.len() {
0
} else {
txns.len() - page * entries
};
let mut running_balance: HashMap<&str, Decimal> = HashMap::new();
if let Some(ref account) = account {
for txn in txns.iter().take(num_skip) {
let _ = update_balance(txn, account, &mut running_balance);
}
}
let num_take = if old_first {
std::cmp::min(entries, txns.len() - entries * (page - 1))
} else {
(txns.len() - entries * (page - 1)) - num_skip
};
let mut items: Vec<_> = txns
.into_iter()
.skip(num_skip)
.take(num_take)
.map(|txn| {
if let Some(ref account) = account {
let changes = update_balance(txn, account, &mut running_balance);
JournalItem {
txn,
balance: running_balance.clone(),
changes,
}
} else {
JournalItem {
txn,
balance: HashMap::new(),
changes: HashMap::new(),
}
}
})
.collect();
if !old_first {
items.reverse();
}
Ok(warp::reply::json(&(items, total_number)))
}
}
| 32.414729 | 99 | 0.537128 |
3a0d6ae5a55003836a6705efaf816f0391bea350 | 50,439 | // Copyright 2021 Parallel Finance Developer.
// This file is part of Parallel Finance.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg_attr(not(feature = "std"), no_std)]
// `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256.
#![recursion_limit = "256"]
// Make the WASM binary available.
#[cfg(feature = "std")]
include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs"));
use codec::Encode;
use frame_support::{
dispatch::Weight,
traits::{All, LockIdentifier, U128CurrencyToVote},
PalletId,
};
use orml_currencies::BasicCurrencyAdapter;
use orml_traits::{parameter_type_with_key, DataProvider, DataProviderExtended};
use polkadot_runtime_common::SlowAdjustingFeeUpdate;
use sp_api::impl_runtime_apis;
use sp_core::{
u32_trait::{_1, _2, _3, _4, _5},
OpaqueMetadata,
};
use sp_runtime::traits::{
AccountIdConversion, AccountIdLookup, BlakeTwo256, Block as BlockT, Convert, Zero,
};
use sp_runtime::DispatchError;
use sp_runtime::{
create_runtime_str, generic, impl_opaque_keys, traits,
transaction_validity::{TransactionSource, TransactionValidity},
ApplyExtrinsicResult, KeyTypeId, Percent, SaturatedConversion,
};
use sp_std::prelude::*;
#[cfg(feature = "std")]
use sp_version::NativeVersion;
use sp_version::RuntimeVersion;
use cumulus_primitives_core::ParaId;
use frame_support::log;
use frame_system::{
limits::{BlockLength, BlockWeights},
EnsureOneOf, EnsureRoot,
};
use orml_xcm_support::{IsNativeConcrete, MultiCurrencyAdapter, MultiNativeAsset};
use polkadot_parachain::primitives::Sibling;
use primitives::*;
use static_assertions::const_assert;
use xcm::v0::{Junction, Junction::*, MultiAsset, MultiLocation, MultiLocation::*, NetworkId, Xcm};
use xcm_builder::{
AccountId32Aliases, AllowTopLevelPaidExecutionFrom, EnsureXcmOrigin, FixedWeightBounds,
LocationInverter, ParentAsSuperuser, ParentIsDefault, RelayChainAsNative,
SiblingParachainAsNative, SiblingParachainConvertsVia, SignedAccountId32AsNative,
SignedToAccountId32, SovereignSignedViaLocation, TakeWeightCredit, UsingComponents,
};
use xcm_executor::{Config, XcmExecutor};
pub mod constants;
pub mod impls;
// A few exports that help ease life for downstream crates.
// re-exports
pub use constants::{currency, fee, time};
pub use impls::DealWithFees;
pub use pallet_liquid_staking;
pub use pallet_liquidation;
pub use pallet_loans;
pub use pallet_multisig;
pub use pallet_prices;
use currency::*;
use fee::*;
use time::*;
pub use frame_support::{
construct_runtime, parameter_types,
traits::{KeyOwnerProofSystem, Randomness},
weights::{
constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND},
DispatchClass, IdentityFee,
},
StorageValue,
};
use pallet_xcm::XcmPassthrough;
#[cfg(any(feature = "std", test))]
pub use sp_runtime::BuildStorage;
pub use sp_runtime::{Perbill, Permill};
/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know
/// the specifics of the runtime. They can then be made to be agnostic over specific formats
/// of data like extrinsics, allowing for them to continue syncing the network through upgrades
/// to even the core data structures.
pub mod opaque {
use super::*;
pub use sp_runtime::OpaqueExtrinsic as UncheckedExtrinsic;
/// Opaque block type.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
pub type SessionHandlers = ();
impl_opaque_keys! {
pub struct SessionKeys {
pub aura: Aura,
}
}
}
pub const VERSION: RuntimeVersion = RuntimeVersion {
spec_name: create_runtime_str!("heiko"),
impl_name: create_runtime_str!("heiko"),
authoring_version: 1,
spec_version: 100,
impl_version: 1,
apis: RUNTIME_API_VERSIONS,
transaction_version: 1,
};
// 1 in 4 blocks (on average, not counting collisions) will be primary babe blocks.
pub const PRIMARY_PROBABILITY: (u64, u64) = (1, 4);
#[derive(codec::Encode, codec::Decode)]
pub enum XCMPMessage<XAccountId, XBalance> {
/// Transfer tokens to the given account from the Parachain account.
TransferToken(XAccountId, XBalance),
}
/// The version information used to identify this runtime when compiled natively.
#[cfg(feature = "std")]
pub fn native_version() -> NativeVersion {
NativeVersion {
runtime_version: VERSION,
can_author_with: Default::default(),
}
}
/// We assume that ~10% of the block weight is consumed by `on_initalize` handlers.
/// This is used to limit the maximal weight of a single extrinsic.
const AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(10);
/// We allow `Normal` extrinsics to fill up the block up to 75%, the rest can be used
/// by Operational extrinsics.
const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);
/// We allow for 2 seconds of compute with a 6 second average block time.
const MAXIMUM_BLOCK_WEIGHT: Weight = 2 * WEIGHT_PER_SECOND;
parameter_types! {
pub const BlockHashCount: BlockNumber = 250;
pub const Version: RuntimeVersion = VERSION;
pub RuntimeBlockLength: BlockLength =
BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO);
pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder()
.base_block(BlockExecutionWeight::get())
.for_class(DispatchClass::all(), |weights| {
weights.base_extrinsic = ExtrinsicBaseWeight::get();
})
.for_class(DispatchClass::Normal, |weights| {
weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT);
})
.for_class(DispatchClass::Operational, |weights| {
weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT);
// Operational transactions have some extra reserved space, so that they
// are included even if block reached `MAXIMUM_BLOCK_WEIGHT`.
weights.reserved = Some(
MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT
);
})
.avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO)
.build_or_panic();
// TODO register parallel's SS58Prefix
pub const SS58Prefix: u8 = 42;
}
// Configure FRAME pallets to include in runtime.
impl frame_system::Config for Runtime {
/// The basic call filter to use in dispatchable.
type BaseCallFilter = ();
/// Block & extrinsics weights: base values and limits.
type BlockWeights = RuntimeBlockWeights;
/// The maximum length of a block (in bytes).
type BlockLength = RuntimeBlockLength;
/// The identifier used to distinguish between accounts.
type AccountId = AccountId;
/// The aggregated dispatch type that is available for extrinsics.
type Call = Call;
/// The lookup mechanism to get account ID from whatever is passed in dispatchers.
type Lookup = AccountIdLookup<AccountId, ()>;
/// The index type for storing how many extrinsics an account has signed.
type Index = Index;
/// The index type for blocks.
type BlockNumber = BlockNumber;
/// The type for hashing blocks and tries.
type Hash = Hash;
/// The hashing algorithm used.
type Hashing = BlakeTwo256;
/// The header type.
type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// The ubiquitous event type.
type Event = Event;
/// The ubiquitous origin type.
type Origin = Origin;
/// Maximum number of block number to block hash mappings to keep (oldest pruned first).
type BlockHashCount = BlockHashCount;
/// The weight of database operations that the runtime can invoke.
type DbWeight = RocksDbWeight;
/// Version of the runtime.
type Version = Version;
/// Converts a module to the index of the module in `construct_runtime!`.
///
/// This type is being generated by `construct_runtime!`.
type PalletInfo = PalletInfo;
/// What to do if a new account is created.
type OnNewAccount = ();
/// What to do if an account is fully reaped from the system.
type OnKilledAccount = ();
/// The data to be stored in an account.
type AccountData = pallet_balances::AccountData<Balance>;
/// Weight information for the extrinsics of this pallet.
type SystemWeightInfo = ();
/// This is used as an identifier of the chain. 42 is the generic substrate prefix.
type SS58Prefix = SS58Prefix;
/// The set code logic.
type OnSetCode = cumulus_pallet_parachain_system::ParachainSetCode<Self>;
}
parameter_type_with_key! {
pub ExistentialDeposits: |_currency_id: CurrencyId| -> Balance {
Zero::zero()
};
}
parameter_types! {
pub TreasuryAccount: AccountId = TreasuryPalletId::get().into_account();
}
impl orml_tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = CurrencyId;
type OnDust = orml_tokens::TransferDust<Runtime, TreasuryAccount>;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type MaxLocks = MaxLocks;
}
parameter_types! {
pub const GetNativeCurrencyId: CurrencyId = CurrencyId::HKO;
pub const LoansPalletId: PalletId = PalletId(*b"par/loan");
}
impl orml_currencies::Config for Runtime {
type Event = Event;
type MultiCurrency = Tokens;
type NativeCurrency = BasicCurrencyAdapter<Runtime, Balances, Amount, BlockNumber>;
type GetNativeCurrencyId = GetNativeCurrencyId;
type WeightInfo = ();
}
pub struct CurrencyIdConvert;
impl Convert<CurrencyId, Option<MultiLocation>> for CurrencyIdConvert {
fn convert(id: CurrencyId) -> Option<MultiLocation> {
match id {
CurrencyId::KSM => Some(X1(Parent)),
CurrencyId::xKSM => Some(X3(
Parent,
Parachain(ParachainInfo::parachain_id().into()),
GeneralKey(b"xKSM".to_vec()),
)),
_ => None,
}
}
}
impl Convert<MultiLocation, Option<CurrencyId>> for CurrencyIdConvert {
fn convert(location: MultiLocation) -> Option<CurrencyId> {
match location {
X1(Parent) => Some(CurrencyId::KSM),
X3(Parent, Parachain(id), GeneralKey(key))
if ParaId::from(id) == ParachainInfo::parachain_id() && key == b"xKSM".to_vec() =>
{
Some(CurrencyId::xKSM)
}
_ => None,
}
}
}
impl Convert<MultiAsset, Option<CurrencyId>> for CurrencyIdConvert {
fn convert(a: MultiAsset) -> Option<CurrencyId> {
if let MultiAsset::ConcreteFungible { id, amount: _ } = a {
Self::convert(id)
} else {
None
}
}
}
pub struct AccountIdToMultiLocation;
impl Convert<AccountId, MultiLocation> for AccountIdToMultiLocation {
fn convert(account_id: AccountId) -> MultiLocation {
MultiLocation::from(Junction::AccountId32 {
network: NetworkId::Any,
id: account_id.into(),
})
}
}
parameter_types! {
pub SelfLocation: MultiLocation = X2(Parent, Parachain(ParachainInfo::parachain_id().into()));
pub const BaseXcmWeight: Weight = 100_000_000;
}
impl orml_xtokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type CurrencyId = CurrencyId;
type CurrencyIdConvert = CurrencyIdConvert;
type AccountIdToMultiLocation = AccountIdToMultiLocation;
type SelfLocation = SelfLocation;
type XcmExecutor = XcmExecutor<XcmConfig>;
type Weigher = FixedWeightBounds<UnitWeightCost, Call>;
type BaseXcmWeight = BaseXcmWeight;
}
impl orml_unknown_tokens::Config for Runtime {
type Event = Event;
}
impl pallet_loans::Config for Runtime {
type Event = Event;
type Currency = Currencies;
type PalletId = LoansPalletId;
type PriceFeeder = Prices;
type ReserveOrigin = EnsureRootOrHalfCouncil;
type UpdateOrigin = EnsureRootOrHalfCouncil;
type WeightInfo = pallet_loans::weights::SubstrateWeight<Runtime>;
type UnixTime = Timestamp;
}
parameter_types! {
pub const LiquidStakingAgentMaxMembers: u32 = 100;
}
type LiquidStakingAgentMembershipInstance = pallet_membership::Instance3;
impl pallet_membership::Config<LiquidStakingAgentMembershipInstance> for Runtime {
type Event = Event;
type AddOrigin = EnsureRootOrHalfCouncil;
type RemoveOrigin = EnsureRootOrHalfCouncil;
type SwapOrigin = EnsureRootOrHalfCouncil;
type ResetOrigin = EnsureRootOrHalfCouncil;
type PrimeOrigin = EnsureRootOrHalfCouncil;
type MembershipInitialized = ();
type MembershipChanged = ();
type MaxMembers = LiquidStakingAgentMaxMembers;
type WeightInfo = ();
}
parameter_types! {
pub const StakingPalletId: PalletId = PalletId(*b"par/stak");
pub const StakingCurrency: CurrencyId = CurrencyId::KSM;
pub const LiquidCurrency: CurrencyId = CurrencyId::xKSM;
pub const MaxWithdrawAmount: Balance = 1000_000_000_000_000;
pub const MaxAccountProcessingUnstake: u32 = 5;
}
impl pallet_liquid_staking::Config for Runtime {
type Event = Event;
type Currency = Currencies;
type PalletId = StakingPalletId;
type StakingCurrency = StakingCurrency;
type LiquidCurrency = LiquidCurrency;
type WithdrawOrigin = EnsureRoot<AccountId>;
type MaxWithdrawAmount = MaxWithdrawAmount;
type MaxAccountProcessingUnstake = MaxAccountProcessingUnstake;
type WeightInfo = pallet_liquid_staking::weights::SubstrateWeight<Runtime>;
type XcmTransfer = XTokens;
type Members = LiquidStakingAgentMembership;
type BaseXcmWeight = BaseXcmWeight;
}
parameter_types! {
pub const LockPeriod: u64 = 20000; // in milli-seconds
pub const LiquidateFactor: Percent = Percent::from_percent(50);
}
impl pallet_liquidation::Config for Runtime {
type AuthorityId = pallet_liquidation::crypto::AuthId;
type LockPeriod = LockPeriod;
type LiquidateFactor = LiquidateFactor;
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
Call: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call,
public: <Signature as traits::Verify>::Signer,
account: AccountId,
index: Index,
) -> Option<(
Call,
<UncheckedExtrinsic as traits::Extrinsic>::SignaturePayload,
)> {
let period = BlockHashCount::get() as u64;
let current_block = System::block_number()
.saturated_into::<u64>()
.saturating_sub(1);
let tip = 0;
let extra: SignedExtra = (
frame_system::CheckSpecVersion::<Runtime>::new(),
frame_system::CheckTxVersion::<Runtime>::new(),
frame_system::CheckGenesis::<Runtime>::new(),
frame_system::CheckEra::<Runtime>::from(generic::Era::mortal(period, current_block)),
frame_system::CheckNonce::<Runtime>::from(index),
frame_system::CheckWeight::<Runtime>::new(),
pallet_transaction_payment::ChargeTransactionPayment::<Runtime>::from(tip),
);
let raw_payload = SignedPayload::new(call, extra)
.map_err(|e| {
log::error!("SignedPayload error: {:?}", e);
})
.ok()?;
let signature = raw_payload.using_encoded(|payload| C::sign(payload, public))?;
let address = account;
let (call, extra, _) = raw_payload.deconstruct();
Some((
call,
(sp_runtime::MultiAddress::Id(address), signature, extra),
))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as traits::Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
Call: From<C>,
{
type OverarchingCall = Call;
type Extrinsic = UncheckedExtrinsic;
}
parameter_types! {
pub const MinimumPeriod: u64 = SLOT_DURATION / 2;
}
impl pallet_timestamp::Config for Runtime {
/// A timestamp: milliseconds since the unix epoch.
type Moment = u64;
type OnTimestampSet = ();
type MinimumPeriod = MinimumPeriod;
type WeightInfo = ();
}
parameter_types! {
pub const UncleGenerations: u32 = 0;
}
impl pallet_authorship::Config for Runtime {
type FindAuthor = pallet_session::FindAccountFromAuthorIndex<Self, Aura>;
type UncleGenerations = UncleGenerations;
type FilterUncle = ();
type EventHandler = (CollatorSelection,);
}
parameter_types! {
pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(33);
pub const Period: u32 = 6 * HOURS;
pub const Offset: u32 = 0;
}
impl pallet_session::Config for Runtime {
type Event = Event;
type ValidatorId = <Self as frame_system::Config>::AccountId;
// we don't have stash and controller, thus we don't need the convert as well.
type ValidatorIdOf = pallet_collator_selection::IdentityCollator;
type ShouldEndSession = pallet_session::PeriodicSessions<Period, Offset>;
type NextSessionRotation = pallet_session::PeriodicSessions<Period, Offset>;
type SessionManager = CollatorSelection;
// Essentially just Aura, but lets be pedantic.
type SessionHandler =
<opaque::SessionKeys as sp_runtime::traits::OpaqueKeys>::KeyTypeIdProviders;
type Keys = opaque::SessionKeys;
type DisabledValidatorsThreshold = DisabledValidatorsThreshold;
type WeightInfo = ();
}
parameter_types! {
pub const PotId: PalletId = PalletId(*b"PotStake");
pub const MaxCandidates: u32 = 1000;
pub const MinCandidates: u32 = 1;
pub const MaxInvulnerables: u32 = 100;
}
impl pallet_collator_selection::Config for Runtime {
type Event = Event;
type Currency = Balances;
type UpdateOrigin = EnsureRootOrHalfCouncil;
type PotId = PotId;
type MaxCandidates = MaxCandidates;
type MinCandidates = MinCandidates;
type MaxInvulnerables = MaxInvulnerables;
// should be a multiple of session or things will get inconsistent
type KickThreshold = Period;
type ValidatorId = <Self as frame_system::Config>::AccountId;
type ValidatorIdOf = pallet_collator_selection::IdentityCollator;
type ValidatorRegistration = Session;
type WeightInfo = ();
}
impl pallet_aura::Config for Runtime {
type AuthorityId = AuraId;
}
impl cumulus_pallet_aura_ext::Config for Runtime {}
parameter_types! {
pub const ExistentialDeposit: u128 = currency::EXISTENTIAL_DEPOSIT;
pub const MaxLocks: u32 = 50;
}
impl pallet_balances::Config for Runtime {
type MaxLocks = MaxLocks;
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type MaxReserves = ();
type ReserveIdentifier = [u8; 8];
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = pallet_balances::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const TransactionByteFee: Balance = 1 * MILLICENTS;
}
impl pallet_transaction_payment::Config for Runtime {
type OnChargeTransaction =
pallet_transaction_payment::CurrencyAdapter<Balances, DealWithFees<Runtime>>;
type TransactionByteFee = TransactionByteFee;
type WeightToFee = WeightToFee;
type FeeMultiplierUpdate = SlowAdjustingFeeUpdate<Self>;
}
impl pallet_sudo::Config for Runtime {
type Event = Event;
type Call = Call;
}
/// No local origins on this chain are allowed to dispatch XCM sends/executions.
#[allow(unused_parens)]
pub type LocalOriginToLocation = (SignedToAccountId32<Origin, AccountId, RelayNetwork>);
/// The means for routing XCM messages which are not for local execution into the right message
/// queues.
pub type XcmRouter = (
// Two routers - use UMP to communicate with the relay chain:
cumulus_primitives_utility::ParentAsUmp<ParachainSystem>,
// ..and XCMP to communicate with the sibling chains.
XcmpQueue,
);
impl pallet_xcm::Config for Runtime {
type Event = Event;
type SendXcmOrigin = EnsureXcmOrigin<Origin, LocalOriginToLocation>;
type XcmRouter = XcmRouter;
type ExecuteXcmOrigin = EnsureXcmOrigin<Origin, LocalOriginToLocation>;
type XcmExecuteFilter = All<(MultiLocation, Xcm<Call>)>;
type XcmReserveTransferFilter = All<(MultiLocation, Vec<MultiAsset>)>;
type XcmExecutor = XcmExecutor<XcmConfig>;
// Teleporting is disabled.
type XcmTeleportFilter = ();
type Weigher = FixedWeightBounds<UnitWeightCost, Call>;
}
impl cumulus_pallet_xcm::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
}
impl cumulus_pallet_xcmp_queue::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
type ChannelInfo = ParachainSystem;
}
impl cumulus_pallet_dmp_queue::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
type ExecuteOverweightOrigin = EnsureRoot<AccountId>;
}
parameter_types! {
pub const ReservedXcmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4;
pub const ReservedDmpWeight: Weight = MAXIMUM_BLOCK_WEIGHT / 4;
}
impl cumulus_pallet_parachain_system::Config for Runtime {
type Event = Event;
type OnValidationData = ();
type SelfParaId = ParachainInfo;
type DmpMessageHandler = DmpQueue;
type OutboundXcmpMessageSource = XcmpQueue;
type XcmpMessageHandler = XcmpQueue;
type ReservedXcmpWeight = ReservedXcmpWeight;
type ReservedDmpWeight = ReservedDmpWeight;
}
impl parachain_info::Config for Runtime {}
parameter_types! {
pub const RelayLocation: MultiLocation = MultiLocation::X1(Junction::Parent);
pub const RelayNetwork: NetworkId = NetworkId::Kusama;
pub HeikoNetwork: NetworkId = NetworkId::Named("heiko".into());
pub RelayChainOrigin: Origin = cumulus_pallet_xcm::Origin::Relay.into();
pub Ancestry: MultiLocation = X1(Parachain(ParachainInfo::parachain_id().into()));
}
/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used
/// when determining ownership of accounts for asset transacting and when attempting to use XCM
/// `Transact` in order to determine the dispatch Origin.
pub type LocationToAccountId = (
// The parent (Relay-chain) origin converts to the default `AccountId`.
ParentIsDefault<AccountId>,
// Sibling parachain origins convert to AccountId via the `ParaId::into`.
SiblingParachainConvertsVia<Sibling, AccountId>,
// Straight up local `AccountId32` origins just alias directly to `AccountId`.
AccountId32Aliases<RelayNetwork, AccountId>,
);
/// Means for transacting assets on this chain.
pub type LocalAssetTransactor = MultiCurrencyAdapter<
// Use this currency:
Currencies,
UnknownTokens,
// Use this currency when it is a fungible asset matching the given location or name:
IsNativeConcrete<CurrencyId, CurrencyIdConvert>,
// Our chain's account ID type (we can't get away without mentioning it explicitly):
AccountId,
// Do a simple punn to convert an AccountId32 MultiLocation into a native chain account ID:
LocationToAccountId,
CurrencyId,
CurrencyIdConvert,
>;
/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance,
/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can
/// biases the kind of local `Origin` it will become.
pub type XcmOriginToTransactDispatchOrigin = (
// Sovereign account converter; this attempts to derive an `AccountId` from the origin location
// using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for
// foreign chains who want to have a local sovereign account on this chain which they control.
SovereignSignedViaLocation<LocationToAccountId, Origin>,
// Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when
// recognised.
RelayChainAsNative<RelayChainOrigin, Origin>,
// Native converter for sibling Parachains; will convert to a `SiblingPara` origin when
// recognised.
SiblingParachainAsNative<cumulus_pallet_xcm::Origin, Origin>,
// Superuser converter for the Relay-chain (Parent) location. This will allow it to issue a
// transaction from the Root origin.
ParentAsSuperuser<Origin>,
// Native signed account converter; this just converts an `AccountId32` origin into a normal
// `Origin::Signed` origin of the same 32-byte value.
SignedAccountId32AsNative<RelayNetwork, Origin>,
// Xcm origins can be represented natively under the Xcm pallet's Xcm origin.
XcmPassthrough<Origin>,
);
parameter_types! {
pub UnitWeightCost: Weight = 1_000;
}
parameter_types! {
// 1_000_000_000_000 => 1 unit of asset for 1 unit of Weight.
// TODO Should take the actual weight price. This is just 1_000 KSM per second of weight.
pub const WeightPrice: (MultiLocation, u128) = (MultiLocation::X1(Junction::Parent), 1_000);
pub AllowUnpaidFrom: Vec<MultiLocation> = vec![ MultiLocation::X1(Junction::Parent) ];
}
pub type Barrier = (
TakeWeightCredit,
AllowTopLevelPaidExecutionFrom<All<MultiLocation>>,
);
pub struct XcmConfig;
impl Config for XcmConfig {
type Call = Call;
type XcmSender = XcmRouter;
// How to withdraw and deposit an asset.
type AssetTransactor = LocalAssetTransactor;
type OriginConverter = XcmOriginToTransactDispatchOrigin;
type IsReserve = MultiNativeAsset;
// Teleporting is disabled.
type IsTeleporter = ();
type LocationInverter = LocationInverter<Ancestry>;
type Barrier = Barrier;
type Weigher = FixedWeightBounds<UnitWeightCost, Call>;
type Trader = UsingComponents<IdentityFee<Balance>, RelayLocation, AccountId, Balances, ()>;
type ResponseHandler = (); // Don't handle responses for now.
}
parameter_types! {
pub const MinimumCount: u32 = 1;
pub const ExpiresIn: Moment = 1000 * 60 * 60; // 60 mins
pub const MaxHasDispatchedSize: u32 = 100;
pub ZeroAccountId: AccountId = AccountId::from([0u8; 32]);
}
type ParallelDataProvider = orml_oracle::Instance1;
impl orml_oracle::Config<ParallelDataProvider> for Runtime {
type Event = Event;
type OnNewData = ();
type CombineData =
orml_oracle::DefaultCombineData<Runtime, MinimumCount, ExpiresIn, ParallelDataProvider>;
type Time = Timestamp;
type OracleKey = CurrencyId;
type OracleValue = PriceWithDecimal;
type RootOperatorAccountId = ZeroAccountId;
type MaxHasDispatchedSize = MaxHasDispatchedSize;
type WeightInfo = ();
type Members = OracleMembership;
}
pub type TimeStampedPrice = orml_oracle::TimestampedValue<PriceWithDecimal, Moment>;
pub struct AggregatedDataProvider;
impl DataProvider<CurrencyId, TimeStampedPrice> for AggregatedDataProvider {
fn get(key: &CurrencyId) -> Option<TimeStampedPrice> {
Oracle::get(key)
}
}
impl DataProviderExtended<CurrencyId, TimeStampedPrice> for AggregatedDataProvider {
fn get_no_op(key: &CurrencyId) -> Option<TimeStampedPrice> {
Oracle::get_no_op(key)
}
#[allow(clippy::complexity)]
fn get_all_values() -> Vec<(CurrencyId, Option<TimeStampedPrice>)> {
Oracle::get_all_values()
}
}
impl pallet_prices::Config for Runtime {
type Event = Event;
type Source = AggregatedDataProvider;
type FeederOrigin = EnsureRoot<AccountId>;
type StakingCurrency = StakingCurrency;
type LiquidCurrency = LiquidCurrency;
type LiquidStakingExchangeRateProvider = LiquidStaking;
}
parameter_types! {
// One storage item; key size is 32; value is size 4+4+16+32 bytes = 56 bytes.
pub const DepositBase: Balance = deposit(1, 88);
// Additional storage item size of 32 bytes.
pub const DepositFactor: Balance = deposit(0, 32);
pub const MaxSignatories: u16 = 100;
}
impl pallet_multisig::Config for Runtime {
type Event = Event;
type Call = Call;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = ();
}
parameter_types! {
pub const LaunchPeriod: BlockNumber = 28 * 24 * 60 * MINUTES;
pub const VotingPeriod: BlockNumber = 28 * 24 * 60 * MINUTES;
pub const FastTrackVotingPeriod: BlockNumber = 3 * 24 * 60 * MINUTES;
pub const InstantAllowed: bool = true;
pub const MinimumDeposit: Balance = 100 * DOLLARS;
pub const EnactmentPeriod: BlockNumber = 30 * 24 * 60 * MINUTES;
pub const CooloffPeriod: BlockNumber = 28 * 24 * 60 * MINUTES;
// One cent: $10,000 / MB
pub const PreimageByteDeposit: Balance = 1 * CENTS;
pub const MaxVotes: u32 = 100;
pub const MaxProposals: u32 = 100;
}
impl pallet_democracy::Config for Runtime {
type Proposal = Call;
type Event = Event;
type Currency = Balances;
type EnactmentPeriod = EnactmentPeriod;
type LaunchPeriod = LaunchPeriod;
type VotingPeriod = VotingPeriod;
type MinimumDeposit = MinimumDeposit;
/// A straight majority of the council can decide what their next motion is.
type ExternalOrigin =
pallet_collective::EnsureProportionAtLeast<_1, _2, AccountId, CouncilCollective>;
/// A super-majority can have the next scheduled referendum be a straight majority-carries vote.
type ExternalMajorityOrigin =
pallet_collective::EnsureProportionAtLeast<_3, _4, AccountId, CouncilCollective>;
/// A unanimous council can have the next scheduled referendum be a straight default-carries
/// (NTB) vote.
type ExternalDefaultOrigin =
pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, CouncilCollective>;
/// Two thirds of the technical committee can have an ExternalMajority/ExternalDefault vote
/// be tabled immediately and with a shorter voting/enactment period.
type FastTrackOrigin =
pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, TechnicalCollective>;
type InstantOrigin =
pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, TechnicalCollective>;
type InstantAllowed = InstantAllowed;
type FastTrackVotingPeriod = FastTrackVotingPeriod;
// To cancel a proposal which has been passed, 2/3 of the council must agree to it.
type CancellationOrigin =
pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, CouncilCollective>;
// To cancel a proposal before it has been passed, the technical committee must be unanimous or
// Root must agree.
type CancelProposalOrigin = EnsureOneOf<
AccountId,
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, TechnicalCollective>,
>;
type BlacklistOrigin = EnsureRoot<AccountId>;
// Any single technical committee member may veto a coming council proposal, however they can
// only do it once and it lasts only for the cool-off period.
type VetoOrigin = pallet_collective::EnsureMember<AccountId, TechnicalCollective>;
type CooloffPeriod = CooloffPeriod;
type PreimageByteDeposit = PreimageByteDeposit;
type OperationalPreimageOrigin = pallet_collective::EnsureMember<AccountId, CouncilCollective>;
type Slash = Treasury;
type Scheduler = Scheduler;
type PalletsOrigin = OriginCaller;
type MaxVotes = MaxVotes;
type WeightInfo = pallet_democracy::weights::SubstrateWeight<Runtime>;
type MaxProposals = MaxProposals;
}
parameter_types! {
pub const CouncilMotionDuration: BlockNumber = 5 * DAYS;
pub const CouncilMaxProposals: u32 = 100;
pub const CouncilMaxMembers: u32 = 100;
}
type CouncilCollective = pallet_collective::Instance1;
impl pallet_collective::Config<CouncilCollective> for Runtime {
type Origin = Origin;
type Proposal = Call;
type Event = Event;
type MotionDuration = CouncilMotionDuration;
type MaxProposals = CouncilMaxProposals;
type MaxMembers = CouncilMaxMembers;
type DefaultVote = pallet_collective::PrimeDefaultVote;
type WeightInfo = pallet_collective::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const CandidacyBond: Balance = 10 * DOLLARS;
// 1 storage item created, key size is 32 bytes, value size is 16+16.
pub const VotingBondBase: Balance = deposit(1, 64);
// additional data per vote is 32 bytes (account id).
pub const VotingBondFactor: Balance = deposit(0, 32);
pub const TermDuration: BlockNumber = 7 * DAYS;
pub const DesiredMembers: u32 = 13;
pub const DesiredRunnersUp: u32 = 7;
pub const ElectionsPhragmenPalletId: LockIdentifier = *b"par/phra";
}
// Make sure that there are no more than `MaxMembers` members elected via elections-phragmen.
const_assert!(DesiredMembers::get() <= CouncilMaxMembers::get());
impl pallet_elections_phragmen::Config for Runtime {
type Event = Event;
type PalletId = ElectionsPhragmenPalletId;
type Currency = Balances;
type ChangeMembers = Council;
// NOTE: this implies that council's genesis members cannot be set directly and must come from
// this module.
type InitializeMembers = Council;
type CurrencyToVote = U128CurrencyToVote;
type CandidacyBond = CandidacyBond;
type VotingBondBase = VotingBondBase;
type VotingBondFactor = VotingBondFactor;
type LoserCandidate = ();
type KickedMember = ();
type DesiredMembers = DesiredMembers;
type DesiredRunnersUp = DesiredRunnersUp;
type TermDuration = TermDuration;
type WeightInfo = pallet_elections_phragmen::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const TechnicalMotionDuration: BlockNumber = 5 * DAYS;
pub const TechnicalMaxProposals: u32 = 100;
pub const TechnicalMaxMembers: u32 = 100;
}
type TechnicalCollective = pallet_collective::Instance2;
impl pallet_collective::Config<TechnicalCollective> for Runtime {
type Origin = Origin;
type Proposal = Call;
type Event = Event;
type MotionDuration = TechnicalMotionDuration;
type MaxProposals = TechnicalMaxProposals;
type MaxMembers = TechnicalMaxMembers;
type DefaultVote = pallet_collective::PrimeDefaultVote;
type WeightInfo = pallet_collective::weights::SubstrateWeight<Runtime>;
}
type EnsureRootOrHalfCouncil = EnsureOneOf<
AccountId,
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>,
>;
impl pallet_membership::Config<pallet_membership::Instance1> for Runtime {
type Event = Event;
type AddOrigin = EnsureRootOrHalfCouncil;
type RemoveOrigin = EnsureRootOrHalfCouncil;
type SwapOrigin = EnsureRootOrHalfCouncil;
type ResetOrigin = EnsureRootOrHalfCouncil;
type PrimeOrigin = EnsureRootOrHalfCouncil;
type MembershipInitialized = TechnicalCommittee;
type MembershipChanged = TechnicalCommittee;
type MaxMembers = TechnicalMaxMembers;
type WeightInfo = pallet_membership::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub MaximumSchedulerWeight: Weight = Perbill::from_percent(80) *
RuntimeBlockWeights::get().max_block;
pub const MaxScheduledPerBlock: u32 = 50;
}
impl pallet_scheduler::Config for Runtime {
type Event = Event;
type Origin = Origin;
type PalletsOrigin = OriginCaller;
type Call = Call;
type MaximumWeight = MaximumSchedulerWeight;
type ScheduleOrigin = EnsureRoot<AccountId>;
type MaxScheduledPerBlock = MaxScheduledPerBlock;
type WeightInfo = pallet_scheduler::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const ProposalBond: Permill = Permill::from_percent(5);
pub const ProposalBondMinimum: Balance = 1 * DOLLARS;
pub const SpendPeriod: BlockNumber = 1 * DAYS;
pub const Burn: Permill = Permill::from_percent(50);
pub const TipCountdown: BlockNumber = 1 * DAYS;
pub const TipFindersFee: Percent = Percent::from_percent(20);
pub const TipReportDepositBase: Balance = 1 * DOLLARS;
pub const TreasuryPalletId: PalletId = PalletId(*b"par/trsy");
pub const MaxApprovals: u32 = 100;
}
impl pallet_treasury::Config for Runtime {
type PalletId = TreasuryPalletId;
type Currency = Balances;
type ApproveOrigin = EnsureOneOf<
AccountId,
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionAtLeast<_3, _5, AccountId, CouncilCollective>,
>;
type RejectOrigin = EnsureOneOf<
AccountId,
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>,
>;
type Event = Event;
type OnSlash = ();
type ProposalBond = ProposalBond;
type ProposalBondMinimum = ProposalBondMinimum;
type SpendPeriod = SpendPeriod;
type Burn = Burn;
type BurnDestination = ();
type SpendFunds = ();
type WeightInfo = pallet_treasury::weights::SubstrateWeight<Runtime>;
type MaxApprovals = MaxApprovals;
}
parameter_types! {
pub const OracleMaxMembers: u32 = 100;
}
type OracleMembershipInstance = pallet_membership::Instance2;
impl pallet_membership::Config<OracleMembershipInstance> for Runtime {
type Event = Event;
type AddOrigin = EnsureRoot<AccountId>;
type RemoveOrigin = EnsureRoot<AccountId>;
type SwapOrigin = EnsureRoot<AccountId>;
type ResetOrigin = EnsureRoot<AccountId>;
type PrimeOrigin = EnsureRoot<AccountId>;
type MembershipInitialized = ();
type MembershipChanged = ();
type MaxMembers = OracleMaxMembers;
type WeightInfo = ();
}
// Create the runtime by composing the FRAME pallets that were previously configured.
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = opaque::Block,
UncheckedExtrinsic = UncheckedExtrinsic,
{
// System, Utility
System: frame_system::{Pallet, Call, Storage, Config, Event<T>},
Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent},
Multisig: pallet_multisig::{Pallet, Call, Storage, Event<T>},
// Governance
Sudo: pallet_sudo::{Pallet, Call, Storage, Config<T>, Event<T>},
Democracy: pallet_democracy::{Pallet, Call, Storage, Config<T>, Event<T>},
Council: pallet_collective::<Instance1>::{Pallet, Call, Storage, Origin<T>, Event<T>, Config<T>},
TechnicalCommittee: pallet_collective::<Instance2>::{Pallet, Call, Storage, Origin<T>, Event<T>, Config<T>},
Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event<T>},
Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event<T>},
Elections: pallet_elections_phragmen::{Pallet, Call, Storage, Event<T>, Config<T>},
TechnicalMembership: pallet_membership::<Instance1>::{Pallet, Call, Storage, Event<T>, Config<T>},
// Currencies
Balances: pallet_balances::{Pallet, Call, Storage, Config<T>, Event<T>},
TransactionPayment: pallet_transaction_payment::{Pallet, Storage},
// Parachain
ParachainSystem: cumulus_pallet_parachain_system::{Pallet, Call, Storage, Inherent, Event<T>},
ParachainInfo: parachain_info::{Pallet, Storage, Config},
XcmpQueue: cumulus_pallet_xcmp_queue::{Pallet, Call, Storage, Event<T>},
DmpQueue: cumulus_pallet_dmp_queue::{Pallet, Call, Storage, Event<T>},
PolkadotXcm: pallet_xcm::{Pallet, Call, Event<T>, Origin},
CumulusXcm: cumulus_pallet_xcm::{Pallet, Call, Event<T>, Origin},
// Collator
Authorship: pallet_authorship::{Pallet, Call, Storage},
CollatorSelection: pallet_collator_selection::{Pallet, Call, Storage, Event<T>, Config<T>},
Session: pallet_session::{Pallet, Call, Storage, Event, Config<T>},
// Consensus
Aura: pallet_aura::{Pallet, Config<T>},
AuraExt: cumulus_pallet_aura_ext::{Pallet, Config},
// ORML
Currencies: orml_currencies::{Pallet, Call, Event<T>},
Tokens: orml_tokens::{Pallet, Storage, Event<T>, Config<T>},
Oracle: orml_oracle::<Instance1>::{Pallet, Storage, Call, Event<T>},
XTokens: orml_xtokens::{Pallet, Storage, Call, Event<T>},
UnknownTokens: orml_unknown_tokens::{Pallet, Storage, Event},
// Parallel pallets
Loans: pallet_loans::{Pallet, Call, Storage, Event<T>, Config},
LiquidStaking: pallet_liquid_staking::{Pallet, Call, Storage, Event<T>, Config},
Liquidation: pallet_liquidation::{Pallet, Call},
Prices: pallet_prices::{Pallet, Storage, Call, Event<T>},
// Oracles
OracleMembership: pallet_membership::<Instance2>::{Pallet, Call, Storage, Event<T>, Config<T>},
// LiquidStaking
LiquidStakingAgentMembership: pallet_membership::<Instance3>::{Pallet, Call, Storage, Event<T>, Config<T>},
}
);
/// The address format for describing accounts.
pub type Address = sp_runtime::MultiAddress<AccountId, ()>;
/// Block header type as expected by this runtime.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Block type as expected by this runtime.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// A Block signed with a Justification
pub type SignedBlock = generic::SignedBlock<Block>;
/// BlockId type as expected by this runtime.
pub type BlockId = generic::BlockId<Block>;
/// The SignedExtension to the basic transaction logic.
pub type SignedExtra = (
frame_system::CheckSpecVersion<Runtime>,
frame_system::CheckTxVersion<Runtime>,
frame_system::CheckGenesis<Runtime>,
frame_system::CheckEra<Runtime>,
frame_system::CheckNonce<Runtime>,
frame_system::CheckWeight<Runtime>,
pallet_transaction_payment::ChargeTransactionPayment<Runtime>,
);
/// Unchecked extrinsic type as expected by this runtime.
pub type UncheckedExtrinsic = generic::UncheckedExtrinsic<Address, Call, Signature, SignedExtra>;
/// The payload being signed in transactions.
pub type SignedPayload = generic::SignedPayload<Call, SignedExtra>;
/// Extrinsic type that has already been checked.
pub type CheckedExtrinsic = generic::CheckedExtrinsic<AccountId, Call, SignedExtra>;
/// Executive: handles dispatch to the various modules.
pub type Executive = frame_executive::Executive<
Runtime,
Block,
frame_system::ChainContext<Runtime>,
Runtime,
AllPallets,
>;
impl_runtime_apis! {
impl sp_consensus_aura::AuraApi<Block, AuraId> for Runtime {
fn slot_duration() -> sp_consensus_aura::SlotDuration {
sp_consensus_aura::SlotDuration::from_millis(Aura::slot_duration())
}
fn authorities() -> Vec<AuraId> {
Aura::authorities()
}
}
impl sp_session::SessionKeys<Block> for Runtime {
fn generate_session_keys(seed: Option<Vec<u8>>) -> Vec<u8> {
opaque::SessionKeys::generate(seed)
}
fn decode_session_keys(
encoded: Vec<u8>,
) -> Option<Vec<(Vec<u8>, KeyTypeId)>> {
opaque::SessionKeys::decode_into_raw_public_keys(&encoded)
}
}
impl sp_api::Core<Block> for Runtime {
fn version() -> RuntimeVersion {
VERSION
}
fn execute_block(block: Block) {
Executive::execute_block(block)
}
fn initialize_block(header: &<Block as BlockT>::Header) {
Executive::initialize_block(header)
}
}
impl sp_api::Metadata<Block> for Runtime {
fn metadata() -> OpaqueMetadata {
Runtime::metadata().into()
}
}
impl sp_block_builder::BlockBuilder<Block> for Runtime {
fn apply_extrinsic(extrinsic: <Block as BlockT>::Extrinsic) -> ApplyExtrinsicResult {
Executive::apply_extrinsic(extrinsic)
}
fn finalize_block() -> <Block as BlockT>::Header {
Executive::finalize_block()
}
fn inherent_extrinsics(data: sp_inherents::InherentData) -> Vec<<Block as BlockT>::Extrinsic> {
data.create_extrinsics()
}
fn check_inherents(
block: Block,
data: sp_inherents::InherentData,
) -> sp_inherents::CheckInherentsResult {
data.check_extrinsics(&block)
}
}
impl sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block> for Runtime {
fn validate_transaction(
source: TransactionSource,
tx: <Block as BlockT>::Extrinsic,
block_hash: <Block as BlockT>::Hash,
) -> TransactionValidity {
Executive::validate_transaction(source, tx, block_hash)
}
}
impl sp_offchain::OffchainWorkerApi<Block> for Runtime {
fn offchain_worker(header: &<Block as BlockT>::Header) {
Executive::offchain_worker(header)
}
}
impl frame_system_rpc_runtime_api::AccountNonceApi<Block, AccountId, Index> for Runtime {
fn account_nonce(account: AccountId) -> Index {
System::account_nonce(account)
}
}
impl pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi<Block, Balance> for Runtime {
fn query_info(
uxt: <Block as BlockT>::Extrinsic,
len: u32,
) -> pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo<Balance> {
TransactionPayment::query_info(uxt, len)
}
fn query_fee_details(
uxt: <Block as BlockT>::Extrinsic,
len: u32,
) -> pallet_transaction_payment::FeeDetails<Balance> {
TransactionPayment::query_fee_details(uxt, len)
}
}
impl orml_oracle_rpc_runtime_api::OracleApi<
Block,
DataProviderId,
CurrencyId,
TimeStampedPrice,
> for Runtime {
fn get_value(provider_id: DataProviderId, key: CurrencyId) -> Option<TimeStampedPrice> {
match provider_id {
DataProviderId::Aggregated => Prices::get_no_op(&key)
}
}
fn get_all_values(provider_id: DataProviderId) -> Vec<(CurrencyId, Option<TimeStampedPrice>)> {
match provider_id {
DataProviderId::Aggregated => Prices::get_all_values()
}
}
}
impl cumulus_primitives_core::CollectCollationInfo<Block> for Runtime {
fn collect_collation_info() -> cumulus_primitives_core::CollationInfo {
ParachainSystem::collect_collation_info()
}
}
impl pallet_loans_rpc_runtime_api::LoansApi<Block, AccountId> for Runtime {
fn get_account_liquidity(account: AccountId) -> Result<(Shortfalls, Surplus), DispatchError> {
Loans::get_account_liquidity(&account)
}
}
#[cfg(feature = "runtime-benchmarks")]
impl frame_benchmarking::Benchmark<Block> for Runtime {
fn dispatch_benchmark(
config: frame_benchmarking::BenchmarkConfig
) -> Result<Vec<frame_benchmarking::BenchmarkBatch>, sp_runtime::RuntimeString> {
use frame_benchmarking::{Benchmarking, BenchmarkBatch, add_benchmark, TrackedStorageKey};
use pallet_loans_benchmarking::Pallet as LoansBench;
use frame_system_benchmarking::Pallet as SystemBench;
impl pallet_loans_benchmarking::Config for Runtime {}
impl frame_system_benchmarking::Config for Runtime {}
let whitelist: Vec<TrackedStorageKey> = vec![
// Block Number
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef702a5c1b19ab7a04f536c519aca4983ac").to_vec().into(),
// Total Issuance
hex_literal::hex!("c2261276cc9d1f8598ea4b6a74b15c2f57c875e4cff74148e4628f264b974c80").to_vec().into(),
// Execution Phase
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef7ff553b5a9862a516939d82b3d3d8661a").to_vec().into(),
// Event Count
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef70a98fdbe9ce6c55837576c60c7af3850").to_vec().into(),
// System Events
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef780d41e5e16056765bc8461851072c9d7").to_vec().into(),
];
let mut batches = Vec::<BenchmarkBatch>::new();
let params = (&config, &whitelist);
add_benchmark!(params, batches, frame_system, SystemBench::<Runtime>);
add_benchmark!(params, batches, pallet_balances, Balances);
add_benchmark!(params, batches, pallet_timestamp, Timestamp);
add_benchmark!(params, batches, pallet_loans, LoansBench::<Runtime>);
add_benchmark!(params, batches, pallet_liquid_staking, LiquidStaking);
if batches.is_empty() { return Err("Benchmark not found for this pallet.".into()) }
Ok(batches)
}
}
}
struct CheckInherents;
impl cumulus_pallet_parachain_system::CheckInherents<Block> for CheckInherents {
fn check_inherents(
block: &Block,
relay_state_proof: &cumulus_pallet_parachain_system::RelayChainStateProof,
) -> sp_inherents::CheckInherentsResult {
let relay_chain_slot = relay_state_proof
.read_slot()
.expect("Could not read the relay chain slot from the proof");
let inherent_data =
cumulus_primitives_timestamp::InherentDataProvider::from_relay_chain_slot_and_duration(
relay_chain_slot,
sp_std::time::Duration::from_secs(6),
)
.create_inherent_data()
.expect("Could not create the timestamp inherent data");
inherent_data.check_extrinsics(&block)
}
}
cumulus_pallet_parachain_system::register_validate_block!(
Runtime = Runtime,
BlockExecutor = cumulus_pallet_aura_ext::BlockExecutor::<Runtime, Executive>,
CheckInherents = CheckInherents,
);
| 38.298405 | 118 | 0.703503 |
18c47b41795f77183fdac6f102fcf3881a71ff75 | 6,537 | use alga::general::{
AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Id, Identity, TwoSidedInverse, Multiplicative, Real,
};
use alga::linear::{
self, AffineTransformation, DirectIsometry, Isometry, OrthogonalTransformation,
ProjectiveTransformation, Similarity, Transformation,
};
use base::allocator::Allocator;
use base::dimension::DimName;
use base::{DefaultAllocator, VectorN};
use geometry::{Point, Rotation};
/*
*
* Algebraic structures.
*
*/
impl<N: Real, D: DimName> Identity<Multiplicative> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D>
{
#[inline]
fn identity() -> Self {
Self::identity()
}
}
impl<N: Real, D: DimName> TwoSidedInverse<Multiplicative> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D>
{
#[inline]
fn two_sided_inverse(&self) -> Self {
self.transpose()
}
#[inline]
fn two_sided_inverse_mut(&mut self) {
self.transpose_mut()
}
}
impl<N: Real, D: DimName> AbstractMagma<Multiplicative> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D>
{
#[inline]
fn operate(&self, rhs: &Self) -> Self {
self * rhs
}
}
macro_rules! impl_multiplicative_structures(
($($marker: ident<$operator: ident>),* $(,)*) => {$(
impl<N: Real, D: DimName> $marker<$operator> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> { }
)*}
);
impl_multiplicative_structures!(
AbstractSemigroup<Multiplicative>,
AbstractMonoid<Multiplicative>,
AbstractQuasigroup<Multiplicative>,
AbstractLoop<Multiplicative>,
AbstractGroup<Multiplicative>
);
/*
*
* Transformation groups.
*
*/
impl<N: Real, D: DimName> Transformation<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> + Allocator<N, D>
{
#[inline]
fn transform_point(&self, pt: &Point<N, D>) -> Point<N, D> {
self * pt
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D> {
self * v
}
}
impl<N: Real, D: DimName> ProjectiveTransformation<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> + Allocator<N, D>
{
#[inline]
fn inverse_transform_point(&self, pt: &Point<N, D>) -> Point<N, D> {
Point::from(self.inverse_transform_vector(&pt.coords))
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D> {
self.matrix().tr_mul(v)
}
}
impl<N: Real, D: DimName> AffineTransformation<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> + Allocator<N, D>
{
type Rotation = Self;
type NonUniformScaling = Id;
type Translation = Id;
#[inline]
fn decompose(&self) -> (Id, Self, Id, Self) {
(Id::new(), self.clone(), Id::new(), Self::identity())
}
#[inline]
fn append_translation(&self, _: &Self::Translation) -> Self {
self.clone()
}
#[inline]
fn prepend_translation(&self, _: &Self::Translation) -> Self {
self.clone()
}
#[inline]
fn append_rotation(&self, r: &Self::Rotation) -> Self {
r * self
}
#[inline]
fn prepend_rotation(&self, r: &Self::Rotation) -> Self {
self * r
}
#[inline]
fn append_scaling(&self, _: &Self::NonUniformScaling) -> Self {
self.clone()
}
#[inline]
fn prepend_scaling(&self, _: &Self::NonUniformScaling) -> Self {
self.clone()
}
}
impl<N: Real, D: DimName> Similarity<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> + Allocator<N, D>
{
type Scaling = Id;
#[inline]
fn translation(&self) -> Id {
Id::new()
}
#[inline]
fn rotation(&self) -> Self {
self.clone()
}
#[inline]
fn scaling(&self) -> Id {
Id::new()
}
}
macro_rules! marker_impl(
($($Trait: ident),*) => {$(
impl<N: Real, D: DimName> $Trait<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> +
Allocator<N, D> { }
)*}
);
marker_impl!(Isometry, DirectIsometry, OrthogonalTransformation);
/// Subgroups of the n-dimensional rotation group `SO(n)`.
impl<N: Real, D: DimName> linear::Rotation<Point<N, D>> for Rotation<N, D>
where DefaultAllocator: Allocator<N, D, D> + Allocator<N, D>
{
#[inline]
fn powf(&self, _: N) -> Option<Self> {
// XXX: Add the general case.
// XXX: Use specialization for 2D and 3D.
unimplemented!()
}
#[inline]
fn rotation_between(_: &VectorN<N, D>, _: &VectorN<N, D>) -> Option<Self> {
// XXX: Add the general case.
// XXX: Use specialization for 2D and 3D.
unimplemented!()
}
#[inline]
fn scaled_rotation_between(_: &VectorN<N, D>, _: &VectorN<N, D>, _: N) -> Option<Self> {
// XXX: Add the general case.
// XXX: Use specialization for 2D and 3D.
unimplemented!()
}
}
/*
impl<N: Real> Matrix for Rotation<N> {
type Field = N;
type Row = Matrix<N>;
type Column = Matrix<N>;
type Transpose = Self;
#[inline]
fn nrows(&self) -> usize {
self.submatrix.nrows()
}
#[inline]
fn ncolumns(&self) -> usize {
self.submatrix.ncolumns()
}
#[inline]
fn row(&self, i: usize) -> Self::Row {
self.submatrix.row(i)
}
#[inline]
fn column(&self, i: usize) -> Self::Column {
self.submatrix.column(i)
}
#[inline]
fn get(&self, i: usize, j: usize) -> Self::Field {
self.submatrix[(i, j)]
}
#[inline]
unsafe fn get_unchecked(&self, i: usize, j: usize) -> Self::Field {
self.submatrix.at_fast(i, j)
}
#[inline]
fn transpose(&self) -> Self::Transpose {
Rotation::from_matrix_unchecked(self.submatrix.transpose())
}
}
impl<N: Real> SquareMatrix for Rotation<N> {
type Vector = Matrix<N>;
#[inline]
fn diagonal(&self) -> Self::Coordinates {
self.submatrix.diagonal()
}
#[inline]
fn determinant(&self) -> Self::Field {
::one()
}
#[inline]
fn try_inverse(&self) -> Option<Self> {
Some(::transpose(self))
}
#[inline]
fn try_inverse_mut(&mut self) -> bool {
self.transpose_mut();
true
}
#[inline]
fn transpose_mut(&mut self) {
self.submatrix.transpose_mut()
}
}
impl<N: Real> InversibleSquareMatrix for Rotation<N> { }
*/
| 23.684783 | 92 | 0.590485 |
7927c15d21ad255fce6dc68db592710e25753174 | 2,740 | use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Changes the owner of a chat. The current user must be a current owner of the chat. Use the method canTransferOwnership to check whether the ownership can be transferred from the current session. Available only for supergroups and channel chats
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TransferChatOwnership {
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
#[serde(rename(serialize = "@client_id", deserialize = "@client_id"))]
client_id: Option<i32>,
/// Chat identifier
chat_id: i64,
/// Identifier of the user to which transfer the ownership. The ownership can't be transferred to a bot or to a deleted user
user_id: i32,
/// The password of the current user
password: String,
#[serde(rename(serialize = "@type"))]
td_type: String,
}
impl RObject for TransferChatOwnership {
#[doc(hidden)]
fn extra(&self) -> Option<&str> {
self.extra.as_deref()
}
#[doc(hidden)]
fn client_id(&self) -> Option<i32> {
self.client_id
}
}
impl RFunction for TransferChatOwnership {}
impl TransferChatOwnership {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDTransferChatOwnershipBuilder {
let mut inner = TransferChatOwnership::default();
inner.extra = Some(Uuid::new_v4().to_string());
inner.td_type = "transferChatOwnership".to_string();
RTDTransferChatOwnershipBuilder { inner }
}
pub fn chat_id(&self) -> i64 {
self.chat_id
}
pub fn user_id(&self) -> i32 {
self.user_id
}
pub fn password(&self) -> &String {
&self.password
}
}
#[doc(hidden)]
pub struct RTDTransferChatOwnershipBuilder {
inner: TransferChatOwnership,
}
impl RTDTransferChatOwnershipBuilder {
pub fn build(&self) -> TransferChatOwnership {
self.inner.clone()
}
pub fn chat_id(&mut self, chat_id: i64) -> &mut Self {
self.inner.chat_id = chat_id;
self
}
pub fn user_id(&mut self, user_id: i32) -> &mut Self {
self.inner.user_id = user_id;
self
}
pub fn password<T: AsRef<str>>(&mut self, password: T) -> &mut Self {
self.inner.password = password.as_ref().to_string();
self
}
}
impl AsRef<TransferChatOwnership> for TransferChatOwnership {
fn as_ref(&self) -> &TransferChatOwnership {
self
}
}
impl AsRef<TransferChatOwnership> for RTDTransferChatOwnershipBuilder {
fn as_ref(&self) -> &TransferChatOwnership {
&self.inner
}
}
| 27.4 | 247 | 0.647445 |
bb4921f67b15cd16ab39c6619389dd7ae3ecf4ef | 10,933 | // Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use crate::aast_check;
use crate::coeffects_check;
use crate::expression_tree_check;
use crate::readonly_check;
use bumpalo::Bump;
use lowerer::{lower, ScourComment};
use mode_parser::{parse_mode, Language};
use namespaces_rust as namespaces;
use ocamlrep::rc::RcOc;
use ocamlrep_derive::{FromOcamlRep, ToOcamlRep};
use oxidized::{
aast::Program, file_info::Mode, namespace_env::Env as NamespaceEnv, pos::Pos,
scoured_comments::ScouredComments,
};
use parser_core_types::{
indexed_source_text::IndexedSourceText,
parser_env::ParserEnv,
source_text::SourceText,
syntax_by_ref::{
positioned_syntax::PositionedSyntax,
positioned_token::{PositionedToken, TokenFactory},
positioned_value::PositionedValue,
},
syntax_error::SyntaxError,
syntax_tree::SyntaxTree,
};
pub use rust_aast_parser_types::{Env, Result as ParserResult};
use rust_parser_errors::parse_errors_with_text;
use smart_constructors::NoState;
use stack_limit::StackLimit;
use std::borrow::Borrow;
type PositionedSyntaxTree<'src, 'arena> = SyntaxTree<'src, PositionedSyntax<'arena>, NoState>;
#[derive(Debug, FromOcamlRep, ToOcamlRep)]
pub enum Error {
NotAHackFile(),
ParserFatal(SyntaxError, Pos),
Other(String),
}
impl<T: ToString> From<T> for Error {
fn from(s: T) -> Self {
Error::Other(s.to_string())
}
}
pub type Result<T> = std::result::Result<T, Error>;
pub struct AastParser;
impl<'src> AastParser {
pub fn from_text(
env: &Env,
indexed_source_text: &'src IndexedSourceText<'src>,
stack_limit: Option<&StackLimit>,
) -> Result<ParserResult> {
let ns = NamespaceEnv::empty(
env.parser_options.po_auto_namespace_map.clone(),
env.codegen,
env.parser_options.po_disable_xhp_element_mangling,
);
Self::from_text_with_namespace_env(env, RcOc::new(ns), indexed_source_text, stack_limit)
}
pub fn from_text_with_namespace_env(
env: &Env,
ns: RcOc<NamespaceEnv>,
indexed_source_text: &'src IndexedSourceText<'src>,
stack_limit: Option<&StackLimit>,
) -> Result<ParserResult> {
let arena = Bump::new();
let (language, mode, tree) =
Self::parse_text(&arena, env, indexed_source_text, stack_limit)?;
Self::from_tree_with_namespace_env(
env,
ns,
indexed_source_text,
stack_limit,
&arena,
language,
mode,
tree,
)
}
pub fn from_tree<'arena>(
env: &Env,
indexed_source_text: &'src IndexedSourceText<'src>,
stack_limit: Option<&StackLimit>,
arena: &'arena Bump,
language: Language,
mode: Option<Mode>,
tree: PositionedSyntaxTree<'src, 'arena>,
) -> Result<ParserResult> {
let ns = NamespaceEnv::empty(
env.parser_options.po_auto_namespace_map.clone(),
env.codegen,
env.parser_options.po_disable_xhp_element_mangling,
);
Self::from_tree_with_namespace_env(
env,
RcOc::new(ns),
indexed_source_text,
stack_limit,
arena,
language,
mode,
tree,
)
}
pub fn from_tree_with_namespace_env<'arena>(
env: &Env,
ns: RcOc<NamespaceEnv>,
indexed_source_text: &'src IndexedSourceText<'src>,
stack_limit: Option<&StackLimit>,
arena: &'arena Bump,
language: Language,
mode: Option<Mode>,
tree: PositionedSyntaxTree<'src, 'arena>,
) -> Result<ParserResult> {
match language {
Language::Hack => {}
_ => return Err(Error::NotAHackFile()),
}
let mode = mode.unwrap_or(Mode::Mstrict);
let scoured_comments =
Self::scour_comments_and_add_fixmes(env, indexed_source_text, &tree.root())?;
let mut lowerer_env = lowerer::Env::make(
env.codegen,
env.quick_mode,
env.keep_errors,
env.show_all_errors,
env.fail_open,
mode,
indexed_source_text,
&env.parser_options,
RcOc::clone(&ns),
stack_limit,
TokenFactory::new(&arena),
&arena,
);
let ret = lower(&mut lowerer_env, tree.root());
let mut ret = if env.elaborate_namespaces {
ret.map(|ast| namespaces::toplevel_elaborator::elaborate_toplevel_defs(ns, ast))
} else {
ret
};
let syntax_errors = match &mut ret {
Ok(aast) => Self::check_syntax_error(&env, indexed_source_text, &tree, Some(aast)),
Err(_) => Self::check_syntax_error(env, indexed_source_text, &tree, None),
};
let lowpri_errors = lowerer_env.lowpri_errors().borrow().to_vec();
let errors = lowerer_env.hh_errors().borrow().to_vec();
let lint_errors = lowerer_env.lint_errors().borrow().to_vec();
Ok(ParserResult {
file_mode: mode,
scoured_comments,
aast: ret,
lowpri_errors,
syntax_errors,
errors,
lint_errors,
})
}
fn check_syntax_error<'arena>(
env: &Env,
indexed_source_text: &'src IndexedSourceText<'src>,
tree: &PositionedSyntaxTree<'src, 'arena>,
aast: Option<&mut Program<(), ()>>,
) -> Vec<SyntaxError> {
let find_errors = |hhi_mode: bool| -> Vec<SyntaxError> {
let mut errors = tree.errors().into_iter().cloned().collect::<Vec<_>>();
let (parse_errors, uses_readonly) = parse_errors_with_text(
tree,
indexed_source_text.clone(),
// TODO(hrust) change to parser_otions to ref in ParserErrors
env.parser_options.clone(),
true, /* hhvm_compat_mode */
hhi_mode,
env.codegen,
env.is_systemlib,
);
errors.extend(parse_errors);
errors.sort_by(SyntaxError::compare_offset);
let mut empty_program = Program(vec![]);
let mut aast = aast.unwrap_or(&mut empty_program);
if uses_readonly {
errors.extend(readonly_check::check_program(&mut aast, !env.codegen));
}
errors.extend(aast_check::check_program(&aast, !env.codegen));
errors.extend(expression_tree_check::check_splices(&aast));
errors.extend(coeffects_check::check_program(&aast, !env.codegen));
errors
};
if env.codegen {
find_errors(false /* hhi_mode */)
} else if env.keep_errors {
let first_error = tree.errors().into_iter().next();
match first_error {
None if !env.quick_mode && !env.parser_options.po_parser_errors_only => {
let is_hhi = indexed_source_text
.source_text()
.file_path()
.has_extension("hhi");
find_errors(is_hhi)
}
None => vec![],
Some(e) => vec![e.clone()],
}
} else {
vec![]
}
}
fn parse_text<'arena>(
arena: &'arena Bump,
env: &Env,
indexed_source_text: &'src IndexedSourceText<'src>,
stack_limit: Option<&StackLimit>,
) -> Result<(Language, Option<Mode>, PositionedSyntaxTree<'src, 'arena>)> {
let source_text = indexed_source_text.source_text();
let (language, mode, parser_env) = Self::make_parser_env(env, source_text);
let tree = Self::parse(arena, env, parser_env, source_text, mode, stack_limit)?;
Ok((language, mode, tree))
}
pub fn make_parser_env<'arena>(
env: &Env,
source_text: &'src SourceText<'src>,
) -> (Language, Option<Mode>, ParserEnv) {
let (language, mode) = parse_mode(source_text);
let parser_env = ParserEnv {
codegen: env.codegen,
hhvm_compat_mode: env.codegen,
php5_compat_mode: env.php5_compat_mode,
allow_new_attribute_syntax: env.parser_options.po_allow_new_attribute_syntax,
enable_xhp_class_modifier: env.parser_options.po_enable_xhp_class_modifier,
disable_xhp_element_mangling: env.parser_options.po_disable_xhp_element_mangling,
disable_xhp_children_declarations: env
.parser_options
.po_disable_xhp_children_declarations,
disallow_fun_and_cls_meth_pseudo_funcs: env
.parser_options
.po_disallow_fun_and_cls_meth_pseudo_funcs,
interpret_soft_types_as_like_types: env
.parser_options
.po_interpret_soft_types_as_like_types,
};
(language, mode, parser_env)
}
fn parse<'arena>(
arena: &'arena Bump,
env: &Env,
parser_env: ParserEnv,
source_text: &'src SourceText<'src>,
mode: Option<Mode>,
stack_limit: Option<&StackLimit>,
) -> Result<PositionedSyntaxTree<'src, 'arena>> {
let quick_mode = match mode {
None | Some(Mode::Mhhi) => !env.codegen,
_ => !env.codegen && env.quick_mode,
};
let tree = if quick_mode {
let (tree, errors, _state) =
decl_mode_parser::parse_script(arena, source_text, parser_env, stack_limit);
PositionedSyntaxTree::create(source_text, tree, errors, mode, NoState, None)
} else {
let (tree, errors, _state) =
positioned_by_ref_parser::parse_script(arena, source_text, parser_env, stack_limit);
PositionedSyntaxTree::create(source_text, tree, errors, mode, NoState, None)
};
Ok(tree)
}
fn scour_comments_and_add_fixmes<'arena>(
env: &Env,
indexed_source_text: &'src IndexedSourceText<'_>,
script: &PositionedSyntax<'arena>,
) -> Result<ScouredComments> {
let scourer: ScourComment<'_, PositionedToken<'arena>, PositionedValue<'arena>> =
ScourComment {
phantom: std::marker::PhantomData,
indexed_source_text,
collect_fixmes: env.keep_errors,
include_line_comments: env.include_line_comments,
disable_hh_ignore_error: env.parser_options.po_disable_hh_ignore_error,
allowed_decl_fixme_codes: &env.parser_options.po_allowed_decl_fixme_codes,
};
Ok(scourer.scour_comments(script))
}
}
| 35.963816 | 100 | 0.595262 |
d56c05aa131aa1b812ded938cffd81a1c675c59e | 10,057 | use crate::host_controller::{StartActor, StartProvider};
use crate::messagebus::AdvertiseLink;
use crate::oci::fetch_oci_bytes;
use crate::NativeCapability;
use provider_archive::ProviderArchive;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::{fs::File, io::Read, path::Path};
/// A host manifest contains a descriptive profile of the host's desired state, including
/// a list of actors and capability providers to load as well as any desired link definitions
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HostManifest {
#[serde(default)]
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub labels: HashMap<String, String>,
pub actors: Vec<String>,
pub capabilities: Vec<Capability>,
pub links: Vec<LinkEntry>,
}
/// The description of a capability within a host manifest
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Capability {
/// An image reference for this capability. If this is a file on disk, it will be used, otherwise
/// the system will assume it is an OCI registry image reference
pub image_ref: String,
/// The (optional) name of the link that identifies this instance of the capability
pub link_name: Option<String>,
}
/// A link definition describing the actor and capability provider involved, as well
/// as the configuration values for that link
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LinkEntry {
pub actor: String,
pub contract_id: String,
pub provider_id: String,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub link_name: Option<String>,
pub values: Option<HashMap<String, String>>,
}
impl HostManifest {
/// Creates an instance of a host manifest from a file path. The de-serialization
/// type will be chosen based on the file path extension, selecting YAML for .yaml
/// or .yml files, and JSON for all other file extensions. If the path has no extension, the
/// de-serialization type chosen will be YAML.
pub fn from_path(
path: impl AsRef<Path>,
expand_env: bool,
) -> std::result::Result<HostManifest, Box<dyn std::error::Error + Send + Sync>> {
let mut contents = String::new();
let mut file = File::open(path.as_ref())?;
file.read_to_string(&mut contents)?;
if expand_env {
contents = Self::expand_env(&contents);
}
match path.as_ref().extension() {
Some(e) => {
let e = e.to_str().unwrap().to_lowercase(); // convert away from the FFI str
if e == "yaml" || e == "yml" {
serde_yaml::from_str::<HostManifest>(&contents).map_err(|e| e.into())
} else {
serde_json::from_str::<HostManifest>(&contents).map_err(|e| e.into())
}
}
None => serde_yaml::from_str::<HostManifest>(&contents).map_err(|e| e.into()),
}
}
fn expand_env(contents: &str) -> String {
let mut options = envmnt::ExpandOptions::new();
options.default_to_empty = false; // If environment variable not found, leave unexpanded.
options.expansion_type = Some(envmnt::ExpansionType::UnixBracketsWithDefaults); // ${VAR:DEFAULT}
envmnt::expand(contents, Some(options))
}
}
pub(crate) async fn generate_actor_start_messages(
manifest: &HostManifest,
allow_latest: bool,
allowed_insecure: &[String],
) -> Vec<StartActor> {
let mut v = Vec::new();
for actor_ref in &manifest.actors {
let p = Path::new(&actor_ref);
if p.exists() {
// read actor from disk
if let Ok(a) = crate::Actor::from_file(p) {
v.push(StartActor {
image_ref: None,
actor: a,
});
}
} else {
// load actor from OCI
if let Ok(a) = fetch_oci_bytes(&actor_ref, allow_latest, allowed_insecure)
.await
.and_then(|bytes| crate::Actor::from_slice(&bytes))
{
v.push(StartActor {
image_ref: Some(actor_ref.to_string()),
actor: a,
});
}
}
}
v
}
pub(crate) async fn generate_provider_start_messages(
manifest: &HostManifest,
allow_latest: bool,
allowed_insecure: &[String],
) -> Vec<StartProvider> {
let mut v = Vec::new();
for cap in &manifest.capabilities {
let p = Path::new(&cap.image_ref);
if p.exists() {
// read PAR from disk
if let Ok(prov) = file_bytes(&p)
.and_then(|bytes| ProviderArchive::try_load(&bytes))
.and_then(|par| NativeCapability::from_archive(&par, cap.link_name.clone()))
{
v.push(StartProvider {
provider: prov,
image_ref: None,
})
}
} else {
// read PAR from OCI
if let Ok(prov) = fetch_oci_bytes(&cap.image_ref, allow_latest, allowed_insecure)
.await
.and_then(|bytes| ProviderArchive::try_load(&bytes))
.and_then(|par| NativeCapability::from_archive(&par, cap.link_name.clone()))
{
v.push(StartProvider {
provider: prov,
image_ref: Some(cap.image_ref.to_string()),
})
}
}
}
v
}
pub(crate) async fn generate_adv_link_messages(manifest: &HostManifest) -> Vec<AdvertiseLink> {
manifest
.links
.iter()
.map(|config| AdvertiseLink {
contract_id: config.contract_id.to_string(),
actor: config.actor.to_string(),
link_name: config
.link_name
.as_ref()
.unwrap_or(&"default".to_string())
.to_string(),
provider_id: config.provider_id.to_string(),
values: config.values.as_ref().unwrap_or(&HashMap::new()).clone(),
})
.collect()
}
fn file_bytes(path: &Path) -> crate::Result<Vec<u8>> {
let mut f = File::open(path)?;
let mut bytes = Vec::new();
f.read_to_end(&mut bytes)?;
Ok(bytes)
}
#[cfg(test)]
mod test {
use super::{Capability, LinkEntry};
use std::collections::HashMap;
#[test]
fn round_trip() {
let manifest = super::HostManifest {
labels: HashMap::new(),
actors: vec!["a".to_string(), "b".to_string(), "c".to_string()],
capabilities: vec![
Capability {
image_ref: "one".to_string(),
link_name: Some("default".to_string()),
},
Capability {
image_ref: "two".to_string(),
link_name: Some("default".to_string()),
},
],
links: vec![LinkEntry {
actor: "a".to_string(),
contract_id: "wasmcloud:one".to_string(),
provider_id: "Vxxxone".to_string(),
values: Some(gen_values()),
link_name: None,
}],
};
let yaml = serde_yaml::to_string(&manifest).unwrap();
assert_eq!(yaml, "---\nactors:\n - a\n - b\n - c\ncapabilities:\n - image_ref: one\n link_name: default\n - image_ref: two\n link_name: default\nlinks:\n - actor: a\n contract_id: \"wasmcloud:one\"\n provider_id: Vxxxone\n values:\n ROOT: /tmp\n");
}
#[test]
fn round_trip_with_labels() {
let manifest = super::HostManifest {
labels: {
let mut hm = HashMap::new();
hm.insert("test".to_string(), "value".to_string());
hm
},
actors: vec!["a".to_string(), "b".to_string(), "c".to_string()],
capabilities: vec![
Capability {
image_ref: "one".to_string(),
link_name: Some("default".to_string()),
},
Capability {
image_ref: "two".to_string(),
link_name: Some("default".to_string()),
},
],
links: vec![LinkEntry {
actor: "a".to_string(),
contract_id: "wasmcloud:one".to_string(),
provider_id: "VxxxxONE".to_string(),
values: Some(gen_values()),
link_name: Some("default".to_string()),
}],
};
let yaml = serde_yaml::to_string(&manifest).unwrap();
assert_eq!(yaml, "---\nlabels:\n test: value\nactors:\n - a\n - b\n - c\ncapabilities:\n - image_ref: one\n link_name: default\n - image_ref: two\n link_name: default\nlinks:\n - actor: a\n contract_id: \"wasmcloud:one\"\n provider_id: VxxxxONE\n link_name: default\n values:\n ROOT: /tmp\n");
}
#[test]
fn env_expansion() {
let values = vec![
"echo Test",
"echo $TEST_EXPAND_ENV_TEMP",
"echo ${TEST_EXPAND_ENV_TEMP}",
"echo ${TEST_EXPAND_ENV_TMP}",
"echo ${TEST_EXPAND_ENV_TEMP:/etc}",
"echo ${TEST_EXPAND_ENV_TMP:/etc}",
];
let expected = vec![
"echo Test",
"echo $TEST_EXPAND_ENV_TEMP",
"echo /tmp",
"echo ${TEST_EXPAND_ENV_TMP}",
"echo /tmp",
"echo /etc",
];
envmnt::set("TEST_EXPAND_ENV_TEMP", "/tmp");
for (got, expected) in values
.iter()
.map(|v| super::HostManifest::expand_env(v))
.zip(expected.iter())
{
assert_eq!(*expected, got);
}
envmnt::remove("TEST_EXPAND_ENV_TEMP");
}
fn gen_values() -> HashMap<String, String> {
let mut hm = HashMap::new();
hm.insert("ROOT".to_string(), "/tmp".to_string());
hm
}
}
| 36.438406 | 331 | 0.547579 |
9c3b8fa458abdb75d551b8ca10aa8cbc1d99f262 | 1,576 | use anyhow::Result;
use clap::{Arg, Command};
use log::debug;
use log_stream_gc::{gc_log_streams, set_up_logger};
#[derive(Debug)]
struct Args {
verbose: bool,
dry_run: bool,
region: String,
}
fn parse_args() -> Args {
let matches = Command::new("log-stream-gc")
.version("0.1")
.author("Jacob Luszcz")
.arg(
Arg::new("verbose")
.short('v')
.long("verbose")
.help("Verbose mode. Outputs DEBUG and higher log messages."),
)
.arg(
Arg::new("dryrun")
.short('d')
.long("dryrun")
.alias("dry-run")
.help("Keeps all log streams, even if they would otherwise be deleted."),
)
.arg(
Arg::new("region")
.short('r')
.long("region")
.required(true)
.takes_value(true)
.env("AWS_REGION")
.help("AWS region to run garbage collection in."),
)
.get_matches();
let verbose = matches.is_present("verbose");
let dry_run = matches.is_present("dryrun");
let region = matches
.value_of("region")
.expect("region is required")
.to_string();
Args {
verbose,
dry_run,
region,
}
}
#[tokio::main]
async fn main() -> Result<()> {
let args = parse_args();
set_up_logger(module_path!(), args.verbose)?;
debug!("{:?}", args);
gc_log_streams(Some(args.region), args.dry_run).await
}
| 24.625 | 89 | 0.503173 |
913257ad6f9ae8153d04cd8fea126c63d0fd88c7 | 17,534 |
//! Utilities for loading and using textures
use std::io;
use std::ptr;
use std::fmt;
use std::error;
use std::path::Path;
use std::borrow::Cow;
use std::fs::File;
use png;
use gl;
use gl::types::*;
/// A wraper around a OpenGL texture object which can be modified
#[derive(Debug)]
pub struct Texture {
texture: GLuint,
pub format: TextureFormat,
pub width: u32,
pub height: u32,
}
impl Texture {
/// Creates a texture from a raw OpenGL handle and some additional data. Intended for internal
/// use only, use with care!
pub fn wrap_gl_texture(texture: GLuint, format: TextureFormat, width: u32, height: u32) -> Texture {
Texture {
texture: texture,
format: format,
width: width,
height: height,
}
}
/// Creates a texture from a image file.
pub fn from_file<P>(path: P) -> Result<Texture, TextureError> where P: AsRef<Path> {
let mut texture = Texture::new();
texture.load_file(path)?;
Ok(texture)
}
/// Creates a texturer from the bytes in a image file. The bytes can be sourced with the
/// `include_bytes!` macro. `source` is only used for context in error messages.
pub fn from_bytes(bytes: &[u8], source: &str) -> Result<Texture, TextureError> {
let mut texture = Texture::new();
let data = RawImageData::from_bytes(bytes, source)?;
texture.load_raw_image_data(data)?;
Ok(texture)
}
/// Creates a new texture without any ascociated data. Use can use [`load_file`],
/// [`load_raw_image_data`] and [`load_data`] to set the data to be used used
/// with this texture.
///
/// [`load_file`]: struct.Texture.html#method.load_file
/// [`load_raw_image_data`]: struct.Texture.html#method.load_raw_image_data
/// [`load_data`]: struct.Texture.html#method.load_data
pub fn new() -> Texture {
let mut texture = 0;
unsafe {
gl::GenTextures(1, &mut texture);
gl::BindTexture(gl::TEXTURE_2D, texture);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::NEAREST as GLint);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::NEAREST as GLint);
}
Texture {
texture: texture,
format: TextureFormat::RGB_8,
width: 0,
height: 0,
}
}
/// Attempts to load data from the given image file into this texture. Note that
/// it is usually more convenient to create a new texture directly from a file using
/// [`from_file(path)`](struct.Texture.html#method.from_file).
///
/// # Example
/// ```rust,no_run
/// use gondola::texture::Texture;
///
/// let mut texture = Texture::new();
/// texture.load_file("assets/test.png").expect("Failed to load texture");
/// ```
pub fn load_file<P: AsRef<Path>>(&mut self, path: P) -> Result<(), TextureError> {
let path = path.as_ref();
let RawImageData { info, buf } = RawImageData::from_file(path)?;
let texture_format = match (info.color_type, info.bit_depth) {
(png::ColorType::RGBA, png::BitDepth::Eight) => TextureFormat::RGBA_8,
(png::ColorType::RGB, png::BitDepth::Eight) => TextureFormat::RGB_8,
other => {
let message = format!(
"Unsuported texture format ({:?}, {:?}) in \"{}\" ({}:{})",
other.0, other.1,
path.to_string_lossy(),
file!(), line!()
);
return Err(TextureError {
source: Some(path.to_string_lossy().into()),
error: io::Error::new(io::ErrorKind::Other, message)
});
}
};
self.load_data(&buf, info.width, info.height, texture_format);
Ok(())
}
/// Attempts to load the given raw image data into this texture. For more info see
/// [`RawImageData`].
///
/// [`RawImageData`]: struct.RawImageData.html
pub fn load_raw_image_data(&mut self, data: RawImageData) -> Result<(), TextureError> {
let texture_format = match (data.info.color_type, data.info.bit_depth) {
(png::ColorType::RGBA, png::BitDepth::Eight) => TextureFormat::RGBA_8,
(png::ColorType::RGB, png::BitDepth::Eight) => TextureFormat::RGB_8,
other => {
let message = format!(
"Unsuported texture format ({:?}, {:?}) ({}:{})",
other.0, other.1, file!(), line!()
);
return Err(TextureError { source: None, error: io::Error::new(io::ErrorKind::Other, message) });
}
};
self.load_data(&data.buf, data.info.width, data.info.height, texture_format);
Ok(())
}
/// Directly loads some color data into a texture. This function does not check to ensure that
/// the data is in the correct format, so you have to manually ensure that it is valid. This
/// function is intended for creating small debug textures.
pub fn load_data(&mut self, data: &[u8], width: u32, height: u32, format: TextureFormat) {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.texture);
gl::TexImage2D(gl::TEXTURE_2D, 0, // Mipmap level
format as GLint, // Internal format
width as GLsizei, height as GLsizei, 0, // Size and border
format.unsized_format(), // Data format
gl::UNSIGNED_BYTE, data.as_ptr() as *const GLvoid);
}
self.width = width;
self.height = height;
self.format = format;
}
/// Sets the data in a sub-region of this texture. The data is expected to be in the
/// format this texture was initialized to. This texture needs to be initialized
/// before this method can be used.
/// Note that there is a debug assertion in place to ensure that the given region
/// is within the bounds of this texture. If debug assertions are not enabled this
/// function will return without taking any action.
pub fn load_data_to_region(&mut self, data: &[u8], x: u32, y: u32, width: u32, height: u32) {
if x + width > self.width && y + height > self.height {
debug_assert!(false, "Invalid region passed ({}:{}) Region: (x: {}, y: {}, width: {}, height: {})",
file!(), line!(),
x, y, width, height);
return;
}
unsafe {
// OpenGL is allowed to expect rows in pixel data to be aligned
// at powers of two. This ensures that any data will be accepted.
gl::PixelStorei(gl::PACK_ALIGNMENT, 1);
gl::PixelStorei(gl::UNPACK_ALIGNMENT, 1);
gl::BindTexture(gl::TEXTURE_2D, self.texture);
gl::TexSubImage2D(gl::TEXTURE_2D, 0,
x as GLint, y as GLint,
width as GLsizei, height as GLsizei,
self.format.unsized_format(), // It is unclear whether opengl allows a different format here
gl::UNSIGNED_BYTE, data.as_ptr() as *const GLvoid);
}
}
/// Converts this texture to a empty texture of the given size. The contents
/// of the texture after this operation are undefined.
pub fn initialize(&mut self, width: u32, height: u32, format: TextureFormat) {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.texture);
gl::TexImage2D(gl::TEXTURE_2D, 0, // Mipmap level
format as GLint, // Internal format
width as GLsizei, height as GLsizei, 0, // Size and border
format.unsized_format(), // Data format
gl::UNSIGNED_BYTE, ptr::null());
}
self.width = width;
self.height = height;
self.format = format;
}
/// Binds this texture to the given texture unit.
pub fn bind(&self, unit: u32) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0 + unit);
gl::BindTexture(gl::TEXTURE_2D, self.texture);
}
}
/// Unbinds the texture at the given texture unit.
pub fn unbind(unit: u32) {
unsafe {
gl::ActiveTexture(gl::TEXTURE0 + unit);
gl::BindTexture(gl::TEXTURE_2D, 0);
}
}
/// Sets the filter that is applied when this texture is rendered at a size larger
/// or smaller sizes than the native size of the texture. A separate filter can be
/// set for magnification and minification.
pub fn set_filter(&mut self, mag: TextureFilter, min: TextureFilter) {
unsafe {
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, mag as GLint);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, min as GLint);
}
}
/// Sets the texture filter, allowing for a separate filter to be used when mipmapping
pub fn set_mipmap_filter(&mut self, mag: TextureFilter, mipmap_mag: TextureFilter,
min: TextureFilter, mipmap_min: TextureFilter) {
unsafe {
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, TextureFilter::mipmap_filter(mag, mipmap_mag) as GLint);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, TextureFilter::mipmap_filter(min, mipmap_min) as GLint);
}
}
/// Sets the swizzle mask of this texture. The swizzle mask specifies how data stored
/// in this texture is seen by other parts of OpenGL. This includes texture samplers
/// in shaders. This is usefull when using textures with only one or two components
/// per pixel.
///
/// For example, given a texture with only a red component (That is, its
/// format is `TextureFormat::R_8` or similar), a texture sampler in a shader will
/// normaly get a value of type `(r, 0.0, 0.0, 1.0)`. By setting the swizzle mask
/// to `(SwizzleComp::One, SwizzleComp::One, SwizzleComp::One, SwizzleComp::Red)`
/// shaders will now see `(1.0, 1.0, 1.0, r)`.
pub fn set_swizzle_mask(&mut self, masks: (SwizzleComp, SwizzleComp, SwizzleComp, SwizzleComp)) {
unsafe {
let masks = [masks.0 as GLint, masks.1 as GLint, masks.2 as GLint, masks.3 as GLint];
gl::TexParameteriv(gl::TEXTURE_2D, gl::TEXTURE_SWIZZLE_RGBA, &masks as *const _);
}
}
}
impl Drop for Texture {
fn drop(&mut self) {
unsafe {
gl::DeleteTextures(1, &self.texture);
}
}
}
/// Raw image data loaded from a png file. This data can then be loaded into a texture
/// using [`Texture::load_raw_image_data`]. When loading very large textures it can be
/// beneficial to load the raw image data from the texture on a separate thread, and then
/// pass it to a texture in the main thread for performance reasons.
///
/// Note that textures must allways be created in the same thread as they are used in, because
/// of OpenGL limitations. You can call [`RawImageData::from_file`] from anywhere, but only
/// ever create textures in the rendering tread (usually the main thread).
///
/// [`Texture::load_raw_image_data`]: struct.Texture.html#method.load_raw_image_data
/// [`RawImageData::from_file`]: struct.RawImageData.html#method.from_file
pub struct RawImageData {
info: png::OutputInfo,
buf: Vec<u8>,
}
impl RawImageData {
/// Does not invoke any OpenGL functions, and can thus be called from any thread.
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<RawImageData, TextureError> {
let path = path.as_ref();
// Open file
let file = match File::open(path) {
Ok(file) => file,
Err(err) => return Err(TextureError {
source: Some(path.to_string_lossy().into()),
error: err
}),
};
let decoder = png::Decoder::new(file);
RawImageData::from_decoder(decoder, path.to_string_lossy().into())
}
/// Can be used in conjunction with the `include_bytes!(..)` in std.
pub fn from_bytes(bytes: &[u8], source: &str) -> Result<RawImageData, TextureError> {
RawImageData::from_decoder(png::Decoder::new(bytes), source.into())
}
fn from_decoder<R: io::Read>(
decoder: png::Decoder<R>,
source: Cow<str>,
) -> Result<RawImageData, TextureError>
{
let (info, mut reader) = match decoder.read_info() {
Ok(result) => result,
Err(err) => return Err(TextureError {
source: Some(source.into()),
error: err.into()
}),
};
// Read data into buffer (This is what makes texture loading slow)
let mut buf = vec![0; info.buffer_size()];
match reader.next_frame(&mut buf) {
Ok(()) => {},
Err(err) => return Err(TextureError {
source: Some(source.into()),
error: err.into()
}),
};
Ok(RawImageData {
info: info,
buf: buf,
})
}
}
/// Represents an OpenGL texture filter.
#[repr(u32)] // GLenum is u32
#[derive(Debug, Copy, Clone)]
pub enum TextureFilter {
Nearest = gl::NEAREST,
Linear = gl::LINEAR
}
impl TextureFilter {
/// Retrieves a OpenGL mipmap filter for mipmaping. The returned `GLenum` can
/// be used in the same scenarios as ´TextureFilter::* as GLenum´
fn mipmap_filter(normal: TextureFilter, mipmap: TextureFilter) -> GLenum {
match normal {
TextureFilter::Nearest => match mipmap {
TextureFilter::Nearest => gl::NEAREST_MIPMAP_NEAREST,
TextureFilter::Linear => gl::NEAREST_MIPMAP_LINEAR,
},
TextureFilter::Linear => match mipmap {
TextureFilter::Nearest => gl::LINEAR_MIPMAP_NEAREST,
TextureFilter::Linear => gl::LINEAR_MIPMAP_LINEAR,
},
}
}
}
/// Represents a OpenGL texture format.
#[repr(u32)] // GLenum is u32
#[allow(non_camel_case_types, dead_code)]
#[derive(Debug, Copy, Clone)]
pub enum TextureFormat {
RGBA_F32 = gl::RGBA32F,
RGBA_F16 = gl::RGBA16F,
RGB_F32 = gl::RGB32F,
RGB_F16 = gl::RGB16F,
R_F32 = gl::R32F,
R_F16 = gl::R16F,
RGBA_8 = gl::RGBA8,
RGB_8 = gl::RGB8,
R_8 = gl::R8,
}
impl TextureFormat {
/// Retrieves the unsized version of the given format
pub fn unsized_format(&self) -> GLenum {
match *self {
TextureFormat::RGBA_F32 | TextureFormat::RGBA_F16 | TextureFormat::RGBA_8 => gl::RGBA,
TextureFormat::RGB_F32 | TextureFormat::RGB_F16 | TextureFormat::RGB_8 => gl::RGB,
TextureFormat::R_F32 | TextureFormat::R_F16 | TextureFormat::R_8 => gl::RED,
}
}
/// The OpenGL primitive associated with this color format.
pub fn gl_primitive_enum(&self) -> GLenum {
match *self {
TextureFormat::RGBA_F32 | TextureFormat::RGB_F32 | TextureFormat::R_F32 => gl::FLOAT,
TextureFormat::RGBA_F16 | TextureFormat::RGB_F16 | TextureFormat::R_F16 => gl::FLOAT,
TextureFormat::RGBA_8 | TextureFormat::RGB_8 | TextureFormat::R_8 => gl::UNSIGNED_BYTE,
}
}
/// The name of the OpenGL primitive associated with this color format.
pub fn gl_primitive_enum_name(&self) -> &'static str {
match *self {
TextureFormat::RGBA_F32 | TextureFormat::RGB_F32 | TextureFormat::R_F32 => "GLfloat",
TextureFormat::RGBA_F16 | TextureFormat::RGB_F16 | TextureFormat::R_F16 => "GLfloat",
TextureFormat::RGBA_8 | TextureFormat::RGB_8 | TextureFormat::R_8 => "GLbyte",
}
}
/// The number of components this color format has. For example, `RGB_8` has 3 components.
pub fn components(&self) -> usize {
match *self {
TextureFormat::RGBA_F32 | TextureFormat::RGBA_F16 | TextureFormat::RGBA_8 => 4,
TextureFormat::RGB_F32 | TextureFormat::RGB_F16 | TextureFormat::RGB_8 => 3,
TextureFormat::R_F32 | TextureFormat::R_F16 | TextureFormat::R_8 => 1,
}
}
}
/// Components that a texture can be mapped to through swizzling. See
/// [`set_swizzle_mask`](struct.Texture.html#method.set_swizzle_mask)
/// for more info.
#[repr(u32)] // GLenum is u32
#[derive(Debug, Copy, Clone)]
pub enum SwizzleComp {
Red = gl::RED,
Green = gl::GREEN,
Blue = gl::BLUE,
Alpha = gl::ALPHA,
One = gl::ONE,
Zero = gl::ZERO,
}
/// A error which can occur during texture loading and creation.
#[derive(Debug)]
pub struct TextureError {
source: Option<String>,
error: io::Error,
}
impl error::Error for TextureError {
fn description(&self) -> &str {
self.error.description()
}
fn cause(&self) -> Option<&error::Error> {
self.error.cause()
}
}
impl fmt::Display for TextureError {
fn fmt(&self, mut f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ref source) = self.source {
write!(f, "For texture \"{}\": ", source)?;
}
self.error.fmt(&mut f)?;
Ok(())
}
}
impl From<TextureError> for io::Error {
fn from(err: TextureError) -> io::Error {
io::Error::new(io::ErrorKind::Other, err)
}
}
| 38.706402 | 126 | 0.592278 |
61984443f1aa529706b0f98286cbd0c044d8f2fe | 4,354 | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
use std::collections::HashMap;
use std::io::BufRead;
use std::io::SeekFrom;
use dashmap::DashMap;
use tokio::fs::File;
use tokio::io::{AsyncBufReadExt, AsyncSeekExt, BufReader};
pub mod bpf;
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub const NAME: &str = env!("CARGO_PKG_NAME");
pub const SECOND: u64 = 1_000 * MILLISECOND;
pub const MILLISECOND: u64 = 1_000 * MICROSECOND;
pub const MICROSECOND: u64 = 1_000 * NANOSECOND;
pub const NANOSECOND: u64 = 1;
pub struct HardwareInfo {
numa_mapping: DashMap<u64, u64>,
}
impl HardwareInfo {
pub fn new() -> Self {
let numa_mapping = DashMap::new();
let mut node = 0;
loop {
let path = format!("/sys/devices/system/node/node{}/cpulist", node);
if let Ok(f) = std::fs::File::open(path) {
let mut reader = std::io::BufReader::new(f);
let mut line = String::new();
if reader.read_line(&mut line).is_ok() {
let ranges: Vec<&str> = line.trim().split(',').collect();
for range in ranges {
let parts: Vec<&str> = range.split('-').collect();
if parts.len() == 1 {
if let Ok(id) = parts[0].parse() {
numa_mapping.insert(id, node);
}
} else if parts.len() == 2 {
if let Ok(start) = parts[0].parse() {
if let Ok(stop) = parts[1].parse() {
for id in start..=stop {
numa_mapping.insert(id, node);
}
}
}
}
}
}
} else {
break;
}
node += 1;
}
Self { numa_mapping }
}
pub fn get_numa(&self, core: u64) -> Option<u64> {
self.numa_mapping.get(&core).map(|v| *v.value())
}
}
/// helper function to discover the number of hardware threads
pub fn hardware_threads() -> Result<u64, ()> {
let path = "/sys/devices/system/cpu/present";
let f =
std::fs::File::open(path).map_err(|e| debug!("failed to open file ({:?}): {}", path, e))?;
let mut f = std::io::BufReader::new(f);
let mut line = String::new();
f.read_line(&mut line)
.map_err(|_| debug!("failed to read line"))?;
let line = line.trim();
let a: Vec<&str> = line.split('-').collect();
a.last()
.unwrap_or(&"0")
.parse::<u64>()
.map_err(|e| debug!("could not parse num cpus from file ({:?}): {}", path, e))
.map(|i| i + 1)
}
/// helper function to create a nested map from files with the form of
/// pkey1 lkey1 lkey2 ... lkeyN
/// pkey1 value1 value2 ... valueN
/// pkey2 ...
pub async fn nested_map_from_file(
file: &mut File,
) -> Result<HashMap<String, HashMap<String, u64>>, std::io::Error> {
file.seek(SeekFrom::Start(0)).await?;
let mut ret = HashMap::<String, HashMap<String, u64>>::new();
let mut reader = BufReader::new(file);
let mut keys = String::new();
let mut values = String::new();
while reader.read_line(&mut keys).await? > 0 {
if reader.read_line(&mut values).await? > 0 {
let mut keys_split = keys.trim().split_whitespace();
let mut values_split = values.trim().split_whitespace();
if let Some(pkey) = keys_split.next() {
let _ = values_split.next();
if !ret.contains_key(pkey) {
ret.insert(pkey.to_string(), Default::default());
}
let inner = ret.get_mut(pkey).unwrap();
for key in keys_split {
if let Some(Ok(value)) = values_split.next().map(|v| v.parse()) {
inner.insert(key.to_owned(), value);
}
}
}
keys.clear();
values.clear();
}
}
Ok(ret)
}
pub fn default_percentiles() -> Vec<f64> {
vec![1.0, 10.0, 50.0, 90.0, 99.0]
}
| 34.555556 | 98 | 0.499541 |
e622b1fdacb11ae4d305ed704e0f88219e51377f | 6,521 | //! Contains methods and types which are useful for logging
use std::fs::File;
use std::io::{BufRead, BufReader, Read};
use std::path::Path;
use crate::types::combined_position_orientation_array_to_isometry;
use crate::{BodyId, Error, Velocity};
use nalgebra::{DVector, Isometry3, Vector6};
use std::convert::TryInto;
use std::fmt;
use std::fmt::{Display, Formatter};
use std::time::Duration;
/// Represents the State of a Body which was recorded with
/// [`start_state_logging`](`crate::PhysicsClient::start_state_logging`) with the
/// [`GenericRobot`](`crate::types::LoggingType::GenericRobot`)
/// [`LoggingType`](`crate::types::LoggingType`).
/// Use [`read_generic_robot_log`](`read_generic_robot_log`) to read such a log file.
#[derive(Debug)]
pub struct GenericRobotLog {
/// ongoing counter of logs from the log file.
pub chunk_number: usize,
/// ongoing counter of the steps in the simulation
pub step_count: usize,
/// simulation time stamp
pub time_stamp: Duration,
/// BodyId of the log entry
pub body: BodyId,
/// Base pose of the robot. Not the end-effector pose!
pub base_pose: Isometry3<f64>,
/// Base velocity of the robot. Not the end-effector velocity!
pub base_velocity: Velocity,
/// total number of joints of the robot.
pub num_joints: usize,
/// joint positions with length of num_joints
pub joint_positions: DVector<f64>,
/// joint velocities with length of num_joints
pub joint_velocities: DVector<f64>,
}
impl Display for GenericRobotLog {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
writeln!(f, "chunk # {}", self.chunk_number)?;
writeln!(f, "step count = {}", self.step_count)?;
writeln!(f, "body = {:?}", self.body)?;
writeln!(f, "Base Pose:")?;
writeln!(f, "{}", self.base_pose.to_homogeneous())?;
writeln!(f, "Base Velocity:")?;
writeln!(f, "{}", self.base_velocity.to_vector())?;
writeln!(f, "num joints = {}", self.num_joints)?;
writeln!(f, "joint positions:")?;
writeln!(f, "{:?}", self.joint_positions.as_slice())?;
writeln!(f, "joint velocities:")?;
writeln!(f, "{:?}", self.joint_velocities.as_slice())
}
}
impl Default for GenericRobotLog {
fn default() -> Self {
GenericRobotLog {
chunk_number: 0,
step_count: 0,
time_stamp: Default::default(),
body: BodyId(-1),
base_pose: Isometry3::identity(),
base_velocity: Vector6::zeros().into(),
num_joints: 0,
joint_positions: DVector::identity(1),
joint_velocities: DVector::identity(1),
}
}
}
fn calc_size(fmt: &str) -> usize {
let mut size = 0;
for c in fmt.chars() {
size += match c {
'I' | 'i' | 'f' => 4,
'B' => 1,
_ => {
panic!("can not determine data type")
}
};
}
size
}
/// reads log files which were generated with [`GenericRobot`](`crate::types::LoggingType::GenericRobot`)
/// [`LoggingType`](`crate::types::LoggingType`).
/// it returns a list of all entries in the log or an error if the file could not be openend.
/// # Arguments
/// * `filename` - location of the log file.
/// # Example
/// ```no_run
///# use rubullet::logging_utils::read_generic_robot_log;
/// let logs = read_generic_robot_log("LOG0001.txt").unwrap();
/// ```
pub fn read_generic_robot_log<P: AsRef<Path>>(filename: P) -> Result<Vec<GenericRobotLog>, Error> {
let file = File::open(filename).map_err(|_| Error::new("could not open file"))?;
let mut reader = BufReader::new(file);
let mut key_buf = String::new();
reader
.read_line(&mut key_buf)
.expect("error while reading file");
let mut fmt_buf = String::new();
reader
.read_line(&mut fmt_buf)
.expect("error while reading file");
let fmt = fmt_buf.strip_suffix("\n").unwrap();
assert_eq!("IfifffffffffffffI", &fmt[0..17]);
let sz = calc_size(fmt);
let mut chunk_index = 0;
let mut logs = Vec::<GenericRobotLog>::new();
loop {
let mut check_buf = [0_u8; 2];
match reader.read_exact(&mut check_buf) {
Ok(_) => {}
Err(_) => {
return Ok(logs);
}
}
assert_eq!(
&check_buf,
&[170_u8, 187_u8],
"Error, expected aabb terminal"
);
let mut buf = vec![0_u8; sz];
reader.read_exact(&mut buf).unwrap();
let mut log = GenericRobotLog {
chunk_number: chunk_index,
..Default::default()
};
chunk_index += 1;
log.step_count = u32::from_le_bytes(buf[0..4].try_into().unwrap()) as usize;
log.time_stamp = Duration::from_secs_f32(f32::from_le_bytes(buf[4..8].try_into().unwrap()));
log.body = BodyId(i32::from_le_bytes(buf[8..12].try_into().unwrap()));
assert!(log.body.0 >= 0);
let mut pose_elements = [0.; 7];
for i in 0..7 {
pose_elements[i] =
f32::from_le_bytes(buf[12 + 4 * i..16 + 4 * i].try_into().unwrap()) as f64;
}
log.base_pose = combined_position_orientation_array_to_isometry(pose_elements);
let mut velocity_elements = [0.; 6];
for i in 0..6 {
velocity_elements[i] =
f32::from_le_bytes(buf[40 + 4 * i..44 + 4 * i].try_into().unwrap()) as f64;
}
let vel_vec: Vector6<f64> = velocity_elements.into();
log.base_velocity = vel_vec.into();
log.num_joints = u32::from_le_bytes(buf[64..68].try_into().unwrap()) as usize;
let remaining = sz - 68;
assert_eq!(remaining % 8, 0);
let mut joint_positions = DVector::<f64>::from(vec![0.; log.num_joints]);
for i in 0..log.num_joints {
joint_positions[i] =
f32::from_le_bytes(buf[68 + 4 * i..72 + 4 * i].try_into().unwrap()) as f64;
}
let mut joint_velocities = DVector::<f64>::from(vec![0.; log.num_joints]);
let start_byte = 68 + remaining / 2;
for i in 0..log.num_joints {
joint_velocities[i] = f32::from_le_bytes(
buf[start_byte + 4 * i..start_byte + 4 + 4 * i]
.try_into()
.unwrap(),
) as f64;
}
log.joint_positions = joint_positions;
log.joint_velocities = joint_velocities;
logs.push(log);
}
}
| 38.134503 | 105 | 0.584726 |
1e01d18a60d8c2e863393ab291cbbdc47459b069 | 6,639 | pub use self::{
adversary::{
AdversaryFragmentSender, AdversaryFragmentSenderError, AdversaryFragmentSenderSetup,
},
export::{FragmentExporter, FragmentExporterError},
initial_certificates::{signed_delegation_cert, signed_stake_pool_cert, vote_plan_cert},
node::{FragmentNode, FragmentNodeError, MemPoolCheck},
sender::{FragmentSender, FragmentSenderError},
setup::{FragmentSenderSetup, FragmentSenderSetupBuilder, VerifyStrategy},
transaction::{transaction_to, transaction_to_many},
verifier::{FragmentVerifier, FragmentVerifierError},
};
use crate::{stake_pool::StakePool, wallet::Wallet};
use chain_impl_mockchain::{
certificate::{PoolId, Proposal, VoteCast, VotePlan, VoteTally},
fee::LinearFee,
fragment::Fragment,
testing::{
data::{StakePool as StakePoolLib, Wallet as WalletLib},
scenario::FragmentFactory,
},
vote::{Choice, Payload},
};
use jormungandr_lib::{
crypto::hash::Hash,
interfaces::{Address, Initial, Value},
};
pub use load::{FragmentGenerator, FragmentStatusProvider};
use thiserror::Error;
mod adversary;
mod export;
mod initial_certificates;
mod load;
mod node;
mod sender;
mod setup;
mod transaction;
mod verifier;
#[derive(Error, Debug)]
pub enum FragmentBuilderError {
#[error("cannot compute the transaction's balance")]
CannotComputeBalance,
#[error("Cannot compute the new fees of {0} for a new input")]
CannotAddCostOfExtraInput(u64),
#[error("transaction already balanced")]
TransactionAlreadyBalanced,
#[error("the transaction has {0} value extra than necessary")]
TransactionAlreadyExtraValue(Value),
}
pub struct FragmentBuilder {
block0_hash: Hash,
fees: LinearFee,
}
impl FragmentBuilder {
pub fn new(block0_hash: &Hash, fees: &LinearFee) -> Self {
Self {
block0_hash: *block0_hash,
fees: *fees,
}
}
fn fragment_factory(&self) -> FragmentFactory {
FragmentFactory::new(self.block0_hash.into_hash(), self.fees)
}
pub fn transaction(
&self,
from: &Wallet,
address: Address,
value: Value,
) -> Result<Fragment, FragmentBuilderError> {
transaction_to(&self.block0_hash, &self.fees, from, address, value)
}
pub fn transaction_to_many(
&self,
from: &Wallet,
addresses: &[Address],
value: Value,
) -> Result<Fragment, FragmentBuilderError> {
transaction_to_many(&self.block0_hash, &self.fees, from, addresses, value)
}
pub fn full_delegation_cert_for_block0(wallet: &Wallet, pool_id: PoolId) -> Initial {
Initial::Cert(signed_delegation_cert(wallet, pool_id).into())
}
pub fn stake_pool_registration(&self, funder: &Wallet, stake_pool: &StakePool) -> Fragment {
let inner_wallet = funder.clone().into();
self.fragment_factory()
.stake_pool_registration(&inner_wallet, &stake_pool.clone().into())
}
pub fn delegation(&self, from: &Wallet, stake_pool: &StakePool) -> Fragment {
let inner_wallet = from.clone().into();
self.fragment_factory()
.delegation(&inner_wallet, &stake_pool.clone().into())
}
pub fn delegation_remove(&self, from: &Wallet) -> Fragment {
let inner_wallet = from.clone().into();
self.fragment_factory().delegation_remove(&inner_wallet)
}
pub fn delegation_to_many(
&self,
from: &Wallet,
distribution: Vec<(&StakePool, u8)>,
) -> Fragment {
let inner_wallet = from.clone().into();
let inner_stake_pools: Vec<StakePoolLib> = distribution
.iter()
.cloned()
.map(|(x, _)| {
let inner_stake_pool: StakePoolLib = x.clone().into();
inner_stake_pool
})
.collect();
let mut inner_distribution: Vec<(&StakePoolLib, u8)> = Vec::new();
for (inner_stake_pool, (_, factor)) in inner_stake_pools.iter().zip(distribution) {
inner_distribution.push((&inner_stake_pool, factor));
}
self.fragment_factory()
.delegation_to_many(&inner_wallet, &inner_distribution[..])
}
pub fn owner_delegation(&self, from: &Wallet, stake_pool: &StakePool) -> Fragment {
let inner_wallet = from.clone().into();
self.fragment_factory()
.owner_delegation(&inner_wallet, &stake_pool.clone().into())
}
pub fn stake_pool_retire(&self, owners: Vec<&Wallet>, stake_pool: &StakePool) -> Fragment {
let inner_owners: Vec<WalletLib> = owners
.iter()
.cloned()
.map(|x| {
let wallet: WalletLib = x.clone().into();
wallet
})
.collect();
let ref_inner_owners: Vec<&WalletLib> = inner_owners.iter().collect();
self.fragment_factory()
.stake_pool_retire(&ref_inner_owners[..], &stake_pool.clone().into())
}
pub fn stake_pool_update(
&self,
owners: Vec<&Wallet>,
old_stake_pool: &StakePool,
new_stake_pool: &StakePool,
) -> Fragment {
let inner_owners: Vec<WalletLib> = owners
.iter()
.cloned()
.map(|x| {
let wallet: WalletLib = x.clone().into();
wallet
})
.collect();
let ref_inner_owners: Vec<&WalletLib> = inner_owners.iter().collect();
self.fragment_factory().stake_pool_update(
ref_inner_owners,
&old_stake_pool.clone().into(),
new_stake_pool.clone().into(),
)
}
pub fn vote_plan(&self, wallet: &Wallet, vote_plan: &VotePlan) -> Fragment {
let inner_wallet = wallet.clone().into();
self.fragment_factory()
.vote_plan(&inner_wallet, vote_plan.clone())
}
pub fn vote_cast(
&self,
wallet: &Wallet,
vote_plan: &VotePlan,
proposal_index: u8,
choice: &Choice,
) -> Fragment {
let inner_wallet = wallet.clone().into();
let vote_cast = VoteCast::new(
vote_plan.to_id(),
proposal_index as u8,
Payload::public(choice.clone()),
);
self.fragment_factory().vote_cast(&inner_wallet, vote_cast)
}
pub fn vote_tally(&self, wallet: &Wallet, vote_plan: &VotePlan) -> Fragment {
let inner_wallet = wallet.clone().into();
let vote_tally = VoteTally::new_public(vote_plan.to_id());
self.fragment_factory()
.vote_tally(&inner_wallet, vote_tally)
}
}
| 32.072464 | 96 | 0.618015 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.