hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
fc4e5a9d1281ccd85aa32959be950bee3de7e8f9 | 57,660 | //! Consensus service is a component that will be communicating with the reactor.
//! It will receive events (like incoming message event or create new message event)
//! and propagate them to the underlying consensus protocol.
//! It tries to know as little as possible about the underlying consensus. The only thing
//! it assumes is the concept of era/epoch and that each era runs separate consensus instance.
//! Most importantly, it doesn't care about what messages it's forwarding.
mod era;
use std::{
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
convert::TryInto,
fmt::{self, Debug, Formatter},
fs,
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use anyhow::Error;
use datasize::DataSize;
use futures::FutureExt;
use itertools::Itertools;
use prometheus::Registry;
use rand::Rng;
use tracing::{debug, error, info, trace, warn};
use casper_hashing::Digest;
use casper_types::{AsymmetricType, EraId, PublicKey, SecretKey, U512};
pub use self::era::Era;
use crate::{
components::consensus::{
cl_context::{ClContext, Keypair},
config::ProtocolConfig,
consensus_protocol::{
ConsensusProtocol, EraReport, FinalizedBlock as CpFinalizedBlock, ProposedBlock,
ProtocolOutcome,
},
metrics::Metrics,
traits::NodeIdT,
validator_change::ValidatorChanges,
ActionId, Config, ConsensusMessage, Event, NewBlockPayload, ReactorEventT, ResolveValidity,
TimerId, ValidatorChange,
},
effect::{
announcements::ControlAnnouncement,
requests::{BlockValidationRequest, ContractRuntimeRequest, StorageRequest},
EffectBuilder, EffectExt, Effects, Responder,
},
fatal,
types::{
ActivationPoint, BlockHash, BlockHeader, Deploy, DeployHash, DeployOrTransferHash,
FinalitySignature, FinalizedBlock, TimeDiff, Timestamp,
},
utils::WithDir,
NodeRng,
};
/// The delay in milliseconds before we shutdown after the number of faulty validators exceeded the
/// fault tolerance threshold.
const FTT_EXCEEDED_SHUTDOWN_DELAY_MILLIS: u64 = 60 * 1000;
type ConsensusConstructor<I> = dyn Fn(
Digest, // the era's unique instance ID
BTreeMap<PublicKey, U512>, // validator weights
&HashSet<PublicKey>, /* faulty validators that are banned in
* this era */
&HashSet<PublicKey>, // inactive validators that can't be leaders
&ProtocolConfig, // the network's chainspec
&Config, // The consensus part of the node config.
Option<&dyn ConsensusProtocol<I, ClContext>>, // previous era's consensus instance
Timestamp, // start time for this era
u64, // random seed
Timestamp, // now timestamp
) -> (
Box<dyn ConsensusProtocol<I, ClContext>>,
Vec<ProtocolOutcome<I, ClContext>>,
) + Send;
#[derive(DataSize)]
pub struct EraSupervisor<I> {
/// A map of consensus protocol instances.
/// A value is a trait so that we can run different consensus protocols per era.
///
/// This map always contains exactly `2 * bonded_eras + 1` entries, with the last one being the
/// current one.
open_eras: HashMap<EraId, Era<I>>,
secret_signing_key: Arc<SecretKey>,
public_signing_key: PublicKey,
current_era: EraId,
protocol_config: ProtocolConfig,
config: Config,
#[data_size(skip)] // Negligible for most closures, zero for functions.
new_consensus: Box<ConsensusConstructor<I>>,
/// The height of the next block to be finalized.
/// We keep that in order to be able to signal to the Block Proposer how many blocks have been
/// finalized when we request a new block. This way the Block Proposer can know whether it's up
/// to date, or whether it has to wait for more finalized blocks before responding.
/// This value could be obtained from the consensus instance in a relevant era, but caching it
/// here is the easiest way of achieving the desired effect.
next_block_height: u64,
/// The height of the next block to be executed. If this falls too far behind, we pause.
next_executed_height: u64,
#[data_size(skip)]
metrics: Metrics,
/// The path to the folder where unit files will be stored.
unit_files_folder: PathBuf,
/// The next upgrade activation point. When the era immediately before the activation point is
/// deactivated, the era supervisor indicates that the node should stop running to allow an
/// upgrade.
next_upgrade_activation_point: Option<ActivationPoint>,
/// If true, the process should stop execution to allow an upgrade to proceed.
stop_for_upgrade: bool,
/// The era that was current when this node joined the network.
era_where_we_joined: EraId,
}
impl<I> Debug for EraSupervisor<I> {
fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
let ae: Vec<_> = self.open_eras.keys().collect();
write!(formatter, "EraSupervisor {{ open_eras: {:?}, .. }}", ae)
}
}
impl<I> EraSupervisor<I>
where
I: NodeIdT,
{
/// Creates a new `EraSupervisor`, starting in the indicated current era.
#[allow(clippy::too_many_arguments)]
pub(crate) fn new<REv: ReactorEventT<I>>(
current_era: EraId,
storage_dir: &Path,
config: WithDir<Config>,
effect_builder: EffectBuilder<REv>,
protocol_config: ProtocolConfig,
maybe_latest_block_header: Option<&BlockHeader>,
next_upgrade_activation_point: Option<ActivationPoint>,
registry: &Registry,
new_consensus: Box<ConsensusConstructor<I>>,
) -> Result<(Self, Effects<Event<I>>), Error> {
if current_era < protocol_config.last_activation_point {
panic!(
"Current era ({:?}) is before the last activation point ({:?}) - no eras would \
be instantiated!",
current_era, protocol_config.last_activation_point
);
}
let unit_files_folder = storage_dir.join("unit_files");
let (root, config) = config.into_parts();
let (secret_signing_key, public_signing_key) = config.load_keys(root)?;
info!(our_id = %public_signing_key, "EraSupervisor pubkey",);
let metrics =
Metrics::new(registry).expect("failure to setup and register ConsensusMetrics");
let activation_era_id = protocol_config.last_activation_point;
let auction_delay = protocol_config.auction_delay;
#[allow(clippy::integer_arithmetic)] // Block height should never reach u64::MAX.
let next_height = maybe_latest_block_header.map_or(0, |hdr| hdr.height() + 1);
let era_supervisor = Self {
open_eras: Default::default(),
secret_signing_key,
public_signing_key,
current_era,
protocol_config,
config,
new_consensus,
next_block_height: next_height,
metrics,
unit_files_folder,
next_upgrade_activation_point,
stop_for_upgrade: false,
next_executed_height: next_height,
era_where_we_joined: current_era,
};
let bonded_eras = era_supervisor.bonded_eras();
let era_ids: Vec<EraId> = era_supervisor
.iter_past(current_era, era_supervisor.bonded_eras().saturating_mul(3))
.collect();
// Asynchronously collect the information needed to initialize all recent eras.
let effects = async move {
info!(?era_ids, "collecting key blocks and booking blocks");
let key_blocks = effect_builder
.collect_key_block_headers(era_ids.iter().cloned())
.await
.expect("should have all the key blocks in storage");
let booking_blocks = collect_booking_block_hashes(
effect_builder,
era_ids.clone(),
auction_delay,
activation_era_id,
)
.await;
if current_era > activation_era_id.saturating_add(bonded_eras.saturating_mul(2).into())
{
// All eras can be initialized using the key blocks only.
(key_blocks, booking_blocks, Default::default())
} else {
let activation_era_validators = effect_builder
.get_era_validators(activation_era_id)
.await
.unwrap_or_default();
(key_blocks, booking_blocks, activation_era_validators)
}
}
.event(
move |(key_blocks, booking_blocks, validators)| Event::InitializeEras {
key_blocks,
booking_blocks,
validators,
},
);
Ok((era_supervisor, effects))
}
/// Returns a list of status changes of active validators.
pub(super) fn get_validator_changes(
&self,
) -> BTreeMap<PublicKey, Vec<(EraId, ValidatorChange)>> {
let mut result: BTreeMap<PublicKey, Vec<(EraId, ValidatorChange)>> = BTreeMap::new();
for ((_, era0), (era_id, era1)) in self.open_eras.iter().tuple_windows() {
for (pub_key, change) in ValidatorChanges::new(era0, era1).0 {
result.entry(pub_key).or_default().push((*era_id, change));
}
}
result
}
fn era_seed(booking_block_hash: BlockHash, key_block_seed: Digest) -> u64 {
let result = Digest::hash_pair(booking_block_hash, key_block_seed).value();
u64::from_le_bytes(result[0..std::mem::size_of::<u64>()].try_into().unwrap())
}
/// Returns an iterator over era IDs of `num_eras` past eras, plus the provided one.
pub(crate) fn iter_past(&self, era_id: EraId, num_eras: u64) -> impl Iterator<Item = EraId> {
(self
.protocol_config
.last_activation_point
.max(era_id.saturating_sub(num_eras))
.value()..=era_id.value())
.map(EraId::from)
}
/// Returns an iterator over era IDs of `num_eras` past eras, excluding the provided one.
pub(crate) fn iter_past_other(
&self,
era_id: EraId,
num_eras: u64,
) -> impl Iterator<Item = EraId> {
(self
.protocol_config
.last_activation_point
.max(era_id.saturating_sub(num_eras))
.value()..era_id.value())
.map(EraId::from)
}
/// Returns an iterator over era IDs of `num_eras` future eras, plus the provided one.
fn iter_future(&self, era_id: EraId, num_eras: u64) -> impl Iterator<Item = EraId> {
(era_id.value()..=era_id.value().saturating_add(num_eras)).map(EraId::from)
}
/// Starts a new era; panics if it already exists.
#[allow(clippy::too_many_arguments)] // FIXME
fn new_era(
&mut self,
era_id: EraId,
now: Timestamp,
validators: BTreeMap<PublicKey, U512>,
new_faulty: Vec<PublicKey>,
faulty: HashSet<PublicKey>,
inactive: HashSet<PublicKey>,
seed: u64,
start_time: Timestamp,
start_height: u64,
) -> Vec<ProtocolOutcome<I, ClContext>> {
if self.open_eras.contains_key(&era_id) {
panic!("{} already exists", era_id);
}
let instance_id = instance_id(&self.protocol_config, era_id);
info!(
?validators,
%start_time,
%now,
%start_height,
%instance_id,
%seed,
era = era_id.value(),
"starting era",
);
// Activate the era if this node was already running when the era began, it is still
// ongoing based on its minimum duration, and we are one of the validators.
let our_id = &self.public_signing_key;
let should_activate = if self.current_era > era_id {
trace!(
era = era_id.value(),
current_era = self.current_era.value(),
"not voting; initializing past era"
);
false
} else if !validators.contains_key(our_id) {
info!(era = era_id.value(), %our_id, "not voting; not a validator");
false
} else {
info!(era = era_id.value(), %our_id, "start voting");
true
};
if era_id >= self.current_era {
self.current_era = era_id;
self.metrics.current_era.set(era_id.value() as i64);
}
let prev_era = era_id
.checked_sub(1)
.and_then(|last_era_id| self.open_eras.get(&last_era_id));
let (mut consensus, mut outcomes) = (self.new_consensus)(
instance_id,
validators.clone(),
&faulty,
&inactive,
&self.protocol_config,
&self.config,
prev_era.map(|era| &*era.consensus),
start_time,
seed,
now,
);
if should_activate {
let secret = Keypair::new(self.secret_signing_key.clone(), our_id.clone());
outcomes.extend(consensus.activate_validator(
our_id.clone(),
secret,
now,
Some(self.unit_file(&instance_id)),
))
}
let mut era = Era::new(
consensus,
start_time,
start_height,
new_faulty,
faulty,
inactive,
validators,
);
// Mark validators as faulty for which we have evidence in a recent era.
for e_id in self.iter_past_other(era_id, self.bonded_eras()) {
if let Some(old_era) = self.open_eras.get_mut(&e_id) {
for pub_key in old_era.consensus.validators_with_evidence() {
let proposed_blocks = era.resolve_evidence_and_mark_faulty(pub_key);
if !proposed_blocks.is_empty() {
error!(
?proposed_blocks,
era = e_id.value(),
"unexpected block in new era"
);
}
}
}
}
let _ = self.open_eras.insert(era_id, era);
let oldest_bonded_era_id = oldest_bonded_era(&self.protocol_config, era_id);
// Clear the obsolete data from the era whose validators are unbonded now. We only retain
// the information necessary to validate evidence that units in still-bonded eras may refer
// to for cross-era fault tracking.
if let Some(evidence_only_era_id) = oldest_bonded_era_id.checked_sub(1) {
trace!(era = evidence_only_era_id.value(), "clearing unbonded era");
if let Some(era) = self.open_eras.get_mut(&evidence_only_era_id) {
era.consensus.set_evidence_only();
}
}
// Remove the era that has become obsolete now: The oldest bonded era could still receive
// units that refer to evidence from any era that was bonded when it was the current one.
let oldest_evidence_era_id = oldest_bonded_era(&self.protocol_config, oldest_bonded_era_id);
if let Some(obsolete_era_id) = oldest_evidence_era_id.checked_sub(1) {
if let Some(_era) = self.open_eras.remove(&obsolete_era_id) {
trace!(era = obsolete_era_id.value(), "removing obsolete era");
}
}
outcomes
}
fn prune_unit_files(&self) {
let valid_unit_files: HashSet<_> = self
.open_eras
.iter()
.map(|(_, era)| self.unit_file(era.consensus.instance_id()))
.collect();
let dir_iterator = match fs::read_dir(&self.unit_files_folder) {
Ok(iter) => iter,
Err(err) => {
warn!(?err, path=?self.unit_files_folder, "could not read the unit files folder");
// if we couldn't clean up the unit files, we just return
return;
}
};
for entry in dir_iterator {
let entry = match entry {
Ok(entry) => entry,
Err(err) => {
warn!(
?err,
path=?self.unit_files_folder,
"error while reading the unit files folder",
);
continue;
}
};
let path = entry.path();
if path.is_dir() {
// unit files should be stored directly in the folder, not in subdirectories
continue;
}
if valid_unit_files.contains(&path) {
// don't remove files corresponding to active eras
continue;
}
debug!(?path, "removing unit file");
if let Err(err) = fs::remove_file(&path) {
warn!(?err, ?path, "could not delete unit file");
}
}
}
/// Returns `true` if the specified era is open and bonded.
fn is_bonded(&self, era_id: EraId) -> bool {
era_id.saturating_add(self.bonded_eras().into()) >= self.current_era
&& era_id <= self.current_era
}
/// Returns whether the validator with the given public key is bonded in that era.
fn is_validator_in(&self, pub_key: &PublicKey, era_id: EraId) -> bool {
let has_validator = |era: &Era<I>| era.validators().contains_key(pub_key);
self.open_eras.get(&era_id).map_or(false, has_validator)
}
pub(crate) fn stop_for_upgrade(&self) -> bool {
self.stop_for_upgrade
}
/// Updates `next_executed_height` based on the given block header, and unpauses consensus if
/// block execution has caught up with finalization.
#[allow(clippy::integer_arithmetic)] // Block height should never reach u64::MAX.
fn executed_block(&mut self, block_header: &BlockHeader) {
self.next_executed_height = self.next_executed_height.max(block_header.height() + 1);
self.update_consensus_pause();
}
/// Pauses or unpauses consensus: Whenever the last executed block is too far behind the last
/// finalized block, we suspend consensus.
fn update_consensus_pause(&mut self) {
let paused = self
.next_block_height
.saturating_sub(self.next_executed_height)
> self.config.highway.max_execution_delay;
match self.open_eras.get_mut(&self.current_era) {
Some(era) => era.set_paused(paused),
None => error!(
era = self.current_era.value(),
"current era not initialized"
),
}
}
pub(super) fn handle_initialize_eras<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
key_blocks: HashMap<EraId, BlockHeader>,
booking_blocks: HashMap<EraId, BlockHash>,
activation_era_validators: BTreeMap<PublicKey, U512>,
) -> Effects<Event<I>> {
let mut effects = Effects::new();
let now = Timestamp::now();
for era_id in self.iter_past(self.current_era, self.bonded_eras().saturating_mul(2)) {
let new_faulty;
let validators;
let start_height;
let era_start_time;
let seed;
let booking_block_hash = booking_blocks
.get(&era_id)
.expect("should have booking block");
#[allow(clippy::integer_arithmetic)] // Block height should never reach u64::MAX.
if era_id.is_genesis() {
new_faulty = vec![];
// The validator set was read from the global state: there's no key block for era 0.
validators = activation_era_validators.clone();
start_height = 0;
era_start_time = self
.protocol_config
.genesis_timestamp
.expect("must have genesis start time if era ID is 0");
seed = 0;
} else {
// If this is not era 0, there must be a key block for it.
let key_block = key_blocks.get(&era_id).expect("missing key block");
start_height = key_block.height() + 1;
era_start_time = key_block.timestamp();
seed = Self::era_seed(*booking_block_hash, key_block.accumulated_seed());
if era_id == self.protocol_config.last_activation_point {
// After an upgrade or emergency restart, we don't track faults cross-era.
new_faulty = vec![];
// And we read the validator sets from the global state, because the key block
// might have been overwritten by the upgrade/restart.
validators = activation_era_validators.clone();
} else {
// If it's neither genesis nor upgrade nor restart, we use the validators from
// the key block and ban validators that were faulty in previous eras.
new_faulty = key_block
.era_end()
.expect("key block must be a switch block")
.era_report()
.equivocators
.clone();
validators = key_block
.next_era_validator_weights()
.expect("missing validators from key block")
.clone();
}
}
let faulty = self
.iter_past(era_id, self.banning_period())
.filter_map(|old_id| key_blocks.get(&old_id).and_then(|bhdr| bhdr.era_end()))
.flat_map(|era_end| era_end.era_report().equivocators.clone())
.collect();
let results = self.new_era(
era_id,
now,
validators,
new_faulty,
faulty,
key_blocks
.get(&era_id)
.and_then(|bhdr| bhdr.era_end())
.into_iter()
.flat_map(|era_end| &era_end.era_report().inactive_validators)
.cloned()
.collect(),
seed,
era_start_time,
start_height,
);
effects.extend(self.handle_consensus_outcomes(effect_builder, rng, era_id, results));
}
self.prune_unit_files();
let open_era_outcomes = self.open_eras[&self.current_era]
.consensus
.handle_is_current(now);
self.next_block_height = self.open_eras[&self.current_era].start_height;
effects.extend(self.handle_consensus_outcomes(
effect_builder,
rng,
self.current_era,
open_era_outcomes,
));
info!("finished initializing era supervisor");
info!(?self, "current eras");
effects
}
/// The number of past eras whose validators are still bonded. After this many eras, a former
/// validator is allowed to withdraw their stake, so their signature can't be trusted anymore.
///
/// A node keeps `2 * bonded_eras` past eras around, because the oldest bonded era could still
/// receive blocks that refer to `bonded_eras` before that.
fn bonded_eras(&self) -> u64 {
bonded_eras(&self.protocol_config)
}
/// The number of past eras we have to check for faulty validators that will be banned in the
/// next era.
// TODO: This should just be `auction_delay`, but we need to guarantee we have enough
// eras.
fn banning_period(&self) -> u64 {
self.bonded_eras().min(self.protocol_config.auction_delay)
}
/// Returns the path to the era's unit file.
fn unit_file(&self, instance_id: &Digest) -> PathBuf {
self.unit_files_folder.join(format!(
"unit_{:?}_{}.dat",
instance_id,
self.public_signing_key.to_hex()
))
}
/// Applies `f` to the consensus protocol of the specified era.
fn delegate_to_era<REv: ReactorEventT<I>, F>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
era_id: EraId,
f: F,
) -> Effects<Event<I>>
where
F: FnOnce(
&mut dyn ConsensusProtocol<I, ClContext>,
&mut NodeRng,
) -> Vec<ProtocolOutcome<I, ClContext>>,
{
match self.open_eras.get_mut(&era_id) {
None => {
if era_id > self.current_era {
info!(era = era_id.value(), "received message for future era");
} else {
info!(era = era_id.value(), "received message for obsolete era");
}
Effects::new()
}
Some(era) => {
let outcomes = f(&mut *era.consensus, rng);
self.handle_consensus_outcomes(effect_builder, rng, era_id, outcomes)
}
}
}
pub(super) fn handle_timer<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
era_id: EraId,
timestamp: Timestamp,
timer_id: TimerId,
) -> Effects<Event<I>> {
self.delegate_to_era(effect_builder, rng, era_id, move |consensus, _| {
consensus.handle_timer(timestamp, timer_id)
})
}
pub(super) fn handle_action<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
era_id: EraId,
action_id: ActionId,
) -> Effects<Event<I>> {
self.delegate_to_era(effect_builder, rng, era_id, move |consensus, _| {
consensus.handle_action(action_id, Timestamp::now())
})
}
pub(super) fn handle_message<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
sender: I,
msg: ConsensusMessage,
) -> Effects<Event<I>> {
match msg {
ConsensusMessage::Protocol { era_id, payload } => {
// If the era is already unbonded, only accept new evidence, because still-bonded
// eras could depend on that.
trace!(era = era_id.value(), "received a consensus message");
self.delegate_to_era(effect_builder, rng, era_id, move |consensus, rng| {
consensus.handle_message(rng, sender, payload, Timestamp::now())
})
}
ConsensusMessage::EvidenceRequest { era_id, pub_key } => {
if !self.is_bonded(era_id) {
trace!(era = era_id.value(), "not handling message; era too old");
return Effects::new();
}
self.iter_past(era_id, self.bonded_eras())
.flat_map(|e_id| {
self.delegate_to_era(effect_builder, rng, e_id, |consensus, _| {
consensus.request_evidence(sender.clone(), &pub_key)
})
})
.collect()
}
}
}
pub(super) fn handle_new_block_payload<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
new_block_payload: NewBlockPayload,
) -> Effects<Event<I>> {
let NewBlockPayload {
era_id,
block_payload,
block_context,
} = new_block_payload;
if !self.is_bonded(era_id) {
warn!(era = era_id.value(), "new block payload in outdated era");
return Effects::new();
}
let proposed_block = ProposedBlock::new(block_payload, block_context);
self.delegate_to_era(effect_builder, rng, era_id, move |consensus, _| {
consensus.propose(proposed_block, Timestamp::now())
})
}
pub(super) fn handle_block_added<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
block_header: BlockHeader,
) -> Effects<Event<I>> {
let our_pk = self.public_signing_key.clone();
let our_sk = self.secret_signing_key.clone();
let era_id = block_header.era_id();
self.executed_block(&block_header);
let mut effects = if self.is_validator_in(&our_pk, era_id) {
effect_builder
.announce_created_finality_signature(FinalitySignature::new(
block_header.hash(),
era_id,
&our_sk,
our_pk,
))
.ignore()
} else {
Effects::new()
};
if era_id < self.current_era {
trace!(era = era_id.value(), "executed block in old era");
return effects;
}
if block_header.is_switch_block() {
if let Some(era) = self.open_eras.get_mut(&era_id) {
// This was the era's last block. Schedule deactivating this era.
let delay = Timestamp::now()
.saturating_diff(block_header.timestamp())
.into();
let faulty_num = era.consensus.validators_with_evidence().len();
let deactivate_era = move |_| Event::DeactivateEra {
era_id,
faulty_num,
delay,
};
effects.extend(effect_builder.set_timeout(delay).event(deactivate_era));
} else {
error!(era = era_id.value(), %block_header, "executed block in uninitialized era");
}
// If it's not the last block before an upgrade, initialize the next era.
if !self.should_upgrade_after(&era_id) {
let new_era_id = era_id.successor();
let effect = get_booking_block_hash(
effect_builder,
new_era_id,
self.protocol_config.auction_delay,
self.protocol_config.last_activation_point,
)
.event(move |booking_block_hash| Event::CreateNewEra {
switch_block_header: Box::new(block_header),
booking_block_hash: Ok(booking_block_hash),
});
effects.extend(effect);
}
}
effects
}
pub(super) fn handle_deactivate_era<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
era_id: EraId,
old_faulty_num: usize,
delay: Duration,
) -> Effects<Event<I>> {
let era = if let Some(era) = self.open_eras.get_mut(&era_id) {
era
} else {
warn!(era = era_id.value(), "trying to deactivate obsolete era");
return Effects::new();
};
let faulty_num = era.consensus.validators_with_evidence().len();
if faulty_num == old_faulty_num {
info!(era = era_id.value(), "stop voting in era");
era.consensus.deactivate_validator();
if self.should_upgrade_after(&era_id) {
// If the next era is at or after the upgrade activation point, stop the node.
info!(era = era_id.value(), "shutting down for upgrade");
self.stop_for_upgrade = true;
}
Effects::new()
} else {
let deactivate_era = move |_| Event::DeactivateEra {
era_id,
faulty_num,
delay,
};
effect_builder.set_timeout(delay).event(deactivate_era)
}
}
/// Creates a new era.
pub(super) fn handle_create_new_era<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
switch_block_header: BlockHeader,
booking_block_hash: BlockHash,
) -> Effects<Event<I>> {
let (era_report, next_era_validators_weights) = match (
switch_block_header.era_end(),
switch_block_header.next_era_validator_weights(),
) {
(Some(era_end), Some(next_era_validator_weights)) => {
(era_end.era_report(), next_era_validator_weights)
}
_ => {
return fatal!(
effect_builder,
"attempted to create a new era with a non-switch block: {}",
switch_block_header
)
.ignore()
}
};
let new_faulty = era_report.equivocators.clone();
let era_id = switch_block_header.era_id().successor();
info!(era = era_id.value(), "era created");
let seed = EraSupervisor::<I>::era_seed(
booking_block_hash,
switch_block_header.accumulated_seed(),
);
trace!(%seed, "the seed for {}: {}", era_id, seed);
let faulty = self
.iter_past_other(era_id, self.banning_period())
.flat_map(|e_id| &self.open_eras[&e_id].new_faulty)
.chain(&new_faulty)
.cloned()
.collect();
let now = Timestamp::now(); // TODO: This should be passed in.
#[allow(clippy::integer_arithmetic)] // Block height should never reach u64::MAX.
let mut outcomes = self.new_era(
era_id,
now,
next_era_validators_weights.clone(),
new_faulty,
faulty,
era_report.inactive_validators.iter().cloned().collect(),
seed,
switch_block_header.timestamp(),
switch_block_header.height() + 1,
);
self.prune_unit_files();
outcomes.extend(self.open_eras[&era_id].consensus.handle_is_current(now));
self.handle_consensus_outcomes(effect_builder, rng, era_id, outcomes)
}
pub(super) fn resolve_validity<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
resolve_validity: ResolveValidity<I>,
) -> Effects<Event<I>> {
let ResolveValidity {
era_id,
sender,
proposed_block,
valid,
} = resolve_validity;
self.metrics.proposed_block();
let mut effects = Effects::new();
if !valid {
warn!(
peer_id = %sender,
era = %era_id.value(),
"invalid consensus value; disconnecting from the sender"
);
effects.extend(self.disconnect(effect_builder, sender));
}
if self
.open_eras
.get_mut(&era_id)
.map_or(false, |era| era.resolve_validity(&proposed_block, valid))
{
effects.extend(
self.delegate_to_era(effect_builder, rng, era_id, |consensus, _| {
consensus.resolve_validity(proposed_block, valid, Timestamp::now())
}),
);
}
effects
}
fn handle_consensus_outcomes<REv: ReactorEventT<I>, T>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
era_id: EraId,
outcomes: T,
) -> Effects<Event<I>>
where
T: IntoIterator<Item = ProtocolOutcome<I, ClContext>>,
{
outcomes
.into_iter()
.flat_map(|result| self.handle_consensus_outcome(effect_builder, rng, era_id, result))
.collect()
}
/// Returns `true` if any of the most recent eras has evidence against the validator with key
/// `pub_key`.
fn has_evidence(&self, era_id: EraId, pub_key: PublicKey) -> bool {
self.iter_past(era_id, self.bonded_eras())
.any(|eid| self.era(eid).consensus.has_evidence(&pub_key))
}
/// Returns the era with the specified ID. Panics if it does not exist.
fn era(&self, era_id: EraId) -> &Era<I> {
&self.open_eras[&era_id]
}
/// Returns the era with the specified ID mutably. Panics if it does not exist.
fn era_mut(&mut self, era_id: EraId) -> &mut Era<I> {
self.open_eras.get_mut(&era_id).unwrap()
}
#[allow(clippy::integer_arithmetic)] // Block height should never reach u64::MAX.
fn handle_consensus_outcome<REv: ReactorEventT<I>>(
&mut self,
effect_builder: EffectBuilder<REv>,
rng: &mut NodeRng,
era_id: EraId,
consensus_result: ProtocolOutcome<I, ClContext>,
) -> Effects<Event<I>> {
match consensus_result {
ProtocolOutcome::InvalidIncomingMessage(_, sender, error) => {
warn!(
%sender,
%error,
"invalid incoming message to consensus instance; disconnecting from the sender"
);
self.disconnect(effect_builder, sender)
}
ProtocolOutcome::Disconnect(sender) => {
warn!(
%sender,
"disconnecting from the sender of invalid data"
);
self.disconnect(effect_builder, sender)
}
ProtocolOutcome::CreatedGossipMessage(payload) => {
let message = ConsensusMessage::Protocol { era_id, payload };
// TODO: we'll want to gossip instead of broadcast here
effect_builder.broadcast_message(message.into()).ignore()
}
ProtocolOutcome::CreatedTargetedMessage(payload, to) => {
let message = ConsensusMessage::Protocol { era_id, payload };
effect_builder.send_message(to, message.into()).ignore()
}
ProtocolOutcome::CreatedMessageToRandomPeer(payload) => {
let message = ConsensusMessage::Protocol { era_id, payload };
async move {
let peers = effect_builder.get_peers_in_random_order().await;
if let Some(to) = peers.into_iter().next() {
effect_builder.send_message(to, message.into()).await;
}
}
.ignore()
}
ProtocolOutcome::ScheduleTimer(timestamp, timer_id) => {
let timediff = timestamp.saturating_diff(Timestamp::now());
effect_builder
.set_timeout(timediff.into())
.event(move |_| Event::Timer {
era_id,
timestamp,
timer_id,
})
}
ProtocolOutcome::QueueAction(action_id) => effect_builder
.immediately()
.event(move |()| Event::Action { era_id, action_id }),
ProtocolOutcome::CreateNewBlock(block_context) => {
let accusations = self
.iter_past(era_id, self.bonded_eras())
.flat_map(|e_id| self.era(e_id).consensus.validators_with_evidence())
.unique()
.filter(|pub_key| !self.era(era_id).faulty.contains(pub_key))
.cloned()
.collect();
effect_builder
.request_block_payload(
block_context.clone(),
self.next_block_height,
accusations,
rng.gen(),
)
.event(move |block_payload| {
Event::NewBlockPayload(NewBlockPayload {
era_id,
block_payload,
block_context,
})
})
}
ProtocolOutcome::FinalizedBlock(CpFinalizedBlock {
value,
timestamp,
relative_height,
terminal_block_data,
equivocators,
proposer,
}) => {
if era_id != self.current_era {
debug!(era = era_id.value(), "finalized block in old era");
return Effects::new();
}
let era = self.open_eras.get_mut(&era_id).unwrap();
era.add_accusations(&equivocators);
era.add_accusations(value.accusations());
// If this is the era's last block, it contains rewards. Everyone who is accused in
// the block or seen as equivocating via the consensus protocol gets faulty.
let report = terminal_block_data.map(|tbd| EraReport {
rewards: tbd.rewards,
equivocators: era.accusations(),
inactive_validators: tbd.inactive_validators,
});
let finalized_block = FinalizedBlock::new(
Arc::try_unwrap(value).unwrap_or_else(|arc| (*arc).clone()),
report,
timestamp,
era_id,
era.start_height + relative_height,
proposer,
);
info!(
era_id = ?finalized_block.era_id(),
height = ?finalized_block.height(),
timestamp = ?finalized_block.timestamp(),
"finalized block"
);
self.metrics.finalized_block(&finalized_block);
// Announce the finalized block.
let mut effects = effect_builder
.announce_finalized_block(finalized_block.clone())
.ignore();
self.next_block_height = self.next_block_height.max(finalized_block.height() + 1);
// Request execution of the finalized block.
effects.extend(execute_finalized_block(effect_builder, finalized_block).ignore());
self.update_consensus_pause();
effects
}
ProtocolOutcome::ValidateConsensusValue {
sender,
proposed_block,
} => {
if !self.is_bonded(era_id) {
return Effects::new(); // Outdated era; we don't need the value anymore.
}
let missing_evidence: Vec<PublicKey> = proposed_block
.value()
.accusations()
.iter()
.filter(|pub_key| !self.has_evidence(era_id, (*pub_key).clone()))
.cloned()
.collect();
self.era_mut(era_id)
.add_block(proposed_block.clone(), missing_evidence.clone());
if let Some(deploy_hash) = proposed_block.contains_replay() {
info!(%sender, %deploy_hash, "block contains a replayed deploy");
return self.resolve_validity(
effect_builder,
rng,
ResolveValidity {
era_id,
sender,
proposed_block,
valid: false,
},
);
}
let mut effects = Effects::new();
for pub_key in missing_evidence {
let msg = ConsensusMessage::EvidenceRequest { era_id, pub_key };
effects.extend(
effect_builder
.send_message(sender.clone(), msg.into())
.ignore(),
);
}
effects.extend(
async move {
check_deploys_for_replay_in_previous_eras_and_validate_block(
effect_builder,
era_id,
sender,
proposed_block,
)
.await
}
.event(std::convert::identity),
);
effects
}
ProtocolOutcome::NewEvidence(pub_key) => {
info!(%pub_key, era = era_id.value(), "validator equivocated");
let mut effects = effect_builder
.announce_fault_event(era_id, pub_key.clone(), Timestamp::now())
.ignore();
for e_id in self.iter_future(era_id, self.bonded_eras()) {
let proposed_blocks = if let Some(era) = self.open_eras.get_mut(&e_id) {
era.resolve_evidence_and_mark_faulty(&pub_key)
} else {
continue;
};
for proposed_block in proposed_blocks {
effects.extend(self.delegate_to_era(
effect_builder,
rng,
e_id,
|consensus, _| {
consensus.resolve_validity(proposed_block, true, Timestamp::now())
},
));
}
}
effects
}
ProtocolOutcome::SendEvidence(sender, pub_key) => self
.iter_past_other(era_id, self.bonded_eras())
.flat_map(|e_id| {
self.delegate_to_era(effect_builder, rng, e_id, |consensus, _| {
consensus.request_evidence(sender.clone(), &pub_key)
})
})
.collect(),
ProtocolOutcome::WeAreFaulty => Default::default(),
ProtocolOutcome::DoppelgangerDetected => Default::default(),
ProtocolOutcome::FttExceeded => effect_builder
.set_timeout(Duration::from_millis(FTT_EXCEEDED_SHUTDOWN_DELAY_MILLIS))
.then(move |_| fatal!(effect_builder, "too many faulty validators"))
.ignore(),
ProtocolOutcome::StandstillAlert => {
if era_id == self.current_era && era_id == self.era_where_we_joined {
warn!(era = %era_id.value(), "current era is stalled; shutting down");
fatal!(effect_builder, "current era is stalled; please retry").ignore()
} else {
if era_id == self.current_era {
warn!(era = %era_id.value(), "current era is stalled");
}
Effects::new()
}
}
}
}
/// Handles registering an upgrade activation point.
pub(super) fn got_upgrade_activation_point(
&mut self,
activation_point: ActivationPoint,
) -> Effects<Event<I>> {
debug!("got {}", activation_point);
self.next_upgrade_activation_point = Some(activation_point);
Effects::new()
}
pub(super) fn status(
&self,
responder: Responder<Option<(PublicKey, Option<TimeDiff>)>>,
) -> Effects<Event<I>> {
let public_key = self.public_signing_key.clone();
let round_length = self
.open_eras
.get(&self.current_era)
.and_then(|era| era.consensus.next_round_length());
responder.respond(Some((public_key, round_length))).ignore()
}
fn disconnect<REv: ReactorEventT<I>>(
&self,
effect_builder: EffectBuilder<REv>,
sender: I,
) -> Effects<Event<I>> {
effect_builder
.announce_disconnect_from_peer(sender)
.ignore()
}
pub(super) fn should_upgrade_after(&self, era_id: &EraId) -> bool {
match self.next_upgrade_activation_point {
None => false,
Some(upgrade_point) => upgrade_point.should_upgrade(era_id),
}
}
}
#[cfg(test)]
impl<I> EraSupervisor<I>
where
I: NodeIdT,
{
/// Returns the most recent era.
pub(crate) fn current_era(&self) -> EraId {
self.current_era
}
/// Returns the list of validators who equivocated in this era.
pub(crate) fn validators_with_evidence(&self, era_id: EraId) -> Vec<&PublicKey> {
self.open_eras[&era_id].consensus.validators_with_evidence()
}
/// Returns this node's validator key.
pub(crate) fn public_key(&self) -> &PublicKey {
&self.public_signing_key
}
}
/// Returns an era ID in which the booking block for `era_id` lives, if we can use it.
/// Booking block for era N is the switch block (the last block) in era N – AUCTION_DELAY - 1.
/// To find it, we get the start height of era N - AUCTION_DELAY and subtract 1.
/// We make sure not to use an era ID below the last upgrade activation point, because we will
/// not have instances of eras from before that.
///
/// We can't use it if it is:
/// * before Genesis
/// * before upgrade
/// * before emergency restart
/// In those cases, returns `None`.
fn valid_booking_block_era_id(
era_id: EraId,
auction_delay: u64,
last_activation_point: EraId,
) -> Option<EraId> {
let after_booking_era_id = era_id.saturating_sub(auction_delay);
// If we would have gone below the last activation point (the first `AUCTION_DELAY ` eras after
// an upgrade), we return `None` as there are no booking blocks there that we can use – we
// can't use anything from before an upgrade.
// NOTE that it's OK if `booking_era_id` == `last_activation_point`.
(after_booking_era_id > last_activation_point).then(|| after_booking_era_id.saturating_sub(1))
}
/// Returns a booking block hash for `era_id`.
async fn get_booking_block_hash<REv>(
effect_builder: EffectBuilder<REv>,
era_id: EraId,
auction_delay: u64,
last_activation_point: EraId,
) -> BlockHash
where
REv: From<StorageRequest>,
{
if let Some(booking_block_era_id) =
valid_booking_block_era_id(era_id, auction_delay, last_activation_point)
{
match effect_builder
.get_switch_block_header_at_era_id_from_storage(booking_block_era_id)
.await
{
Some(block_header) => block_header.hash(),
None => {
error!(
?era_id,
?booking_block_era_id,
"booking block header for era must exist"
);
panic!("booking block header not found in storage");
}
}
} else {
// If there's no booking block for the `era_id`
// (b/c it would have been from before Genesis, upgrade or emergency restart),
// use a "zero" block hash. This should not hurt the security of the leader selection
// algorithm.
BlockHash::default()
}
}
/// Returns booking block hashes for the eras.
async fn collect_booking_block_hashes<REv>(
effect_builder: EffectBuilder<REv>,
era_ids: Vec<EraId>,
auction_delay: u64,
last_activation_point: EraId,
) -> HashMap<EraId, BlockHash>
where
REv: From<StorageRequest>,
{
let mut booking_block_hashes: HashMap<EraId, BlockHash> = HashMap::new();
for era_id in era_ids {
let booking_block_hash =
get_booking_block_hash(effect_builder, era_id, auction_delay, last_activation_point)
.await;
booking_block_hashes.insert(era_id, booking_block_hash);
}
booking_block_hashes
}
async fn get_deploys_or_transfers<REv>(
effect_builder: EffectBuilder<REv>,
hashes: Vec<DeployHash>,
) -> Option<Vec<Deploy>>
where
REv: From<StorageRequest>,
{
let mut deploys_or_transfer: Vec<Deploy> = Vec::with_capacity(hashes.len());
for maybe_deploy_or_transfer in effect_builder.get_deploys_from_storage(hashes).await {
if let Some(deploy_or_transfer) = maybe_deploy_or_transfer {
deploys_or_transfer.push(deploy_or_transfer)
} else {
return None;
}
}
Some(deploys_or_transfer)
}
async fn execute_finalized_block<REv>(
effect_builder: EffectBuilder<REv>,
finalized_block: FinalizedBlock,
) where
REv: From<StorageRequest> + From<ControlAnnouncement> + From<ContractRuntimeRequest>,
{
// Get all deploys in order they appear in the finalized block.
let deploys =
match get_deploys_or_transfers(effect_builder, finalized_block.deploy_hashes().to_owned())
.await
{
Some(deploys) => deploys,
None => {
fatal!(
effect_builder,
"Could not fetch deploys for finalized block: {:?}",
finalized_block
)
.await;
return;
}
};
// Get all transfers in order they appear in the finalized block.
let transfers = match get_deploys_or_transfers(
effect_builder,
finalized_block.transfer_hashes().to_owned(),
)
.await
{
Some(transfers) => transfers,
None => {
fatal!(
effect_builder,
"Could not fetch transfers for finalized block: {:?}",
finalized_block
)
.await;
return;
}
};
effect_builder
.enqueue_block_for_execution(finalized_block, deploys, transfers)
.await
}
/// Computes the instance ID for an era, given the era ID and the chainspec hash.
fn instance_id(protocol_config: &ProtocolConfig, era_id: EraId) -> Digest {
Digest::hash_pair(protocol_config.chainspec_hash, era_id.to_le_bytes())
.value()
.into()
}
/// The number of past eras whose validators are still bonded. After this many eras, a former
/// validator is allowed to withdraw their stake, so their signature can't be trusted anymore.
///
/// A node keeps `2 * bonded_eras` past eras around, because the oldest bonded era could still
/// receive blocks that refer to `bonded_eras` before that.
fn bonded_eras(protocol_config: &ProtocolConfig) -> u64 {
protocol_config
.unbonding_delay
.saturating_sub(protocol_config.auction_delay)
}
/// The oldest era whose validators are still bonded.
// This is public because it's used in reactor::validator::tests.
pub(crate) fn oldest_bonded_era(protocol_config: &ProtocolConfig, current_era: EraId) -> EraId {
current_era
.saturating_sub(bonded_eras(protocol_config))
.max(protocol_config.last_activation_point)
}
/// Checks that a [BlockPayload] does not have deploys we have already included in blocks in
/// previous eras. This is done by repeatedly querying storage for deploy metadata. When metadata is
/// found storage is queried again to get the era id for the included deploy. That era id must *not*
/// be less than the current era, otherwise the deploy is a replay attack.
async fn check_deploys_for_replay_in_previous_eras_and_validate_block<REv, I>(
effect_builder: EffectBuilder<REv>,
proposed_block_era_id: EraId,
sender: I,
proposed_block: ProposedBlock<ClContext>,
) -> Event<I>
where
REv: From<BlockValidationRequest<I>> + From<StorageRequest>,
I: Clone + Send + 'static,
{
for deploy_hash in proposed_block.value().deploys_and_transfers_iter() {
let block_header = match effect_builder
.get_block_header_for_deploy_from_storage(deploy_hash.into())
.await
{
None => continue,
Some(header) => header,
};
// We have found the deploy in the database. If it was from a previous era, it was a
// replay attack.
//
// If not, then it might be this is a deploy for a block we are currently
// coming to consensus, and we will rely on the immediate ancestors of the
// block_payload within the current era to determine if we are facing a replay
// attack.
if block_header.era_id() < proposed_block_era_id {
return Event::ResolveValidity(ResolveValidity {
era_id: proposed_block_era_id,
sender: sender.clone(),
proposed_block: proposed_block.clone(),
valid: false,
});
}
}
let sender_for_validate_block: I = sender.clone();
let valid = effect_builder
.validate_block(sender_for_validate_block, proposed_block.clone())
.await;
Event::ResolveValidity(ResolveValidity {
era_id: proposed_block_era_id,
sender,
proposed_block,
valid,
})
}
impl ProposedBlock<ClContext> {
/// If this block contains a deploy that's also present in an ancestor, this returns the deploy
/// hash, otherwise `None`.
fn contains_replay(&self) -> Option<DeployHash> {
let block_deploys_set: BTreeSet<DeployOrTransferHash> =
self.value().deploys_and_transfers_iter().collect();
self.context()
.ancestor_values()
.iter()
.flat_map(|ancestor| ancestor.deploys_and_transfers_iter())
.find(|deploy| block_deploys_set.contains(deploy))
.map(DeployOrTransferHash::into)
}
}
| 39.197825 | 100 | 0.564863 |
f494feefcd53a5721dc21e0c5257c365b735026f | 102,152 | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::cell::RefCell;
use std::collections::Bound::{Excluded, Unbounded};
use std::collections::VecDeque;
use std::sync::atomic::AtomicBool;
use std::sync::{atomic, Arc};
use std::time::{Duration, Instant};
use std::{cmp, mem, slice, u64};
use engine::rocks::{Snapshot, SyncSnapshot, WriteBatch, WriteOptions, DB};
use engine::{Engines, Peekable};
use kvproto::metapb;
use kvproto::pdpb::PeerStats;
use kvproto::raft_cmdpb::{
self, AdminCmdType, AdminResponse, CmdType, RaftCmdRequest, RaftCmdResponse, ReadIndexResponse,
Request, Response, TransferLeaderRequest, TransferLeaderResponse,
};
use kvproto::raft_serverpb::{
MergeState, PeerState, RaftApplyState, RaftMessage, RaftSnapshotData,
};
use protobuf::{self, Message};
use raft::eraftpb::{self, ConfChangeType, EntryType, MessageType};
use raft::{
self, Progress, ProgressState, RawNode, Ready, SnapshotStatus, StateRole, INVALID_INDEX,
NO_LIMIT,
};
use time::Timespec;
use crate::pd::{PdTask, INVALID_ID};
use crate::raftstore::coprocessor::{CoprocessorHost, RegionChangeEvent};
use crate::raftstore::store::fsm::store::PollContext;
use crate::raftstore::store::fsm::{
apply, Apply, ApplyMetrics, ApplyTask, ApplyTaskRes, GroupState, Proposal, RegionProposal,
};
use crate::raftstore::store::keys::{enc_end_key, enc_start_key};
use crate::raftstore::store::worker::{ReadDelegate, ReadProgress, RegionTask};
use crate::raftstore::store::{keys, Callback, Config, ReadResponse, RegionSnapshot};
use crate::raftstore::{Error, Result};
use tikv_util::collections::HashMap;
use tikv_util::time::{duration_to_sec, monotonic_raw_now};
use tikv_util::worker::Scheduler;
use tikv_util::{escape, MustConsumeVec};
use super::cmd_resp;
use super::local_metrics::{RaftMessageMetrics, RaftReadyMetrics};
use super::metrics::*;
use super::peer_storage::{write_peer_state, ApplySnapResult, InvokeContext, PeerStorage};
use super::transport::Transport;
use super::util::{self, check_region_epoch, is_initial_msg, Lease, LeaseState};
use super::DestroyPeerJob;
const SHRINK_CACHE_CAPACITY: usize = 64;
struct ReadIndexRequest {
id: u64,
cmds: MustConsumeVec<(RaftCmdRequest, Callback)>,
renew_lease_time: Timespec,
read_index: Option<u64>,
}
impl ReadIndexRequest {
// Transmutes `self.id` to a 8 bytes slice, so that we can use the payload to do read index.
fn binary_id(&self) -> &[u8] {
unsafe {
let id = &self.id as *const u64 as *const u8;
slice::from_raw_parts(id, 8)
}
}
}
#[derive(Default)]
struct ReadIndexQueue {
id_allocator: u64,
reads: VecDeque<ReadIndexRequest>,
ready_cnt: usize,
}
impl ReadIndexQueue {
fn next_id(&mut self) -> u64 {
self.id_allocator += 1;
self.id_allocator
}
fn clear_uncommitted(&mut self, term: u64) {
for mut read in self.reads.drain(self.ready_cnt..) {
for (_, cb) in read.cmds.drain(..) {
apply::notify_stale_req(term, cb);
}
}
}
/// update the read index of the requests that before the specified id.
fn advance(&mut self, id: &[u8], read_index: u64) {
if let Some(i) = self.reads.iter().position(|x| x.binary_id() == id) {
for pos in 0..=i {
let req = &mut self.reads[pos];
let index = req.read_index.get_or_insert(read_index);
if *index > read_index {
*index = read_index;
}
}
if self.ready_cnt < i + 1 {
self.ready_cnt = i + 1;
}
} else {
error!(
"cannot find corresponding read from pending reads";
"id"=>?id, "read-index" =>read_index,
);
}
}
fn gc(&mut self) {
if self.reads.capacity() > SHRINK_CACHE_CAPACITY && self.reads.len() < SHRINK_CACHE_CAPACITY
{
self.reads.shrink_to_fit();
}
}
}
/// The returned states of the peer after checking whether it is stale
#[derive(Debug, PartialEq, Eq)]
pub enum StaleState {
Valid,
ToValidate,
LeaderMissing,
}
/// Meta information about proposals.
pub struct ProposalMeta {
pub index: u64,
pub term: u64,
/// `renew_lease_time` contains the last time when a peer starts to renew lease.
pub renew_lease_time: Option<Timespec>,
}
#[derive(Default)]
struct ProposalQueue {
queue: VecDeque<ProposalMeta>,
}
impl ProposalQueue {
fn pop(&mut self, term: u64) -> Option<ProposalMeta> {
self.queue.pop_front().and_then(|meta| {
if meta.term > term {
self.queue.push_front(meta);
return None;
}
Some(meta)
})
}
fn push(&mut self, meta: ProposalMeta) {
self.queue.push_back(meta);
}
fn clear(&mut self) {
self.queue.clear();
}
fn gc(&mut self) {
if self.queue.capacity() > SHRINK_CACHE_CAPACITY && self.queue.len() < SHRINK_CACHE_CAPACITY
{
self.queue.shrink_to_fit();
}
}
}
bitflags! {
// TODO: maybe declare it as protobuf struct is better.
/// A bitmap contains some useful flags when dealing with `eraftpb::Entry`.
pub struct ProposalContext: u8 {
const SYNC_LOG = 0b00000001;
const SPLIT = 0b00000010;
const PREPARE_MERGE = 0b00000100;
}
}
impl ProposalContext {
/// Converts itself to a vector.
pub fn to_vec(self) -> Vec<u8> {
if self.is_empty() {
return vec![];
}
let ctx = self.bits();
vec![ctx]
}
/// Initializes a `ProposalContext` from a byte slice.
pub fn from_bytes(ctx: &[u8]) -> ProposalContext {
if ctx.is_empty() {
ProposalContext::empty()
} else if ctx.len() == 1 {
ProposalContext::from_bits_truncate(ctx[0])
} else {
panic!("invalid ProposalContext {:?}", ctx);
}
}
}
/// `ConsistencyState` is used for consistency check.
pub struct ConsistencyState {
pub last_check_time: Instant,
// (computed_result_or_to_be_verified, index, hash)
pub index: u64,
pub hash: Vec<u8>,
}
/// Statistics about raft peer.
#[derive(Default, Clone)]
pub struct PeerStat {
pub written_bytes: u64,
pub written_keys: u64,
}
#[derive(Default, Debug, Clone, Copy)]
pub struct CheckTickResult {
leader: bool,
up_to_date: bool,
}
/// A struct that stores the state to wait for `PrepareMerge` apply result.
///
/// When handling the apply result of a `CommitMerge`, the source peer may have
/// not handle the apply result of the `PrepareMerge`, so the target peer has
/// to abort current handle process and wait for it asynchronously.
pub struct WaitApplyResultState {
/// The following apply results waiting to be handled, including the `CommitMerge`.
/// These will be handled once `ready_to_merge` is true.
pub results: Vec<ApplyTaskRes>,
/// It is used by target peer to check whether the apply result of `PrepareMerge` is handled.
pub ready_to_merge: Arc<AtomicBool>,
}
pub struct Peer {
/// The ID of the Region which this Peer belongs to.
region_id: u64,
// TODO: remove it once panic!() support slog fields.
/// Peer_tag, "[region <region_id>] <peer_id>"
pub tag: String,
/// The Peer meta information.
pub peer: metapb::Peer,
/// The Raft state machine of this Peer.
pub raft_group: RawNode<PeerStorage>,
/// The cache of meta information for Region's other Peers.
peer_cache: RefCell<HashMap<u64, metapb::Peer>>,
/// Record the last instant of each peer's heartbeat response.
pub peer_heartbeats: HashMap<u64, Instant>,
proposals: ProposalQueue,
apply_proposals: Vec<Proposal>,
leader_missing_time: Option<Instant>,
leader_lease: Lease,
pending_reads: ReadIndexQueue,
/// If it fails to send messages to leader.
pub leader_unreachable: bool,
/// Whether this peer is destroyed asynchronously.
pub pending_remove: bool,
/// If a snapshot is being applied asynchronously, messages should not be sent.
pending_messages: Vec<eraftpb::Message>,
/// Record the instants of peers being added into the configuration.
/// Remove them after they are not pending any more.
pub peers_start_pending_time: Vec<(u64, Instant)>,
/// A inaccurate cache about which peer is marked as down.
down_peer_ids: Vec<u64>,
pub recent_conf_change_time: Instant,
/// An inaccurate difference in region size since last reset.
/// It is used to decide whether split check is needed.
pub size_diff_hint: u64,
/// The count of deleted keys since last reset.
delete_keys_hint: u64,
/// An inaccurate difference in region size after compaction.
/// It is used to trigger check split to update approximate size and keys after space reclamation
/// of deleted entries.
pub compaction_declined_bytes: u64,
/// Approximate size of the region.
pub approximate_size: Option<u64>,
/// Approximate keys of the region.
pub approximate_keys: Option<u64>,
/// The state for consistency check.
pub consistency_state: ConsistencyState,
/// The index of last scheduled committed raft log.
pub last_applying_idx: u64,
/// The index of last compacted raft log. It is used for the next compact log task.
pub last_compacted_idx: u64,
/// The index of the latest urgent proposal index.
last_urgent_proposal_idx: u64,
/// The index of the latest committed split command.
last_committed_split_idx: u64,
/// Approximate size of logs that is applied but not compacted yet.
pub raft_log_size_hint: u64,
/// The index of the latest committed prepare merge command.
last_committed_prepare_merge_idx: u64,
/// The merge related state. It indicates this Peer is in merging.
pub pending_merge_state: Option<MergeState>,
/// The state to wait for `PrepareMerge` apply result.
pub pending_merge_apply_result: Option<WaitApplyResultState>,
/// Write Statistics for PD to schedule hot spot.
pub peer_stat: PeerStat,
}
impl Peer {
pub fn new(
store_id: u64,
cfg: &Config,
sched: Scheduler<RegionTask>,
engines: Engines,
region: &metapb::Region,
peer: metapb::Peer,
) -> Result<Peer> {
if peer.get_id() == raft::INVALID_ID {
return Err(box_err!("invalid peer id"));
}
let tag = format!("[region {}] {}", region.get_id(), peer.get_id());
let ps = PeerStorage::new(engines.clone(), region, sched, peer.get_id(), tag.clone())?;
let applied_index = ps.applied_index();
let raft_cfg = raft::Config {
id: peer.get_id(),
peers: vec![],
election_tick: cfg.raft_election_timeout_ticks,
heartbeat_tick: cfg.raft_heartbeat_ticks,
min_election_tick: cfg.raft_min_election_timeout_ticks,
max_election_tick: cfg.raft_max_election_timeout_ticks,
max_size_per_msg: cfg.raft_max_size_per_msg.0,
max_inflight_msgs: cfg.raft_max_inflight_msgs,
applied: applied_index,
check_quorum: true,
tag: tag.clone(),
skip_bcast_commit: true,
pre_vote: cfg.prevote,
..Default::default()
};
let raft_group = RawNode::new(&raft_cfg, ps, vec![])?;
let mut peer = Peer {
peer,
region_id: region.get_id(),
raft_group,
proposals: Default::default(),
apply_proposals: vec![],
pending_reads: Default::default(),
peer_cache: RefCell::new(HashMap::default()),
peer_heartbeats: HashMap::default(),
peers_start_pending_time: vec![],
down_peer_ids: vec![],
recent_conf_change_time: Instant::now(),
size_diff_hint: 0,
delete_keys_hint: 0,
approximate_size: None,
approximate_keys: None,
compaction_declined_bytes: 0,
leader_unreachable: false,
pending_remove: false,
pending_merge_state: None,
last_committed_prepare_merge_idx: 0,
leader_missing_time: Some(Instant::now()),
tag,
last_applying_idx: applied_index,
last_compacted_idx: 0,
last_urgent_proposal_idx: u64::MAX,
last_committed_split_idx: 0,
consistency_state: ConsistencyState {
last_check_time: Instant::now(),
index: INVALID_INDEX,
hash: vec![],
},
raft_log_size_hint: 0,
leader_lease: Lease::new(cfg.raft_store_max_leader_lease()),
pending_messages: vec![],
pending_merge_apply_result: None,
peer_stat: PeerStat::default(),
};
// If this region has only one peer and I am the one, campaign directly.
if region.get_peers().len() == 1 && region.get_peers()[0].get_store_id() == store_id {
peer.raft_group.campaign()?;
}
Ok(peer)
}
/// Register self to apply_scheduler so that the peer is then usable.
/// Also trigger `RegionChangeEvent::Create` here.
pub fn activate<T, C>(&self, ctx: &PollContext<T, C>) {
ctx.apply_router
.schedule_task(self.region_id, ApplyTask::register(self));
ctx.coprocessor_host.on_region_changed(
self.region(),
RegionChangeEvent::Create,
self.get_role(),
);
}
#[inline]
fn next_proposal_index(&self) -> u64 {
self.raft_group.raft.raft_log.last_index() + 1
}
/// Tries to destroy itself. Returns a job (if needed) to do more cleaning tasks.
pub fn maybe_destroy(&mut self) -> Option<DestroyPeerJob> {
if self.pending_remove {
info!(
"is being destroyed, skip";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
return None;
}
let initialized = self.get_store().is_initialized();
let async_remove = if self.is_applying_snapshot() {
if !self.mut_store().cancel_applying_snap() {
info!(
"stale peer is applying snapshot, will destroy next time";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
return None;
}
// There is no tasks in apply/local read worker.
false
} else {
initialized
};
self.pending_remove = true;
Some(DestroyPeerJob {
async_remove,
initialized,
region_id: self.region_id,
peer: self.peer.clone(),
})
}
/// Does the real destroy task which includes:
/// 1. Set the region to tombstone;
/// 2. Clear data;
/// 3. Notify all pending requests.
pub fn destroy<T, C>(&mut self, ctx: &PollContext<T, C>, keep_data: bool) -> Result<()> {
fail_point!("raft_store_skip_destroy_peer", |_| Ok(()));
let t = Instant::now();
let region = self.region().clone();
info!(
"begin to destroy";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
// Set Tombstone state explicitly
let kv_wb = WriteBatch::new();
let raft_wb = WriteBatch::new();
self.mut_store().clear_meta(&kv_wb, &raft_wb)?;
write_peer_state(
&ctx.engines.kv,
&kv_wb,
®ion,
PeerState::Tombstone,
self.pending_merge_state.clone(),
)?;
// write kv rocksdb first in case of restart happen between two write
let mut write_opts = WriteOptions::new();
write_opts.set_sync(ctx.cfg.sync_log);
ctx.engines.write_kv_opt(&kv_wb, &write_opts)?;
ctx.engines.write_raft_opt(&raft_wb, &write_opts)?;
if self.get_store().is_initialized() && !keep_data {
// If we meet panic when deleting data and raft log, the dirty data
// will be cleared by a newer snapshot applying or restart.
if let Err(e) = self.get_store().clear_data() {
error!(
"failed to schedule clear data task";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
}
}
for mut read in self.pending_reads.reads.drain(..) {
for (_, cb) in read.cmds.drain(..) {
apply::notify_req_region_removed(region.get_id(), cb);
}
}
for proposal in self.apply_proposals.drain(..) {
apply::notify_req_region_removed(region.get_id(), proposal.cb);
}
info!(
"peer destroy itself";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"takes" => ?t.elapsed(),
);
Ok(())
}
#[inline]
pub fn is_initialized(&self) -> bool {
self.get_store().is_initialized()
}
#[inline]
pub fn region(&self) -> &metapb::Region {
self.get_store().region()
}
/// Check whether the peer can be hibernated.
///
/// This should be used with `check_after_tick` to get a correct conclusion.
pub fn check_before_tick(&self, cfg: &Config) -> CheckTickResult {
let mut res = CheckTickResult::default();
if !self.is_leader() {
return res;
}
res.leader = true;
if self.raft_group.raft.election_elapsed + 1 < cfg.raft_election_timeout_ticks {
return res;
}
let status = self.raft_group.status_ref();
let last_index = self.raft_group.raft.raft_log.last_index();
for (id, pr) in status.progress.unwrap().iter() {
// Only recent active peer is considerred, so that an isolated follower
// won't cause a waste of leader's resource.
if *id == self.peer.get_id() || !pr.recent_active {
continue;
}
// Keep replicating data to active followers.
if pr.matched != last_index {
return res;
}
}
// Unapplied entries can change the configuration of the group.
res.up_to_date = self.get_store().applied_index() == last_index;
res
}
pub fn check_after_tick(&self, state: GroupState, res: CheckTickResult) -> bool {
if res.leader {
res.up_to_date && self.is_leader() && self.raft_group.raft.pending_read_count() == 0
} else {
// If follower keeps receiving data from leader, then it's safe to stop
// ticking, as leader will make sure it has the latest logs.
// Checking term to make sure campaign has finished and the leader starts
// doing its job, it's not required but a safe options.
state != GroupState::Chaos
&& self.raft_group.raft.leader_id != raft::INVALID_ID
&& self.raft_group.raft.raft_log.last_term() == self.raft_group.raft.term
}
}
/// Pings if followers are still connected.
///
/// Leader needs to know exact progress of followers, and
/// followers just need to know whether leader is still alive.
pub fn ping(&mut self) {
if self.is_leader() {
self.raft_group.ping();
}
}
/// Set the region of a peer.
///
/// This will update the region of the peer, caller must ensure the region
/// has been preserved in a durable device.
pub fn set_region(
&mut self,
host: &CoprocessorHost,
reader: &mut ReadDelegate,
region: metapb::Region,
) {
if self.region().get_region_epoch().get_version() < region.get_region_epoch().get_version()
{
// Epoch version changed, disable read on the localreader for this region.
self.leader_lease.expire_remote_lease();
}
self.mut_store().set_region(region.clone());
let progress = ReadProgress::region(region);
// Always update read delegate's region to avoid stale region info after a follower
// becoming a leader.
self.maybe_update_read_progress(reader, progress);
if !self.pending_remove {
host.on_region_changed(self.region(), RegionChangeEvent::Update, self.get_role());
}
}
#[inline]
pub fn peer_id(&self) -> u64 {
self.peer.get_id()
}
#[inline]
pub fn get_raft_status(&self) -> raft::StatusRef<'_> {
self.raft_group.status_ref()
}
#[inline]
pub fn leader_id(&self) -> u64 {
self.raft_group.raft.leader_id
}
#[inline]
pub fn is_leader(&self) -> bool {
self.raft_group.raft.state == StateRole::Leader
}
#[inline]
pub fn get_role(&self) -> StateRole {
self.raft_group.raft.state
}
#[inline]
pub fn get_store(&self) -> &PeerStorage {
self.raft_group.get_store()
}
#[inline]
pub fn mut_store(&mut self) -> &mut PeerStorage {
self.raft_group.mut_store()
}
#[inline]
pub fn is_applying_snapshot(&self) -> bool {
self.get_store().is_applying_snapshot()
}
/// Returns `true` if the raft group has replicated a snapshot but not committed it yet.
#[inline]
pub fn has_pending_snapshot(&self) -> bool {
self.get_pending_snapshot().is_some()
}
#[inline]
pub fn get_pending_snapshot(&self) -> Option<&eraftpb::Snapshot> {
self.raft_group.get_snap()
}
fn add_ready_metric(&self, ready: &Ready, metrics: &mut RaftReadyMetrics) {
metrics.message += ready.messages.len() as u64;
metrics.commit += ready
.committed_entries
.as_ref()
.map_or(0, |v| v.len() as u64);
metrics.append += ready.entries.len() as u64;
if !raft::is_empty_snap(&ready.snapshot) {
metrics.snapshot += 1;
}
}
#[inline]
fn send<T, I>(&mut self, trans: &mut T, msgs: I, metrics: &mut RaftMessageMetrics) -> Result<()>
where
T: Transport,
I: IntoIterator<Item = eraftpb::Message>,
{
for msg in msgs {
let msg_type = msg.get_msg_type();
self.send_raft_message(msg, trans)?;
match msg_type {
MessageType::MsgAppend => metrics.append += 1,
MessageType::MsgAppendResponse => metrics.append_resp += 1,
MessageType::MsgRequestPreVote => metrics.prevote += 1,
MessageType::MsgRequestPreVoteResponse => metrics.prevote_resp += 1,
MessageType::MsgRequestVote => metrics.vote += 1,
MessageType::MsgRequestVoteResponse => metrics.vote_resp += 1,
MessageType::MsgSnapshot => metrics.snapshot += 1,
MessageType::MsgHeartbeat => metrics.heartbeat += 1,
MessageType::MsgHeartbeatResponse => metrics.heartbeat_resp += 1,
MessageType::MsgTransferLeader => metrics.transfer_leader += 1,
MessageType::MsgTimeoutNow => {
// After a leader transfer procedure is triggered, the lease for
// the old leader may be expired earlier than usual, since a new leader
// may be elected and the old leader doesn't step down due to
// network partition from the new leader.
// For lease safety during leader transfer, transit `leader_lease`
// to suspect.
self.leader_lease.suspect(monotonic_raw_now());
metrics.timeout_now += 1;
}
// We do not care about these message types for metrics.
// Explicitly declare them so when we add new message types we are forced to
// decide.
MessageType::MsgHup
| MessageType::MsgBeat
| MessageType::MsgPropose
| MessageType::MsgUnreachable
| MessageType::MsgSnapStatus
| MessageType::MsgCheckQuorum
| MessageType::MsgReadIndex
| MessageType::MsgReadIndexResp => {}
}
}
Ok(())
}
/// Steps the raft message.
pub fn step(&mut self, m: eraftpb::Message) -> Result<()> {
fail_point!(
"step_message_3_1",
{ self.peer.get_store_id() == 3 && self.region_id == 1 },
|_| Ok(())
);
if self.is_leader() && m.get_from() != INVALID_ID {
self.peer_heartbeats.insert(m.get_from(), Instant::now());
// As the leader we know we are not missing.
self.leader_missing_time.take();
} else if m.get_from() == self.leader_id() {
// As another role know we're not missing.
self.leader_missing_time.take();
}
self.raft_group.step(m)?;
Ok(())
}
/// Checks and updates `peer_heartbeats` for the peer.
pub fn check_peers(&mut self) {
if !self.is_leader() {
self.peer_heartbeats.clear();
self.peers_start_pending_time.clear();
return;
}
if self.peer_heartbeats.len() == self.region().get_peers().len() {
return;
}
// Insert heartbeats in case that some peers never response heartbeats.
let region = self.raft_group.get_store().region();
for peer in region.get_peers() {
self.peer_heartbeats
.entry(peer.get_id())
.or_insert_with(Instant::now);
}
}
/// Collects all down peers.
pub fn collect_down_peers(&mut self, max_duration: Duration) -> Vec<PeerStats> {
let mut down_peers = Vec::new();
let mut down_peer_ids = Vec::new();
for p in self.region().get_peers() {
if p.get_id() == self.peer.get_id() {
continue;
}
if let Some(instant) = self.peer_heartbeats.get(&p.get_id()) {
if instant.elapsed() >= max_duration {
let mut stats = PeerStats::new();
stats.set_peer(p.clone());
stats.set_down_seconds(instant.elapsed().as_secs());
down_peers.push(stats);
down_peer_ids.push(p.get_id());
}
}
}
self.down_peer_ids = down_peer_ids;
down_peers
}
/// Collects all pending peers and update `peers_start_pending_time`.
pub fn collect_pending_peers(&mut self) -> Vec<metapb::Peer> {
let mut pending_peers = Vec::with_capacity(self.region().get_peers().len());
let status = self.raft_group.status_ref();
let truncated_idx = self.get_store().truncated_index();
if status.progress.is_none() {
return pending_peers;
}
let progresses = status.progress.unwrap().iter();
for (&id, progress) in progresses {
if id == self.peer.get_id() {
continue;
}
if progress.matched < truncated_idx {
if let Some(p) = self.get_peer_from_cache(id) {
pending_peers.push(p);
if !self
.peers_start_pending_time
.iter()
.any(|&(pid, _)| pid == id)
{
let now = Instant::now();
self.peers_start_pending_time.push((id, now));
debug!(
"peer start pending";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"time" => ?now,
);
}
}
}
}
pending_peers
}
/// Returns `true` if any peer recover from connectivity problem.
///
/// A peer can become pending or down if it has not responded for a
/// long time. If it becomes normal again, PD need to be notified.
pub fn any_new_peer_catch_up(&mut self, peer_id: u64) -> bool {
if self.peers_start_pending_time.is_empty() && self.down_peer_ids.is_empty() {
return false;
}
if !self.is_leader() {
self.down_peer_ids = vec![];
self.peers_start_pending_time = vec![];
return false;
}
for i in 0..self.peers_start_pending_time.len() {
if self.peers_start_pending_time[i].0 != peer_id {
continue;
}
let truncated_idx = self.raft_group.get_store().truncated_index();
if let Some(progress) = self.raft_group.raft.prs().get(peer_id) {
if progress.matched >= truncated_idx {
let (_, pending_after) = self.peers_start_pending_time.swap_remove(i);
let elapsed = duration_to_sec(pending_after.elapsed());
debug!(
"peer has caught up logs";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"takes" => elapsed,
);
return true;
}
}
}
if self.down_peer_ids.contains(&peer_id) {
return true;
}
false
}
pub fn check_stale_state<T, C>(&mut self, ctx: &mut PollContext<T, C>) -> StaleState {
if self.is_leader() {
// Leaders always have valid state.
//
// We update the leader_missing_time in the `fn step`. However one peer region
// does not send any raft messages, so we have to check and update it before
// reporting stale states.
self.leader_missing_time = None;
return StaleState::Valid;
}
let naive_peer = !self.is_initialized() || self.raft_group.raft.is_learner;
// Updates the `leader_missing_time` according to the current state.
//
// If we are checking this it means we suspect the leader might be missing.
// Mark down the time when we are called, so we can check later if it's been longer than it
// should be.
match self.leader_missing_time {
None => {
self.leader_missing_time = Instant::now().into();
StaleState::Valid
}
Some(instant) if instant.elapsed() >= ctx.cfg.max_leader_missing_duration.0 => {
// Resets the `leader_missing_time` to avoid sending the same tasks to
// PD worker continuously during the leader missing timeout.
self.leader_missing_time = Instant::now().into();
StaleState::ToValidate
}
Some(instant)
if instant.elapsed() >= ctx.cfg.abnormal_leader_missing_duration.0
&& !naive_peer =>
{
// A peer is considered as in the leader missing state
// if it's initialized but is isolated from its leader or
// something bad happens that the raft group can not elect a leader.
StaleState::LeaderMissing
}
_ => StaleState::Valid,
}
}
fn on_role_changed<T, C>(&mut self, ctx: &mut PollContext<T, C>, ready: &Ready) {
// Update leader lease when the Raft state changes.
if let Some(ref ss) = ready.ss {
match ss.raft_state {
StateRole::Leader => {
// The local read can only be performed after a new leader has applied
// the first empty entry on its term. After that the lease expiring time
// should be updated to
// send_to_quorum_ts + max_lease
// as the comments in `Lease` explain.
// It is recommended to update the lease expiring time right after
// this peer becomes leader because it's more convenient to do it here and
// it has no impact on the correctness.
let progress_term = ReadProgress::term(self.term());
self.maybe_renew_leader_lease(monotonic_raw_now(), ctx, Some(progress_term));
debug!(
"becomes leader with lease";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"lease" => ?self.leader_lease,
);
}
StateRole::Follower => {
self.leader_lease.expire();
}
_ => {}
}
ctx.coprocessor_host
.on_role_change(self.region(), ss.raft_state);
}
}
#[inline]
pub fn ready_to_handle_pending_snap(&self) -> bool {
// If apply worker is still working, written apply state may be overwritten
// by apply worker. So we have to wait here.
// Please note that committed_index can't be used here. When applying a snapshot,
// a stale heartbeat can make the leader think follower has already applied
// the snapshot, and send remaining log entries, which may increase committed_index.
// TODO: add more test
self.last_applying_idx == self.get_store().applied_index()
}
#[inline]
fn ready_to_handle_read(&self) -> bool {
// TODO: It may cause read index to wait a long time.
// There may be some values that are not applied by this leader yet but the old leader,
// if applied_index_term isn't equal to current term.
self.get_store().applied_index_term() == self.term()
// There may be stale read if the old leader splits really slow,
// the new region may already elected a new leader while
// the old leader still think it owns the splitted range.
&& !self.is_splitting()
// There may be stale read if a target leader is in another store and
// applied commit merge, written new values, but the sibling peer in
// this store does not apply commit merge, so the leader is not ready
// to read, until the merge is rollbacked.
&& !self.is_merging()
}
#[inline]
fn is_splitting(&self) -> bool {
self.last_committed_split_idx > self.get_store().applied_index()
}
#[inline]
fn is_merging(&self) -> bool {
self.last_committed_prepare_merge_idx > self.get_store().applied_index()
|| self.pending_merge_state.is_some()
}
pub fn take_apply_proposals(&mut self) -> Option<RegionProposal> {
if self.apply_proposals.is_empty() {
return None;
}
let proposals = mem::replace(&mut self.apply_proposals, vec![]);
let region_proposal = RegionProposal::new(self.peer_id(), self.region_id, proposals);
Some(region_proposal)
}
pub fn handle_raft_ready_append<T: Transport, C>(
&mut self,
ctx: &mut PollContext<T, C>,
) -> Option<(Ready, InvokeContext)> {
if self.pending_remove {
return None;
}
if self.mut_store().check_applying_snap() {
// If we continue to handle all the messages, it may cause too many messages because
// leader will send all the remaining messages to this follower, which can lead
// to full message queue under high load.
debug!(
"still applying snapshot, skip further handling";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
return None;
}
if !self.pending_messages.is_empty() {
fail_point!("raft_before_follower_send");
let messages = mem::replace(&mut self.pending_messages, vec![]);
ctx.need_flush_trans = true;
self.send(&mut ctx.trans, messages, &mut ctx.raft_metrics.message)
.unwrap_or_else(|e| {
warn!(
"failed to clear snapshot pending messages";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
});
}
if let Some(snap) = self.get_pending_snapshot() {
if !self.ready_to_handle_pending_snap() {
debug!(
"is not ready to apply snapshot";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"apply_index" => self.get_store().applied_index(),
"last_applying_index" => self.last_applying_idx,
);
return None;
}
let mut snap_data = RaftSnapshotData::new();
snap_data
.merge_from_bytes(snap.get_data())
.unwrap_or_else(|e| {
warn!(
"failed to parse snap data";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
});
let region = snap_data.take_region();
let meta = ctx.store_meta.lock().unwrap();
// Region's range changes if and only if epoch version change. So if the snapshot's
// version is not larger than now, we can make sure there is no overlap.
if region.get_region_epoch().get_version()
> meta.regions[®ion.get_id()]
.get_region_epoch()
.get_version()
{
// For merge process, when applying snapshot or create new peer the stale source
// peer is destroyed asynchronously. So here checks whether there is any overlap, if
// so, wait and do not handle raft ready.
if let Some(r) = meta
.region_ranges
.range((Excluded(enc_start_key(®ion)), Unbounded::<Vec<u8>>))
.map(|(_, ®ion_id)| &meta.regions[®ion_id])
.take_while(|r| enc_start_key(r) < enc_end_key(®ion))
.find(|r| r.get_id() != region.get_id())
{
info!(
"snapshot range overlaps, wait source destroy finish";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"apply_index" => self.get_store().applied_index(),
"last_applying_index" => self.last_applying_idx,
"overlap_region" => ?r,
);
return None;
}
}
}
// Check whether there is a pending generate snapshot task, the task
// needs to be sent the apply system.
if let Some(gen_task) = self.mut_store().take_gen_snap_task() {
ctx.apply_router
.schedule_task(self.region_id, ApplyTask::Snapshot(gen_task));
}
if !self
.raft_group
.has_ready_since(Some(self.last_applying_idx))
{
return None;
}
debug!(
"handle raft ready";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
let mut ready = self.raft_group.ready_since(self.last_applying_idx);
self.on_role_changed(ctx, &ready);
self.add_ready_metric(&ready, &mut ctx.raft_metrics.ready);
// The leader can write to disk and replicate to the followers concurrently
// For more details, check raft thesis 10.2.1.
if self.is_leader() {
fail_point!("raft_before_leader_send");
let msgs = ready.messages.drain(..);
ctx.need_flush_trans = true;
self.send(&mut ctx.trans, msgs, &mut ctx.raft_metrics.message)
.unwrap_or_else(|e| {
// We don't care that the message is sent failed, so here just log this error.
warn!(
"leader failed to send messages";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
});
}
let invoke_ctx = match self.mut_store().handle_raft_ready(ctx, &ready) {
Ok(r) => r,
Err(e) => {
// We may have written something to writebatch and it can't be reverted, so has
// to panic here.
panic!("{} failed to handle raft ready: {:?}", self.tag, e)
}
};
Some((ready, invoke_ctx))
}
pub fn post_raft_ready_append<T: Transport, C>(
&mut self,
ctx: &mut PollContext<T, C>,
ready: &mut Ready,
invoke_ctx: InvokeContext,
) -> Option<ApplySnapResult> {
if invoke_ctx.has_snapshot() {
// When apply snapshot, there is no log applied and not compacted yet.
self.raft_log_size_hint = 0;
}
let apply_snap_result = self.mut_store().post_ready(invoke_ctx);
if apply_snap_result.is_some() && self.peer.get_is_learner() {
// The peer may change from learner to voter after snapshot applied.
let peer = self
.region()
.get_peers()
.iter()
.find(|p| p.get_id() == self.peer.get_id())
.unwrap()
.clone();
if peer != self.peer {
info!(
"meta changed in applying snapshot";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"before" => ?self.peer,
"after" => ?peer,
);
self.peer = peer;
};
}
if !self.is_leader() {
fail_point!("raft_before_follower_send");
if self.is_applying_snapshot() {
self.pending_messages = mem::replace(&mut ready.messages, vec![]);
} else {
self.send(
&mut ctx.trans,
ready.messages.drain(..),
&mut ctx.raft_metrics.message,
)
.unwrap_or_else(|e| {
warn!(
"follower failed to send messages";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
});
ctx.need_flush_trans = true;
}
}
if apply_snap_result.is_some() {
self.activate(ctx);
let mut meta = ctx.store_meta.lock().unwrap();
meta.readers
.insert(self.region_id, ReadDelegate::from_peer(self));
}
apply_snap_result
}
pub fn handle_raft_ready_apply<T, C>(&mut self, ctx: &mut PollContext<T, C>, mut ready: Ready) {
// Call `handle_raft_committed_entries` directly here may lead to inconsistency.
// In some cases, there will be some pending committed entries when applying a
// snapshot. If we call `handle_raft_committed_entries` directly, these updates
// will be written to disk. Because we apply snapshot asynchronously, so these
// updates will soon be removed. But the soft state of raft is still be updated
// in memory. Hence when handle ready next time, these updates won't be included
// in `ready.committed_entries` again, which will lead to inconsistency.
if self.is_applying_snapshot() {
// Snapshot's metadata has been applied.
self.last_applying_idx = self.get_store().truncated_index();
} else {
let committed_entries = ready.committed_entries.take().unwrap();
// leader needs to update lease and last committed split index.
let mut lease_to_be_updated = self.is_leader();
let mut split_to_be_updated = self.is_leader();
let mut merge_to_be_update = self.is_leader();
if !lease_to_be_updated {
// It's not leader anymore, we are safe to clear proposals. If it becomes leader
// again, the lease should be updated when election is finished, old proposals
// have no effect.
self.proposals.clear();
}
for entry in committed_entries.iter().rev() {
// raft meta is very small, can be ignored.
self.raft_log_size_hint += entry.get_data().len() as u64;
if lease_to_be_updated {
let propose_time = self.find_propose_time(entry.get_index(), entry.get_term());
if let Some(propose_time) = propose_time {
self.maybe_renew_leader_lease(propose_time, ctx, None);
lease_to_be_updated = false;
}
}
// We care about split/merge commands that are committed in the current term.
if entry.term == self.term() && (split_to_be_updated || merge_to_be_update) {
let ctx = ProposalContext::from_bytes(&entry.context);
if split_to_be_updated && ctx.contains(ProposalContext::SPLIT) {
// We dont need to suspect its lease because peers of new region that
// in other store do not start election before theirs election timeout
// which is longer than the max leader lease.
// It's safe to read local within its current lease, however, it's not
// safe to renew its lease.
self.last_committed_split_idx = entry.index;
split_to_be_updated = false;
}
if merge_to_be_update && ctx.contains(ProposalContext::PREPARE_MERGE) {
// We committed prepare merge, to prevent unsafe read index,
// we must record its index.
self.last_committed_prepare_merge_idx = entry.get_index();
// After prepare_merge is committed, the leader can not know
// when the target region merges majority of this region, also
// it can not know when the target region writes new values.
// To prevent unsafe local read, we suspect its leader lease.
self.leader_lease.suspect(monotonic_raw_now());
merge_to_be_update = false;
}
}
}
if !committed_entries.is_empty() {
self.last_applying_idx = committed_entries.last().unwrap().get_index();
if self.last_applying_idx >= self.last_urgent_proposal_idx {
// Urgent requests are flushed, make it lazy again.
self.raft_group.skip_bcast_commit(true);
self.last_urgent_proposal_idx = u64::MAX;
}
let apply = Apply::new(self.region_id, self.term(), committed_entries);
ctx.apply_router
.schedule_task(self.region_id, ApplyTask::apply(apply));
}
}
self.apply_reads(ctx, &ready);
self.raft_group.advance_append(ready);
if self.is_applying_snapshot() {
// Because we only handle raft ready when not applying snapshot, so following
// line won't be called twice for the same snapshot.
self.raft_group.advance_apply(self.last_applying_idx);
}
self.proposals.gc();
}
/// Responses to the ready read index request on the replica, the replica is not a leader.
fn post_pending_read_index_on_replica<T, C>(&mut self, ctx: &mut PollContext<T, C>) {
if self.pending_reads.ready_cnt > 0 {
for _ in 0..self.pending_reads.ready_cnt {
let (read_index, is_read_index_request) = {
let read = self.pending_reads.reads.front().unwrap();
if read.cmds.len() == 1
&& read.cmds[0].0.get_requests().len() == 1
&& read.cmds[0].0.get_requests()[0].get_cmd_type() == CmdType::ReadIndex
{
(read.read_index, true)
} else {
(read.read_index, false)
}
};
let mut read = self.pending_reads.reads.pop_front().unwrap();
if !is_read_index_request {
let term = self.term();
// Only read index request is valid.
for (_, cb) in read.cmds.drain(..) {
apply::notify_stale_req(term, cb);
}
continue;
}
for (req, cb) in read.cmds.drain(..) {
cb.invoke_read(self.handle_read(ctx, req, true, read_index));
}
self.pending_reads.ready_cnt -= 1;
}
}
}
fn apply_reads<T, C>(&mut self, ctx: &mut PollContext<T, C>, ready: &Ready) {
let mut propose_time = None;
// The follower may lost `ReadIndexResp`, so the pending_reads does not
// guarantee the orders are consistent with read_states. `advance` will
// update the `read_index` of read request that before this successful
// `ready`.
if !self.is_leader() && !ready.read_states.is_empty() {
for state in &ready.read_states {
self.pending_reads
.advance(state.request_ctx.as_slice(), state.index);
self.post_pending_read_index_on_replica(ctx);
}
return;
}
if self.ready_to_handle_read() {
for state in &ready.read_states {
let mut read = self.pending_reads.reads.pop_front().unwrap();
assert_eq!(state.request_ctx.as_slice(), read.binary_id());
for (req, cb) in read.cmds.drain(..) {
cb.invoke_read(self.handle_read(ctx, req, true, Some(state.index)));
}
propose_time = Some(read.renew_lease_time);
}
} else {
for state in &ready.read_states {
let read = &mut self.pending_reads.reads[self.pending_reads.ready_cnt];
assert_eq!(state.request_ctx.as_slice(), read.binary_id());
self.pending_reads.ready_cnt += 1;
read.read_index = Some(state.index);
propose_time = Some(read.renew_lease_time);
}
}
// Note that only after handle read_states can we identify what requests are
// actually stale.
if ready.ss.is_some() {
let term = self.term();
// all uncommitted reads will be dropped silently in raft.
self.pending_reads.clear_uncommitted(term);
}
if let Some(propose_time) = propose_time {
// `propose_time` is a placeholder, here cares about `Suspect` only,
// and if it is in `Suspect` phase, the actual timestamp is useless.
if self.leader_lease.inspect(Some(propose_time)) == LeaseState::Suspect {
return;
}
self.maybe_renew_leader_lease(propose_time, ctx, None);
}
}
pub fn post_apply<T, C>(
&mut self,
ctx: &mut PollContext<T, C>,
apply_state: RaftApplyState,
applied_index_term: u64,
merged: bool,
apply_metrics: &ApplyMetrics,
) -> bool {
let mut has_ready = false;
if self.is_applying_snapshot() {
panic!("{} should not applying snapshot.", self.tag);
}
if !merged {
self.raft_group
.advance_apply(apply_state.get_applied_index());
}
let progress_to_be_updated = self.mut_store().applied_index_term() != applied_index_term;
self.mut_store().set_applied_state(apply_state);
self.mut_store().set_applied_term(applied_index_term);
self.peer_stat.written_keys += apply_metrics.written_keys;
self.peer_stat.written_bytes += apply_metrics.written_bytes;
self.delete_keys_hint += apply_metrics.delete_keys_hint;
let diff = self.size_diff_hint as i64 + apply_metrics.size_diff_hint;
self.size_diff_hint = cmp::max(diff, 0) as u64;
if self.has_pending_snapshot() && self.ready_to_handle_pending_snap() {
has_ready = true;
}
if !self.is_leader() {
self.post_pending_read_index_on_replica(ctx)
} else {
if self.pending_reads.ready_cnt > 0 && self.ready_to_handle_read() {
for _ in 0..self.pending_reads.ready_cnt {
let mut read = self.pending_reads.reads.pop_front().unwrap();
for (req, cb) in read.cmds.drain(..) {
cb.invoke_read(self.handle_read(ctx, req, true, read.read_index));
}
}
self.pending_reads.ready_cnt = 0;
}
}
self.pending_reads.gc();
// Only leaders need to update applied_index_term.
if progress_to_be_updated && self.is_leader() {
let progress = ReadProgress::applied_index_term(applied_index_term);
let mut meta = ctx.store_meta.lock().unwrap();
let reader = meta.readers.get_mut(&self.region_id).unwrap();
self.maybe_update_read_progress(reader, progress);
}
has_ready
}
pub fn post_split(&mut self) {
// Reset delete_keys_hint and size_diff_hint.
self.delete_keys_hint = 0;
self.size_diff_hint = 0;
}
/// Try to renew leader lease.
fn maybe_renew_leader_lease<T, C>(
&mut self,
ts: Timespec,
ctx: &mut PollContext<T, C>,
progress: Option<ReadProgress>,
) {
// A nonleader peer should never has leader lease.
let read_progress = if !self.is_leader() {
None
} else if self.is_splitting() {
// A splitting leader should not renew its lease.
// Because we split regions asynchronous, the leader may read stale results
// if splitting runs slow on the leader.
debug!(
"prevents renew lease while splitting";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
None
} else if self.is_merging() {
// A merging leader should not renew its lease.
// Because we merge regions asynchronous, the leader may read stale results
// if commit merge runs slow on sibling peers.
debug!(
"prevents renew lease while merging";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
None
} else {
self.leader_lease.renew(ts);
let term = self.term();
if let Some(remote_lease) = self.leader_lease.maybe_new_remote_lease(term) {
Some(ReadProgress::leader_lease(remote_lease))
} else {
None
}
};
if let Some(progress) = progress {
let mut meta = ctx.store_meta.lock().unwrap();
let reader = meta.readers.get_mut(&self.region_id).unwrap();
self.maybe_update_read_progress(reader, progress);
}
if let Some(progress) = read_progress {
let mut meta = ctx.store_meta.lock().unwrap();
let reader = meta.readers.get_mut(&self.region_id).unwrap();
self.maybe_update_read_progress(reader, progress);
}
}
fn maybe_update_read_progress(&self, reader: &mut ReadDelegate, progress: ReadProgress) {
if self.pending_remove {
return;
}
debug!(
"update read progress";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"progress" => ?progress,
);
reader.update(progress);
}
pub fn maybe_campaign(&mut self, parent_is_leader: bool) -> bool {
if self.region().get_peers().len() <= 1 {
// The peer campaigned when it was created, no need to do it again.
return false;
}
if !parent_is_leader {
return false;
}
// If last peer is the leader of the region before split, it's intuitional for
// it to become the leader of new split region.
let _ = self.raft_group.campaign();
true
}
fn find_propose_time(&mut self, index: u64, term: u64) -> Option<Timespec> {
while let Some(meta) = self.proposals.pop(term) {
if meta.index == index && meta.term == term {
return Some(meta.renew_lease_time.unwrap());
}
}
None
}
/// Propose a request.
///
/// Return true means the request has been proposed successfully.
pub fn propose<T, C>(
&mut self,
ctx: &mut PollContext<T, C>,
cb: Callback,
req: RaftCmdRequest,
mut err_resp: RaftCmdResponse,
) -> bool {
if self.pending_remove {
return false;
}
ctx.raft_metrics.propose.all += 1;
let mut is_conf_change = false;
let is_urgent = is_request_urgent(&req);
let policy = self.inspect(&req);
let res = match policy {
Ok(RequestPolicy::ReadLocal) => {
self.read_local(ctx, req, cb);
return false;
}
Ok(RequestPolicy::ReadIndex) => return self.read_index(ctx, req, err_resp, cb),
Ok(RequestPolicy::ProposeNormal) => self.propose_normal(ctx, req),
Ok(RequestPolicy::ProposeTransferLeader) => {
return self.propose_transfer_leader(ctx, req, cb);
}
Ok(RequestPolicy::ProposeConfChange) => {
is_conf_change = true;
self.propose_conf_change(ctx, &req)
}
Err(e) => Err(e),
};
match res {
Err(e) => {
cmd_resp::bind_error(&mut err_resp, e);
cb.invoke_with_response(err_resp);
false
}
Ok(idx) => {
if is_urgent {
self.last_urgent_proposal_idx = idx;
// Eager flush to make urgent proposal be applied on all nodes as soon as
// possible.
self.raft_group.skip_bcast_commit(false);
}
let meta = ProposalMeta {
index: idx,
term: self.term(),
renew_lease_time: None,
};
self.post_propose(meta, is_conf_change, cb);
true
}
}
}
fn post_propose(&mut self, mut meta: ProposalMeta, is_conf_change: bool, cb: Callback) {
// Try to renew leader lease on every consistent read/write request.
meta.renew_lease_time = Some(monotonic_raw_now());
let p = Proposal::new(is_conf_change, meta.index, meta.term, cb);
self.apply_proposals.push(p);
self.proposals.push(meta);
}
/// Count the number of the healthy nodes.
/// A node is healthy when
/// 1. it's the leader of the Raft group, which has the latest logs
/// 2. it's a follower, and it does not lag behind the leader a lot.
/// If a snapshot is involved between it and the Raft leader, it's not healthy since
/// it cannot works as a node in the quorum to receive replicating logs from leader.
fn count_healthy_node<'a, I>(&self, progress: I) -> usize
where
I: Iterator<Item = &'a Progress>,
{
let mut healthy = 0;
for pr in progress {
if pr.matched >= self.get_store().truncated_index() {
healthy += 1;
}
}
healthy
}
/// Validate the `ConfChange` request and check whether it's safe to
/// propose the specified conf change request.
/// It's safe iff at least the quorum of the Raft group is still healthy
/// right after that conf change is applied.
/// Define the total number of nodes in current Raft cluster to be `total`.
/// To ensure the above safety, if the cmd is
/// 1. A `AddNode` request
/// Then at least '(total + 1)/2 + 1' nodes need to be up to date for now.
/// 2. A `RemoveNode` request
/// Then at least '(total - 1)/2 + 1' other nodes (the node about to be removed is excluded)
/// need to be up to date for now. If 'allow_remove_leader' is false then
/// the peer to be removed should not be the leader.
fn check_conf_change<T, C>(
&self,
ctx: &mut PollContext<T, C>,
cmd: &RaftCmdRequest,
) -> Result<()> {
let change_peer = apply::get_change_peer_cmd(cmd).unwrap();
let change_type = change_peer.get_change_type();
let peer = change_peer.get_peer();
// Check the request itself is valid or not.
match (change_type, peer.get_is_learner()) {
(ConfChangeType::AddNode, true) | (ConfChangeType::AddLearnerNode, false) => {
warn!(
"invalid conf change request";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"request" => ?change_peer,
);
return Err(box_err!("invalid conf change request"));
}
_ => {}
}
if change_type == ConfChangeType::RemoveNode
&& !ctx.cfg.allow_remove_leader
&& peer.get_id() == self.peer_id()
{
warn!(
"rejects remove leader request";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"request" => ?change_peer,
);
return Err(box_err!("ignore remove leader"));
}
let status = self.raft_group.status_ref();
let total = status.progress.unwrap().voters().len();
if total == 1 {
// It's always safe if there is only one node in the cluster.
return Ok(());
}
let mut progress = status.progress.unwrap().clone();
match change_type {
ConfChangeType::AddNode => {
if let Err(raft::Error::NotExists(_, _)) = progress.promote_learner(peer.get_id()) {
let _ = progress.insert_voter(peer.get_id(), Progress::default());
}
}
ConfChangeType::RemoveNode => {
progress.remove(peer.get_id());
}
ConfChangeType::AddLearnerNode => {
return Ok(());
}
}
let healthy = self.count_healthy_node(progress.voters().values());
let quorum_after_change = raft::quorum(progress.voters().len());
if healthy >= quorum_after_change {
return Ok(());
}
PEER_ADMIN_CMD_COUNTER_VEC
.with_label_values(&["conf_change", "reject_unsafe"])
.inc();
info!(
"rejects unsafe conf change request";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"request" => ?change_peer,
"total" => total,
"healthy" => healthy,
"quorum_after_change" => quorum_after_change,
);
Err(box_err!(
"unsafe to perform conf change {:?}, total {}, healthy {}, quorum after \
change {}",
change_peer,
total,
healthy,
quorum_after_change
))
}
fn transfer_leader(&mut self, peer: &metapb::Peer) {
info!(
"transfer leader";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"peer" => ?peer,
);
self.raft_group.transfer_leader(peer.get_id());
}
fn ready_to_transfer_leader<T, C>(
&self,
ctx: &mut PollContext<T, C>,
peer: &metapb::Peer,
) -> bool {
let peer_id = peer.get_id();
let status = self.raft_group.status_ref();
let progress = status.progress.unwrap();
if !progress.voters().contains_key(&peer_id) {
return false;
}
for progress in progress.voters().values() {
if progress.state == ProgressState::Snapshot {
return false;
}
}
// Checks if safe to transfer leader.
if duration_to_sec(self.recent_conf_change_time.elapsed())
< ctx.cfg.raft_reject_transfer_leader_duration.as_secs() as f64
{
debug!(
"reject transfer leader due to the region was config changed recently";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"peer" => ?peer,
);
return false;
}
let last_index = self.get_store().last_index();
last_index <= progress.voters()[&peer_id].matched + ctx.cfg.leader_transfer_max_log_lag
}
fn read_local<T, C>(&mut self, ctx: &mut PollContext<T, C>, req: RaftCmdRequest, cb: Callback) {
ctx.raft_metrics.propose.local_read += 1;
cb.invoke_read(self.handle_read(ctx, req, false, None))
}
fn pre_read_index(&self) -> Result<()> {
fail_point!(
"before_propose_readindex",
|s| if s.map_or(true, |s| s.parse().unwrap_or(true)) {
Ok(())
} else {
Err(box_err!("can not read due to injected failure"))
}
);
// See more in ready_to_handle_read().
if self.is_splitting() {
return Err(box_err!("can not read index due to split"));
}
if self.is_merging() {
return Err(box_err!("can not read index due to merge"));
}
Ok(())
}
// Returns a boolean to indicate whether the `read` is proposed or not.
// For these cases it won't be proposed:
// 1. The region is in merging or splitting;
// 2. The message is stale and dropped by the Raft group internally;
// 3. There is already a read request proposed in the current lease;
fn read_index<T, C>(
&mut self,
poll_ctx: &mut PollContext<T, C>,
req: RaftCmdRequest,
mut err_resp: RaftCmdResponse,
cb: Callback,
) -> bool {
if let Err(e) = self.pre_read_index() {
debug!(
"prevents unsafe read index";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
poll_ctx.raft_metrics.propose.unsafe_read_index += 1;
cmd_resp::bind_error(&mut err_resp, e);
cb.invoke_with_response(err_resp);
return false;
}
poll_ctx.raft_metrics.propose.read_index += 1;
let renew_lease_time = monotonic_raw_now();
if self.is_leader() {
match self.inspect_lease() {
// Here combine the new read request with the previous one even if the lease expired is
// ok because in this case, the previous read index must be sent out with a valid
// lease instead of a suspect lease. So there must no pending transfer-leader proposals
// before or after the previous read index, and the lease can be renewed when get
// heartbeat responses.
LeaseState::Valid | LeaseState::Expired => {
if let Some(read) = self.pending_reads.reads.back_mut() {
let max_lease = poll_ctx.cfg.raft_store_max_leader_lease();
if read.renew_lease_time + max_lease > renew_lease_time {
read.cmds.push((req, cb));
return false;
}
}
}
// If the current lease is suspect, new read requests can't be appended into
// `pending_reads` because if the leader is transfered, the latest read could
// be dirty.
_ => {}
}
}
// Should we call pre_propose here?
let last_pending_read_count = self.raft_group.raft.pending_read_count();
let last_ready_read_count = self.raft_group.raft.ready_read_count();
let id = self.pending_reads.next_id();
let ctx = id.to_ne_bytes();
self.raft_group.read_index(ctx.to_vec());
let pending_read_count = self.raft_group.raft.pending_read_count();
let ready_read_count = self.raft_group.raft.ready_read_count();
if pending_read_count == last_pending_read_count
&& ready_read_count == last_ready_read_count
&& self.is_leader()
{
// The message gets dropped silently, can't be handled anymore.
apply::notify_stale_req(self.term(), cb);
return false;
}
let mut cmds = MustConsumeVec::with_capacity("callback of index read", 1);
cmds.push((req, cb));
self.pending_reads.reads.push_back(ReadIndexRequest {
id,
cmds,
renew_lease_time,
read_index: None,
});
// TimeoutNow has been sent out, so we need to propose explicitly to
// update leader lease.
if self.leader_lease.inspect(Some(renew_lease_time)) == LeaseState::Suspect {
let req = RaftCmdRequest::new();
if let Ok(index) = self.propose_normal(poll_ctx, req) {
let meta = ProposalMeta {
index,
term: self.term(),
renew_lease_time: Some(renew_lease_time),
};
self.post_propose(meta, false, Callback::None);
}
}
true
}
pub fn get_min_progress(&self) -> u64 {
self.raft_group.status_ref().progress.map_or(0, |p| {
p.iter().map(|(_, pr)| pr.matched).min().unwrap_or_default()
})
}
fn pre_propose_prepare_merge<T, C>(
&self,
ctx: &mut PollContext<T, C>,
req: &mut RaftCmdRequest,
) -> Result<()> {
let last_index = self.raft_group.raft.raft_log.last_index();
let min_progress = self.get_min_progress();
let min_index = min_progress + 1;
if min_progress == 0 || last_index - min_progress > ctx.cfg.merge_max_log_gap {
return Err(box_err!(
"log gap ({}, {}] is too large, skip merge",
min_progress,
last_index
));
}
let mut entry_size = 0;
for entry in self.raft_group.raft.raft_log.entries(min_index, NO_LIMIT)? {
entry_size += entry.get_data().len();
if entry.get_entry_type() == EntryType::EntryConfChange {
return Err(box_err!("log gap contains conf change, skip merging."));
}
if entry.get_data().is_empty() {
continue;
}
let cmd: RaftCmdRequest =
util::parse_data_at(entry.get_data(), entry.get_index(), &self.tag);
if !cmd.has_admin_request() {
continue;
}
let cmd_type = cmd.get_admin_request().get_cmd_type();
match cmd_type {
AdminCmdType::TransferLeader
| AdminCmdType::ComputeHash
| AdminCmdType::VerifyHash
| AdminCmdType::InvalidAdmin => continue,
_ => {}
}
// Any command that can change epoch or log gap should be rejected.
return Err(box_err!(
"log gap contains admin request {:?}, skip merging.",
cmd_type
));
}
if entry_size as f64 > ctx.cfg.raft_entry_max_size.0 as f64 * 0.9 {
return Err(box_err!(
"log gap size exceed entry size limit, skip merging."
));
}
req.mut_admin_request()
.mut_prepare_merge()
.set_min_index(min_index);
Ok(())
}
fn pre_propose<T, C>(
&self,
poll_ctx: &mut PollContext<T, C>,
req: &mut RaftCmdRequest,
) -> Result<ProposalContext> {
poll_ctx.coprocessor_host.pre_propose(self.region(), req)?;
let mut ctx = ProposalContext::empty();
if get_sync_log_from_request(req) {
ctx.insert(ProposalContext::SYNC_LOG);
}
if !req.has_admin_request() {
return Ok(ctx);
}
match req.get_admin_request().get_cmd_type() {
AdminCmdType::Split | AdminCmdType::BatchSplit => ctx.insert(ProposalContext::SPLIT),
_ => {}
}
if req.get_admin_request().has_prepare_merge() {
self.pre_propose_prepare_merge(poll_ctx, req)?;
ctx.insert(ProposalContext::PREPARE_MERGE);
}
Ok(ctx)
}
fn propose_normal<T, C>(
&mut self,
poll_ctx: &mut PollContext<T, C>,
mut req: RaftCmdRequest,
) -> Result<u64> {
if self.pending_merge_state.is_some()
&& req.get_admin_request().get_cmd_type() != AdminCmdType::RollbackMerge
{
return Err(box_err!("peer in merging mode, can't do proposal."));
}
poll_ctx.raft_metrics.propose.normal += 1;
// TODO: validate request for unexpected changes.
let ctx = match self.pre_propose(poll_ctx, &mut req) {
Ok(ctx) => ctx,
Err(e) => {
warn!(
"skip proposal";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
return Err(e);
}
};
let data = req.write_to_bytes()?;
// TODO: use local histogram metrics
PEER_PROPOSE_LOG_SIZE_HISTOGRAM.observe(data.len() as f64);
if data.len() as u64 > poll_ctx.cfg.raft_entry_max_size.0 {
error!(
"entry is too large";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"size" => data.len(),
);
return Err(Error::RaftEntryTooLarge(self.region_id, data.len() as u64));
}
let propose_index = self.next_proposal_index();
self.raft_group.propose(ctx.to_vec(), data)?;
if self.next_proposal_index() == propose_index {
// The message is dropped silently, this usually due to leader absence
// or transferring leader. Both cases can be considered as NotLeader error.
return Err(Error::NotLeader(self.region_id, None));
}
Ok(propose_index)
}
// Return true to if the transfer leader request is accepted.
fn propose_transfer_leader<T, C>(
&mut self,
ctx: &mut PollContext<T, C>,
req: RaftCmdRequest,
cb: Callback,
) -> bool {
ctx.raft_metrics.propose.transfer_leader += 1;
let transfer_leader = get_transfer_leader_cmd(&req).unwrap();
let peer = transfer_leader.get_peer();
let transferred = if self.ready_to_transfer_leader(ctx, peer) {
self.transfer_leader(peer);
true
} else {
info!(
"transfer leader message ignored directly";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"message" => ?req,
);
false
};
// transfer leader command doesn't need to replicate log and apply, so we
// return immediately. Note that this command may fail, we can view it just as an advice
cb.invoke_with_response(make_transfer_leader_response());
transferred
}
// Fails in such cases:
// 1. A pending conf change has not been applied yet;
// 2. Removing the leader is not allowed in the configuration;
// 3. The conf change makes the raft group not healthy;
// 4. The conf change is dropped by raft group internally.
fn propose_conf_change<T, C>(
&mut self,
ctx: &mut PollContext<T, C>,
req: &RaftCmdRequest,
) -> Result<u64> {
if self.pending_merge_state.is_some() {
return Err(box_err!("peer in merging mode, can't do proposal."));
}
if self.raft_group.raft.pending_conf_index > self.get_store().applied_index() {
info!(
"there is a pending conf change, try later";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
);
return Err(box_err!(
"{} there is a pending conf change, try later",
self.tag
));
}
self.check_conf_change(ctx, req)?;
ctx.raft_metrics.propose.conf_change += 1;
let data = req.write_to_bytes()?;
// TODO: use local histogram metrics
PEER_PROPOSE_LOG_SIZE_HISTOGRAM.observe(data.len() as f64);
let change_peer = apply::get_change_peer_cmd(req).unwrap();
let mut cc = eraftpb::ConfChange::new();
cc.set_change_type(change_peer.get_change_type());
cc.set_node_id(change_peer.get_peer().get_id());
cc.set_context(data);
info!(
"propose conf change peer";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"change_type" => ?cc.get_change_type(),
"change_peer" => cc.get_node_id(),
);
let propose_index = self.next_proposal_index();
self.raft_group
.propose_conf_change(ProposalContext::SYNC_LOG.to_vec(), cc)?;
if self.next_proposal_index() == propose_index {
// The message is dropped silently, this usually due to leader absence
// or transferring leader. Both cases can be considered as NotLeader error.
return Err(Error::NotLeader(self.region_id, None));
}
Ok(propose_index)
}
fn handle_read<T, C>(
&mut self,
ctx: &mut PollContext<T, C>,
req: RaftCmdRequest,
check_epoch: bool,
read_index: Option<u64>,
) -> ReadResponse {
let mut resp = ReadExecutor::new(
ctx.engines.kv.clone(),
check_epoch,
false, /* we don't need snapshot time */
)
.execute(&req, self.region(), read_index);
cmd_resp::bind_term(&mut resp.response, self.term());
resp
}
pub fn term(&self) -> u64 {
self.raft_group.raft.term
}
pub fn stop(&mut self) {
self.mut_store().cancel_applying_snap();
for mut read in self.pending_reads.reads.drain(..) {
read.cmds.clear();
}
}
}
impl Peer {
pub fn insert_peer_cache(&mut self, peer: metapb::Peer) {
self.peer_cache.borrow_mut().insert(peer.get_id(), peer);
}
pub fn remove_peer_from_cache(&mut self, peer_id: u64) {
self.peer_cache.borrow_mut().remove(&peer_id);
}
pub fn get_peer_from_cache(&self, peer_id: u64) -> Option<metapb::Peer> {
if let Some(peer) = self.peer_cache.borrow().get(&peer_id) {
return Some(peer.clone());
}
// Try to find in region, if found, set in cache.
for peer in self.region().get_peers() {
if peer.get_id() == peer_id {
self.peer_cache.borrow_mut().insert(peer_id, peer.clone());
return Some(peer.clone());
}
}
None
}
pub fn heartbeat_pd<T, C>(&mut self, ctx: &PollContext<T, C>) {
let task = PdTask::Heartbeat {
region: self.region().clone(),
peer: self.peer.clone(),
down_peers: self.collect_down_peers(ctx.cfg.max_peer_down_duration.0),
pending_peers: self.collect_pending_peers(),
written_bytes: self.peer_stat.written_bytes,
written_keys: self.peer_stat.written_keys,
approximate_size: self.approximate_size,
approximate_keys: self.approximate_keys,
};
if let Err(e) = ctx.pd_scheduler.schedule(task) {
error!(
"failed to notify pd";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"err" => ?e,
);
}
}
fn send_raft_message<T: Transport>(
&mut self,
msg: eraftpb::Message,
trans: &mut T,
) -> Result<()> {
let mut send_msg = RaftMessage::new();
send_msg.set_region_id(self.region_id);
// set current epoch
send_msg.set_region_epoch(self.region().get_region_epoch().clone());
let from_peer = self.peer.clone();
let to_peer = match self.get_peer_from_cache(msg.get_to()) {
Some(p) => p,
None => {
return Err(box_err!(
"failed to look up recipient peer {} in region {}",
msg.get_to(),
self.region_id
));
}
};
let to_peer_id = to_peer.get_id();
let to_store_id = to_peer.get_store_id();
let msg_type = msg.get_msg_type();
debug!(
"send raft msg";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"msg_type" => ?msg_type,
"msg_size" => msg.compute_size(),
"from" => from_peer.get_id(),
"to" => to_peer_id,
);
send_msg.set_from_peer(from_peer);
send_msg.set_to_peer(to_peer);
// There could be two cases:
// 1. Target peer already exists but has not established communication with leader yet
// 2. Target peer is added newly due to member change or region split, but it's not
// created yet
// For both cases the region start key and end key are attached in RequestVote and
// Heartbeat message for the store of that peer to check whether to create a new peer
// when receiving these messages, or just to wait for a pending region split to perform
// later.
if self.get_store().is_initialized() && is_initial_msg(&msg) {
let region = self.region();
send_msg.set_start_key(region.get_start_key().to_vec());
send_msg.set_end_key(region.get_end_key().to_vec());
}
send_msg.set_message(msg);
if let Err(e) = trans.send(send_msg) {
warn!(
"failed to send msg to other peer";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"target_peer_id" => to_peer_id,
"target_store_id" => to_store_id,
"err" => ?e,
);
if to_peer_id == self.leader_id() {
self.leader_unreachable = true;
}
// unreachable store
self.raft_group.report_unreachable(to_peer_id);
if msg_type == eraftpb::MessageType::MsgSnapshot {
self.raft_group
.report_snapshot(to_peer_id, SnapshotStatus::Failure);
}
}
Ok(())
}
}
/// `RequestPolicy` decides how we handle a request.
#[derive(Clone, PartialEq, Debug)]
pub enum RequestPolicy {
// Handle the read request directly without dispatch.
ReadLocal,
// Handle the read request via raft's SafeReadIndex mechanism.
ReadIndex,
ProposeNormal,
ProposeTransferLeader,
ProposeConfChange,
}
/// `RequestInspector` makes `RequestPolicy` for requests.
pub trait RequestInspector {
/// Has the current term been applied?
fn has_applied_to_current_term(&mut self) -> bool;
/// Inspects its lease.
fn inspect_lease(&mut self) -> LeaseState;
/// Inspect a request, return a policy that tells us how to
/// handle the request.
fn inspect(&mut self, req: &RaftCmdRequest) -> Result<RequestPolicy> {
if req.has_admin_request() {
if apply::get_change_peer_cmd(req).is_some() {
return Ok(RequestPolicy::ProposeConfChange);
}
if get_transfer_leader_cmd(req).is_some() {
return Ok(RequestPolicy::ProposeTransferLeader);
}
return Ok(RequestPolicy::ProposeNormal);
}
let mut has_read = false;
let mut has_write = false;
for r in req.get_requests() {
match r.get_cmd_type() {
CmdType::Get | CmdType::Snap | CmdType::ReadIndex => has_read = true,
CmdType::Delete | CmdType::Put | CmdType::DeleteRange | CmdType::IngestSST => {
has_write = true
}
CmdType::Prewrite | CmdType::Invalid => {
return Err(box_err!(
"invalid cmd type {:?}, message maybe corrupted",
r.get_cmd_type()
));
}
}
if has_read && has_write {
return Err(box_err!("read and write can't be mixed in one batch."));
}
}
if has_write {
return Ok(RequestPolicy::ProposeNormal);
}
if req.get_header().get_read_quorum() {
return Ok(RequestPolicy::ReadIndex);
}
// If applied index's term is differ from current raft's term, leader transfer
// must happened, if read locally, we may read old value.
if !self.has_applied_to_current_term() {
return Ok(RequestPolicy::ReadIndex);
}
// Local read should be performed, if and only if leader is in lease.
// None for now.
match self.inspect_lease() {
LeaseState::Valid => Ok(RequestPolicy::ReadLocal),
LeaseState::Expired | LeaseState::Suspect => {
// Perform a consistent read to Raft quorum and try to renew the leader lease.
Ok(RequestPolicy::ReadIndex)
}
}
}
}
impl RequestInspector for Peer {
fn has_applied_to_current_term(&mut self) -> bool {
self.get_store().applied_index_term() == self.term()
}
fn inspect_lease(&mut self) -> LeaseState {
if !self.raft_group.raft.in_lease() {
return LeaseState::Suspect;
}
// None means now.
let state = self.leader_lease.inspect(None);
if LeaseState::Expired == state {
debug!(
"leader lease is expired";
"region_id" => self.region_id,
"peer_id" => self.peer.get_id(),
"lease" => ?self.leader_lease,
);
// The lease is expired, call `expire` explicitly.
self.leader_lease.expire();
}
state
}
}
#[derive(Debug)]
pub struct ReadExecutor {
check_epoch: bool,
engine: Arc<DB>,
snapshot: Option<SyncSnapshot>,
snapshot_time: Option<Timespec>,
need_snapshot_time: bool,
}
impl ReadExecutor {
pub fn new(engine: Arc<DB>, check_epoch: bool, need_snapshot_time: bool) -> Self {
ReadExecutor {
check_epoch,
engine,
snapshot: None,
snapshot_time: None,
need_snapshot_time,
}
}
#[inline]
pub fn snapshot_time(&mut self) -> Option<Timespec> {
self.maybe_update_snapshot();
self.snapshot_time
}
#[inline]
fn maybe_update_snapshot(&mut self) {
if self.snapshot.is_some() {
return;
}
let engine = self.engine.clone();
self.snapshot = Some(Snapshot::new(engine).into_sync());
// Reading current timespec after snapshot, in case we do not
// expire lease in time.
atomic::fence(atomic::Ordering::Release);
if self.need_snapshot_time {
self.snapshot_time = Some(monotonic_raw_now());
}
}
fn do_get(&self, req: &Request, region: &metapb::Region) -> Result<Response> {
// TODO: the get_get looks weird, maybe we should figure out a better name later.
let key = req.get_get().get_key();
// region key range has no data prefix, so we must use origin key to check.
util::check_key_in_region(key, region)?;
let mut resp = Response::new();
let snapshot = self.snapshot.as_ref().unwrap();
let res = if !req.get_get().get_cf().is_empty() {
let cf = req.get_get().get_cf();
// TODO: check whether cf exists or not.
snapshot
.get_value_cf(cf, &keys::data_key(key))
.unwrap_or_else(|e| {
panic!(
"[region {}] failed to get {} with cf {}: {:?}",
region.get_id(),
escape(key),
cf,
e
)
})
} else {
snapshot
.get_value(&keys::data_key(key))
.unwrap_or_else(|e| {
panic!(
"[region {}] failed to get {}: {:?}",
region.get_id(),
escape(key),
e
)
})
};
if let Some(res) = res {
resp.mut_get().set_value(res.to_vec());
}
Ok(resp)
}
pub fn execute(
&mut self,
msg: &RaftCmdRequest,
region: &metapb::Region,
read_index: Option<u64>,
) -> ReadResponse {
if self.check_epoch {
if let Err(e) = check_region_epoch(msg, region, true) {
debug!(
"epoch not match";
"region_id" => region.get_id(),
"err" => ?e,
);
return ReadResponse {
response: cmd_resp::new_error(e),
snapshot: None,
};
}
}
self.maybe_update_snapshot();
let mut need_snapshot = false;
let requests = msg.get_requests();
let mut responses = Vec::with_capacity(requests.len());
for req in requests {
let cmd_type = req.get_cmd_type();
let mut resp = match cmd_type {
CmdType::Get => match self.do_get(req, region) {
Ok(resp) => resp,
Err(e) => {
error!(
"failed to execute get command";
"region_id" => region.get_id(),
"err" => ?e,
);
return ReadResponse {
response: cmd_resp::new_error(e),
snapshot: None,
};
}
},
CmdType::Snap => {
need_snapshot = true;
raft_cmdpb::Response::new()
}
CmdType::ReadIndex => {
let mut resp = raft_cmdpb::Response::new();
if let Some(read_index) = read_index {
let mut res = ReadIndexResponse::new();
res.set_read_index(read_index);
resp.set_read_index(res);
} else {
panic!("[region {}] can not get readindex", region.get_id(),);
}
resp
}
CmdType::Prewrite
| CmdType::Put
| CmdType::Delete
| CmdType::DeleteRange
| CmdType::IngestSST
| CmdType::Invalid => unreachable!(),
};
resp.set_cmd_type(cmd_type);
responses.push(resp);
}
let mut response = RaftCmdResponse::new();
response.set_responses(protobuf::RepeatedField::from_vec(responses));
let snapshot = if need_snapshot {
Some(RegionSnapshot::from_snapshot(
self.snapshot.clone().unwrap(),
region.to_owned(),
))
} else {
None
};
ReadResponse { response, snapshot }
}
}
fn get_transfer_leader_cmd(msg: &RaftCmdRequest) -> Option<&TransferLeaderRequest> {
if !msg.has_admin_request() {
return None;
}
let req = msg.get_admin_request();
if !req.has_transfer_leader() {
return None;
}
Some(req.get_transfer_leader())
}
fn get_sync_log_from_request(msg: &RaftCmdRequest) -> bool {
if msg.has_admin_request() {
let req = msg.get_admin_request();
return match req.get_cmd_type() {
AdminCmdType::ChangePeer
| AdminCmdType::Split
| AdminCmdType::BatchSplit
| AdminCmdType::PrepareMerge
| AdminCmdType::CommitMerge
| AdminCmdType::RollbackMerge => true,
_ => false,
};
}
msg.get_header().get_sync_log()
}
/// We enable follower lazy commit to get a better performance.
/// But it may not be appropriate for some requests. This function
/// checks whether the request should be committed on all followers
/// as soon as possible.
fn is_request_urgent(req: &RaftCmdRequest) -> bool {
if !req.has_admin_request() {
return false;
}
match req.get_admin_request().get_cmd_type() {
AdminCmdType::Split
| AdminCmdType::BatchSplit
| AdminCmdType::ChangePeer
| AdminCmdType::ComputeHash
| AdminCmdType::VerifyHash
| AdminCmdType::PrepareMerge
| AdminCmdType::CommitMerge
| AdminCmdType::RollbackMerge => true,
_ => false,
}
}
fn make_transfer_leader_response() -> RaftCmdResponse {
let mut response = AdminResponse::new();
response.set_cmd_type(AdminCmdType::TransferLeader);
response.set_transfer_leader(TransferLeaderResponse::new());
let mut resp = RaftCmdResponse::new();
resp.set_admin_response(response);
resp
}
#[cfg(test)]
mod tests {
use protobuf::ProtobufEnum;
use super::*;
#[test]
fn test_sync_log() {
let white_list = [
AdminCmdType::InvalidAdmin,
AdminCmdType::CompactLog,
AdminCmdType::TransferLeader,
AdminCmdType::ComputeHash,
AdminCmdType::VerifyHash,
];
for tp in AdminCmdType::values() {
let mut msg = RaftCmdRequest::new();
msg.mut_admin_request().set_cmd_type(*tp);
assert_eq!(
get_sync_log_from_request(&msg),
!white_list.contains(tp),
"{:?}",
tp
);
}
}
#[test]
fn test_urgent() {
let urgent_types = [
AdminCmdType::Split,
AdminCmdType::BatchSplit,
AdminCmdType::ChangePeer,
AdminCmdType::ComputeHash,
AdminCmdType::VerifyHash,
AdminCmdType::PrepareMerge,
AdminCmdType::CommitMerge,
AdminCmdType::RollbackMerge,
];
for tp in AdminCmdType::values() {
let mut req = RaftCmdRequest::new();
req.mut_admin_request().set_cmd_type(*tp);
assert_eq!(
is_request_urgent(&req),
urgent_types.contains(tp),
"{:?}",
tp
);
}
assert!(!is_request_urgent(&RaftCmdRequest::new()));
}
#[test]
fn test_entry_context() {
let tbl: Vec<&[ProposalContext]> = vec![
&[ProposalContext::SPLIT],
&[ProposalContext::SYNC_LOG],
&[ProposalContext::PREPARE_MERGE],
&[ProposalContext::SPLIT, ProposalContext::SYNC_LOG],
&[ProposalContext::PREPARE_MERGE, ProposalContext::SYNC_LOG],
];
for flags in tbl {
let mut ctx = ProposalContext::empty();
for f in flags {
ctx.insert(*f);
}
let ser = ctx.to_vec();
let de = ProposalContext::from_bytes(&ser);
for f in flags {
assert!(de.contains(*f), "{:?}", de);
}
}
}
#[allow(clippy::useless_vec)]
#[test]
fn test_request_inspector() {
struct DummyInspector {
applied_to_index_term: bool,
lease_state: LeaseState,
}
impl RequestInspector for DummyInspector {
fn has_applied_to_current_term(&mut self) -> bool {
self.applied_to_index_term
}
fn inspect_lease(&mut self) -> LeaseState {
self.lease_state
}
}
let mut table = vec![];
// Ok(_)
let mut req = RaftCmdRequest::new();
let mut admin_req = raft_cmdpb::AdminRequest::new();
req.set_admin_request(admin_req.clone());
table.push((req.clone(), RequestPolicy::ProposeNormal));
admin_req.set_change_peer(raft_cmdpb::ChangePeerRequest::new());
req.set_admin_request(admin_req.clone());
table.push((req.clone(), RequestPolicy::ProposeConfChange));
admin_req.clear_change_peer();
admin_req.set_transfer_leader(raft_cmdpb::TransferLeaderRequest::new());
req.set_admin_request(admin_req.clone());
table.push((req.clone(), RequestPolicy::ProposeTransferLeader));
admin_req.clear_transfer_leader();
req.clear_admin_request();
for (op, policy) in vec![
(CmdType::Get, RequestPolicy::ReadLocal),
(CmdType::Snap, RequestPolicy::ReadLocal),
(CmdType::Put, RequestPolicy::ProposeNormal),
(CmdType::Delete, RequestPolicy::ProposeNormal),
(CmdType::DeleteRange, RequestPolicy::ProposeNormal),
(CmdType::IngestSST, RequestPolicy::ProposeNormal),
] {
let mut request = raft_cmdpb::Request::new();
request.set_cmd_type(op);
req.set_requests(vec![request].into());
table.push((req.clone(), policy));
}
for applied_to_index_term in vec![true, false] {
for lease_state in vec![LeaseState::Expired, LeaseState::Suspect, LeaseState::Valid] {
for (req, mut policy) in table.clone() {
let mut inspector = DummyInspector {
applied_to_index_term,
lease_state,
};
// Leader can not read local as long as
// it has not applied to its term or it does has a valid lease.
if policy == RequestPolicy::ReadLocal
&& (!applied_to_index_term || LeaseState::Valid != inspector.lease_state)
{
policy = RequestPolicy::ReadIndex;
}
assert_eq!(inspector.inspect(&req).unwrap(), policy);
}
}
}
// Read quorum.
let mut request = raft_cmdpb::Request::new();
request.set_cmd_type(CmdType::Snap);
req.set_requests(vec![request].into());
req.mut_header().set_read_quorum(true);
let mut inspector = DummyInspector {
applied_to_index_term: true,
lease_state: LeaseState::Valid,
};
assert_eq!(inspector.inspect(&req).unwrap(), RequestPolicy::ReadIndex);
req.clear_header();
// Err(_)
let mut err_table = vec![];
for op in vec![CmdType::Prewrite, CmdType::Invalid] {
let mut request = raft_cmdpb::Request::new();
request.set_cmd_type(op);
req.set_requests(vec![request].into());
err_table.push(req.clone());
}
let mut snap = raft_cmdpb::Request::new();
snap.set_cmd_type(CmdType::Snap);
let mut put = raft_cmdpb::Request::new();
put.set_cmd_type(CmdType::Put);
req.set_requests(vec![snap, put].into());
err_table.push(req.clone());
for req in err_table {
let mut inspector = DummyInspector {
applied_to_index_term: true,
lease_state: LeaseState::Valid,
};
assert!(inspector.inspect(&req).is_err());
}
}
}
| 37.078766 | 103 | 0.550699 |
16cca5cd6cf4dd56b6567412ee4f19f60527391a | 671 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Pair { x: int, y: int }
pub fn main() {
// This just tests whether the vec leaks its members.
let pvec: ~[@Pair] =
~[@Pair{x: 1, y: 2}, @Pair{x: 3, y: 4}, @Pair{x: 5, y: 6}];
}
| 31.952381 | 68 | 0.672131 |
8f57c57719f27b44b45c48e250ccb5bba52fcafb | 5,022 | // c:scatterChart
use super::ScatterStyle;
use super::VaryColors;
use super::AreaChartSeries;
use super::AreaChartSeriesList;
use super::DataLabels;
use super::AxisId;
use structs::Spreadsheet;
use writer::driver::*;
use quick_xml::Reader;
use quick_xml::events::{Event, BytesStart};
use quick_xml::Writer;
use std::io::Cursor;
#[derive(Clone, Default, Debug)]
pub struct ScatterChart {
scatter_style: ScatterStyle,
vary_colors: VaryColors,
area_chart_series_list: AreaChartSeriesList,
data_labels: DataLabels,
axis_id: Vec<AxisId>,
}
impl ScatterChart {
pub fn get_scatter_style(&self)-> &ScatterStyle {
&self.scatter_style
}
pub fn get_scatter_style_mut(&mut self)-> &mut ScatterStyle {
&mut self.scatter_style
}
pub fn set_scatter_style(&mut self, value:ScatterStyle)-> &mut ScatterChart {
self.scatter_style = value;
self
}
pub fn get_vary_colors(&self)-> &VaryColors {
&self.vary_colors
}
pub fn get_vary_colors_mut(&mut self)-> &mut VaryColors {
&mut self.vary_colors
}
pub fn set_vary_colors(&mut self, value:VaryColors)-> &mut ScatterChart {
self.vary_colors = value;
self
}
pub fn get_area_chart_series_list(&self)-> &AreaChartSeriesList {
&self.area_chart_series_list
}
pub fn get_area_chart_series_list_mut(&mut self)-> &mut AreaChartSeriesList {
&mut self.area_chart_series_list
}
pub fn set_area_chart_series_list(&mut self, value:AreaChartSeriesList)-> &mut Self {
self.area_chart_series_list = value;
self
}
pub fn get_data_labels(&self)-> &DataLabels {
&self.data_labels
}
pub fn get_data_labels_mut(&mut self)-> &mut DataLabels {
&mut self.data_labels
}
pub fn set_data_labels(&mut self, value:DataLabels)-> &mut ScatterChart {
self.data_labels = value;
self
}
pub fn get_axis_id(&self)-> &Vec<AxisId> {
&self.axis_id
}
pub fn get_axis_id_mut(&mut self)-> &mut Vec<AxisId> {
&mut self.axis_id
}
pub fn set_axis_id(&mut self, value:Vec<AxisId>)-> &mut ScatterChart {
self.axis_id = value;
self
}
pub fn add_axis_id(&mut self, value:AxisId)-> &mut ScatterChart {
self.axis_id.push(value);
self
}
pub(crate) fn set_attributes<R: std::io::BufRead>(
&mut self,
reader:&mut Reader<R>,
_e:&BytesStart
) {
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => {
match e.name() {
b"c:ser" => {
let mut obj = AreaChartSeries::default();
obj.set_attributes(reader, e);
self.get_area_chart_series_list_mut().add_area_chart_series(obj);
},
b"c:dLbls" => {
self.data_labels.set_attributes(reader, e);
},
_ => (),
}
},
Ok(Event::Empty(ref e)) => {
match e.name() {
b"c:scatterStyle" => {
self.scatter_style.set_attributes(reader, e);
},
b"c:varyColors" => {
self.vary_colors.set_attributes(reader, e);
},
b"c:axId" => {
let mut obj = AxisId::default();
obj.set_attributes(reader, e);
self.add_axis_id(obj);
},
_ => (),
}
},
Ok(Event::End(ref e)) => {
match e.name() {
b"c:scatterChart" => return,
_ => (),
}
},
Ok(Event::Eof) => panic!("Error not find {} end element", "c:scatterChart"),
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (),
}
buf.clear();
}
}
pub(crate) fn write_to(&self, writer: &mut Writer<Cursor<Vec<u8>>>, spreadsheet: &Spreadsheet) {
// c:scatterChart
write_start_tag(writer, "c:scatterChart", vec![], false);
// c:scatterStyle
&self.scatter_style.write_to(writer);
// c:varyColors
&self.vary_colors.write_to(writer);
// c:ser
for v in self.area_chart_series_list.get_area_chart_series() {
v.write_to(writer, spreadsheet);
}
// c:dLbls
&self.data_labels.write_to(writer);
// c:axId
for v in &self.axis_id {
v.write_to(writer);
}
write_end_tag(writer, "c:scatterChart");
}
}
| 29.368421 | 100 | 0.516527 |
0199381fcbb543b72c08c965d89550cc1886b4ab | 275 | // --force-warn $LINT causes $LINT (which is warn-by-default) to warn
// despite allowing all warnings in module
// compile-flags: --force-warn dead_code
// check-pass
#![allow(warnings)]
fn dead_function() {}
//~^ WARN function `dead_function` is never used
fn main() {}
| 22.916667 | 69 | 0.698182 |
23f45c5546d66babbcfcbeb8e2b732a9e47fd653 | 5,811 | use crate::{BlockOn, LocalSpawn, Spawn, SpawnBlocking, SpawnError, TokioJoinHandle};
use std::rc::Rc;
use {
crate::{JoinHandle, LocalSpawnHandle, SpawnHandle},
futures_task::{FutureObj, LocalFutureObj},
std::future::Future,
tokio::{runtime::Runtime, task::LocalSet},
};
/// An executor that uses a [`tokio::runtime::Runtime`] with the [current thread](tokio::runtime::Builder::new_current_thread)
/// and a [`tokio::task::LocalSet`]. Can spawn `!Send` futures.
///
/// ## Creation of the runtime
///
/// You must use [`TokioCtBuilder`](crate::TokioCtBuilder) to create the executor.
///
/// ```
/// // Make sure to set the `tokio_ct` feature on async_executors.
/// //
/// use
/// {
/// async_executors :: { TokioCt, TokioCtBuilder, LocalSpawnHandleExt, BlockOn } ,
/// tokio :: { runtime::Builder } ,
/// std :: { rc::Rc } ,
/// };
///
/// // You must use the builder. This guarantees that TokioCt is always backed by a single threaded runtime.
/// // You can set other configurations by calling `tokio_builder()` on TokioCtBuilder, so you get
/// // access to the `tokio::runtime::Builder`.
/// //
/// let exec = TokioCtBuilder::new().build().expect( "create tokio runtime" );
///
/// // block_on takes a &self, so if you need to `async move`,
/// // just clone it for use inside the async block.
/// //
/// exec.block_on( async
/// {
/// let not_send = async { let rc = Rc::new(()); };
///
/// // We can spawn !Send futures here.
/// //
/// let join_handle = exec.spawn_handle_local( not_send ).expect( "spawn" );
///
/// join_handle.await;
/// });
///```
///
/// ## Unwind Safety.
///
/// When a future spawned on this wrapper panics, the panic will be caught by tokio in the poll function.
///
/// You must only spawn futures to this API that are unwind safe. Tokio will wrap spawned tasks in
/// [`std::panic::AssertUnwindSafe`] and wrap the poll invocation with [`std::panic::catch_unwind`].
///
/// They reason that this is fine because they require `Send + 'static` on the task. As far
/// as I can tell this is wrong. Unwind safety can be circumvented in several ways even with
/// `Send + 'static` (eg. `parking_lot::Mutex` is `Send + 'static` but `!UnwindSafe`).
///
/// You should make sure that if your future panics, no code that lives on after the panic,
/// nor any destructors called during the unwind can observe data in an inconsistent state.
///
/// Note: the future running from within `block_on` as opposed to `spawn` does not exhibit this behavior and will panic
/// the current thread.
///
/// Note that these are logic errors, not related to the class of problems that cannot happen
/// in safe rust (memory safety, undefined behavior, unsoundness, data races, ...). See the relevant
/// [catch_unwind RFC](https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md)
/// and it's discussion threads for more info as well as the documentation of [std::panic::UnwindSafe]
/// for more information.
///
//
#[derive(Debug, Clone)]
//
#[cfg_attr(nightly, doc(cfg(feature = "tokio_ct")))]
//
pub struct TokioCt {
pub(crate) exec: Rc<Runtime>,
pub(crate) local: Rc<LocalSet>,
}
impl TokioCt {
/// This is the entry point for this executor. Once this call returns, no remaining tasks shall be polled anymore.
/// However the tasks stay in the executor, so if you make a second call to `block_on` with a new task, the older
/// tasks will start making progress again.
///
/// For simplicity, it's advised to just create top level task that you run through `block_on` and make sure your
/// program is done when it returns.
///
/// See: [tokio::runtime::Runtime::block_on]
///
/// ## Panics
///
/// This function will panic if it is called from an async context, including but not limited to making a nested
/// call. It will also panic if the provided future panics.
pub fn block_on<F: Future>(&self, f: F) -> F::Output {
self.exec.block_on(self.local.run_until(f))
}
}
impl BlockOn for TokioCt {
fn block_on<F: Future>(&self, f: F) -> F::Output {
Self::block_on(self, f)
}
}
impl Spawn for TokioCt {
fn spawn_obj(&self, future: FutureObj<'static, ()>) -> Result<(), SpawnError> {
// We drop the JoinHandle, so the task becomes detached.
//
let _ = self.local.spawn_local(future);
Ok(())
}
}
impl LocalSpawn for TokioCt {
fn spawn_local_obj(&self, future: LocalFutureObj<'static, ()>) -> Result<(), SpawnError> {
// We drop the JoinHandle, so the task becomes detached.
//
let _ = self.local.spawn_local(future);
Ok(())
}
}
impl<Out: 'static + Send> SpawnHandle<Out> for TokioCt {
fn spawn_handle_obj(
&self,
future: FutureObj<'static, Out>,
) -> Result<JoinHandle<Out>, SpawnError> {
Ok(TokioJoinHandle::new(self.exec.spawn(future)).into())
}
}
impl<Out: 'static> LocalSpawnHandle<Out> for TokioCt {
fn spawn_handle_local_obj(
&self,
future: LocalFutureObj<'static, Out>,
) -> Result<JoinHandle<Out>, SpawnError> {
Ok(TokioJoinHandle::new(self.local.spawn_local(future)).into())
}
}
impl<T: Send + 'static> SpawnBlocking<T> for TokioCt {
fn spawn_blocking_obj(
&self,
func: Box<dyn FnOnce() -> T + Send>,
) -> Result<JoinHandle<T>, SpawnError> {
let handle = self.exec.spawn_blocking(func);
Ok(TokioJoinHandle::new(handle).into())
}
}
#[cfg(test)]
//
mod tests {
use super::*;
// It's important that this is not Send, as we allow spawning !Send futures on it.
//
static_assertions::assert_not_impl_any!(TokioCt: Send, Sync);
}
| 35.650307 | 126 | 0.640165 |
9c2f4940fa567bbf1a3fd39d6af884a4e9d79de7 | 195 | struct r {
let b: @mut int;
new(b: @mut int) {
self.b = b;
}
drop { *(self.b) += 1; }
}
fn main() {
let b = @mut 0;
{
let p = some(r(b));
}
assert *b == 1;
} | 12.1875 | 27 | 0.384615 |
e8b81db07067c65751c41f3f7fc8b9a2e731ef2b | 29,560 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Character manipulation.
//!
//! For more details, see ::std_unicode::char (a.k.a. std::char)
#![allow(non_snake_case)]
#![stable(feature = "core_char", since = "1.2.0")]
use char_private::is_printable;
use convert::TryFrom;
use fmt::{self, Write};
use slice;
use str::{from_utf8_unchecked_mut, FromStr};
use iter::FusedIterator;
use mem::transmute;
// UTF-8 ranges and tags for encoding characters
const TAG_CONT: u8 = 0b1000_0000;
const TAG_TWO_B: u8 = 0b1100_0000;
const TAG_THREE_B: u8 = 0b1110_0000;
const TAG_FOUR_B: u8 = 0b1111_0000;
const MAX_ONE_B: u32 = 0x80;
const MAX_TWO_B: u32 = 0x800;
const MAX_THREE_B: u32 = 0x10000;
/*
Lu Uppercase_Letter an uppercase letter
Ll Lowercase_Letter a lowercase letter
Lt Titlecase_Letter a digraphic character, with first part uppercase
Lm Modifier_Letter a modifier letter
Lo Other_Letter other letters, including syllables and ideographs
Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)
Mc Spacing_Mark a spacing combining mark (positive advance width)
Me Enclosing_Mark an enclosing combining mark
Nd Decimal_Number a decimal digit
Nl Letter_Number a letterlike numeric character
No Other_Number a numeric character of other type
Pc Connector_Punctuation a connecting punctuation mark, like a tie
Pd Dash_Punctuation a dash or hyphen punctuation mark
Ps Open_Punctuation an opening punctuation mark (of a pair)
Pe Close_Punctuation a closing punctuation mark (of a pair)
Pi Initial_Punctuation an initial quotation mark
Pf Final_Punctuation a final quotation mark
Po Other_Punctuation a punctuation mark of other type
Sm Math_Symbol a symbol of primarily mathematical use
Sc Currency_Symbol a currency sign
Sk Modifier_Symbol a non-letterlike modifier symbol
So Other_Symbol a symbol of other type
Zs Space_Separator a space character (of various non-zero widths)
Zl Line_Separator U+2028 LINE SEPARATOR only
Zp Paragraph_Separator U+2029 PARAGRAPH SEPARATOR only
Cc Control a C0 or C1 control code
Cf Format a format control character
Cs Surrogate a surrogate code point
Co Private_Use a private-use character
Cn Unassigned a reserved unassigned code point or a noncharacter
*/
/// The highest valid code point a `char` can have.
///
/// A [`char`] is a [Unicode Scalar Value], which means that it is a [Code
/// Point], but only ones within a certain range. `MAX` is the highest valid
/// code point that's a valid [Unicode Scalar Value].
///
/// [`char`]: ../../std/primitive.char.html
/// [Unicode Scalar Value]: http://www.unicode.org/glossary/#unicode_scalar_value
/// [Code Point]: http://www.unicode.org/glossary/#code_point
#[stable(feature = "rust1", since = "1.0.0")]
pub const MAX: char = '\u{10ffff}';
/// Converts a `u32` to a `char`.
///
/// Note that all [`char`]s are valid [`u32`]s, and can be casted to one with
/// [`as`]:
///
/// ```
/// let c = '💯';
/// let i = c as u32;
///
/// assert_eq!(128175, i);
/// ```
///
/// However, the reverse is not true: not all valid [`u32`]s are valid
/// [`char`]s. `from_u32()` will return `None` if the input is not a valid value
/// for a [`char`].
///
/// [`char`]: ../../std/primitive.char.html
/// [`u32`]: ../../std/primitive.u32.html
/// [`as`]: ../../book/first-edition/casting-between-types.html#as
///
/// For an unsafe version of this function which ignores these checks, see
/// [`from_u32_unchecked`].
///
/// [`from_u32_unchecked`]: fn.from_u32_unchecked.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::char;
///
/// let c = char::from_u32(0x2764);
///
/// assert_eq!(Some('❤'), c);
/// ```
///
/// Returning `None` when the input is not a valid [`char`]:
///
/// ```
/// use std::char;
///
/// let c = char::from_u32(0x110000);
///
/// assert_eq!(None, c);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_u32(i: u32) -> Option<char> {
char::try_from(i).ok()
}
/// Converts a `u32` to a `char`, ignoring validity.
///
/// Note that all [`char`]s are valid [`u32`]s, and can be casted to one with
/// [`as`]:
///
/// ```
/// let c = '💯';
/// let i = c as u32;
///
/// assert_eq!(128175, i);
/// ```
///
/// However, the reverse is not true: not all valid [`u32`]s are valid
/// [`char`]s. `from_u32_unchecked()` will ignore this, and blindly cast to
/// [`char`], possibly creating an invalid one.
///
/// [`char`]: ../../std/primitive.char.html
/// [`u32`]: ../../std/primitive.u32.html
/// [`as`]: ../../book/first-edition/casting-between-types.html#as
///
/// # Safety
///
/// This function is unsafe, as it may construct invalid `char` values.
///
/// For a safe version of this function, see the [`from_u32`] function.
///
/// [`from_u32`]: fn.from_u32.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::char;
///
/// let c = unsafe { char::from_u32_unchecked(0x2764) };
///
/// assert_eq!('❤', c);
/// ```
#[inline]
#[stable(feature = "char_from_unchecked", since = "1.5.0")]
pub unsafe fn from_u32_unchecked(i: u32) -> char {
transmute(i)
}
#[stable(feature = "char_convert", since = "1.13.0")]
impl From<char> for u32 {
#[inline]
fn from(c: char) -> Self {
c as u32
}
}
/// Maps a byte in 0x00...0xFF to a `char` whose code point has the same value, in U+0000 to U+00FF.
///
/// Unicode is designed such that this effectively decodes bytes
/// with the character encoding that IANA calls ISO-8859-1.
/// This encoding is compatible with ASCII.
///
/// Note that this is different from ISO/IEC 8859-1 a.k.a. ISO 8859-1 (with one less hyphen),
/// which leaves some "blanks", byte values that are not assigned to any character.
/// ISO-8859-1 (the IANA one) assigns them to the C0 and C1 control codes.
///
/// Note that this is *also* different from Windows-1252 a.k.a. code page 1252,
/// which is a superset ISO/IEC 8859-1 that assigns some (not all!) blanks
/// to punctuation and various Latin characters.
///
/// To confuse things further, [on the Web](https://encoding.spec.whatwg.org/)
/// `ascii`, `iso-8859-1`, and `windows-1252` are all aliases
/// for a superset of Windows-1252 that fills the remaining blanks with corresponding
/// C0 and C1 control codes.
#[stable(feature = "char_convert", since = "1.13.0")]
impl From<u8> for char {
#[inline]
fn from(i: u8) -> Self {
i as char
}
}
/// An error which can be returned when parsing a char.
#[stable(feature = "char_from_str", since = "1.20.0")]
#[derive(Clone, Debug)]
pub struct ParseCharError {
kind: CharErrorKind,
}
impl ParseCharError {
#[unstable(feature = "char_error_internals",
reason = "this method should not be available publicly",
issue = "0")]
#[doc(hidden)]
pub fn __description(&self) -> &str {
match self.kind {
CharErrorKind::EmptyString => {
"cannot parse char from empty string"
},
CharErrorKind::TooManyChars => "too many characters in string"
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum CharErrorKind {
EmptyString,
TooManyChars,
}
#[stable(feature = "char_from_str", since = "1.20.0")]
impl fmt::Display for ParseCharError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.__description().fmt(f)
}
}
#[stable(feature = "char_from_str", since = "1.20.0")]
impl FromStr for char {
type Err = ParseCharError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut chars = s.chars();
match (chars.next(), chars.next()) {
(None, _) => {
Err(ParseCharError { kind: CharErrorKind::EmptyString })
},
(Some(c), None) => Ok(c),
_ => {
Err(ParseCharError { kind: CharErrorKind::TooManyChars })
}
}
}
}
#[unstable(feature = "try_from", issue = "33417")]
impl TryFrom<u32> for char {
type Error = CharTryFromError;
#[inline]
fn try_from(i: u32) -> Result<Self, Self::Error> {
if (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF) {
Err(CharTryFromError(()))
} else {
Ok(unsafe { from_u32_unchecked(i) })
}
}
}
/// The error type returned when a conversion from u32 to char fails.
#[unstable(feature = "try_from", issue = "33417")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct CharTryFromError(());
#[unstable(feature = "try_from", issue = "33417")]
impl fmt::Display for CharTryFromError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"converted integer out of range for `char`".fmt(f)
}
}
/// Converts a digit in the given radix to a `char`.
///
/// A 'radix' here is sometimes also called a 'base'. A radix of two
/// indicates a binary number, a radix of ten, decimal, and a radix of
/// sixteen, hexadecimal, to give some common values. Arbitrary
/// radices are supported.
///
/// `from_digit()` will return `None` if the input is not a digit in
/// the given radix.
///
/// # Panics
///
/// Panics if given a radix larger than 36.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::char;
///
/// let c = char::from_digit(4, 10);
///
/// assert_eq!(Some('4'), c);
///
/// // Decimal 11 is a single digit in base 16
/// let c = char::from_digit(11, 16);
///
/// assert_eq!(Some('b'), c);
/// ```
///
/// Returning `None` when the input is not a digit:
///
/// ```
/// use std::char;
///
/// let c = char::from_digit(20, 10);
///
/// assert_eq!(None, c);
/// ```
///
/// Passing a large radix, causing a panic:
///
/// ```
/// use std::thread;
/// use std::char;
///
/// let result = thread::spawn(|| {
/// // this panics
/// let c = char::from_digit(1, 37);
/// }).join();
///
/// assert!(result.is_err());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_digit(num: u32, radix: u32) -> Option<char> {
if radix > 36 {
panic!("from_digit: radix is too high (maximum 36)");
}
if num < radix {
let num = num as u8;
if num < 10 {
Some((b'0' + num) as char)
} else {
Some((b'a' + num - 10) as char)
}
} else {
None
}
}
// NB: the stabilization and documentation for this trait is in
// unicode/char.rs, not here
#[allow(missing_docs)] // docs in libunicode/u_char.rs
#[doc(hidden)]
#[unstable(feature = "core_char_ext",
reason = "the stable interface is `impl char` in later crate",
issue = "32110")]
pub trait CharExt {
#[stable(feature = "core", since = "1.6.0")]
fn is_digit(self, radix: u32) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn to_digit(self, radix: u32) -> Option<u32>;
#[stable(feature = "core", since = "1.6.0")]
fn escape_unicode(self) -> EscapeUnicode;
#[stable(feature = "core", since = "1.6.0")]
fn escape_default(self) -> EscapeDefault;
#[stable(feature = "char_escape_debug", since = "1.20.0")]
fn escape_debug(self) -> EscapeDebug;
#[stable(feature = "core", since = "1.6.0")]
fn len_utf8(self) -> usize;
#[stable(feature = "core", since = "1.6.0")]
fn len_utf16(self) -> usize;
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
fn encode_utf8(self, dst: &mut [u8]) -> &mut str;
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16];
}
#[stable(feature = "core", since = "1.6.0")]
impl CharExt for char {
#[inline]
fn is_digit(self, radix: u32) -> bool {
self.to_digit(radix).is_some()
}
#[inline]
fn to_digit(self, radix: u32) -> Option<u32> {
if radix > 36 {
panic!("to_digit: radix is too high (maximum 36)");
}
let val = match self {
'0' ... '9' => self as u32 - '0' as u32,
'a' ... 'z' => self as u32 - 'a' as u32 + 10,
'A' ... 'Z' => self as u32 - 'A' as u32 + 10,
_ => return None,
};
if val < radix { Some(val) }
else { None }
}
#[inline]
fn escape_unicode(self) -> EscapeUnicode {
let c = self as u32;
// or-ing 1 ensures that for c==0 the code computes that one
// digit should be printed and (which is the same) avoids the
// (31 - 32) underflow
let msb = 31 - (c | 1).leading_zeros();
// the index of the most significant hex digit
let ms_hex_digit = msb / 4;
EscapeUnicode {
c: self,
state: EscapeUnicodeState::Backslash,
hex_digit_idx: ms_hex_digit as usize,
}
}
#[inline]
fn escape_default(self) -> EscapeDefault {
let init_state = match self {
'\t' => EscapeDefaultState::Backslash('t'),
'\r' => EscapeDefaultState::Backslash('r'),
'\n' => EscapeDefaultState::Backslash('n'),
'\\' | '\'' | '"' => EscapeDefaultState::Backslash(self),
'\x20' ... '\x7e' => EscapeDefaultState::Char(self),
_ => EscapeDefaultState::Unicode(self.escape_unicode())
};
EscapeDefault { state: init_state }
}
#[inline]
fn escape_debug(self) -> EscapeDebug {
let init_state = match self {
'\t' => EscapeDefaultState::Backslash('t'),
'\r' => EscapeDefaultState::Backslash('r'),
'\n' => EscapeDefaultState::Backslash('n'),
'\\' | '\'' | '"' => EscapeDefaultState::Backslash(self),
c if is_printable(c) => EscapeDefaultState::Char(c),
c => EscapeDefaultState::Unicode(c.escape_unicode()),
};
EscapeDebug(EscapeDefault { state: init_state })
}
#[inline]
fn len_utf8(self) -> usize {
let code = self as u32;
if code < MAX_ONE_B {
1
} else if code < MAX_TWO_B {
2
} else if code < MAX_THREE_B {
3
} else {
4
}
}
#[inline]
fn len_utf16(self) -> usize {
let ch = self as u32;
if (ch & 0xFFFF) == ch { 1 } else { 2 }
}
#[inline]
fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
let code = self as u32;
unsafe {
let len =
if code < MAX_ONE_B && !dst.is_empty() {
*dst.get_unchecked_mut(0) = code as u8;
1
} else if code < MAX_TWO_B && dst.len() >= 2 {
*dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;
*dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT;
2
} else if code < MAX_THREE_B && dst.len() >= 3 {
*dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;
*dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
*dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT;
3
} else if dst.len() >= 4 {
*dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;
*dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT;
*dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
*dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT;
4
} else {
panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}",
from_u32_unchecked(code).len_utf8(),
code,
dst.len())
};
from_utf8_unchecked_mut(dst.get_unchecked_mut(..len))
}
}
#[inline]
fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] {
let mut code = self as u32;
unsafe {
if (code & 0xFFFF) == code && !dst.is_empty() {
// The BMP falls through (assuming non-surrogate, as it should)
*dst.get_unchecked_mut(0) = code as u16;
slice::from_raw_parts_mut(dst.as_mut_ptr(), 1)
} else if dst.len() >= 2 {
// Supplementary planes break into surrogates.
code -= 0x1_0000;
*dst.get_unchecked_mut(0) = 0xD800 | ((code >> 10) as u16);
*dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF);
slice::from_raw_parts_mut(dst.as_mut_ptr(), 2)
} else {
panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}",
from_u32_unchecked(code).len_utf16(),
code,
dst.len())
}
}
}
}
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
/// This `struct` is created by the [`escape_unicode`] method on [`char`]. See
/// its documentation for more.
///
/// [`escape_unicode`]: ../../std/primitive.char.html#method.escape_unicode
/// [`char`]: ../../std/primitive.char.html
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct EscapeUnicode {
c: char,
state: EscapeUnicodeState,
// The index of the next hex digit to be printed (0 if none),
// i.e. the number of remaining hex digits to be printed;
// increasing from the least significant digit: 0x543210
hex_digit_idx: usize,
}
// The enum values are ordered so that their representation is the
// same as the remaining length (besides the hexadecimal digits). This
// likely makes `len()` a single load from memory) and inline-worth.
#[derive(Clone, Debug)]
enum EscapeUnicodeState {
Done,
RightBrace,
Value,
LeftBrace,
Type,
Backslash,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for EscapeUnicode {
type Item = char;
fn next(&mut self) -> Option<char> {
match self.state {
EscapeUnicodeState::Backslash => {
self.state = EscapeUnicodeState::Type;
Some('\\')
}
EscapeUnicodeState::Type => {
self.state = EscapeUnicodeState::LeftBrace;
Some('u')
}
EscapeUnicodeState::LeftBrace => {
self.state = EscapeUnicodeState::Value;
Some('{')
}
EscapeUnicodeState::Value => {
let hex_digit = ((self.c as u32) >> (self.hex_digit_idx * 4)) & 0xf;
let c = from_digit(hex_digit, 16).unwrap();
if self.hex_digit_idx == 0 {
self.state = EscapeUnicodeState::RightBrace;
} else {
self.hex_digit_idx -= 1;
}
Some(c)
}
EscapeUnicodeState::RightBrace => {
self.state = EscapeUnicodeState::Done;
Some('}')
}
EscapeUnicodeState::Done => None,
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.len();
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
fn last(self) -> Option<char> {
match self.state {
EscapeUnicodeState::Done => None,
EscapeUnicodeState::RightBrace |
EscapeUnicodeState::Value |
EscapeUnicodeState::LeftBrace |
EscapeUnicodeState::Type |
EscapeUnicodeState::Backslash => Some('}'),
}
}
}
#[stable(feature = "exact_size_escape", since = "1.11.0")]
impl ExactSizeIterator for EscapeUnicode {
#[inline]
fn len(&self) -> usize {
// The match is a single memory access with no branching
self.hex_digit_idx + match self.state {
EscapeUnicodeState::Done => 0,
EscapeUnicodeState::RightBrace => 1,
EscapeUnicodeState::Value => 2,
EscapeUnicodeState::LeftBrace => 3,
EscapeUnicodeState::Type => 4,
EscapeUnicodeState::Backslash => 5,
}
}
}
#[unstable(feature = "fused", issue = "35602")]
impl FusedIterator for EscapeUnicode {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
impl fmt::Display for EscapeUnicode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}
/// An iterator that yields the literal escape code of a `char`.
///
/// This `struct` is created by the [`escape_default`] method on [`char`]. See
/// its documentation for more.
///
/// [`escape_default`]: ../../std/primitive.char.html#method.escape_default
/// [`char`]: ../../std/primitive.char.html
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct EscapeDefault {
state: EscapeDefaultState
}
#[derive(Clone, Debug)]
enum EscapeDefaultState {
Done,
Char(char),
Backslash(char),
Unicode(EscapeUnicode),
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for EscapeDefault {
type Item = char;
fn next(&mut self) -> Option<char> {
match self.state {
EscapeDefaultState::Backslash(c) => {
self.state = EscapeDefaultState::Char(c);
Some('\\')
}
EscapeDefaultState::Char(c) => {
self.state = EscapeDefaultState::Done;
Some(c)
}
EscapeDefaultState::Done => None,
EscapeDefaultState::Unicode(ref mut iter) => iter.next(),
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.len();
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
fn nth(&mut self, n: usize) -> Option<char> {
match self.state {
EscapeDefaultState::Backslash(c) if n == 0 => {
self.state = EscapeDefaultState::Char(c);
Some('\\')
},
EscapeDefaultState::Backslash(c) if n == 1 => {
self.state = EscapeDefaultState::Done;
Some(c)
},
EscapeDefaultState::Backslash(_) => {
self.state = EscapeDefaultState::Done;
None
},
EscapeDefaultState::Char(c) => {
self.state = EscapeDefaultState::Done;
if n == 0 {
Some(c)
} else {
None
}
},
EscapeDefaultState::Done => return None,
EscapeDefaultState::Unicode(ref mut i) => return i.nth(n),
}
}
fn last(self) -> Option<char> {
match self.state {
EscapeDefaultState::Unicode(iter) => iter.last(),
EscapeDefaultState::Done => None,
EscapeDefaultState::Backslash(c) | EscapeDefaultState::Char(c) => Some(c),
}
}
}
#[stable(feature = "exact_size_escape", since = "1.11.0")]
impl ExactSizeIterator for EscapeDefault {
fn len(&self) -> usize {
match self.state {
EscapeDefaultState::Done => 0,
EscapeDefaultState::Char(_) => 1,
EscapeDefaultState::Backslash(_) => 2,
EscapeDefaultState::Unicode(ref iter) => iter.len(),
}
}
}
#[unstable(feature = "fused", issue = "35602")]
impl FusedIterator for EscapeDefault {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
impl fmt::Display for EscapeDefault {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}
/// An iterator that yields the literal escape code of a `char`.
///
/// This `struct` is created by the [`escape_debug`] method on [`char`]. See its
/// documentation for more.
///
/// [`escape_debug`]: ../../std/primitive.char.html#method.escape_debug
/// [`char`]: ../../std/primitive.char.html
#[stable(feature = "char_escape_debug", since = "1.20.0")]
#[derive(Clone, Debug)]
pub struct EscapeDebug(EscapeDefault);
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl Iterator for EscapeDebug {
type Item = char;
fn next(&mut self) -> Option<char> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl ExactSizeIterator for EscapeDebug { }
#[unstable(feature = "fused", issue = "35602")]
impl FusedIterator for EscapeDebug {}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl fmt::Display for EscapeDebug {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
/// An iterator over an iterator of bytes of the characters the bytes represent
/// as UTF-8
#[unstable(feature = "decode_utf8", issue = "33906")]
#[derive(Clone, Debug)]
pub struct DecodeUtf8<I: Iterator<Item = u8>>(::iter::Peekable<I>);
/// Decodes an `Iterator` of bytes as UTF-8.
#[unstable(feature = "decode_utf8", issue = "33906")]
#[inline]
pub fn decode_utf8<I: IntoIterator<Item = u8>>(i: I) -> DecodeUtf8<I::IntoIter> {
DecodeUtf8(i.into_iter().peekable())
}
/// `<DecodeUtf8 as Iterator>::next` returns this for an invalid input sequence.
#[unstable(feature = "decode_utf8", issue = "33906")]
#[derive(PartialEq, Eq, Debug)]
pub struct InvalidSequence(());
#[unstable(feature = "decode_utf8", issue = "33906")]
impl<I: Iterator<Item = u8>> Iterator for DecodeUtf8<I> {
type Item = Result<char, InvalidSequence>;
#[inline]
fn next(&mut self) -> Option<Result<char, InvalidSequence>> {
self.0.next().map(|first_byte| {
// Emit InvalidSequence according to
// Unicode §5.22 Best Practice for U+FFFD Substitution
// http://www.unicode.org/versions/Unicode9.0.0/ch05.pdf#G40630
// Roughly: consume at least one byte,
// then validate one byte at a time and stop before the first unexpected byte
// (which might be the valid start of the next byte sequence).
let mut code_point;
macro_rules! first_byte {
($mask: expr) => {
code_point = u32::from(first_byte & $mask)
}
}
macro_rules! continuation_byte {
() => { continuation_byte!(0x80...0xBF) };
($range: pat) => {
match self.0.peek() {
Some(&byte @ $range) => {
code_point = (code_point << 6) | u32::from(byte & 0b0011_1111);
self.0.next();
}
_ => return Err(InvalidSequence(()))
}
}
}
match first_byte {
0x00...0x7F => {
first_byte!(0b1111_1111);
}
0xC2...0xDF => {
first_byte!(0b0001_1111);
continuation_byte!();
}
0xE0 => {
first_byte!(0b0000_1111);
continuation_byte!(0xA0...0xBF); // 0x80...0x9F here are overlong
continuation_byte!();
}
0xE1...0xEC | 0xEE...0xEF => {
first_byte!(0b0000_1111);
continuation_byte!();
continuation_byte!();
}
0xED => {
first_byte!(0b0000_1111);
continuation_byte!(0x80...0x9F); // 0xA0..0xBF here are surrogates
continuation_byte!();
}
0xF0 => {
first_byte!(0b0000_0111);
continuation_byte!(0x90...0xBF); // 0x80..0x8F here are overlong
continuation_byte!();
continuation_byte!();
}
0xF1...0xF3 => {
first_byte!(0b0000_0111);
continuation_byte!();
continuation_byte!();
continuation_byte!();
}
0xF4 => {
first_byte!(0b0000_0111);
continuation_byte!(0x80...0x8F); // 0x90..0xBF here are beyond char::MAX
continuation_byte!();
continuation_byte!();
}
_ => return Err(InvalidSequence(())) // Illegal first byte, overlong, or beyond MAX
}
unsafe {
Ok(from_u32_unchecked(code_point))
}
})
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<I: FusedIterator<Item = u8>> FusedIterator for DecodeUtf8<I> {}
| 32.519252 | 100 | 0.556732 |
2f39f5cb9cca11e9d69c09b19557f7f4fb9a0614 | 10,643 | // This is mostly stubs
#![allow(unused_variables, dead_code)]
//! Virtual pipes.
//!
//! These types provide easy implementations of `WasiFile` that mimic much of the behavior of Unix
//! pipes. These are particularly helpful for redirecting WASI stdio handles to destinations other
//! than OS files.
//!
//! Some convenience constructors are included for common backing types like `Vec<u8>` and `String`,
//! but the virtual pipes can be instantiated with any `Read` or `Write` type.
//!
use crate::{
file::{Advice, FdFlags, FileType, Filestat, WasiFile},
Error, ErrorExt, SystemTimeSpec,
};
use std::any::Any;
use std::convert::TryInto;
use std::io::{self, Read, Write};
use std::sync::{Arc, RwLock};
/// A virtual pipe read end.
///
/// A variety of `From` impls are provided so that common pipe types are easy to create. For example:
///
/// ```no_run
/// # use std::rc::Rc;
/// # use std::cell::RefCell;
/// use wasi_common::{pipe::ReadPipe, WasiCtx, Table};
/// let stdin = ReadPipe::from("hello from stdin!");
/// // Brint these instances from elsewhere (e.g. wasi-cap-std-sync):
/// let random = todo!();
/// let clocks = todo!();
/// let sched = todo!();
/// let table = Rc::new(RefCell::new(Table::new()));
/// let ctx = WasiCtx::builder(random, clocks, sched, table)
/// .stdin(Box::new(stdin.clone()))
/// .build();
/// ```
#[derive(Debug)]
pub struct ReadPipe<R: Read> {
reader: Arc<RwLock<R>>,
}
impl<R: Read> Clone for ReadPipe<R> {
fn clone(&self) -> Self {
Self {
reader: self.reader.clone(),
}
}
}
impl<R: Read> ReadPipe<R> {
/// Create a new pipe from a `Read` type.
///
/// All `Handle` read operations delegate to reading from this underlying reader.
pub fn new(r: R) -> Self {
Self::from_shared(Arc::new(RwLock::new(r)))
}
/// Create a new pipe from a shareable `Read` type.
///
/// All `Handle` read operations delegate to reading from this underlying reader.
pub fn from_shared(reader: Arc<RwLock<R>>) -> Self {
Self { reader }
}
/// Try to convert this `ReadPipe<R>` back to the underlying `R` type.
///
/// This will fail with `Err(self)` if multiple references to the underlying `R` exist.
pub fn try_into_inner(mut self) -> Result<R, Self> {
match Arc::try_unwrap(self.reader) {
Ok(rc) => Ok(RwLock::into_inner(rc).unwrap()),
Err(reader) => {
self.reader = reader;
Err(self)
}
}
}
fn borrow(&self) -> std::sync::RwLockWriteGuard<R> {
RwLock::write(&self.reader).unwrap()
}
}
impl From<Vec<u8>> for ReadPipe<io::Cursor<Vec<u8>>> {
fn from(r: Vec<u8>) -> Self {
Self::new(io::Cursor::new(r))
}
}
impl From<&[u8]> for ReadPipe<io::Cursor<Vec<u8>>> {
fn from(r: &[u8]) -> Self {
Self::from(r.to_vec())
}
}
impl From<String> for ReadPipe<io::Cursor<String>> {
fn from(r: String) -> Self {
Self::new(io::Cursor::new(r))
}
}
impl From<&str> for ReadPipe<io::Cursor<String>> {
fn from(r: &str) -> Self {
Self::from(r.to_string())
}
}
#[wiggle::async_trait]
impl<R: Read + Any + Send + Sync> WasiFile for ReadPipe<R> {
fn as_any(&self) -> &dyn Any {
self
}
async fn datasync(&self) -> Result<(), Error> {
Ok(()) // trivial: no implementation needed
}
async fn sync(&self) -> Result<(), Error> {
Ok(()) // trivial
}
async fn get_filetype(&self) -> Result<FileType, Error> {
Ok(FileType::Pipe)
}
async fn get_fdflags(&self) -> Result<FdFlags, Error> {
Ok(FdFlags::empty())
}
async fn set_fdflags(&mut self, _fdflags: FdFlags) -> Result<(), Error> {
Err(Error::badf())
}
async fn get_filestat(&self) -> Result<Filestat, Error> {
Ok(Filestat {
device_id: 0,
inode: 0,
filetype: self.get_filetype().await?,
nlink: 0,
size: 0, // XXX no way to get a size out of a Read :(
atim: None,
mtim: None,
ctim: None,
})
}
async fn set_filestat_size(&self, _size: u64) -> Result<(), Error> {
Err(Error::badf())
}
async fn advise(&self, offset: u64, len: u64, advice: Advice) -> Result<(), Error> {
Err(Error::badf())
}
async fn allocate(&self, offset: u64, len: u64) -> Result<(), Error> {
Err(Error::badf())
}
async fn read_vectored<'a>(&self, bufs: &mut [io::IoSliceMut<'a>]) -> Result<u64, Error> {
let n = self.borrow().read_vectored(bufs)?;
Ok(n.try_into()?)
}
async fn read_vectored_at<'a>(
&self,
bufs: &mut [io::IoSliceMut<'a>],
offset: u64,
) -> Result<u64, Error> {
Err(Error::badf())
}
async fn write_vectored<'a>(&self, bufs: &[io::IoSlice<'a>]) -> Result<u64, Error> {
Err(Error::badf())
}
async fn write_vectored_at<'a>(
&self,
bufs: &[io::IoSlice<'a>],
offset: u64,
) -> Result<u64, Error> {
Err(Error::badf())
}
async fn seek(&self, pos: std::io::SeekFrom) -> Result<u64, Error> {
Err(Error::badf())
}
async fn peek(&self, buf: &mut [u8]) -> Result<u64, Error> {
Err(Error::badf())
}
async fn set_times(
&self,
atime: Option<SystemTimeSpec>,
mtime: Option<SystemTimeSpec>,
) -> Result<(), Error> {
Err(Error::badf())
}
async fn num_ready_bytes(&self) -> Result<u64, Error> {
Ok(0)
}
async fn readable(&mut self) -> Result<(), Error> {
Err(Error::badf())
}
async fn writable(&mut self) -> Result<(), Error> {
Err(Error::badf())
}
}
/// A virtual pipe write end.
///
/// ```no_run
/// # use std::rc::Rc;
/// # use std::cell::RefCell;
/// use wasi_common::{pipe::WritePipe, WasiCtx, Table};
/// let stdout = WritePipe::new_in_memory();
/// // Brint these instances from elsewhere (e.g. wasi-cap-std-sync):
/// let random = todo!();
/// let clocks = todo!();
/// let sched = todo!();
/// let table = Rc::new(RefCell::new(Table::new()));
/// let ctx = WasiCtx::builder(random, clocks, sched, table)
/// .stdout(Box::new(stdout.clone()))
/// .build();
/// // use ctx in an instance, then make sure it is dropped:
/// drop(ctx);
/// let contents: Vec<u8> = stdout.try_into_inner().expect("sole remaining reference to WritePipe").into_inner();
/// println!("contents of stdout: {:?}", contents);
/// ```
#[derive(Debug)]
pub struct WritePipe<W: Write> {
writer: Arc<RwLock<W>>,
}
impl<W: Write> Clone for WritePipe<W> {
fn clone(&self) -> Self {
Self {
writer: self.writer.clone(),
}
}
}
impl<W: Write> WritePipe<W> {
/// Create a new pipe from a `Write` type.
///
/// All `Handle` write operations delegate to writing to this underlying writer.
pub fn new(w: W) -> Self {
Self::from_shared(Arc::new(RwLock::new(w)))
}
/// Create a new pipe from a shareable `Write` type.
///
/// All `Handle` write operations delegate to writing to this underlying writer.
pub fn from_shared(writer: Arc<RwLock<W>>) -> Self {
Self { writer }
}
/// Try to convert this `WritePipe<W>` back to the underlying `W` type.
///
/// This will fail with `Err(self)` if multiple references to the underlying `W` exist.
pub fn try_into_inner(mut self) -> Result<W, Self> {
match Arc::try_unwrap(self.writer) {
Ok(rc) => Ok(RwLock::into_inner(rc).unwrap()),
Err(writer) => {
self.writer = writer;
Err(self)
}
}
}
fn borrow(&self) -> std::sync::RwLockWriteGuard<W> {
RwLock::write(&self.writer).unwrap()
}
}
impl WritePipe<io::Cursor<Vec<u8>>> {
/// Create a new writable virtual pipe backed by a `Vec<u8>` buffer.
pub fn new_in_memory() -> Self {
Self::new(io::Cursor::new(vec![]))
}
}
#[wiggle::async_trait]
impl<W: Write + Any + Send + Sync> WasiFile for WritePipe<W> {
fn as_any(&self) -> &dyn Any {
self
}
async fn datasync(&self) -> Result<(), Error> {
Ok(())
}
async fn sync(&self) -> Result<(), Error> {
Ok(())
}
async fn get_filetype(&self) -> Result<FileType, Error> {
Ok(FileType::Pipe)
}
async fn get_fdflags(&self) -> Result<FdFlags, Error> {
Ok(FdFlags::APPEND)
}
async fn set_fdflags(&mut self, _fdflags: FdFlags) -> Result<(), Error> {
Err(Error::badf())
}
async fn get_filestat(&self) -> Result<Filestat, Error> {
Ok(Filestat {
device_id: 0,
inode: 0,
filetype: self.get_filetype().await?,
nlink: 0,
size: 0, // XXX no way to get a size out of a Write :(
atim: None,
mtim: None,
ctim: None,
})
}
async fn set_filestat_size(&self, _size: u64) -> Result<(), Error> {
Err(Error::badf())
}
async fn advise(&self, offset: u64, len: u64, advice: Advice) -> Result<(), Error> {
Err(Error::badf())
}
async fn allocate(&self, offset: u64, len: u64) -> Result<(), Error> {
Err(Error::badf())
}
async fn read_vectored<'a>(&self, bufs: &mut [io::IoSliceMut<'a>]) -> Result<u64, Error> {
Err(Error::badf())
}
async fn read_vectored_at<'a>(
&self,
bufs: &mut [io::IoSliceMut<'a>],
offset: u64,
) -> Result<u64, Error> {
Err(Error::badf())
}
async fn write_vectored<'a>(&self, bufs: &[io::IoSlice<'a>]) -> Result<u64, Error> {
let n = self.borrow().write_vectored(bufs)?;
Ok(n.try_into()?)
}
async fn write_vectored_at<'a>(
&self,
bufs: &[io::IoSlice<'a>],
offset: u64,
) -> Result<u64, Error> {
Err(Error::badf())
}
async fn seek(&self, pos: std::io::SeekFrom) -> Result<u64, Error> {
Err(Error::badf())
}
async fn peek(&self, buf: &mut [u8]) -> Result<u64, Error> {
Err(Error::badf())
}
async fn set_times(
&self,
atime: Option<SystemTimeSpec>,
mtime: Option<SystemTimeSpec>,
) -> Result<(), Error> {
Err(Error::badf())
}
async fn num_ready_bytes(&self) -> Result<u64, Error> {
Ok(0)
}
async fn readable(&mut self) -> Result<(), Error> {
Err(Error::badf())
}
async fn writable(&mut self) -> Result<(), Error> {
Err(Error::badf())
}
}
| 30.235795 | 113 | 0.552382 |
1a5b9a84ce21e126036815b2b0eda6e856f2c090 | 652 | use std::error::Error;
use std::sync::Mutex;
use lazy_static::lazy_static;
lazy_static! {
static ref FRUIT: Mutex<Vec<String>> = Mutex::new(Vec::new());
}
fn insert(fruit: &str) -> Result<(), Box<dyn Error>> {
let mut db = FRUIT.lock().map_err(|_| "Failed to acquire MutexGuard")?;
db.push(fruit.to_string());
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
insert("apple")?;
insert("orange")?;
insert("peach")?;
{
let db = FRUIT.lock().map_err(|_| "Failed to acquire MutexGuard")?;
db.iter().enumerate().for_each(|(i, item)| println!("{}: {}", i, item));
}
insert("grape")?;
Ok(())
}
| 24.148148 | 80 | 0.569018 |
ed92ec54f8bcf0e1aa6c87b4d06a4e4d92d64bfe | 3,042 | use crate::{
exp, parse, stdlib, CallSnapshot, Environment, Exception, ExceptionValue as EV, Expression,
};
use relative_path::RelativePath;
use std::env;
use std::fs;
use std::path::PathBuf;
use crate::Locker;
pub fn resolve_resource(
path: &str,
snapshot: Locker<CallSnapshot>,
via: &Expression,
env: Locker<Environment>,
) -> Result<Expression, Exception> {
let content = match path.starts_with('@') {
true => match stdlib::get_std_resource(path) {
Some(val) => val,
None => exp!(
EV::InvalidIncludePath(String::from(path)),
snapshot,
format!("`{}` is not in the standard library", path)
),
},
false => {
let source_path_opt = match via.source() {
Some(source) => match source.location() {
Some(location) => Some(location),
None => None,
},
None => None,
};
let working_dir = match env::current_dir() {
Ok(dir) => dir,
Err(_) => exp!(
EV::InvalidIncludePath(String::from(path)),
snapshot,
"could not establish working directory (the environment is unknown)"
.to_string()
),
};
let relative_dir = match source_path_opt {
Some(source_path) => match fs::metadata(&source_path) {
Ok(metadata) => match metadata.is_dir() {
true => PathBuf::from(source_path),
false => match PathBuf::from(source_path).parent() {
Some(parent) => PathBuf::from(parent),
None => working_dir,
},
},
Err(_) => working_dir,
},
None => working_dir,
};
let relative_dir_composed = match RelativePath::from_path(&path) {
Ok(relative) => relative,
Err(err) => exp!(
EV::InvalidIncludePath(String::from(path)),
snapshot,
format!(
"could not understand include path ({}; all includes must be relative)",
err
)
),
};
match fs::read_to_string(&relative_dir_composed.to_path(relative_dir)) {
Ok(value) => value,
Err(val) => exp!(
EV::InvalidIncludePath(path.to_string()),
snapshot,
format!("unable to read file ({})", val)
),
}
}
};
let parsed = parse(&content, &path.to_string())?;
let mut return_val = Expression::nil();
for exp in parsed {
return_val = exp.eval(CallSnapshot::new(&exp, &snapshot)?, env.clone())?;
}
Ok(return_val)
}
| 33.8 | 96 | 0.461867 |
14cb732e6891ff44be97519db1fdc307d9f76cb6 | 1,747 | use super::{scale_base, ParsedValue};
use std::fmt::Debug;
/// Format a quantity of bytes.
#[derive(Debug, Clone, Copy)]
pub struct Formatter {
scale_base: u64,
}
impl Formatter {
/// Create a new formatter.
pub const fn new(scale_base: u64) -> Self {
Formatter { scale_base }
}
/// Multiplication factor.
pub const fn scale_base(self) -> u64 {
self.scale_base
}
/// Get scale in number.
pub const fn scale(self, exp: u32) -> u64 {
self.scale_base().pow(exp)
}
/// Parse a value according to the prefixing rule.
pub const fn parse_value(self, value: u64) -> ParsedValue {
macro_rules! check {
($exp:literal => $unit:literal) => {{
let scale = self.scale($exp);
if value >= scale {
return ParsedValue {
coefficient: rounded_div::u64(value, scale),
unit: $unit,
exponent: $exp,
scale,
};
}
}};
}
check!(5 => 'P');
check!(4 => 'T');
check!(3 => 'G');
check!(2 => 'M');
check!(1 => 'K');
ParsedValue {
coefficient: value,
unit: 'B',
scale: 1,
exponent: 0,
}
}
}
macro_rules! variant {
($(#[$attributes:meta])* $name:ident) => {
$(#[$attributes])*
pub const $name: Formatter = Formatter::new(scale_base::$name);
};
}
variant! {
/// Format a quantity of bytes in [metric system](scale_base::METRIC).
METRIC
}
variant! {
/// Format a quantity of bytes in [binary system](scale_base::BINARY).
BINARY
}
| 24.263889 | 74 | 0.4917 |
bf42078c88f5256c5829aab8444d9347b95656b9 | 5,174 | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
/// This code is built to load test Kinesis sources.
///
/// Essentially, it:
/// - Generates some amount of data (total_records). Right now, the data are just
/// random strings converted to bytes.
/// - Pushes the generated data to the target Kinesis stream (at a rate of records_per_second).
/// - Creates a source from the Kinesis stream. Create a materialized view of the count
/// of records from the stream.
///
/// The test will end and is considered successful iff all records are pushed to
/// Kinesis, all records are accounted for in materialized, AND the performance seems
/// reasonable.
///
/// To evaluate overall performance, we use the latency metrics in the Grafana dashboard.
/// In general, the server side latencies should be low and consistent over time. Additionally,
/// "Time behind external source," which indicates our lag behind the tip of the Kinesis
/// stream, should not drift over time. (These measurements should become more concrete as
/// we get more experience running this test).
///
///
use anyhow::Context;
use rand::Rng;
use mz_aws_util::config::AwsConfig;
use ore::task;
use test_util::mz_client;
mod kinesis;
mod mz;
#[tokio::main]
async fn main() {
if let Err(e) = run().await {
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
}
async fn run() -> Result<(), anyhow::Error> {
let timer = std::time::Instant::now();
let args: Args = ore::cli::parse_args();
env_logger::init();
// Initialize and log test variables.
let seed: u32 = rand::thread_rng().gen();
let stream_name = format!("{}-{}", args.stream_prefix, seed);
// todo: make queries per second configurable. (requires mz_client changes)
tracing::info!("Starting kinesis load test with mzd={}:{} \
stream={} shard_count={} total_records={} records_per_second={} queries_per_second={}",
args.materialized_host, args.materialized_port, &stream_name, args.shard_count, args.total_records, args.records_per_second, 1);
// Initialize test resources in Kinesis.
let config = AwsConfig::load_from_env().await;
let kinesis_client = mz_aws_util::kinesis::client(&config);
let stream_arn =
kinesis::create_stream(&kinesis_client, &stream_name, args.shard_count).await?;
tracing::info!("Created Kinesis stream {}", stream_name);
// Push records to Kinesis.
let kinesis_task = task::spawn(|| "kinesis_task", {
let kinesis_client_clone = kinesis_client.clone();
let stream_name_clone = stream_name.clone();
let total_records = args.total_records;
let records_per_second = args.records_per_second;
async move {
kinesis::generate_and_put_records(
&kinesis_client_clone,
&stream_name_clone,
total_records,
records_per_second,
)
.await
}
});
// Initialize connection to materialized instance.
let client = mz_client::client(&args.materialized_host, args.materialized_port)
.await
.context("creating postgres client")?;
// Create Kinesis source and materialized view.
mz::create_source_and_views(&client, stream_arn).await?;
tracing::info!("Created source and materialized views");
// Query materialized view for all pushed Kinesis records.
let materialize_task = task::spawn(|| "kinesis_mz_verify", {
let total_records = args.total_records;
async move { mz::query_materialized_view_until(&client, "foo_count", total_records).await }
});
let (kinesis_result, materialize_result) = futures::join!(kinesis_task, materialize_task);
kinesis::delete_stream(&kinesis_client, &stream_name).await?;
kinesis_result?.context("kinesis thread failed")?;
materialize_result.context("materialize thread failed")??;
tracing::info!(
"Completed load test in {} milliseconds",
timer.elapsed().as_millis()
);
Ok(())
}
#[derive(Clone, Debug, clap::Parser)]
pub struct Args {
/// The materialized host
#[clap(long, default_value = "materialized")]
pub materialized_host: String,
/// The materialized port
#[clap(long, default_value = "6875")]
pub materialized_port: u16,
/// The number of shards in the Kinesis stream
#[clap(long, default_value = "50")]
pub shard_count: i32,
/// The total number of records to create
#[clap(long, default_value = "150000000")]
pub total_records: u64,
/// The number of records to put to the Kinesis stream per second
#[clap(long, default_value = "2000")]
pub records_per_second: u64,
/// The name of the stream to use, will always have a nonce
#[clap(long, default_value = "testdrive-perf-kinesis")]
pub stream_prefix: String,
}
| 36.43662 | 132 | 0.678005 |
7af94c73f324d80324fdacec9a44edd95df61698 | 28,960 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Overloadable operators
//!
//! Implementing these traits allows you to get an effect similar to
//! overloading operators.
//!
//! Some of these traits are imported by the prelude, so they are available in
//! every Rust program.
//!
//! Many of the operators take their operands by value. In non-generic
//! contexts involving built-in types, this is usually not a problem.
//! However, using these operators in generic code, requires some
//! attention if values have to be reused as opposed to letting the operators
//! consume them. One option is to occasionally use `clone()`.
//! Another option is to rely on the types involved providing additional
//! operator implementations for references. For example, for a user-defined
//! type `T` which is supposed to support addition, it is probably a good
//! idea to have both `T` and `&T` implement the traits `Add<T>` and `Add<&T>`
//! so that generic code can be written without unnecessary cloning.
//!
//! # Example
//!
//! This example creates a `Point` struct that implements `Add` and `Sub`, and then
//! demonstrates adding and subtracting two `Point`s.
//!
//! ```rust
//! use std::ops::{Add, Sub};
//!
//! #[derive(Debug)]
//! struct Point {
//! x: int,
//! y: int
//! }
//!
//! impl Add for Point {
//! type Output = Point;
//!
//! fn add(self, other: Point) -> Point {
//! Point {x: self.x + other.x, y: self.y + other.y}
//! }
//! }
//!
//! impl Sub for Point {
//! type Output = Point;
//!
//! fn sub(self, other: Point) -> Point {
//! Point {x: self.x - other.x, y: self.y - other.y}
//! }
//! }
//! fn main() {
//! println!("{:?}", Point {x: 1, y: 0} + Point {x: 2, y: 3});
//! println!("{:?}", Point {x: 1, y: 0} - Point {x: 2, y: 3});
//! }
//! ```
//!
//! See the documentation for each trait for a minimum implementation that prints
//! something to the screen.
#![stable(feature = "rust1", since = "1.0.0")]
use marker::Sized;
use fmt;
/// The `Drop` trait is used to run some code when a value goes out of scope. This
/// is sometimes called a 'destructor'.
///
/// # Example
///
/// A trivial implementation of `Drop`. The `drop` method is called when `_x` goes
/// out of scope, and therefore `main` prints `Dropping!`.
///
/// ```rust
/// struct HasDrop;
///
/// impl Drop for HasDrop {
/// fn drop(&mut self) {
/// println!("Dropping!");
/// }
/// }
///
/// fn main() {
/// let _x = HasDrop;
/// }
/// ```
#[lang="drop"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Drop {
/// The `drop` method, called when the value goes out of scope.
#[stable(feature = "rust1", since = "1.0.0")]
fn drop(&mut self);
}
// implements the unary operator "op &T"
// based on "op T" where T is expected to be `Copy`able
macro_rules! forward_ref_unop {
(impl $imp:ident, $method:ident for $t:ty) => {
#[unstable(feature = "core",
reason = "recently added, waiting for dust to settle")]
impl<'a> $imp for &'a $t {
type Output = <$t as $imp>::Output;
#[inline]
fn $method(self) -> <$t as $imp>::Output {
$imp::$method(*self)
}
}
}
}
// implements binary operators "&T op U", "T op &U", "&T op &U"
// based on "T op U" where T and U are expected to be `Copy`able
macro_rules! forward_ref_binop {
(impl $imp:ident, $method:ident for $t:ty, $u:ty) => {
#[unstable(feature = "core",
reason = "recently added, waiting for dust to settle")]
impl<'a> $imp<$u> for &'a $t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: $u) -> <$t as $imp<$u>>::Output {
$imp::$method(*self, other)
}
}
#[unstable(feature = "core",
reason = "recently added, waiting for dust to settle")]
impl<'a> $imp<&'a $u> for $t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: &'a $u) -> <$t as $imp<$u>>::Output {
$imp::$method(self, *other)
}
}
#[unstable(feature = "core",
reason = "recently added, waiting for dust to settle")]
impl<'a, 'b> $imp<&'a $u> for &'b $t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: &'a $u) -> <$t as $imp<$u>>::Output {
$imp::$method(*self, *other)
}
}
}
}
/// The `Add` trait is used to specify the functionality of `+`.
///
/// # Example
///
/// A trivial implementation of `Add`. When `Foo + Foo` happens, it ends up
/// calling `add`, and therefore, `main` prints `Adding!`.
///
/// ```rust
/// use std::ops::Add;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Add for Foo {
/// type Output = Foo;
///
/// fn add(self, _rhs: Foo) -> Foo {
/// println!("Adding!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo + Foo;
/// }
/// ```
#[lang="add"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Add<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `+` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn add(self, rhs: RHS) -> Self::Output;
}
macro_rules! add_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Add for $t {
type Output = $t;
#[inline]
fn add(self, other: $t) -> $t { self + other }
}
forward_ref_binop! { impl Add, add for $t, $t }
)*)
}
add_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 }
/// The `Sub` trait is used to specify the functionality of `-`.
///
/// # Example
///
/// A trivial implementation of `Sub`. When `Foo - Foo` happens, it ends up
/// calling `sub`, and therefore, `main` prints `Subtracting!`.
///
/// ```rust
/// use std::ops::Sub;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Sub for Foo {
/// type Output = Foo;
///
/// fn sub(self, _rhs: Foo) -> Foo {
/// println!("Subtracting!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo - Foo;
/// }
/// ```
#[lang="sub"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Sub<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `-` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn sub(self, rhs: RHS) -> Self::Output;
}
macro_rules! sub_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Sub for $t {
type Output = $t;
#[inline]
fn sub(self, other: $t) -> $t { self - other }
}
forward_ref_binop! { impl Sub, sub for $t, $t }
)*)
}
sub_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 }
/// The `Mul` trait is used to specify the functionality of `*`.
///
/// # Example
///
/// A trivial implementation of `Mul`. When `Foo * Foo` happens, it ends up
/// calling `mul`, and therefore, `main` prints `Multiplying!`.
///
/// ```rust
/// use std::ops::Mul;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Mul for Foo {
/// type Output = Foo;
///
/// fn mul(self, _rhs: Foo) -> Foo {
/// println!("Multiplying!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo * Foo;
/// }
/// ```
#[lang="mul"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Mul<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `*` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn mul(self, rhs: RHS) -> Self::Output;
}
macro_rules! mul_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Mul for $t {
type Output = $t;
#[inline]
fn mul(self, other: $t) -> $t { self * other }
}
forward_ref_binop! { impl Mul, mul for $t, $t }
)*)
}
mul_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 }
/// The `Div` trait is used to specify the functionality of `/`.
///
/// # Example
///
/// A trivial implementation of `Div`. When `Foo / Foo` happens, it ends up
/// calling `div`, and therefore, `main` prints `Dividing!`.
///
/// ```
/// use std::ops::Div;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Div for Foo {
/// type Output = Foo;
///
/// fn div(self, _rhs: Foo) -> Foo {
/// println!("Dividing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo / Foo;
/// }
/// ```
#[lang="div"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Div<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `/` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn div(self, rhs: RHS) -> Self::Output;
}
macro_rules! div_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Div for $t {
type Output = $t;
#[inline]
fn div(self, other: $t) -> $t { self / other }
}
forward_ref_binop! { impl Div, div for $t, $t }
)*)
}
div_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 }
/// The `Rem` trait is used to specify the functionality of `%`.
///
/// # Example
///
/// A trivial implementation of `Rem`. When `Foo % Foo` happens, it ends up
/// calling `rem`, and therefore, `main` prints `Remainder-ing!`.
///
/// ```
/// use std::ops::Rem;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Rem for Foo {
/// type Output = Foo;
///
/// fn rem(self, _rhs: Foo) -> Foo {
/// println!("Remainder-ing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo % Foo;
/// }
/// ```
#[lang="rem"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Rem<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output = Self;
/// The method for the `%` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn rem(self, rhs: RHS) -> Self::Output;
}
macro_rules! rem_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Rem for $t {
type Output = $t;
#[inline]
fn rem(self, other: $t) -> $t { self % other }
}
forward_ref_binop! { impl Rem, rem for $t, $t }
)*)
}
macro_rules! rem_float_impl {
($t:ty, $fmod:ident) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl Rem for $t {
type Output = $t;
#[inline]
fn rem(self, other: $t) -> $t {
extern { fn $fmod(a: $t, b: $t) -> $t; }
unsafe { $fmod(self, other) }
}
}
forward_ref_binop! { impl Rem, rem for $t, $t }
}
}
rem_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 }
rem_float_impl! { f32, fmodf }
rem_float_impl! { f64, fmod }
/// The `Neg` trait is used to specify the functionality of unary `-`.
///
/// # Example
///
/// A trivial implementation of `Neg`. When `-Foo` happens, it ends up calling
/// `neg`, and therefore, `main` prints `Negating!`.
///
/// ```
/// use std::ops::Neg;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Neg for Foo {
/// type Output = Foo;
///
/// fn neg(self) -> Foo {
/// println!("Negating!");
/// self
/// }
/// }
///
/// fn main() {
/// -Foo;
/// }
/// ```
#[lang="neg"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Neg {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the unary `-` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn neg(self) -> Self::Output;
}
macro_rules! neg_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Neg for $t {
#[stable(feature = "rust1", since = "1.0.0")]
type Output = $t;
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn neg(self) -> $t { -self }
}
forward_ref_unop! { impl Neg, neg for $t }
)*)
}
macro_rules! neg_uint_impl {
($t:ty, $t_signed:ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl Neg for $t {
type Output = $t;
#[inline]
fn neg(self) -> $t { -(self as $t_signed) as $t }
}
forward_ref_unop! { impl Neg, neg for $t }
}
}
neg_impl! { int i8 i16 i32 i64 f32 f64 }
neg_uint_impl! { uint, int }
neg_uint_impl! { u8, i8 }
neg_uint_impl! { u16, i16 }
neg_uint_impl! { u32, i32 }
neg_uint_impl! { u64, i64 }
/// The `Not` trait is used to specify the functionality of unary `!`.
///
/// # Example
///
/// A trivial implementation of `Not`. When `!Foo` happens, it ends up calling
/// `not`, and therefore, `main` prints `Not-ing!`.
///
/// ```
/// use std::ops::Not;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Not for Foo {
/// type Output = Foo;
///
/// fn not(self) -> Foo {
/// println!("Not-ing!");
/// self
/// }
/// }
///
/// fn main() {
/// !Foo;
/// }
/// ```
#[lang="not"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Not {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the unary `!` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn not(self) -> Self::Output;
}
macro_rules! not_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl Not for $t {
type Output = $t;
#[inline]
fn not(self) -> $t { !self }
}
forward_ref_unop! { impl Not, not for $t }
)*)
}
not_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 }
/// The `BitAnd` trait is used to specify the functionality of `&`.
///
/// # Example
///
/// A trivial implementation of `BitAnd`. When `Foo & Foo` happens, it ends up
/// calling `bitand`, and therefore, `main` prints `Bitwise And-ing!`.
///
/// ```
/// use std::ops::BitAnd;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl BitAnd for Foo {
/// type Output = Foo;
///
/// fn bitand(self, _rhs: Foo) -> Foo {
/// println!("Bitwise And-ing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo & Foo;
/// }
/// ```
#[lang="bitand"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait BitAnd<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `&` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn bitand(self, rhs: RHS) -> Self::Output;
}
macro_rules! bitand_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl BitAnd for $t {
type Output = $t;
#[inline]
fn bitand(self, rhs: $t) -> $t { self & rhs }
}
forward_ref_binop! { impl BitAnd, bitand for $t, $t }
)*)
}
bitand_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 }
/// The `BitOr` trait is used to specify the functionality of `|`.
///
/// # Example
///
/// A trivial implementation of `BitOr`. When `Foo | Foo` happens, it ends up
/// calling `bitor`, and therefore, `main` prints `Bitwise Or-ing!`.
///
/// ```
/// use std::ops::BitOr;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl BitOr for Foo {
/// type Output = Foo;
///
/// fn bitor(self, _rhs: Foo) -> Foo {
/// println!("Bitwise Or-ing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo | Foo;
/// }
/// ```
#[lang="bitor"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait BitOr<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `|` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn bitor(self, rhs: RHS) -> Self::Output;
}
macro_rules! bitor_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl BitOr for $t {
type Output = $t;
#[inline]
fn bitor(self, rhs: $t) -> $t { self | rhs }
}
forward_ref_binop! { impl BitOr, bitor for $t, $t }
)*)
}
bitor_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 }
/// The `BitXor` trait is used to specify the functionality of `^`.
///
/// # Example
///
/// A trivial implementation of `BitXor`. When `Foo ^ Foo` happens, it ends up
/// calling `bitxor`, and therefore, `main` prints `Bitwise Xor-ing!`.
///
/// ```
/// use std::ops::BitXor;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl BitXor for Foo {
/// type Output = Foo;
///
/// fn bitxor(self, _rhs: Foo) -> Foo {
/// println!("Bitwise Xor-ing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo ^ Foo;
/// }
/// ```
#[lang="bitxor"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait BitXor<RHS=Self> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `^` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn bitxor(self, rhs: RHS) -> Self::Output;
}
macro_rules! bitxor_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl BitXor for $t {
type Output = $t;
#[inline]
fn bitxor(self, other: $t) -> $t { self ^ other }
}
forward_ref_binop! { impl BitXor, bitxor for $t, $t }
)*)
}
bitxor_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 }
/// The `Shl` trait is used to specify the functionality of `<<`.
///
/// # Example
///
/// A trivial implementation of `Shl`. When `Foo << Foo` happens, it ends up
/// calling `shl`, and therefore, `main` prints `Shifting left!`.
///
/// ```
/// use std::ops::Shl;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Shl<Foo> for Foo {
/// type Output = Foo;
///
/// fn shl(self, _rhs: Foo) -> Foo {
/// println!("Shifting left!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo << Foo;
/// }
/// ```
#[lang="shl"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Shl<RHS> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `<<` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn shl(self, rhs: RHS) -> Self::Output;
}
macro_rules! shl_impl {
($t:ty, $f:ty) => (
#[stable(feature = "rust1", since = "1.0.0")]
impl Shl<$f> for $t {
type Output = $t;
#[inline]
fn shl(self, other: $f) -> $t {
self << other
}
}
forward_ref_binop! { impl Shl, shl for $t, $f }
)
}
macro_rules! shl_impl_all {
($($t:ty)*) => ($(
shl_impl! { $t, u8 }
shl_impl! { $t, u16 }
shl_impl! { $t, u32 }
shl_impl! { $t, u64 }
shl_impl! { $t, usize }
shl_impl! { $t, i8 }
shl_impl! { $t, i16 }
shl_impl! { $t, i32 }
shl_impl! { $t, i64 }
shl_impl! { $t, isize }
)*)
}
shl_impl_all! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
/// The `Shr` trait is used to specify the functionality of `>>`.
///
/// # Example
///
/// A trivial implementation of `Shr`. When `Foo >> Foo` happens, it ends up
/// calling `shr`, and therefore, `main` prints `Shifting right!`.
///
/// ```
/// use std::ops::Shr;
///
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Shr<Foo> for Foo {
/// type Output = Foo;
///
/// fn shr(self, _rhs: Foo) -> Foo {
/// println!("Shifting right!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo >> Foo;
/// }
/// ```
#[lang="shr"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Shr<RHS> {
#[stable(feature = "rust1", since = "1.0.0")]
type Output;
/// The method for the `>>` operator
#[stable(feature = "rust1", since = "1.0.0")]
fn shr(self, rhs: RHS) -> Self::Output;
}
macro_rules! shr_impl {
($t:ty, $f:ty) => (
impl Shr<$f> for $t {
type Output = $t;
#[inline]
fn shr(self, other: $f) -> $t {
self >> other
}
}
forward_ref_binop! { impl Shr, shr for $t, $f }
)
}
macro_rules! shr_impl_all {
($($t:ty)*) => ($(
shr_impl! { $t, u8 }
shr_impl! { $t, u16 }
shr_impl! { $t, u32 }
shr_impl! { $t, u64 }
shr_impl! { $t, usize }
shr_impl! { $t, i8 }
shr_impl! { $t, i16 }
shr_impl! { $t, i32 }
shr_impl! { $t, i64 }
shr_impl! { $t, isize }
)*)
}
shr_impl_all! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
/// The `Index` trait is used to specify the functionality of indexing operations
/// like `arr[idx]` when used in an immutable context.
///
/// # Example
///
/// A trivial implementation of `Index`. When `Foo[Bar]` happens, it ends up
/// calling `index`, and therefore, `main` prints `Indexing!`.
///
/// ```
/// use std::ops::Index;
///
/// #[derive(Copy)]
/// struct Foo;
/// struct Bar;
///
/// impl Index<Bar> for Foo {
/// type Output = Foo;
///
/// fn index<'a>(&'a self, _index: &Bar) -> &'a Foo {
/// println!("Indexing!");
/// self
/// }
/// }
///
/// fn main() {
/// Foo[Bar];
/// }
/// ```
#[lang="index"]
#[rustc_on_unimplemented = "the type `{Self}` cannot be indexed by `{Index}`"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Index<Index: ?Sized> {
type Output: ?Sized;
/// The method for the indexing (`Foo[Bar]`) operation
#[stable(feature = "rust1", since = "1.0.0")]
fn index<'a>(&'a self, index: &Index) -> &'a Self::Output;
}
/// The `IndexMut` trait is used to specify the functionality of indexing
/// operations like `arr[idx]`, when used in a mutable context.
///
/// # Example
///
/// A trivial implementation of `IndexMut`. When `Foo[Bar]` happens, it ends up
/// calling `index_mut`, and therefore, `main` prints `Indexing!`.
///
/// ```
/// use std::ops::IndexMut;
///
/// #[derive(Copy)]
/// struct Foo;
/// struct Bar;
///
/// impl IndexMut<Bar> for Foo {
/// type Output = Foo;
///
/// fn index_mut<'a>(&'a mut self, _index: &Bar) -> &'a mut Foo {
/// println!("Indexing!");
/// self
/// }
/// }
///
/// fn main() {
/// &mut Foo[Bar];
/// }
/// ```
#[lang="index_mut"]
#[rustc_on_unimplemented = "the type `{Self}` cannot be mutably indexed by `{Index}`"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IndexMut<Index: ?Sized> {
type Output: ?Sized;
/// The method for the indexing (`Foo[Bar]`) operation
#[stable(feature = "rust1", since = "1.0.0")]
fn index_mut<'a>(&'a mut self, index: &Index) -> &'a mut Self::Output;
}
/// An unbounded range.
#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="range_full"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFull;
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for RangeFull {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt("..", fmt)
}
}
/// A (half-open) range which is bounded at both ends.
#[derive(Clone, PartialEq, Eq)]
#[lang="range"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Range<Idx> {
/// The lower bound of the range (inclusive).
pub start: Idx,
/// The upper bound of the range (exclusive).
pub end: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for Range<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}..{:?}", self.start, self.end)
}
}
/// A range which is only bounded below.
#[derive(Clone, PartialEq, Eq)]
#[lang="range_from"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFrom<Idx> {
/// The lower bound of the range (inclusive).
pub start: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeFrom<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}..", self.start)
}
}
/// A range which is only bounded above.
#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="range_to"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeTo<Idx> {
/// The upper bound of the range (exclusive).
pub end: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeTo<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "..{:?}", self.end)
}
}
/// The `Deref` trait is used to specify the functionality of dereferencing
/// operations like `*v`.
///
/// # Example
///
/// A struct with a single field which is accessible via dereferencing the
/// struct.
///
/// ```
/// use std::ops::Deref;
///
/// struct DerefExample<T> {
/// value: T
/// }
///
/// impl<T> Deref for DerefExample<T> {
/// type Target = T;
///
/// fn deref<'a>(&'a self) -> &'a T {
/// &self.value
/// }
/// }
///
/// fn main() {
/// let x = DerefExample { value: 'a' };
/// assert_eq!('a', *x);
/// }
/// ```
#[lang="deref"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Deref {
#[stable(feature = "rust1", since = "1.0.0")]
type Target: ?Sized;
/// The method called to dereference a value
#[stable(feature = "rust1", since = "1.0.0")]
fn deref<'a>(&'a self) -> &'a Self::Target;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: ?Sized> Deref for &'a T {
type Target = T;
fn deref(&self) -> &T { *self }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: ?Sized> Deref for &'a mut T {
type Target = T;
fn deref(&self) -> &T { *self }
}
/// The `DerefMut` trait is used to specify the functionality of dereferencing
/// mutably like `*v = 1;`
///
/// # Example
///
/// A struct with a single field which is modifiable via dereferencing the
/// struct.
///
/// ```
/// use std::ops::{Deref, DerefMut};
///
/// struct DerefMutExample<T> {
/// value: T
/// }
///
/// impl<T> Deref for DerefMutExample<T> {
/// type Target = T;
///
/// fn deref<'a>(&'a self) -> &'a T {
/// &self.value
/// }
/// }
///
/// impl<T> DerefMut for DerefMutExample<T> {
/// fn deref_mut<'a>(&'a mut self) -> &'a mut T {
/// &mut self.value
/// }
/// }
///
/// fn main() {
/// let mut x = DerefMutExample { value: 'a' };
/// *x = 'b';
/// assert_eq!('b', *x);
/// }
/// ```
#[lang="deref_mut"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait DerefMut: Deref {
/// The method called to mutably dereference a value
#[stable(feature = "rust1", since = "1.0.0")]
fn deref_mut<'a>(&'a mut self) -> &'a mut Self::Target;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: ?Sized> DerefMut for &'a mut T {
fn deref_mut(&mut self) -> &mut T { *self }
}
/// A version of the call operator that takes an immutable receiver.
#[lang="fn"]
#[unstable(feature = "core",
reason = "uncertain about variadic generics, input versus associated types")]
#[rustc_paren_sugar]
pub trait Fn<Args> {
type Output;
/// This is called when the call operator is used.
extern "rust-call" fn call(&self, args: Args) -> Self::Output;
}
/// A version of the call operator that takes a mutable receiver.
#[lang="fn_mut"]
#[unstable(feature = "core",
reason = "uncertain about variadic generics, input versus associated types")]
#[rustc_paren_sugar]
pub trait FnMut<Args> {
type Output;
/// This is called when the call operator is used.
extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
}
/// A version of the call operator that takes a by-value receiver.
#[lang="fn_once"]
#[unstable(feature = "core",
reason = "uncertain about variadic generics, input versus associated types")]
#[rustc_paren_sugar]
pub trait FnOnce<Args> {
type Output;
/// This is called when the call operator is used.
extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
}
impl<F: ?Sized, A> FnMut<A> for F
where F : Fn<A>
{
type Output = <F as Fn<A>>::Output;
extern "rust-call" fn call_mut(&mut self, args: A) -> <F as Fn<A>>::Output {
self.call(args)
}
}
impl<F,A> FnOnce<A> for F
where F : FnMut<A>
{
type Output = <F as FnMut<A>>::Output;
extern "rust-call" fn call_once(mut self, args: A) -> <F as FnMut<A>>::Output {
self.call_mut(args)
}
}
| 24.730999 | 88 | 0.526761 |
4b352733e6c2e09a5cf9b154dd7823310f5a0c52 | 6,653 | use std::io::{Read, Write, BufReader, BufWriter};
use std::fs::File;
///! This module contains functions to write pages with various layouts
use std::path::{Path, PathBuf};
use crate::{replace, replace_path, ImageInfo, PageInfo, PageKind};
#[derive(Debug)]
pub struct Page {
path: PathBuf,
}
pub(crate) fn set_section_title(
page_info: &PageInfo, title: Option<&str>
) -> std::io::Result<()> {
let file = File::open(&page_info.path)?;
let mut buf_reader = BufReader::new(file);
let mut page_text = String::new();
buf_reader.read_to_string(&mut page_text)?;
let comment = "% page title";
replace(
&mut page_text,
"PHOTOTEX_PAGE_TITLE",
title.unwrap_or(comment),
)
.unwrap();
let f = File::create(&page_info.path)?;
let mut writer = BufWriter::new(f);
write!(writer, "{}", page_text)?;
Ok(())
}
impl Page {
pub fn new(
page_id: &mut usize,
out_folder: &Path,
) -> Page {
let path = out_folder.join(format!("page{:03}", *page_id));
*page_id += 1;
Page {
path,
}
}
pub(crate) fn write_two_landscapes(
self,
im0: &ImageInfo,
im1: &ImageInfo,
) -> std::io::Result<PageInfo> {
let page_path = &self.path;
std::fs::create_dir_all(page_path)?;
let page_path = page_path.join("page.tex");
let f = File::create(&page_path)?;
let mut writer = BufWriter::new(f);
let mut page_text =
include_str!("../data/page_2_landscapes.tex").to_string();
if let Some(im0_path) = im0.path.canonicalize()?.to_str() {
replace(&mut page_text, "PHOTOTEX_FIRST_IMAGE_PATH", im0_path)
.unwrap();
} else {
log::error!(
"could not include image path {:?} in {:?}: utf-8 failed",
im0.path,
page_path,
);
}
if let Some(im1_path) = im1.path.canonicalize()?.to_str() {
replace(&mut page_text, "PHOTOTEX_SECOND_IMAGE_PATH", im1_path)
.unwrap();
} else {
log::error!(
"could not include image path {:?} in {:?}: utf-8 failed",
im1.path,
page_path,
);
}
replace(&mut page_text, "PHOTOTEX_FIRST_LEGEND", "%").unwrap();
replace(&mut page_text, "PHOTOTEX_SECOND_LEGEND", "%").unwrap();
write!(writer, "{}", page_text)?;
Ok(PageInfo {
path: page_path,
kind: PageKind::TwoLandscapes,
})
}
pub(crate) fn write_two_portraits_one_landscape(
self,
im0: &ImageInfo,
im1: &ImageInfo,
im2: &ImageInfo,
) -> std::io::Result<PageInfo> {
let page_path = &self.path;
std::fs::create_dir_all(page_path)?;
let page_path = page_path.join("page.tex");
let f = File::create(&page_path)?;
let mut writer = BufWriter::new(f);
let mut page_text =
include_str!("../data/page_2_portrait_1_landscape.tex").to_string();
let (im0_, im1_, im2_);
if im0.rotated_dims.0 >= im0.rotated_dims.1 {
im2_ = im0;
im0_ = im1;
im1_ = im2;
} else if im1.rotated_dims.0 >= im1.rotated_dims.1 {
im2_ = im1;
im0_ = im0;
im1_ = im2;
} else {
im2_ = im2;
im0_ = im0;
im1_ = im1;
}
replace_path(
&mut page_text,
"PHOTOTEX_FIRST_IMAGE_PATH",
im0_,
&page_path,
);
replace_path(
&mut page_text,
"PHOTOTEX_SECOND_IMAGE_PATH",
im1_,
&page_path,
);
replace_path(
&mut page_text,
"PHOTOTEX_THIRD_IMAGE_PATH",
im2_,
&page_path,
);
replace(&mut page_text, "PHOTOTEX_FIRST_SECOND_LEGENDS", "%").unwrap();
replace(&mut page_text, "PHOTOTEX_THIRD_LEGEND", "%").unwrap();
write!(writer, "{}", page_text)?;
Ok(PageInfo {
path: page_path,
kind: PageKind::TwoLandscapes,
})
}
pub(crate) fn write_four_portraits(
self,
im0: &ImageInfo,
im1: &ImageInfo,
im2: &ImageInfo,
im3: &ImageInfo,
) -> std::io::Result<PageInfo> {
let page_path = &self.path;
std::fs::create_dir_all(page_path)?;
let page_path = page_path.join("page.tex");
let f = File::create(&page_path)?;
let mut writer = BufWriter::new(f);
let mut page_text =
include_str!("../data/page_4_portraits.tex").to_string();
replace_path(
&mut page_text,
"PHOTOTEX_FIRST_IMAGE_PATH",
&im0,
&page_path,
);
replace_path(
&mut page_text,
"PHOTOTEX_SECOND_IMAGE_PATH",
&im1,
&page_path,
);
replace_path(
&mut page_text,
"PHOTOTEX_THIRD_IMAGE_PATH",
&im2,
&page_path,
);
replace_path(
&mut page_text,
"PHOTOTEX_FOURTH_IMAGE_PATH",
&im3,
&page_path,
);
replace(&mut page_text, "PHOTOTEX_FIRST_SECOND_LEGENDS", "%").unwrap();
replace(&mut page_text, "PHOTOTEX_THIRD_FOURTH_LEGENDS", "%").unwrap();
write!(writer, "{}", page_text)?;
Ok(PageInfo {
path: page_path,
kind: PageKind::TwoLandscapes,
})
}
pub(crate) fn write_one_portrait(
self,
im_info: &ImageInfo,
) -> std::io::Result<PageInfo> {
let page_path = &self.path;
std::fs::create_dir_all(page_path)?;
let page_path = page_path.join("page.tex");
let f = File::create(&page_path)?;
let mut writer = BufWriter::new(f);
let mut page_text =
include_str!("../data/page_1_portrait.tex").to_string();
if let Some(im_path) = im_info.path.canonicalize()?.to_str() {
replace(&mut page_text, "PHOTOTEX_IMAGE_PATH", im_path).unwrap();
} else {
log::error!(
"could not include image path {:?} in {:?}: utf-8 failed",
im_info.path,
page_path,
);
}
replace(&mut page_text, "PHOTOTEX_LEGEND", "%").unwrap();
write!(writer, "{}", page_text)?;
Ok(PageInfo {
path: page_path,
kind: PageKind::OnePortrait,
})
}
}
| 30.240909 | 80 | 0.519615 |
79adef7eecedcf24846148ebb17969407d102dbd | 615 | // enums2.rs
// Make me compile! Execute `rustlings hint enums2` for hints!
#[derive(Debug)]
enum Message {
// TODO: define the different variants used below
Move { x: i32, y: i32 },
Echo(String),
ChangeColor(u8, u8, u8),
Quit,
}
impl Message {
fn call(&self) {
println!("{:?}", &self);
}
}
fn main() {
let messages = [Message::Move { x: 10, y: 30 },
Message::Echo(String::from("hello world")),
Message::ChangeColor(200, 255, 255),
Message::Quit];
for message in &messages {
message.call();
}
}
| 21.206897 | 63 | 0.528455 |
872257118714e55930bd45ad21bb76319a7cd006 | 68,402 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// The Rust abstract syntax tree.
pub use self::GenericArgs::*;
pub use self::UnsafeSource::*;
pub use symbol::{Ident, Symbol as Name};
pub use util::parser::ExprPrecedence;
use ext::hygiene::{Mark, SyntaxContext};
use print::pprust;
use ptr::P;
use rustc_data_structures::indexed_vec::Idx;
#[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert;
use rustc_target::spec::abi::Abi;
use source_map::{dummy_spanned, respan, Spanned};
use symbol::{keywords, Symbol};
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{ThinTokenStream, TokenStream};
use ThinVec;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use serialize::{self, Decoder, Encoder};
use std::fmt;
pub use rustc_target::abi::FloatTy;
#[derive(Clone, RustcEncodable, RustcDecodable, Copy)]
pub struct Label {
pub ident: Ident,
}
impl fmt::Debug for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "label({:?})", self.ident)
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Copy)]
pub struct Lifetime {
pub id: NodeId,
pub ident: Ident,
}
impl fmt::Debug for Lifetime {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"lifetime({}: {})",
self.id,
pprust::lifetime_to_string(self)
)
}
}
/// A "Path" is essentially Rust's notion of a name.
///
/// It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
///
/// E.g. `std::cmp::PartialEq`
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Path {
pub span: Span,
/// The segments in the path: the things separated by `::`.
/// Global paths begin with `keywords::PathRoot`.
pub segments: Vec<PathSegment>,
}
impl<'a> PartialEq<&'a str> for Path {
fn eq(&self, string: &&'a str) -> bool {
self.segments.len() == 1 && self.segments[0].ident.name == *string
}
}
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})", pprust::path_to_string(self))
}
}
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", pprust::path_to_string(self))
}
}
impl Path {
// convert a span and an identifier to the corresponding
// 1-segment path
pub fn from_ident(ident: Ident) -> Path {
Path {
segments: vec![PathSegment::from_ident(ident)],
span: ident.span,
}
}
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].ident.name == keywords::PathRoot.name()
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of types.
///
/// E.g. `std`, `String` or `Box<T>`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
pub ident: Ident,
pub id: NodeId,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`.
/// `None` means that no parameter list is supplied (`Path`),
/// `Some` means that parameter list is supplied (`Path<X, Y>`)
/// but it can be empty (`Path<>`).
/// `P` is used as a size optimization for the common case with no parameters.
pub args: Option<P<GenericArgs>>,
}
impl PathSegment {
pub fn from_ident(ident: Ident) -> Self {
PathSegment { ident, id: DUMMY_NODE_ID, args: None }
}
pub fn path_root(span: Span) -> Self {
PathSegment::from_ident(Ident::new(keywords::PathRoot.name(), span))
}
}
/// Arguments of a path segment.
///
/// E.g. `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum GenericArgs {
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
AngleBracketed(AngleBracketedArgs),
/// The `(A,B)` and `C` in `Foo(A,B) -> C`
Parenthesized(ParenthesisedArgs),
}
impl GenericArgs {
pub fn span(&self) -> Span {
match *self {
AngleBracketed(ref data) => data.span,
Parenthesized(ref data) => data.span,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum GenericArg {
Lifetime(Lifetime),
Type(P<Ty>),
}
/// A path like `Foo<'a, T>`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)]
pub struct AngleBracketedArgs {
/// Overall span
pub span: Span,
/// The arguments for this path segment.
pub args: Vec<GenericArg>,
/// Bindings (equality constraints) on associated types, if present.
///
/// E.g., `Foo<A=Bar>`.
pub bindings: Vec<TypeBinding>,
}
impl Into<Option<P<GenericArgs>>> for AngleBracketedArgs {
fn into(self) -> Option<P<GenericArgs>> {
Some(P(GenericArgs::AngleBracketed(self)))
}
}
impl Into<Option<P<GenericArgs>>> for ParenthesisedArgs {
fn into(self) -> Option<P<GenericArgs>> {
Some(P(GenericArgs::Parenthesized(self)))
}
}
/// A path like `Foo(A,B) -> C`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ParenthesisedArgs {
/// Overall span
pub span: Span,
/// `(A,B)`
pub inputs: Vec<P<Ty>>,
/// `C`
pub output: Option<P<Ty>>,
}
// hack to ensure that we don't try to access the private parts of `NodeId` in this module
mod node_id_inner {
use rustc_data_structures::indexed_vec::Idx;
newtype_index! {
pub struct NodeId {
ENCODABLE = custom
DEBUG_FORMAT = "NodeId({})"
}
}
}
pub use self::node_id_inner::NodeId;
impl NodeId {
pub fn placeholder_from_mark(mark: Mark) -> Self {
NodeId::from_u32(mark.as_u32())
}
pub fn placeholder_to_mark(self) -> Mark {
Mark::from_u32(self.as_u32())
}
}
impl fmt::Display for NodeId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.as_u32(), f)
}
}
impl serialize::UseSpecializedEncodable for NodeId {
fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_u32(self.as_u32())
}
}
impl serialize::UseSpecializedDecodable for NodeId {
fn default_decode<D: Decoder>(d: &mut D) -> Result<NodeId, D::Error> {
d.read_u32().map(NodeId::from_u32)
}
}
/// Node id used to represent the root of the crate.
pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0);
/// When parsing and doing expansions, we initially give all AST nodes this AST
/// node value. Then later, in the renumber pass, we renumber them to have
/// small, positive ids.
pub const DUMMY_NODE_ID: NodeId = NodeId::MAX;
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
pub enum TraitBoundModifier {
None,
Maybe,
}
/// The AST represents all type param bounds as types.
/// `typeck::collect::compute_bounds` matches these against
/// the "special" built-in traits (see `middle::lang_items`) and
/// detects `Copy`, `Send` and `Sync`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum GenericBound {
Trait(PolyTraitRef, TraitBoundModifier),
Outlives(Lifetime),
}
impl GenericBound {
pub fn span(&self) -> Span {
match self {
&GenericBound::Trait(ref t, ..) => t.span,
&GenericBound::Outlives(ref l) => l.ident.span,
}
}
}
pub type GenericBounds = Vec<GenericBound>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum GenericParamKind {
/// A lifetime definition, e.g. `'a: 'b+'c+'d`.
Lifetime,
Type {
default: Option<P<Ty>>,
},
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct GenericParam {
pub id: NodeId,
pub ident: Ident,
pub attrs: ThinVec<Attribute>,
pub bounds: GenericBounds,
pub kind: GenericParamKind,
}
/// Represents lifetime, type and const parameters attached to a declaration of
/// a function, enum, trait, etc.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Generics {
pub params: Vec<GenericParam>,
pub where_clause: WhereClause,
pub span: Span,
}
impl Default for Generics {
/// Creates an instance of `Generics`.
fn default() -> Generics {
Generics {
params: Vec::new(),
where_clause: WhereClause {
id: DUMMY_NODE_ID,
predicates: Vec::new(),
span: DUMMY_SP,
},
span: DUMMY_SP,
}
}
}
/// A `where` clause in a definition
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereClause {
pub id: NodeId,
pub predicates: Vec<WherePredicate>,
pub span: Span,
}
/// A single predicate in a `where` clause
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum WherePredicate {
/// A type binding, e.g. `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
}
impl WherePredicate {
pub fn span(&self) -> Span {
match self {
&WherePredicate::BoundPredicate(ref p) => p.span,
&WherePredicate::RegionPredicate(ref p) => p.span,
&WherePredicate::EqPredicate(ref p) => p.span,
}
}
}
/// A type bound.
///
/// E.g. `for<'c> Foo: Send+Clone+'c`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereBoundPredicate {
pub span: Span,
/// Any generics from a `for` binding
pub bound_generic_params: Vec<GenericParam>,
/// The type being bounded
pub bounded_ty: P<Ty>,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: GenericBounds,
}
/// A lifetime predicate.
///
/// E.g. `'a: 'b+'c`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereRegionPredicate {
pub span: Span,
pub lifetime: Lifetime,
pub bounds: GenericBounds,
}
/// An equality predicate (unsupported).
///
/// E.g. `T=int`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereEqPredicate {
pub id: NodeId,
pub span: Span,
pub lhs_ty: P<Ty>,
pub rhs_ty: P<Ty>,
}
/// The set of MetaItems that define the compilation environment of the crate,
/// used to drive conditional compilation
pub type CrateConfig = FxHashSet<(Name, Option<Symbol>)>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {
pub module: Mod,
pub attrs: Vec<Attribute>,
pub span: Span,
}
/// A spanned compile-time attribute list item.
pub type NestedMetaItem = Spanned<NestedMetaItemKind>;
/// Possible values inside of compile-time attribute lists.
///
/// E.g. the '..' in `#[name(..)]`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum NestedMetaItemKind {
/// A full MetaItem, for recursive meta items.
MetaItem(MetaItem),
/// A literal.
///
/// E.g. "foo", 64, true
Literal(Lit),
}
/// A spanned compile-time attribute item.
///
/// E.g. `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MetaItem {
pub ident: Path,
pub node: MetaItemKind,
pub span: Span,
}
/// A compile-time attribute item.
///
/// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum MetaItemKind {
/// Word meta item.
///
/// E.g. `test` as in `#[test]`
Word,
/// List meta item.
///
/// E.g. `derive(..)` as in `#[derive(..)]`
List(Vec<NestedMetaItem>),
/// Name value meta item.
///
/// E.g. `feature = "foo"` as in `#[feature = "foo"]`
NameValue(Lit),
}
/// A Block (`{ .. }`).
///
/// E.g. `{ .. }` as in `fn foo() { .. }`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Block {
/// Statements in a block
pub stmts: Vec<Stmt>,
pub id: NodeId,
/// Distinguishes between `unsafe { ... }` and `{ ... }`
pub rules: BlockCheckMode,
pub span: Span,
pub recovered: bool,
}
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Pat {
pub id: NodeId,
pub node: PatKind,
pub span: Span,
}
impl fmt::Debug for Pat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self))
}
}
impl Pat {
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
let node = match &self.node {
PatKind::Wild => TyKind::Infer,
PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None) => {
TyKind::Path(None, Path::from_ident(*ident))
}
PatKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()),
PatKind::Mac(mac) => TyKind::Mac(mac.clone()),
PatKind::Ref(pat, mutbl) => pat
.to_ty()
.map(|ty| TyKind::Rptr(None, MutTy { ty, mutbl: *mutbl }))?,
PatKind::Slice(pats, None, _) if pats.len() == 1 => {
pats[0].to_ty().map(TyKind::Slice)?
}
PatKind::Tuple(pats, None) => {
let mut tys = Vec::with_capacity(pats.len());
// FIXME(#48994) - could just be collected into an Option<Vec>
for pat in pats {
tys.push(pat.to_ty()?);
}
TyKind::Tup(tys)
}
_ => return None,
};
Some(P(Ty {
node,
id: self.id,
span: self.span,
}))
}
pub fn walk<F>(&self, it: &mut F) -> bool
where
F: FnMut(&Pat) -> bool,
{
if !it(self) {
return false;
}
match self.node {
PatKind::Ident(_, _, Some(ref p)) => p.walk(it),
PatKind::Struct(_, ref fields, _) => fields.iter().all(|field| field.node.pat.walk(it)),
PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => {
s.iter().all(|p| p.walk(it))
}
PatKind::Box(ref s) | PatKind::Ref(ref s, _) | PatKind::Paren(ref s) => s.walk(it),
PatKind::Slice(ref before, ref slice, ref after) => {
before.iter().all(|p| p.walk(it))
&& slice.iter().all(|p| p.walk(it))
&& after.iter().all(|p| p.walk(it))
}
PatKind::Wild
| PatKind::Lit(_)
| PatKind::Range(..)
| PatKind::Ident(..)
| PatKind::Path(..)
| PatKind::Mac(_) => true,
}
}
}
/// A single field in a struct pattern
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
/// except is_shorthand is true
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct FieldPat {
/// The identifier for the field
pub ident: Ident,
/// The pattern the field is destructured to
pub pat: P<Pat>,
pub is_shorthand: bool,
pub attrs: ThinVec<Attribute>,
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum BindingMode {
ByRef(Mutability),
ByValue(Mutability),
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum RangeEnd {
Included(RangeSyntax),
Excluded,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum RangeSyntax {
DotDotDot,
DotDotEq,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum PatKind {
/// Represents a wildcard pattern (`_`)
Wild,
/// A `PatKind::Ident` may either be a new bound variable (`ref mut binding @ OPT_SUBPATTERN`),
/// or a unit struct/variant pattern, or a const pattern (in the last two cases the third
/// field must be `None`). Disambiguation cannot be done with parser alone, so it happens
/// during name resolution.
Ident(BindingMode, Ident, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
Struct(Path, Vec<Spanned<FieldPat>>, bool),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
TupleStruct(Path, Vec<P<Pat>>, Option<usize>),
/// A possibly qualified path pattern.
/// Unqualified path patterns `A::B::C` can legally refer to variants, structs, constants
/// or associated constants. Qualified path patterns `<A>::B::C`/`<A as Trait>::B::C` can
/// only legally refer to associated constants.
Path(Option<QSelf>, Path),
/// A tuple pattern `(a, b)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
Tuple(Vec<P<Pat>>, Option<usize>),
/// A `box` pattern
Box(P<Pat>),
/// A reference pattern, e.g. `&mut (a, b)`
Ref(P<Pat>, Mutability),
/// A literal
Lit(P<Expr>),
/// A range pattern, e.g. `1...2`, `1..=2` or `1..2`
Range(P<Expr>, P<Expr>, Spanned<RangeEnd>),
/// `[a, b, ..i, y, z]` is represented as:
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
Slice(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>),
/// Parentheses in patterns used for grouping, i.e. `(PAT)`.
Paren(P<Pat>),
/// A macro pattern; pre-expansion
Mac(Mac),
}
#[derive(
Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug, Copy,
)]
pub enum Mutability {
Mutable,
Immutable,
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum BinOpKind {
/// The `+` operator (addition)
Add,
/// The `-` operator (subtraction)
Sub,
/// The `*` operator (multiplication)
Mul,
/// The `/` operator (division)
Div,
/// The `%` operator (modulus)
Rem,
/// The `&&` operator (logical and)
And,
/// The `||` operator (logical or)
Or,
/// The `^` operator (bitwise xor)
BitXor,
/// The `&` operator (bitwise and)
BitAnd,
/// The `|` operator (bitwise or)
BitOr,
/// The `<<` operator (shift left)
Shl,
/// The `>>` operator (shift right)
Shr,
/// The `==` operator (equality)
Eq,
/// The `<` operator (less than)
Lt,
/// The `<=` operator (less than or equal to)
Le,
/// The `!=` operator (not equal to)
Ne,
/// The `>=` operator (greater than or equal to)
Ge,
/// The `>` operator (greater than)
Gt,
}
impl BinOpKind {
pub fn to_string(&self) -> &'static str {
use self::BinOpKind::*;
match *self {
Add => "+",
Sub => "-",
Mul => "*",
Div => "/",
Rem => "%",
And => "&&",
Or => "||",
BitXor => "^",
BitAnd => "&",
BitOr => "|",
Shl => "<<",
Shr => ">>",
Eq => "==",
Lt => "<",
Le => "<=",
Ne => "!=",
Ge => ">=",
Gt => ">",
}
}
pub fn lazy(&self) -> bool {
match *self {
BinOpKind::And | BinOpKind::Or => true,
_ => false,
}
}
pub fn is_shift(&self) -> bool {
match *self {
BinOpKind::Shl | BinOpKind::Shr => true,
_ => false,
}
}
pub fn is_comparison(&self) -> bool {
use self::BinOpKind::*;
match *self {
Eq | Lt | Le | Ne | Gt | Ge => true,
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
}
}
/// Returns `true` if the binary operator takes its arguments by value
pub fn is_by_value(&self) -> bool {
!self.is_comparison()
}
}
pub type BinOp = Spanned<BinOpKind>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref,
/// The `!` operator for logical inversion
Not,
/// The `-` operator for negation
Neg,
}
impl UnOp {
/// Returns `true` if the unary operator takes its argument by value
pub fn is_by_value(u: UnOp) -> bool {
match u {
UnOp::Neg | UnOp::Not => true,
_ => false,
}
}
pub fn to_string(op: UnOp) -> &'static str {
match op {
UnOp::Deref => "*",
UnOp::Not => "!",
UnOp::Neg => "-",
}
}
}
/// A statement
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Stmt {
pub id: NodeId,
pub node: StmtKind,
pub span: Span,
}
impl Stmt {
pub fn add_trailing_semicolon(mut self) -> Self {
self.node = match self.node {
StmtKind::Expr(expr) => StmtKind::Semi(expr),
StmtKind::Mac(mac) => {
StmtKind::Mac(mac.map(|(mac, _style, attrs)| (mac, MacStmtStyle::Semicolon, attrs)))
}
node => node,
};
self
}
pub fn is_item(&self) -> bool {
match self.node {
StmtKind::Item(_) => true,
_ => false,
}
}
pub fn is_expr(&self) -> bool {
match self.node {
StmtKind::Expr(_) => true,
_ => false,
}
}
}
impl fmt::Debug for Stmt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"stmt({}: {})",
self.id.to_string(),
pprust::stmt_to_string(self)
)
}
}
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum StmtKind {
/// A local (let) binding.
Local(P<Local>),
/// An item definition.
Item(P<Item>),
/// Expr without trailing semi-colon.
Expr(P<Expr>),
/// Expr with a trailing semi-colon.
Semi(P<Expr>),
/// Macro.
Mac(P<(Mac, MacStmtStyle, ThinVec<Attribute>)>),
}
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum MacStmtStyle {
/// The macro statement had a trailing semicolon, e.g. `foo! { ... };`
/// `foo!(...);`, `foo![...];`
Semicolon,
/// The macro statement had braces; e.g. foo! { ... }
Braces,
/// The macro statement had parentheses or brackets and no semicolon; e.g.
/// `foo!(...)`. All of these will end up being converted into macro
/// expressions.
NoBraces,
}
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Local {
pub pat: P<Pat>,
pub ty: Option<P<Ty>>,
/// Initializer expression to set the value, if any
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
pub attrs: ThinVec<Attribute>,
}
/// An arm of a 'match'.
///
/// E.g. `0..=10 => { println!("match!") }` as in
///
/// ```
/// match 123 {
/// 0..=10 => { println!("match!") },
/// _ => { println!("no match!") },
/// }
/// ```
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Arm {
pub attrs: Vec<Attribute>,
pub pats: Vec<P<Pat>>,
pub guard: Option<Guard>,
pub body: P<Expr>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum Guard {
If(P<Expr>),
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Field {
pub ident: Ident,
pub expr: P<Expr>,
pub span: Span,
pub is_shorthand: bool,
pub attrs: ThinVec<Attribute>,
}
pub type SpannedIdent = Spanned<Ident>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum BlockCheckMode {
Default,
Unsafe(UnsafeSource),
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum UnsafeSource {
CompilerGenerated,
UserProvided,
}
/// A constant (expression) that's not an item or associated item,
/// but needs its own `DefId` for type-checking, const-eval, etc.
/// These are usually found nested inside types (e.g. array lengths)
/// or expressions (e.g. repeat counts), and also used to define
/// explicit discriminant values for enum variants.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct AnonConst {
pub id: NodeId,
pub value: P<Expr>,
}
/// An expression
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Expr {
pub id: NodeId,
pub node: ExprKind,
pub span: Span,
pub attrs: ThinVec<Attribute>,
}
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 88);
impl Expr {
/// Whether this expression would be valid somewhere that expects a value, for example, an `if`
/// condition.
pub fn returns(&self) -> bool {
if let ExprKind::Block(ref block, _) = self.node {
match block.stmts.last().map(|last_stmt| &last_stmt.node) {
// implicit return
Some(&StmtKind::Expr(_)) => true,
Some(&StmtKind::Semi(ref expr)) => {
if let ExprKind::Ret(_) = expr.node {
// last statement is explicit return
true
} else {
false
}
}
// This is a block that doesn't end in either an implicit or explicit return
_ => false,
}
} else {
// This is not a block, it is a value
true
}
}
fn to_bound(&self) -> Option<GenericBound> {
match &self.node {
ExprKind::Path(None, path) => Some(GenericBound::Trait(
PolyTraitRef::new(Vec::new(), path.clone(), self.span),
TraitBoundModifier::None,
)),
_ => None,
}
}
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
let node = match &self.node {
ExprKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()),
ExprKind::Mac(mac) => TyKind::Mac(mac.clone()),
ExprKind::Paren(expr) => expr.to_ty().map(TyKind::Paren)?,
ExprKind::AddrOf(mutbl, expr) => expr
.to_ty()
.map(|ty| TyKind::Rptr(None, MutTy { ty, mutbl: *mutbl }))?,
ExprKind::Repeat(expr, expr_len) => {
expr.to_ty().map(|ty| TyKind::Array(ty, expr_len.clone()))?
}
ExprKind::Array(exprs) if exprs.len() == 1 => exprs[0].to_ty().map(TyKind::Slice)?,
ExprKind::Tup(exprs) => {
let tys = exprs
.iter()
.map(|expr| expr.to_ty())
.collect::<Option<Vec<_>>>()?;
TyKind::Tup(tys)
}
ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add => {
if let (Some(lhs), Some(rhs)) = (lhs.to_bound(), rhs.to_bound()) {
TyKind::TraitObject(vec![lhs, rhs], TraitObjectSyntax::None)
} else {
return None;
}
}
_ => return None,
};
Some(P(Ty {
node,
id: self.id,
span: self.span,
}))
}
pub fn precedence(&self) -> ExprPrecedence {
match self.node {
ExprKind::Box(_) => ExprPrecedence::Box,
ExprKind::ObsoleteInPlace(..) => ExprPrecedence::ObsoleteInPlace,
ExprKind::Array(_) => ExprPrecedence::Array,
ExprKind::Call(..) => ExprPrecedence::Call,
ExprKind::MethodCall(..) => ExprPrecedence::MethodCall,
ExprKind::Tup(_) => ExprPrecedence::Tup,
ExprKind::Binary(op, ..) => ExprPrecedence::Binary(op.node),
ExprKind::Unary(..) => ExprPrecedence::Unary,
ExprKind::Lit(_) => ExprPrecedence::Lit,
ExprKind::Type(..) | ExprKind::Cast(..) => ExprPrecedence::Cast,
ExprKind::If(..) => ExprPrecedence::If,
ExprKind::IfLet(..) => ExprPrecedence::IfLet,
ExprKind::While(..) => ExprPrecedence::While,
ExprKind::WhileLet(..) => ExprPrecedence::WhileLet,
ExprKind::ForLoop(..) => ExprPrecedence::ForLoop,
ExprKind::Loop(..) => ExprPrecedence::Loop,
ExprKind::Match(..) => ExprPrecedence::Match,
ExprKind::Closure(..) => ExprPrecedence::Closure,
ExprKind::Block(..) => ExprPrecedence::Block,
ExprKind::TryBlock(..) => ExprPrecedence::TryBlock,
ExprKind::Async(..) => ExprPrecedence::Async,
ExprKind::Assign(..) => ExprPrecedence::Assign,
ExprKind::AssignOp(..) => ExprPrecedence::AssignOp,
ExprKind::Field(..) => ExprPrecedence::Field,
ExprKind::Index(..) => ExprPrecedence::Index,
ExprKind::Range(..) => ExprPrecedence::Range,
ExprKind::Path(..) => ExprPrecedence::Path,
ExprKind::AddrOf(..) => ExprPrecedence::AddrOf,
ExprKind::Break(..) => ExprPrecedence::Break,
ExprKind::Continue(..) => ExprPrecedence::Continue,
ExprKind::Ret(..) => ExprPrecedence::Ret,
ExprKind::InlineAsm(..) => ExprPrecedence::InlineAsm,
ExprKind::Mac(..) => ExprPrecedence::Mac,
ExprKind::Struct(..) => ExprPrecedence::Struct,
ExprKind::Repeat(..) => ExprPrecedence::Repeat,
ExprKind::Paren(..) => ExprPrecedence::Paren,
ExprKind::Try(..) => ExprPrecedence::Try,
ExprKind::Yield(..) => ExprPrecedence::Yield,
}
}
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self))
}
}
/// Limit types of a range (inclusive or exclusive)
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum RangeLimits {
/// Inclusive at the beginning, exclusive at the end
HalfOpen,
/// Inclusive at the beginning and end
Closed,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ExprKind {
/// A `box x` expression.
Box(P<Expr>),
/// First expr is the place; second expr is the value.
ObsoleteInPlace(P<Expr>, P<Expr>),
/// An array (`[a, b, c, d]`)
Array(Vec<P<Expr>>),
/// A function call
///
/// The first field resolves to the function itself,
/// and the second field is the list of arguments.
/// This also represents calling the constructor of
/// tuple-like ADTs such as tuple structs and enum variants.
Call(P<Expr>, Vec<P<Expr>>),
/// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`)
///
/// The `PathSegment` represents the method name and its generic arguments
/// (within the angle brackets).
/// The first element of the vector of `Expr`s is the expression that evaluates
/// to the object on which the method is being called on (the receiver),
/// and the remaining elements are the rest of the arguments.
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
MethodCall(PathSegment, Vec<P<Expr>>),
/// A tuple (`(a, b, c ,d)`)
Tup(Vec<P<Expr>>),
/// A binary operation (For example: `a + b`, `a * b`)
Binary(BinOp, P<Expr>, P<Expr>),
/// A unary operation (For example: `!x`, `*x`)
Unary(UnOp, P<Expr>),
/// A literal (For example: `1`, `"foo"`)
Lit(Lit),
/// A cast (`foo as f64`)
Cast(P<Expr>, P<Ty>),
Type(P<Expr>, P<Ty>),
/// An `if` block, with an optional else block
///
/// `if expr { block } else { expr }`
If(P<Expr>, P<Block>, Option<P<Expr>>),
/// An `if let` expression with an optional else block
///
/// `if let pat = expr { block } else { expr }`
///
/// This is desugared to a `match` expression.
IfLet(Vec<P<Pat>>, P<Expr>, P<Block>, Option<P<Expr>>),
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
While(P<Expr>, P<Block>, Option<Label>),
/// A while-let loop, with an optional label
///
/// `'label: while let pat = expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
WhileLet(Vec<P<Pat>>, P<Expr>, P<Block>, Option<Label>),
/// A for loop, with an optional label
///
/// `'label: for pat in expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
Loop(P<Block>, Option<Label>),
/// A `match` block.
Match(P<Expr>, Vec<Arm>),
/// A closure (for example, `move |a, b, c| a + b + c`)
///
/// The final span is the span of the argument block `|...|`
Closure(CaptureBy, IsAsync, Movability, P<FnDecl>, P<Expr>, Span),
/// A block (`'label: { ... }`)
Block(P<Block>, Option<Label>),
/// An async block (`async move { ... }`)
///
/// The `NodeId` is the `NodeId` for the closure that results from
/// desugaring an async block, just like the NodeId field in the
/// `IsAsync` enum. This is necessary in order to create a def for the
/// closure which can be used as a parent of any child defs. Defs
/// created during lowering cannot be made the parent of any other
/// preexisting defs.
Async(CaptureBy, NodeId, P<Block>),
/// A try block (`try { ... }`)
TryBlock(P<Block>),
/// An assignment (`a = foo()`)
Assign(P<Expr>, P<Expr>),
/// An assignment with an operator
///
/// For example, `a += 1`.
AssignOp(BinOp, P<Expr>, P<Expr>),
/// Access of a named (`obj.foo`) or unnamed (`obj.0`) struct field
Field(P<Expr>, Ident),
/// An indexing operation (`foo[2]`)
Index(P<Expr>, P<Expr>),
/// A range (`1..2`, `1..`, `..2`, `1...2`, `1...`, `...2`)
Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits),
/// Variable reference, possibly containing `::` and/or type
/// parameters, e.g. foo::bar::<baz>.
///
/// Optionally "qualified",
/// E.g. `<Vec<T> as SomeTrait>::SomeType`.
Path(Option<QSelf>, Path),
/// A referencing operation (`&a` or `&mut a`)
AddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break, and an optional expression
Break(Option<Label>, Option<P<Expr>>),
/// A `continue`, with an optional label
Continue(Option<Label>),
/// A `return`, with an optional value to be returned
Ret(Option<P<Expr>>),
/// Output of the `asm!()` macro
InlineAsm(P<InlineAsm>),
/// A macro invocation; pre-expansion
Mac(Mac),
/// A struct literal expression.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
Struct(Path, Vec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
/// For example, `[1; 5]`. The expression is the element to be
/// repeated; the constant is the number of times to repeat it.
Repeat(P<Expr>, AnonConst),
/// No-op: used solely so we can pretty-print faithfully
Paren(P<Expr>),
/// `expr?`
Try(P<Expr>),
/// A `yield`, with an optional value to be yielded
Yield(Option<P<Expr>>),
}
/// The explicit Self type in a "qualified path". The actual
/// path, including the trait and the associated item, is stored
/// separately. `position` represents the index of the associated
/// item qualified with this Self type.
///
/// ```ignore (only-for-syntax-highlight)
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~ ^
/// ty position = 0
/// ```
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct QSelf {
pub ty: P<Ty>,
/// The span of `a::b::Trait` in a path like `<Vec<T> as
/// a::b::Trait>::AssociatedItem`; in the case where `position ==
/// 0`, this is an empty span.
pub path_span: Span,
pub position: usize,
}
/// A capture clause
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum CaptureBy {
Value,
Ref,
}
/// The movability of a generator / closure literal
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum Movability {
Static,
Movable,
}
pub type Mac = Spanned<Mac_>;
/// Represents a macro invocation. The Path indicates which macro
/// is being invoked, and the vector of token-trees contains the source
/// of the macro invocation.
///
/// NB: the additional ident for a macro_rules-style macro is actually
/// stored in the enclosing item. Oog.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mac_ {
pub path: Path,
pub delim: MacDelimiter,
pub tts: ThinTokenStream,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
pub enum MacDelimiter {
Parenthesis,
Bracket,
Brace,
}
impl Mac_ {
pub fn stream(&self) -> TokenStream {
self.tts.clone().into()
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MacroDef {
pub tokens: ThinTokenStream,
pub legacy: bool,
}
impl MacroDef {
pub fn stream(&self) -> TokenStream {
self.tokens.clone().into()
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)]
pub enum StrStyle {
/// A regular string, like `"foo"`
Cooked,
/// A raw string, like `r##"foo"##`
///
/// The value is the number of `#` symbols used.
Raw(u16),
}
/// A literal
pub type Lit = Spanned<LitKind>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)]
pub enum LitIntType {
Signed(IntTy),
Unsigned(UintTy),
Unsuffixed,
}
/// Literal kind.
///
/// E.g. `"foo"`, `42`, `12.34` or `bool`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Hash, PartialEq)]
pub enum LitKind {
/// A string literal (`"foo"`)
Str(Symbol, StrStyle),
/// A byte string (`b"foo"`)
ByteStr(Lrc<Vec<u8>>),
/// A byte char (`b'f'`)
Byte(u8),
/// A character literal (`'a'`)
Char(char),
/// An integer literal (`1`)
Int(u128, LitIntType),
/// A float literal (`1f64` or `1E10f64`)
Float(Symbol, FloatTy),
/// A float literal without a suffix (`1.0 or 1.0E10`)
FloatUnsuffixed(Symbol),
/// A boolean literal
Bool(bool),
}
impl LitKind {
/// Returns true if this literal is a string and false otherwise.
pub fn is_str(&self) -> bool {
match *self {
LitKind::Str(..) => true,
_ => false,
}
}
/// Returns true if this literal is byte literal string false otherwise.
pub fn is_bytestr(&self) -> bool {
match self {
LitKind::ByteStr(_) => true,
_ => false,
}
}
/// Returns true if this is a numeric literal.
pub fn is_numeric(&self) -> bool {
match *self {
LitKind::Int(..) | LitKind::Float(..) | LitKind::FloatUnsuffixed(..) => true,
_ => false,
}
}
/// Returns true if this literal has no suffix. Note: this will return true
/// for literals with prefixes such as raw strings and byte strings.
pub fn is_unsuffixed(&self) -> bool {
match *self {
// unsuffixed variants
LitKind::Str(..)
| LitKind::ByteStr(..)
| LitKind::Byte(..)
| LitKind::Char(..)
| LitKind::Int(_, LitIntType::Unsuffixed)
| LitKind::FloatUnsuffixed(..)
| LitKind::Bool(..) => true,
// suffixed variants
LitKind::Int(_, LitIntType::Signed(..))
| LitKind::Int(_, LitIntType::Unsigned(..))
| LitKind::Float(..) => false,
}
}
/// Returns true if this literal has a suffix.
pub fn is_suffixed(&self) -> bool {
!self.is_unsuffixed()
}
}
// NB: If you change this, you'll probably want to change the corresponding
// type structure in middle/ty.rs as well.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MutTy {
pub ty: P<Ty>,
pub mutbl: Mutability,
}
/// Represents a method's signature in a trait declaration,
/// or in an implementation.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MethodSig {
pub header: FnHeader,
pub decl: P<FnDecl>,
}
/// Represents an item declaration within a trait declaration,
/// possibly including a default implementation. A trait item is
/// either required (meaning it doesn't have an implementation, just a
/// signature) or provided (meaning it has a default implementation).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TraitItem {
pub id: NodeId,
pub ident: Ident,
pub attrs: Vec<Attribute>,
pub generics: Generics,
pub node: TraitItemKind,
pub span: Span,
/// See `Item::tokens` for what this is
pub tokens: Option<TokenStream>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum TraitItemKind {
Const(P<Ty>, Option<P<Expr>>),
Method(MethodSig, Option<P<Block>>),
Type(GenericBounds, Option<P<Ty>>),
Macro(Mac),
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ImplItem {
pub id: NodeId,
pub ident: Ident,
pub vis: Visibility,
pub defaultness: Defaultness,
pub attrs: Vec<Attribute>,
pub generics: Generics,
pub node: ImplItemKind,
pub span: Span,
/// See `Item::tokens` for what this is
pub tokens: Option<TokenStream>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ImplItemKind {
Const(P<Ty>, P<Expr>),
Method(MethodSig, P<Block>),
Type(P<Ty>),
Existential(GenericBounds),
Macro(Mac),
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Copy)]
pub enum IntTy {
Isize,
I8,
I16,
I32,
I64,
I128,
}
impl fmt::Debug for IntTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for IntTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.ty_to_string())
}
}
impl IntTy {
pub fn ty_to_string(&self) -> &'static str {
match *self {
IntTy::Isize => "isize",
IntTy::I8 => "i8",
IntTy::I16 => "i16",
IntTy::I32 => "i32",
IntTy::I64 => "i64",
IntTy::I128 => "i128",
}
}
pub fn val_to_string(&self, val: i128) -> String {
// cast to a u128 so we can correctly print INT128_MIN. All integral types
// are parsed as u128, so we wouldn't want to print an extra negative
// sign.
format!("{}{}", val as u128, self.ty_to_string())
}
pub fn bit_width(&self) -> Option<usize> {
Some(match *self {
IntTy::Isize => return None,
IntTy::I8 => 8,
IntTy::I16 => 16,
IntTy::I32 => 32,
IntTy::I64 => 64,
IntTy::I128 => 128,
})
}
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Copy)]
pub enum UintTy {
Usize,
U8,
U16,
U32,
U64,
U128,
}
impl UintTy {
pub fn ty_to_string(&self) -> &'static str {
match *self {
UintTy::Usize => "usize",
UintTy::U8 => "u8",
UintTy::U16 => "u16",
UintTy::U32 => "u32",
UintTy::U64 => "u64",
UintTy::U128 => "u128",
}
}
pub fn val_to_string(&self, val: u128) -> String {
format!("{}{}", val, self.ty_to_string())
}
pub fn bit_width(&self) -> Option<usize> {
Some(match *self {
UintTy::Usize => return None,
UintTy::U8 => 8,
UintTy::U16 => 16,
UintTy::U32 => 32,
UintTy::U64 => 64,
UintTy::U128 => 128,
})
}
}
impl fmt::Debug for UintTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for UintTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.ty_to_string())
}
}
// Bind a type to an associated type: `A=Foo`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TypeBinding {
pub id: NodeId,
pub ident: Ident,
pub ty: P<Ty>,
pub span: Span,
}
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Ty {
pub id: NodeId,
pub node: TyKind,
pub span: Span,
}
impl fmt::Debug for Ty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "type({})", pprust::ty_to_string(self))
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Abi,
pub generic_params: Vec<GenericParam>,
pub decl: P<FnDecl>,
}
/// The different kinds of types recognized by the compiler
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum TyKind {
/// A variable-length slice (`[T]`)
Slice(P<Ty>),
/// A fixed length array (`[T; n]`)
Array(P<Ty>, AnonConst),
/// A raw pointer (`*const T` or `*mut T`)
Ptr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
Rptr(Option<Lifetime>, MutTy),
/// A bare function (e.g. `fn(usize) -> bool`)
BareFn(P<BareFnTy>),
/// The never type (`!`)
Never,
/// A tuple (`(A, B, C, D,...)`)
Tup(Vec<P<Ty>>),
/// A path (`module::module::...::Type`), optionally
/// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`.
///
/// Type parameters are stored in the Path itself
Path(Option<QSelf>, Path),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TraitObject(GenericBounds, TraitObjectSyntax),
/// An `impl Bound1 + Bound2 + Bound3` type
/// where `Bound` is a trait or a lifetime.
///
/// The `NodeId` exists to prevent lowering from having to
/// generate `NodeId`s on the fly, which would complicate
/// the generation of `existential type` items significantly
ImplTrait(NodeId, GenericBounds),
/// No-op; kept solely so that we can pretty-print faithfully
Paren(P<Ty>),
/// Unused for now
Typeof(AnonConst),
/// TyKind::Infer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
Infer,
/// Inferred type of a `self` or `&self` argument in a method.
ImplicitSelf,
// A macro in the type position.
Mac(Mac),
/// Placeholder for a kind that has failed to be defined.
Err,
}
impl TyKind {
pub fn is_implicit_self(&self) -> bool {
if let TyKind::ImplicitSelf = *self {
true
} else {
false
}
}
pub fn is_unit(&self) -> bool {
if let TyKind::Tup(ref tys) = *self {
tys.is_empty()
} else {
false
}
}
}
/// Syntax used to declare a trait object.
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum TraitObjectSyntax {
Dyn,
None,
}
/// Inline assembly dialect.
///
/// E.g. `"intel"` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")`
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum AsmDialect {
Att,
Intel,
}
/// Inline assembly.
///
/// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct InlineAsmOutput {
pub constraint: Symbol,
pub expr: P<Expr>,
pub is_rw: bool,
pub is_indirect: bool,
}
/// Inline assembly.
///
/// E.g. `asm!("NOP");`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct InlineAsm {
pub asm: Symbol,
pub asm_str_style: StrStyle,
pub outputs: Vec<InlineAsmOutput>,
pub inputs: Vec<(Symbol, P<Expr>)>,
pub clobbers: Vec<Symbol>,
pub volatile: bool,
pub alignstack: bool,
pub dialect: AsmDialect,
pub ctxt: SyntaxContext,
}
/// An argument in a function header.
///
/// E.g. `bar: usize` as in `fn foo(bar: usize)`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Arg {
pub ty: P<Ty>,
pub pat: P<Pat>,
pub id: NodeId,
}
/// Alternative representation for `Arg`s describing `self` parameter of methods.
///
/// E.g. `&mut self` as in `fn foo(&mut self)`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum SelfKind {
/// `self`, `mut self`
Value(Mutability),
/// `&'lt self`, `&'lt mut self`
Region(Option<Lifetime>, Mutability),
/// `self: TYPE`, `mut self: TYPE`
Explicit(P<Ty>, Mutability),
}
pub type ExplicitSelf = Spanned<SelfKind>;
impl Arg {
pub fn to_self(&self) -> Option<ExplicitSelf> {
if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node {
if ident.name == keywords::SelfLower.name() {
return match self.ty.node {
TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
TyKind::Rptr(lt, MutTy { ref ty, mutbl }) if ty.node.is_implicit_self() => {
Some(respan(self.pat.span, SelfKind::Region(lt, mutbl)))
}
_ => Some(respan(
self.pat.span.to(self.ty.span),
SelfKind::Explicit(self.ty.clone(), mutbl),
)),
};
}
}
None
}
pub fn is_self(&self) -> bool {
if let PatKind::Ident(_, ident, _) = self.pat.node {
ident.name == keywords::SelfLower.name()
} else {
false
}
}
pub fn from_self(eself: ExplicitSelf, eself_ident: Ident) -> Arg {
let span = eself.span.to(eself_ident.span);
let infer_ty = P(Ty {
id: DUMMY_NODE_ID,
node: TyKind::ImplicitSelf,
span,
});
let arg = |mutbl, ty| Arg {
pat: P(Pat {
id: DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None),
span,
}),
ty,
id: DUMMY_NODE_ID,
};
match eself.node {
SelfKind::Explicit(ty, mutbl) => arg(mutbl, ty),
SelfKind::Value(mutbl) => arg(mutbl, infer_ty),
SelfKind::Region(lt, mutbl) => arg(
Mutability::Immutable,
P(Ty {
id: DUMMY_NODE_ID,
node: TyKind::Rptr(
lt,
MutTy {
ty: infer_ty,
mutbl: mutbl,
},
),
span,
}),
),
}
}
}
/// Header (not the body) of a function declaration.
///
/// E.g. `fn foo(bar: baz)`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct FnDecl {
pub inputs: Vec<Arg>,
pub output: FunctionRetTy,
pub variadic: bool,
}
impl FnDecl {
pub fn get_self(&self) -> Option<ExplicitSelf> {
self.inputs.get(0).and_then(Arg::to_self)
}
pub fn has_self(&self) -> bool {
self.inputs.get(0).map(Arg::is_self).unwrap_or(false)
}
}
/// Is the trait definition an auto trait?
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum IsAuto {
Yes,
No,
}
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum IsAsync {
Async {
closure_id: NodeId,
return_impl_trait_id: NodeId,
},
NotAsync,
}
impl IsAsync {
pub fn is_async(self) -> bool {
if let IsAsync::Async { .. } = self {
true
} else {
false
}
}
/// In case this is an `Async` return the `NodeId` for the generated impl Trait item
pub fn opt_return_id(self) -> Option<NodeId> {
match self {
IsAsync::Async {
return_impl_trait_id,
..
} => Some(return_impl_trait_id),
IsAsync::NotAsync => None,
}
}
}
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum Constness {
Const,
NotConst,
}
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum Defaultness {
Default,
Final,
}
impl fmt::Display for Unsafety {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(
match *self {
Unsafety::Normal => "normal",
Unsafety::Unsafe => "unsafe",
},
f,
)
}
}
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum ImplPolarity {
/// `impl Trait for Type`
Positive,
/// `impl !Trait for Type`
Negative,
}
impl fmt::Debug for ImplPolarity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ImplPolarity::Positive => "positive".fmt(f),
ImplPolarity::Negative => "negative".fmt(f),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum FunctionRetTy {
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
Default(Span),
/// Everything else
Ty(P<Ty>),
}
impl FunctionRetTy {
pub fn span(&self) -> Span {
match *self {
FunctionRetTy::Default(span) => span,
FunctionRetTy::Ty(ref ty) => ty.span,
}
}
}
/// Module declaration.
///
/// E.g. `mod foo;` or `mod foo { .. }`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mod {
/// A span from the first token past `{` to the last token until `}`.
/// For `mod foo;`, the inner span ranges from the first token
/// to the last token in the external file.
pub inner: Span,
pub items: Vec<P<Item>>,
/// For `mod foo;` inline is false, for `mod foo { .. }` it is true.
pub inline: bool,
}
/// Foreign module declaration.
///
/// E.g. `extern { .. }` or `extern C { .. }`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ForeignMod {
pub abi: Abi,
pub items: Vec<ForeignItem>,
}
/// Global inline assembly
///
/// aka module-level assembly or file-scoped assembly
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct GlobalAsm {
pub asm: Symbol,
pub ctxt: SyntaxContext,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct EnumDef {
pub variants: Vec<Variant>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Variant_ {
pub ident: Ident,
pub attrs: Vec<Attribute>,
pub data: VariantData,
/// Explicit discriminant, e.g. `Foo = 1`
pub disr_expr: Option<AnonConst>,
}
pub type Variant = Spanned<Variant_>;
/// Part of `use` item to the right of its prefix.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum UseTreeKind {
/// `use prefix` or `use prefix as rename`
///
/// The extra `NodeId`s are for HIR lowering, when additional statements are created for each
/// namespace.
Simple(Option<Ident>, NodeId, NodeId),
/// `use prefix::{...}`
Nested(Vec<(UseTree, NodeId)>),
/// `use prefix::*`
Glob,
}
/// A tree of paths sharing common prefixes.
/// Used in `use` items both at top-level and inside of braces in import groups.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct UseTree {
pub prefix: Path,
pub kind: UseTreeKind,
pub span: Span,
}
impl UseTree {
pub fn ident(&self) -> Ident {
match self.kind {
UseTreeKind::Simple(Some(rename), ..) => rename,
UseTreeKind::Simple(None, ..) => {
self.prefix
.segments
.last()
.expect("empty prefix in a simple import")
.ident
}
_ => panic!("`UseTree::ident` can only be used on a simple import"),
}
}
}
/// Distinguishes between Attributes that decorate items and Attributes that
/// are contained as statements within items. These two cases need to be
/// distinguished for pretty-printing.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum AttrStyle {
Outer,
Inner,
}
#[derive(
Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, PartialOrd, Ord, Copy,
)]
pub struct AttrId(pub usize);
impl Idx for AttrId {
fn new(idx: usize) -> Self {
AttrId(idx)
}
fn index(self) -> usize {
self.0
}
}
/// Meta-data associated with an item
/// Doc-comments are promoted to attributes that have is_sugared_doc = true
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Attribute {
pub id: AttrId,
pub style: AttrStyle,
pub path: Path,
pub tokens: TokenStream,
pub is_sugared_doc: bool,
pub span: Span,
}
/// TraitRef's appear in impls.
///
/// resolve maps each TraitRef's ref_id to its defining trait; that's all
/// that the ref_id is for. The impl_id maps to the "self type" of this impl.
/// If this impl is an ItemKind::Impl, the impl_id is redundant (it could be the
/// same as the impl's node id).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TraitRef {
pub path: Path,
pub ref_id: NodeId,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_generic_params: Vec<GenericParam>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: TraitRef,
pub span: Span,
}
impl PolyTraitRef {
pub fn new(generic_params: Vec<GenericParam>, path: Path, span: Span) -> Self {
PolyTraitRef {
bound_generic_params: generic_params,
trait_ref: TraitRef {
path: path,
ref_id: DUMMY_NODE_ID,
},
span,
}
}
}
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum CrateSugar {
/// Source is `pub(crate)`
PubCrate,
/// Source is (just) `crate`
JustCrate,
}
pub type Visibility = Spanned<VisibilityKind>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum VisibilityKind {
Public,
Crate(CrateSugar),
Restricted { path: P<Path>, id: NodeId },
Inherited,
}
impl VisibilityKind {
pub fn is_pub(&self) -> bool {
if let VisibilityKind::Public = *self {
true
} else {
false
}
}
}
/// Field of a struct.
///
/// E.g. `bar: usize` as in `struct Foo { bar: usize }`
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct StructField {
pub span: Span,
pub ident: Option<Ident>,
pub vis: Visibility,
pub id: NodeId,
pub ty: P<Ty>,
pub attrs: Vec<Attribute>,
}
/// Fields and Ids of enum variants and structs
///
/// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all
/// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants).
/// One shared Id can be successfully used for these two purposes.
/// Id of the whole enum lives in `Item`.
///
/// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually
/// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of
/// the variant itself" from enum variants.
/// Id of the whole struct lives in `Item`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum VariantData {
/// Struct variant.
///
/// E.g. `Bar { .. }` as in `enum Foo { Bar { .. } }`
Struct(Vec<StructField>, NodeId),
/// Tuple variant.
///
/// E.g. `Bar(..)` as in `enum Foo { Bar(..) }`
Tuple(Vec<StructField>, NodeId),
/// Unit variant.
///
/// E.g. `Bar = ..` as in `enum Foo { Bar = .. }`
Unit(NodeId),
}
impl VariantData {
pub fn fields(&self) -> &[StructField] {
match *self {
VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields,
_ => &[],
}
}
pub fn id(&self) -> NodeId {
match *self {
VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id,
}
}
pub fn is_struct(&self) -> bool {
if let VariantData::Struct(..) = *self {
true
} else {
false
}
}
pub fn is_tuple(&self) -> bool {
if let VariantData::Tuple(..) = *self {
true
} else {
false
}
}
pub fn is_unit(&self) -> bool {
if let VariantData::Unit(..) = *self {
true
} else {
false
}
}
}
/// An item
///
/// The name might be a dummy name in case of anonymous items
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>,
pub id: NodeId,
pub node: ItemKind,
pub vis: Visibility,
pub span: Span,
/// Original tokens this item was parsed from. This isn't necessarily
/// available for all items, although over time more and more items should
/// have this be `Some`. Right now this is primarily used for procedural
/// macros, notably custom attributes.
///
/// Note that the tokens here do not include the outer attributes, but will
/// include inner attributes.
pub tokens: Option<TokenStream>,
}
/// A function header
///
/// All the information between the visibility & the name of the function is
/// included in this struct (e.g. `async unsafe fn` or `const extern "C" fn`)
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
pub struct FnHeader {
pub unsafety: Unsafety,
pub asyncness: IsAsync,
pub constness: Spanned<Constness>,
pub abi: Abi,
}
impl Default for FnHeader {
fn default() -> FnHeader {
FnHeader {
unsafety: Unsafety::Normal,
asyncness: IsAsync::NotAsync,
constness: dummy_spanned(Constness::NotConst),
abi: Abi::Rust,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ItemKind {
/// An `extern crate` item, with optional *original* crate name if the crate was renamed.
///
/// E.g. `extern crate foo` or `extern crate foo_bar as foo`
ExternCrate(Option<Name>),
/// A use declaration (`use` or `pub use`) item.
///
/// E.g. `use foo;`, `use foo::bar;` or `use foo::bar as FooBar;`
Use(P<UseTree>),
/// A static item (`static` or `pub static`).
///
/// E.g. `static FOO: i32 = 42;` or `static FOO: &'static str = "bar";`
Static(P<Ty>, Mutability, P<Expr>),
/// A constant item (`const` or `pub const`).
///
/// E.g. `const FOO: i32 = 42;`
Const(P<Ty>, P<Expr>),
/// A function declaration (`fn` or `pub fn`).
///
/// E.g. `fn foo(bar: usize) -> usize { .. }`
Fn(P<FnDecl>, FnHeader, Generics, P<Block>),
/// A module declaration (`mod` or `pub mod`).
///
/// E.g. `mod foo;` or `mod foo { .. }`
Mod(Mod),
/// An external module (`extern` or `pub extern`).
///
/// E.g. `extern {}` or `extern "C" {}`
ForeignMod(ForeignMod),
/// Module-level inline assembly (from `global_asm!()`)
GlobalAsm(P<GlobalAsm>),
/// A type alias (`type` or `pub type`).
///
/// E.g. `type Foo = Bar<u8>;`
Ty(P<Ty>, Generics),
/// An existential type declaration (`existential type`).
///
/// E.g. `existential type Foo: Bar + Boo;`
Existential(GenericBounds, Generics),
/// An enum definition (`enum` or `pub enum`).
///
/// E.g. `enum Foo<A, B> { C<A>, D<B> }`
Enum(EnumDef, Generics),
/// A struct definition (`struct` or `pub struct`).
///
/// E.g. `struct Foo<A> { x: A }`
Struct(VariantData, Generics),
/// A union definition (`union` or `pub union`).
///
/// E.g. `union Foo<A, B> { x: A, y: B }`
Union(VariantData, Generics),
/// A Trait declaration (`trait` or `pub trait`).
///
/// E.g. `trait Foo { .. }`, `trait Foo<T> { .. }` or `auto trait Foo {}`
Trait(IsAuto, Unsafety, Generics, GenericBounds, Vec<TraitItem>),
/// Trait alias
///
/// E.g. `trait Foo = Bar + Quux;`
TraitAlias(Generics, GenericBounds),
/// An implementation.
///
/// E.g. `impl<A> Foo<A> { .. }` or `impl<A> Trait for Foo<A> { .. }`
Impl(
Unsafety,
ImplPolarity,
Defaultness,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
Vec<ImplItem>,
),
/// A macro invocation.
///
/// E.g. `macro_rules! foo { .. }` or `foo!(..)`
Mac(Mac),
/// A macro definition.
MacroDef(MacroDef),
}
impl ItemKind {
pub fn descriptive_variant(&self) -> &str {
match *self {
ItemKind::ExternCrate(..) => "extern crate",
ItemKind::Use(..) => "use",
ItemKind::Static(..) => "static item",
ItemKind::Const(..) => "constant item",
ItemKind::Fn(..) => "function",
ItemKind::Mod(..) => "module",
ItemKind::ForeignMod(..) => "foreign module",
ItemKind::GlobalAsm(..) => "global asm",
ItemKind::Ty(..) => "type alias",
ItemKind::Existential(..) => "existential type",
ItemKind::Enum(..) => "enum",
ItemKind::Struct(..) => "struct",
ItemKind::Union(..) => "union",
ItemKind::Trait(..) => "trait",
ItemKind::TraitAlias(..) => "trait alias",
ItemKind::Mac(..) | ItemKind::MacroDef(..) | ItemKind::Impl(..) => "item",
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ForeignItem {
pub ident: Ident,
pub attrs: Vec<Attribute>,
pub node: ForeignItemKind,
pub id: NodeId,
pub span: Span,
pub vis: Visibility,
}
/// An item within an `extern` block
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ForeignItemKind {
/// A foreign function
Fn(P<FnDecl>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
Static(P<Ty>, bool),
/// A foreign type
Ty,
/// A macro invocation
Macro(Mac),
}
impl ForeignItemKind {
pub fn descriptive_variant(&self) -> &str {
match *self {
ForeignItemKind::Fn(..) => "foreign function",
ForeignItemKind::Static(..) => "foreign static item",
ForeignItemKind::Ty => "foreign type",
ForeignItemKind::Macro(..) => "macro in foreign module",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use serialize;
// are ASTs encodable?
#[test]
fn check_asts_encodable() {
fn assert_encodable<T: serialize::Encodable>() {}
assert_encodable::<Crate>();
}
}
| 29.458226 | 100 | 0.576255 |
7143d7652ff9b607792012c71f10bf2f14fdeca8 | 416 | //! Dataset processing toolkit.
mod cached;
mod coco_;
mod csv;
mod dataset;
mod iii;
mod mem_cache;
mod on_demand;
mod record;
mod sanitized;
mod streaming;
mod utils;
mod voc;
pub use self::csv::*;
pub use cached::*;
pub use coco_::*;
pub use dataset::*;
pub use iii::*;
pub use mem_cache::*;
pub use on_demand::*;
pub use record::*;
pub use sanitized::*;
pub use streaming::*;
pub use utils::*;
pub use voc::*;
| 14.857143 | 31 | 0.682692 |
3808f2a44bc18148bc620503ec688e8bf27e6302 | 38,859 | use std::convert::TryFrom;
use std::io::{self, BufRead, BufReader, Cursor, Read};
use std::str::{self, FromStr};
use std::fmt::Display;
use std::marker::PhantomData;
use std::mem;
use super::{ArbitraryHeader, ArbitraryTuplType, BitmapHeader, GraymapHeader, PixmapHeader};
use super::{HeaderRecord, PNMHeader, PNMSubtype, SampleEncoding};
use crate::color::{ColorType, ExtendedColorType};
use crate::error::{ImageError, ImageResult};
use crate::image::{self, ImageDecoder};
use crate::utils;
use byteorder::{BigEndian, ByteOrder, NativeEndian};
/// Dynamic representation, represents all decodable (sample, depth) combinations.
#[derive(Clone, Copy)]
enum TupleType {
PbmBit,
BWBit,
GrayU8,
GrayU16,
RGBU8,
RGBU16,
}
trait Sample {
fn bytelen(width: u32, height: u32, samples: u32) -> ImageResult<usize>;
/// It is guaranteed that `bytes.len() == bytelen(width, height, samples)`
fn from_bytes(bytes: &[u8], width: u32, height: u32, samples: u32)
-> ImageResult<Vec<u8>>;
fn from_ascii(reader: &mut dyn Read, width: u32, height: u32, samples: u32)
-> ImageResult<Vec<u8>>;
}
struct U8;
struct U16;
struct PbmBit;
struct BWBit;
trait DecodableImageHeader {
fn tuple_type(&self) -> ImageResult<TupleType>;
}
/// PNM decoder
pub struct PnmDecoder<R> {
reader: BufReader<R>,
header: PNMHeader,
tuple: TupleType,
}
impl<R: Read> PnmDecoder<R> {
/// Create a new decoder that decodes from the stream ```read```
pub fn new(read: R) -> ImageResult<PnmDecoder<R>> {
let mut buf = BufReader::new(read);
let magic = buf.read_magic_constant()?;
if magic[0] != b'P' {
return Err(ImageError::FormatError(
format!("Expected magic constant for pnm, P1 through P7 instead of {:?}", magic),
));
}
let subtype = match magic[1] {
b'1' => PNMSubtype::Bitmap(SampleEncoding::Ascii),
b'2' => PNMSubtype::Graymap(SampleEncoding::Ascii),
b'3' => PNMSubtype::Pixmap(SampleEncoding::Ascii),
b'4' => PNMSubtype::Bitmap(SampleEncoding::Binary),
b'5' => PNMSubtype::Graymap(SampleEncoding::Binary),
b'6' => PNMSubtype::Pixmap(SampleEncoding::Binary),
b'7' => PNMSubtype::ArbitraryMap,
_ => {
return Err(ImageError::FormatError(
format!("Expected magic constant for pnm, P1 through P7 instead of {:?}", magic),
));
}
};
match subtype {
PNMSubtype::Bitmap(enc) => PnmDecoder::read_bitmap_header(buf, enc),
PNMSubtype::Graymap(enc) => PnmDecoder::read_graymap_header(buf, enc),
PNMSubtype::Pixmap(enc) => PnmDecoder::read_pixmap_header(buf, enc),
PNMSubtype::ArbitraryMap => PnmDecoder::read_arbitrary_header(buf),
}
}
/// Extract the reader and header after an image has been read.
pub fn into_inner(self) -> (R, PNMHeader) {
(self.reader.into_inner(), self.header)
}
fn read_bitmap_header(
mut reader: BufReader<R>,
encoding: SampleEncoding,
) -> ImageResult<PnmDecoder<R>> {
let header = reader.read_bitmap_header(encoding)?;
Ok(PnmDecoder {
reader,
tuple: TupleType::PbmBit,
header: PNMHeader {
decoded: HeaderRecord::Bitmap(header),
encoded: None,
},
})
}
fn read_graymap_header(
mut reader: BufReader<R>,
encoding: SampleEncoding,
) -> ImageResult<PnmDecoder<R>> {
let header = reader.read_graymap_header(encoding)?;
let tuple_type = header.tuple_type()?;
Ok(PnmDecoder {
reader,
tuple: tuple_type,
header: PNMHeader {
decoded: HeaderRecord::Graymap(header),
encoded: None,
},
})
}
fn read_pixmap_header(
mut reader: BufReader<R>,
encoding: SampleEncoding,
) -> ImageResult<PnmDecoder<R>> {
let header = reader.read_pixmap_header(encoding)?;
let tuple_type = header.tuple_type()?;
Ok(PnmDecoder {
reader,
tuple: tuple_type,
header: PNMHeader {
decoded: HeaderRecord::Pixmap(header),
encoded: None,
},
})
}
fn read_arbitrary_header(mut reader: BufReader<R>) -> ImageResult<PnmDecoder<R>> {
let header = reader.read_arbitrary_header()?;
let tuple_type = header.tuple_type()?;
Ok(PnmDecoder {
reader,
tuple: tuple_type,
header: PNMHeader {
decoded: HeaderRecord::Arbitrary(header),
encoded: None,
},
})
}
}
trait HeaderReader: BufRead {
/// Reads the two magic constant bytes
fn read_magic_constant(&mut self) -> ImageResult<[u8; 2]> {
let mut magic: [u8; 2] = [0, 0];
self.read_exact(&mut magic)
.map_err(ImageError::IoError)?;
Ok(magic)
}
/// Reads a string as well as a single whitespace after it, ignoring comments
fn read_next_string(&mut self) -> ImageResult<String> {
let mut bytes = Vec::new();
// pair input bytes with a bool mask to remove comments
let mark_comments = self.bytes().scan(true, |partof, read| {
let byte = match read {
Err(err) => return Some((*partof, Err(err))),
Ok(byte) => byte,
};
let cur_enabled = *partof && byte != b'#';
let next_enabled = cur_enabled || (byte == b'\r' || byte == b'\n');
*partof = next_enabled;
Some((cur_enabled, Ok(byte)))
});
for (_, byte) in mark_comments.filter(|ref e| e.0) {
match byte {
Ok(b'\t') | Ok(b'\n') | Ok(b'\x0b') | Ok(b'\x0c') | Ok(b'\r') | Ok(b' ') => {
if !bytes.is_empty() {
break; // We're done as we already have some content
}
}
Ok(byte) if !byte.is_ascii() => {
return Err(ImageError::FormatError(
format!("Non ascii character {} in header", byte),
));
},
Ok(byte) => {
bytes.push(byte);
},
Err(_) => break,
}
}
if bytes.is_empty() {
return Err(ImageError::IoError(io::ErrorKind::UnexpectedEof.into()));
}
if !bytes.as_slice().is_ascii() {
// We have only filled the buffer with characters for which `byte.is_ascii()` holds.
unreachable!("Non ascii character should have returned sooner")
}
let string = String::from_utf8(bytes)
// We checked the precondition ourselves a few lines before, `bytes.as_slice().is_ascii()`.
.unwrap_or_else(|_| unreachable!("Only ascii characters should be decoded"));
Ok(string)
}
/// Read the next line
fn read_next_line(&mut self) -> ImageResult<String> {
let mut buffer = String::new();
self.read_line(&mut buffer)
.map_err(ImageError::IoError)?;
Ok(buffer)
}
fn read_next_u32(&mut self) -> ImageResult<u32> {
let s = self.read_next_string()?;
s.parse::<u32>()
.map_err(|err| ImageError::FormatError(
format!("Error parsing number {} in preamble: {}", s, err)
))
}
fn read_bitmap_header(&mut self, encoding: SampleEncoding) -> ImageResult<BitmapHeader> {
let width = self.read_next_u32()?;
let height = self.read_next_u32()?;
Ok(BitmapHeader {
encoding,
width,
height,
})
}
fn read_graymap_header(&mut self, encoding: SampleEncoding) -> ImageResult<GraymapHeader> {
self.read_pixmap_header(encoding).map(
|PixmapHeader {
encoding,
width,
height,
maxval,
}| GraymapHeader {
encoding,
width,
height,
maxwhite: maxval,
},
)
}
fn read_pixmap_header(&mut self, encoding: SampleEncoding) -> ImageResult<PixmapHeader> {
let width = self.read_next_u32()?;
let height = self.read_next_u32()?;
let maxval = self.read_next_u32()?;
Ok(PixmapHeader {
encoding,
width,
height,
maxval,
})
}
fn read_arbitrary_header(&mut self) -> ImageResult<ArbitraryHeader> {
match self.bytes().next() {
None => return Err(ImageError::IoError(io::ErrorKind::UnexpectedEof.into())),
Some(Err(io)) => return Err(ImageError::IoError(io)),
Some(Ok(b'\n')) => (),
Some(Ok(c)) => {
return Err(ImageError::FormatError(
format!("Expected newline after P7 magic instead of {}", c),
))
}
}
let mut line = String::new();
let mut height: Option<u32> = None;
let mut width: Option<u32> = None;
let mut depth: Option<u32> = None;
let mut maxval: Option<u32> = None;
let mut tupltype: Option<String> = None;
loop {
line.truncate(0);
let len = self.read_line(&mut line).map_err(ImageError::IoError)?;
if len == 0 {
return Err(ImageError::FormatError(
"Unexpected end of pnm header".to_string(),
))
}
if line.as_bytes()[0] == b'#' {
continue;
}
if !line.is_ascii() {
return Err(ImageError::FormatError(
"Only ascii characters allowed in pam header".to_string(),
));
}
#[allow(deprecated)]
let (identifier, rest) = line.trim_left()
.split_at(line.find(char::is_whitespace).unwrap_or_else(|| line.len()));
match identifier {
"ENDHDR" => break,
"HEIGHT" => if height.is_some() {
return Err(ImageError::FormatError("Duplicate HEIGHT line".to_string()));
} else {
let h = rest.trim()
.parse::<u32>()
.map_err(|err| ImageError::FormatError(
format!("Invalid height {}: {}", rest, err)
))?;
height = Some(h);
},
"WIDTH" => if width.is_some() {
return Err(ImageError::FormatError("Duplicate WIDTH line".to_string()));
} else {
let w = rest.trim()
.parse::<u32>()
.map_err(|err| ImageError::FormatError(
format!("Invalid width {}: {}", rest, err)
))?;
width = Some(w);
},
"DEPTH" => if depth.is_some() {
return Err(ImageError::FormatError("Duplicate DEPTH line".to_string()));
} else {
let d = rest.trim()
.parse::<u32>()
.map_err(|err| ImageError::FormatError(
format!("Invalid depth {}: {}", rest, err)
))?;
depth = Some(d);
},
"MAXVAL" => if maxval.is_some() {
return Err(ImageError::FormatError("Duplicate MAXVAL line".to_string()));
} else {
let m = rest.trim()
.parse::<u32>()
.map_err(|err| ImageError::FormatError(
format!("Invalid maxval {}: {}", rest, err)
))?;
maxval = Some(m);
},
"TUPLTYPE" => {
let identifier = rest.trim();
if tupltype.is_some() {
let appended = tupltype.take().map(|mut v| {
v.push(' ');
v.push_str(identifier);
v
});
tupltype = appended;
} else {
tupltype = Some(identifier.to_string());
}
}
_ => return Err(ImageError::FormatError("Unknown header line".to_string())),
}
}
let (h, w, d, m) = match (height, width, depth, maxval) {
(None, _, _, _) => {
return Err(ImageError::FormatError(
"Expected one HEIGHT line".to_string(),
))
}
(_, None, _, _) => {
return Err(ImageError::FormatError(
"Expected one WIDTH line".to_string(),
))
}
(_, _, None, _) => {
return Err(ImageError::FormatError(
"Expected one DEPTH line".to_string(),
))
}
(_, _, _, None) => {
return Err(ImageError::FormatError(
"Expected one MAXVAL line".to_string(),
))
}
(Some(h), Some(w), Some(d), Some(m)) => (h, w, d, m),
};
let tupltype = match tupltype {
None => None,
Some(ref t) if t == "BLACKANDWHITE" => Some(ArbitraryTuplType::BlackAndWhite),
Some(ref t) if t == "BLACKANDWHITE_ALPHA" => {
Some(ArbitraryTuplType::BlackAndWhiteAlpha)
}
Some(ref t) if t == "GRAYSCALE" => Some(ArbitraryTuplType::Grayscale),
Some(ref t) if t == "GRAYSCALE_ALPHA" => Some(ArbitraryTuplType::GrayscaleAlpha),
Some(ref t) if t == "RGB" => Some(ArbitraryTuplType::RGB),
Some(ref t) if t == "RGB_ALPHA" => Some(ArbitraryTuplType::RGBAlpha),
Some(other) => Some(ArbitraryTuplType::Custom(other)),
};
Ok(ArbitraryHeader {
height: h,
width: w,
depth: d,
maxval: m,
tupltype,
})
}
}
impl<R: Read> HeaderReader for BufReader<R> {}
/// Wrapper struct around a `Cursor<Vec<u8>>`
pub struct PnmReader<R>(Cursor<Vec<u8>>, PhantomData<R>);
impl<R> Read for PnmReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
if self.0.position() == 0 && buf.is_empty() {
mem::swap(buf, self.0.get_mut());
Ok(buf.len())
} else {
self.0.read_to_end(buf)
}
}
}
impl<'a, R: 'a + Read> ImageDecoder<'a> for PnmDecoder<R> {
type Reader = PnmReader<R>;
fn dimensions(&self) -> (u32, u32) {
(self.header.width(), self.header.height())
}
fn color_type(&self) -> ColorType {
match self.tuple {
TupleType::PbmBit => ColorType::L8,
TupleType::BWBit => ColorType::L8,
TupleType::GrayU8 => ColorType::L8,
TupleType::GrayU16 => ColorType::L16,
TupleType::RGBU8 => ColorType::Rgb8,
TupleType::RGBU16 => ColorType::Rgb16,
}
}
fn original_color_type(&self) -> ExtendedColorType {
match self.tuple {
TupleType::PbmBit => ExtendedColorType::L1,
TupleType::BWBit => ExtendedColorType::L1,
TupleType::GrayU8 => ExtendedColorType::L8,
TupleType::GrayU16 => ExtendedColorType::L16,
TupleType::RGBU8 => ExtendedColorType::Rgb8,
TupleType::RGBU16 => ExtendedColorType::Rgb16,
}
}
fn into_reader(self) -> ImageResult<Self::Reader> {
Ok(PnmReader(Cursor::new(image::decoder_to_vec(self)?), PhantomData))
}
fn read_image(mut self, buf: &mut [u8]) -> ImageResult<()> {
assert_eq!(u64::try_from(buf.len()), Ok(self.total_bytes()));
buf.copy_from_slice(&match self.tuple {
TupleType::PbmBit => self.read_samples::<PbmBit>(1),
TupleType::BWBit => self.read_samples::<BWBit>(1),
TupleType::RGBU8 => self.read_samples::<U8>(3),
TupleType::RGBU16 => self.read_samples::<U16>(3),
TupleType::GrayU8 => self.read_samples::<U8>(1),
TupleType::GrayU16 => self.read_samples::<U16>(1),
}?);
Ok(())
}
}
impl<R: Read> PnmDecoder<R> {
fn read_samples<S: Sample>(&mut self, components: u32) -> ImageResult<Vec<u8>> {
match self.subtype().sample_encoding() {
SampleEncoding::Binary => {
let width = self.header.width();
let height = self.header.height();
let bytecount = S::bytelen(width, height, components)?;
let mut bytes = vec![0 as u8; bytecount];
(&mut self.reader)
.read_exact(&mut bytes)
.map_err(|_| ImageError::NotEnoughData)?;
let samples = S::from_bytes(&bytes, width, height, components)?;
Ok(samples)
}
SampleEncoding::Ascii => {
let samples = self.read_ascii::<S>(components)?;
Ok(samples)
}
}
}
fn read_ascii<Basic: Sample>(&mut self, components: u32) -> ImageResult<Vec<u8>> {
Basic::from_ascii(&mut self.reader, self.header.width(), self.header.height(), components)
}
/// Get the pnm subtype, depending on the magic constant contained in the header
pub fn subtype(&self) -> PNMSubtype {
self.header.subtype()
}
}
fn read_separated_ascii<T: FromStr>(reader: &mut dyn Read) -> ImageResult<T>
where T::Err: Display
{
let is_separator = |v: &u8| match *v {
b'\t' | b'\n' | b'\x0b' | b'\x0c' | b'\r' | b' ' => true,
_ => false,
};
let token = reader
.bytes()
.skip_while(|v| v.as_ref().ok().map(&is_separator).unwrap_or(false))
.take_while(|v| v.as_ref().ok().map(|c| !is_separator(c)).unwrap_or(false))
.collect::<Result<Vec<u8>, _>>()?;
if !token.is_ascii() {
return Err(ImageError::FormatError(
"Non ascii character where sample value was expected".to_string(),
));
}
let string = str::from_utf8(&token)
// We checked the precondition ourselves a few lines before, `token.is_ascii()`.
.unwrap_or_else(|_| unreachable!("Only ascii characters should be decoded"));
string
.parse()
.map_err(|err| ImageError::FormatError(format!("Error parsing {} as a sample: {}", string, err)))
}
impl Sample for U8 {
fn bytelen(width: u32, height: u32, samples: u32) -> ImageResult<usize> {
Ok((width * height * samples) as usize)
}
fn from_bytes(
bytes: &[u8],
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
assert_eq!(bytes.len(), Self::bytelen(width, height, samples).unwrap());
Ok(bytes.to_vec())
}
fn from_ascii(
reader: &mut dyn Read,
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
(0..width*height*samples)
.map(|_| read_separated_ascii(reader))
.collect()
}
}
impl Sample for U16 {
fn bytelen(width: u32, height: u32, samples: u32) -> ImageResult<usize> {
Ok((width * height * samples * 2) as usize)
}
fn from_bytes(
bytes: &[u8],
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
assert_eq!(bytes.len(), Self::bytelen(width, height, samples).unwrap());
let mut buffer = bytes.to_vec();
for chunk in buffer.chunks_mut(2) {
let v = BigEndian::read_u16(chunk);
NativeEndian::write_u16(chunk, v);
}
Ok(buffer)
}
fn from_ascii(
reader: &mut dyn Read,
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
let mut buffer = vec![0; (width * height * samples * 2) as usize];
for i in 0..(width*height*samples) as usize {
let v = read_separated_ascii::<u16>(reader)?;
NativeEndian::write_u16(&mut buffer[2*i..][..2], v);
}
Ok(buffer)
}
}
// The image is encoded in rows of bits, high order bits first. Any bits beyond the row bits should
// be ignored. Also, contrary to rgb, black pixels are encoded as a 1 while white is 0. This will
// need to be reversed for the grayscale output.
impl Sample for PbmBit {
fn bytelen(width: u32, height: u32, samples: u32) -> ImageResult<usize> {
let count = width * samples;
let linelen = (count / 8) + ((count % 8) != 0) as u32;
Ok((linelen * height) as usize)
}
fn from_bytes(
bytes: &[u8],
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
assert_eq!(bytes.len(), Self::bytelen(width, height, samples).unwrap());
let mut expanded = utils::expand_bits(1, width * samples, bytes);
for b in expanded.iter_mut() {
*b = !*b;
}
Ok(expanded)
}
fn from_ascii(
reader: &mut dyn Read,
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
let count = (width*height*samples) as usize;
let raw_samples = reader.bytes()
.filter_map(|ascii| match ascii {
Ok(b'0') => Some(Ok(1)),
Ok(b'1') => Some(Ok(0)),
Err(err) => Some(Err(ImageError::IoError(err))),
Ok(b'\t')
| Ok(b'\n')
| Ok(b'\x0b')
| Ok(b'\x0c')
| Ok(b'\r')
| Ok(b' ') => None,
Ok(c) => Some(Err(ImageError::FormatError(
format!("Unexpected character {} within sample raster", c),
))),
})
.take(count)
.collect::<ImageResult<Vec<u8>>>()?;
if raw_samples.len() < count {
return Err(ImageError::NotEnoughData)
}
Ok(raw_samples)
}
}
// Encoded just like a normal U8 but we check the values.
impl Sample for BWBit {
fn bytelen(width: u32, height: u32, samples: u32) -> ImageResult<usize> {
U8::bytelen(width, height, samples)
}
fn from_bytes(
bytes: &[u8],
width: u32,
height: u32,
samples: u32,
) -> ImageResult<Vec<u8>> {
assert_eq!(bytes.len(), Self::bytelen(width, height, samples).unwrap());
let values = U8::from_bytes(bytes, width, height, samples)?;
if let Some(val) = values.iter().find(|&val| *val > 1) {
return Err(ImageError::FormatError(
format!("Sample value {} outside of bounds", val),
));
};
Ok(values)
}
fn from_ascii(
_reader: &mut dyn Read,
_width: u32,
_height: u32,
_samples: u32,
) -> ImageResult<Vec<u8>> {
unreachable!("BW bits from anymaps are never encoded as ascii")
}
}
impl DecodableImageHeader for BitmapHeader {
fn tuple_type(&self) -> ImageResult<TupleType> {
Ok(TupleType::PbmBit)
}
}
impl DecodableImageHeader for GraymapHeader {
fn tuple_type(&self) -> ImageResult<TupleType> {
match self.maxwhite {
v if v <= 0xFF => Ok(TupleType::GrayU8),
v if v <= 0xFFFF => Ok(TupleType::GrayU16),
_ => Err(ImageError::FormatError(
"Image maxval is not less or equal to 65535".to_string(),
)),
}
}
}
impl DecodableImageHeader for PixmapHeader {
fn tuple_type(&self) -> ImageResult<TupleType> {
match self.maxval {
v if v <= 0xFF => Ok(TupleType::RGBU8),
v if v <= 0xFFFF => Ok(TupleType::RGBU16),
_ => Err(ImageError::FormatError(
"Image maxval is not less or equal to 65535".to_string(),
)),
}
}
}
impl DecodableImageHeader for ArbitraryHeader {
fn tuple_type(&self) -> ImageResult<TupleType> {
match self.tupltype {
None if self.depth == 1 => Ok(TupleType::GrayU8),
None if self.depth == 2 => Err(ImageError::UnsupportedColor(ExtendedColorType::La8)),
None if self.depth == 3 => Ok(TupleType::RGBU8),
None if self.depth == 4 => Err(ImageError::UnsupportedColor(ExtendedColorType::Rgba8)),
Some(ArbitraryTuplType::BlackAndWhite) if self.maxval == 1 && self.depth == 1 => {
Ok(TupleType::BWBit)
}
Some(ArbitraryTuplType::BlackAndWhite) => Err(ImageError::FormatError(
"Invalid depth or maxval for tuple type BLACKANDWHITE".to_string(),
)),
Some(ArbitraryTuplType::Grayscale) if self.depth == 1 && self.maxval <= 0xFF => {
Ok(TupleType::GrayU8)
}
Some(ArbitraryTuplType::Grayscale) if self.depth <= 1 && self.maxval <= 0xFFFF => {
Ok(TupleType::GrayU16)
}
Some(ArbitraryTuplType::Grayscale) => Err(ImageError::FormatError(
"Invalid depth or maxval for tuple type GRAYSCALE".to_string(),
)),
Some(ArbitraryTuplType::RGB) if self.depth == 3 && self.maxval <= 0xFF => {
Ok(TupleType::RGBU8)
}
Some(ArbitraryTuplType::RGB) if self.depth == 3 && self.maxval <= 0xFFFF => {
Ok(TupleType::RGBU16)
}
Some(ArbitraryTuplType::RGB) => Err(ImageError::FormatError(
"Invalid depth for tuple type RGB".to_string(),
)),
Some(ArbitraryTuplType::BlackAndWhiteAlpha) => Err(ImageError::FormatError(
"Unsupported color type: BlackAndWhiteAlpha".to_string()
)),
Some(ArbitraryTuplType::GrayscaleAlpha) => {
Err(ImageError::UnsupportedColor(ExtendedColorType::La8))
}
Some(ArbitraryTuplType::RGBAlpha) => {
Err(ImageError::UnsupportedColor(ExtendedColorType::Rgba8))
}
_ => Err(ImageError::FormatError(
"Tuple type not recognized".to_string(),
)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
/// Tests reading of a valid blackandwhite pam
#[test]
fn pam_blackandwhite() {
let pamdata = b"P7
WIDTH 4
HEIGHT 4
DEPTH 1
MAXVAL 1
TUPLTYPE BLACKANDWHITE
# Comment line
ENDHDR
\x01\x00\x00\x01\x01\x00\x00\x01\x01\x00\x00\x01\x01\x00\x00\x01";
let decoder = PnmDecoder::new(&pamdata[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.original_color_type(), ExtendedColorType::L1);
assert_eq!(decoder.dimensions(), (4, 4));
assert_eq!(decoder.subtype(), PNMSubtype::ArbitraryMap);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(
image,
vec![0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00,
0x00, 0x01]
);
match PnmDecoder::new(&pamdata[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Arbitrary(ArbitraryHeader {
width: 4,
height: 4,
maxval: 1,
depth: 1,
tupltype: Some(ArbitraryTuplType::BlackAndWhite),
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
/// Tests reading of a valid grayscale pam
#[test]
fn pam_grayscale() {
let pamdata = b"P7
WIDTH 4
HEIGHT 4
DEPTH 1
MAXVAL 255
TUPLTYPE GRAYSCALE
# Comment line
ENDHDR
\xde\xad\xbe\xef\xde\xad\xbe\xef\xde\xad\xbe\xef\xde\xad\xbe\xef";
let decoder = PnmDecoder::new(&pamdata[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.dimensions(), (4, 4));
assert_eq!(decoder.subtype(), PNMSubtype::ArbitraryMap);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(
image,
vec![0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad,
0xbe, 0xef]
);
match PnmDecoder::new(&pamdata[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Arbitrary(ArbitraryHeader {
width: 4,
height: 4,
depth: 1,
maxval: 255,
tupltype: Some(ArbitraryTuplType::Grayscale),
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
/// Tests reading of a valid rgb pam
#[test]
fn pam_rgb() {
let pamdata = b"P7
# Comment line
MAXVAL 255
TUPLTYPE RGB
DEPTH 3
WIDTH 2
HEIGHT 2
ENDHDR
\xde\xad\xbe\xef\xde\xad\xbe\xef\xde\xad\xbe\xef";
let decoder = PnmDecoder::new(&pamdata[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::Rgb8);
assert_eq!(decoder.dimensions(), (2, 2));
assert_eq!(decoder.subtype(), PNMSubtype::ArbitraryMap);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image,
vec![0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef]);
match PnmDecoder::new(&pamdata[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Arbitrary(ArbitraryHeader {
maxval: 255,
tupltype: Some(ArbitraryTuplType::RGB),
depth: 3,
width: 2,
height: 2,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
#[test]
fn pbm_binary() {
// The data contains two rows of the image (each line is padded to the full byte). For
// comments on its format, see documentation of `impl SampleType for PbmBit`.
let pbmbinary = [&b"P4 6 2\n"[..], &[0b01101100 as u8, 0b10110111]].concat();
let decoder = PnmDecoder::new(&pbmbinary[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.original_color_type(), ExtendedColorType::L1);
assert_eq!(decoder.dimensions(), (6, 2));
assert_eq!(
decoder.subtype(),
PNMSubtype::Bitmap(SampleEncoding::Binary)
);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image, vec![255, 0, 0, 255, 0, 0, 0, 255, 0, 0, 255, 0]);
match PnmDecoder::new(&pbmbinary[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Bitmap(BitmapHeader {
encoding: SampleEncoding::Binary,
width: 6,
height: 2,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
/// A previous inifite loop.
#[test]
fn pbm_binary_ascii_termination() {
use std::io::{Cursor, Error, ErrorKind, Read, Result};
struct FailRead(Cursor<&'static [u8]>);
impl Read for FailRead {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
match self.0.read(buf) {
Ok(n) if n > 0 => Ok(n),
_ => Err(Error::new(
ErrorKind::BrokenPipe,
"Simulated broken pipe error"
)),
}
}
}
let pbmbinary = FailRead(Cursor::new(b"P1 1 1\n"));
let decoder = PnmDecoder::new(pbmbinary).unwrap();
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).expect_err("Image is malformed");
}
#[test]
fn pbm_ascii() {
// The data contains two rows of the image (each line is padded to the full byte). For
// comments on its format, see documentation of `impl SampleType for PbmBit`. Tests all
// whitespace characters that should be allowed (the 6 characters according to POSIX).
let pbmbinary = b"P1 6 2\n 0 1 1 0 1 1\n1 0 1 1 0\t\n\x0b\x0c\r1";
let decoder = PnmDecoder::new(&pbmbinary[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.original_color_type(), ExtendedColorType::L1);
assert_eq!(decoder.dimensions(), (6, 2));
assert_eq!(decoder.subtype(), PNMSubtype::Bitmap(SampleEncoding::Ascii));
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image, vec![1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0]);
match PnmDecoder::new(&pbmbinary[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Bitmap(BitmapHeader {
encoding: SampleEncoding::Ascii,
width: 6,
height: 2,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
#[test]
fn pbm_ascii_nospace() {
// The data contains two rows of the image (each line is padded to the full byte). Notably,
// it is completely within specification for the ascii data not to contain separating
// whitespace for the pbm format or any mix.
let pbmbinary = b"P1 6 2\n011011101101";
let decoder = PnmDecoder::new(&pbmbinary[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.original_color_type(), ExtendedColorType::L1);
assert_eq!(decoder.dimensions(), (6, 2));
assert_eq!(decoder.subtype(), PNMSubtype::Bitmap(SampleEncoding::Ascii));
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image, vec![1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0]);
match PnmDecoder::new(&pbmbinary[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Bitmap(BitmapHeader {
encoding: SampleEncoding::Ascii,
width: 6,
height: 2,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
#[test]
fn pgm_binary() {
// The data contains two rows of the image (each line is padded to the full byte). For
// comments on its format, see documentation of `impl SampleType for PbmBit`.
let elements = (0..16).collect::<Vec<_>>();
let pbmbinary = [&b"P5 4 4 255\n"[..], &elements].concat();
let decoder = PnmDecoder::new(&pbmbinary[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.dimensions(), (4, 4));
assert_eq!(
decoder.subtype(),
PNMSubtype::Graymap(SampleEncoding::Binary)
);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image, elements);
match PnmDecoder::new(&pbmbinary[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Graymap(GraymapHeader {
encoding: SampleEncoding::Binary,
width: 4,
height: 4,
maxwhite: 255,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
#[test]
fn pgm_ascii() {
// The data contains two rows of the image (each line is padded to the full byte). For
// comments on its format, see documentation of `impl SampleType for PbmBit`.
let pbmbinary = b"P2 4 4 255\n 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15";
let decoder = PnmDecoder::new(&pbmbinary[..]).unwrap();
assert_eq!(decoder.color_type(), ColorType::L8);
assert_eq!(decoder.dimensions(), (4, 4));
assert_eq!(
decoder.subtype(),
PNMSubtype::Graymap(SampleEncoding::Ascii)
);
let mut image = vec![0; decoder.total_bytes() as usize];
decoder.read_image(&mut image).unwrap();
assert_eq!(image, (0..16).collect::<Vec<_>>());
match PnmDecoder::new(&pbmbinary[..]).unwrap().into_inner() {
(
_,
PNMHeader {
decoded:
HeaderRecord::Graymap(GraymapHeader {
encoding: SampleEncoding::Ascii,
width: 4,
height: 4,
maxwhite: 255,
}),
encoded: _,
},
) => (),
_ => panic!("Decoded header is incorrect"),
}
}
}
| 35.650459 | 105 | 0.508479 |
2148987648bb6472b6477098d06a7c92bd75d735 | 1,654 | extern crate ctest;
use std::env;
use std::path::PathBuf;
fn main() {
let target = env::var("TARGET").unwrap();
let root = PathBuf::from(env::var_os("DEP_NGHTTP2_ROOT").unwrap());
let mut cfg = ctest::TestGenerator::new();
// Apparently MSVC doesn't have `ssize_t` defined as a type
if target.contains("msvc") {
match env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap().as_str() {
"64" => {
cfg.define("ssize_t", Some("int64_t"));
}
"32" => {
cfg.define("ssize_t", Some("int32_t"));
}
s => panic!("unknown pointer size: {}", s),
}
}
cfg.header("nghttp2/nghttp2.h")
.include(root.join("include"))
.type_name(|n, _is_struct, _is_union| n.to_string())
.skip_struct(|name| {
// TODO: dox
name == "nghttp2_session"
|| name == "nghttp2_rcbuf"
|| name == "nghttp2_session_callbacks"
|| name == "nghttp2_option"
|| name == "nghttp2_hd_deflater"
|| name == "nghttp2_hd_inflater"
|| name == "nghttp2_stream"
})
.field_name(|_struct, field| {
if field == "type_" {
"type".to_string()
} else {
field.to_string()
}
})
.skip_signededness(move |_ty| {
// skip signededness checks on MSVC since lots of enums switch, and
// it doesn't really matter that much anyway
target.contains("msvc")
});
cfg.generate("../src/lib.rs", "all.rs");
}
| 31.807692 | 79 | 0.5 |
d76f2ff06720110a674eee46e58f5b60843f5aae | 8,161 | // Copyright (c) 2016-2020 Fabian Schuiki
//! Object declarations
//!
//! This includes constant, signal, variable, shared variable, and file
//! declarations.
use crate::common::errors::*;
use crate::common::score::Result;
use crate::common::source::Spanned;
use crate::add_ctx::AddContext;
use crate::hir;
use crate::score::*;
use crate::syntax::ast;
use crate::ty::*;
impl<'sbc, 'lazy, 'sb, 'ast, 'ctx> AddContext<'sbc, 'lazy, 'sb, 'ast, 'ctx> {
/// Add a constant declaration.
pub fn add_const_decl<I>(&self, decl: &'ast ast::ObjDecl) -> Result<Vec<I>>
where
I: From<ConstDeclRef>,
{
let ty = self.add_subtype_ind(&decl.subtype)?;
let init = self.add_optional(&decl.init, AddContext::add_expr)?;
self.ctx
.set_type_context_optional(init, TypeCtx::TypeOf(ty.into()));
if let Some(Spanned { span, .. }) = decl.detail {
self.emit(DiagBuilder2::error("expected `:=` or `;`").span(span));
}
decl.names
.iter()
.map(|dn| {
let (mk, id, scope) = self.make::<ConstDeclRef>(dn.span);
mk.lower_to_hir(Box::new(move |_sbc| {
Ok(hir::Decl {
parent: scope,
span: dn.span,
name: (*dn).into(),
decl: hir::ConstDecl { ty: ty, init: init },
})
}));
mk.typeval(Box::new(move |tyc| {
let hir = tyc.ctx.lazy_hir(id)?;
let ty = tyc.lazy_typeval(hir.decl.ty)?;
if let Some(init) = hir.decl.init {
let init_ty = tyc.lazy_typeval(init)?;
tyc.must_match(ty, init_ty, tyc.ctx.span(init).unwrap());
}
Ok(ty)
}));
Ok(mk.finish().into())
})
.collect()
}
/// Add a signal declaration.
pub fn add_signal_decl<I>(&self, decl: &'ast ast::ObjDecl) -> Result<Vec<I>>
where
I: From<SignalDeclRef>,
{
let ty = self.add_subtype_ind(&decl.subtype)?;
let init = self.add_optional(&decl.init, AddContext::add_expr)?;
self.ctx
.set_type_context_optional(init, TypeCtx::TypeOf(ty.into()));
let kind = match decl.detail {
Some(Spanned {
value: ast::ObjDetail::Register,
..
}) => hir::SignalKind::Register,
Some(Spanned {
value: ast::ObjDetail::Bus,
..
}) => hir::SignalKind::Bus,
Some(Spanned { span, .. }) => {
self.emit(DiagBuilder2::error("expected `:=` or `;`").span(span));
hir::SignalKind::Normal
}
None => hir::SignalKind::Normal,
};
decl.names
.iter()
.map(|dn| {
let (mk, id, scope) = self.make::<SignalDeclRef>(dn.span);
mk.lower_to_hir(Box::new(move |_sbc| {
Ok(hir::Decl {
parent: scope,
span: dn.span,
name: (*dn).into(),
decl: hir::SignalDecl {
ty: ty,
kind: kind,
init: init,
},
})
}));
mk.typeval(Box::new(move |tyc| {
let hir = tyc.ctx.lazy_hir(id)?;
let ty = tyc.lazy_typeval(hir.decl.ty)?;
if let Some(init) = hir.decl.init {
let init_ty = tyc.lazy_typeval(init)?;
tyc.must_match(ty, init_ty, tyc.ctx.span(init).unwrap());
}
Ok(ty)
}));
Ok(mk.finish().into())
})
.collect()
}
/// Add a variable declaration.
pub fn add_var_decl<I>(&self, decl: &'ast ast::ObjDecl) -> Result<Vec<I>>
where
I: From<VarDeclRef>,
{
let ty = self.add_subtype_ind(&decl.subtype)?;
let init = self.add_optional(&decl.init, AddContext::add_expr)?;
self.ctx
.set_type_context_optional(init, TypeCtx::TypeOf(ty.into()));
if let Some(Spanned { span, .. }) = decl.detail {
self.emit(DiagBuilder2::error("expected `:=` or `;`").span(span));
}
decl.names
.iter()
.map(|dn| {
let (mk, id, scope) = self.make::<VarDeclRef>(dn.span);
mk.lower_to_hir(Box::new(move |_sbc| {
Ok(hir::Decl {
parent: scope,
span: dn.span,
name: (*dn).into(),
decl: hir::VarDecl {
shared: decl.kind == ast::ObjKind::SharedVar,
ty: ty,
init: init,
},
})
}));
mk.typeval(Box::new(move |tyc| {
let hir = tyc.ctx.lazy_hir(id)?;
let ty = tyc.lazy_typeval(hir.decl.ty)?;
if let Some(init) = hir.decl.init {
let init_ty = tyc.lazy_typeval(init)?;
tyc.must_match(ty, init_ty, tyc.ctx.span(init).unwrap());
}
Ok(ty)
}));
Ok(mk.finish().into())
})
.collect()
}
/// Add a file declaration.
pub fn add_file_decl<I>(&self, decl: &'ast ast::ObjDecl) -> Result<Vec<I>>
where
I: From<FileDeclRef>,
{
let ty = self.add_subtype_ind(&decl.subtype)?;
if let Some(ref init) = decl.init {
self.emit(DiagBuilder2::error("expected `;`, `open`, or `is`").span(init.span));
}
decl.names
.iter()
.map(|dn| {
let (mk, id, scope) = self.make::<FileDeclRef>(dn.span);
mk.lower_to_hir(Box::new(move |sbc| {
let ctx = AddContext::new(sbc, scope);
let (mode, filename) = match decl.detail {
Some(Spanned {
value: ast::ObjDetail::Open(ref mode, ref filename),
..
}) => {
let mode = ctx.add_optional(mode, AddContext::add_expr)?;
let filename = ctx.add_expr(filename)?;
(mode, Some(filename))
}
Some(Spanned { span, .. }) => {
sbc.emit(
DiagBuilder2::error("expected `;`, `open`, or `is`").span(span),
);
(None, None)
}
None => (None, None),
};
Ok(hir::Decl {
parent: scope,
span: dn.span,
name: (*dn).into(),
decl: hir::FileDecl {
ty: ty,
filename: filename,
mode: mode,
},
})
}));
mk.typeval(Box::new(move |tyc| {
let hir = tyc.ctx.lazy_hir(id)?;
let ty = tyc.ctx.lazy_typeval(hir.decl.ty)?;
// TODO: Check that the type of expressions are okay.
let file_ty = tyc.ctx.intern_ty(Ty::File(Box::new(ty.clone())));
Ok(file_ty)
}));
Ok(mk.finish().into())
})
.collect()
}
}
| 39.235577 | 97 | 0.403382 |
0e58b50cfa4fbed6d982af5c03fd49822532725e | 7,471 | use crate::common::rc::{PyRc, PyWeak};
use crate::pyobject::{IdProtocol, PyObject, PyObjectPayload, TypeProtocol};
use std::borrow;
use std::fmt;
use std::ops::Deref;
pub struct PyObjectRc<T = dyn PyObjectPayload>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
inner: PyRc<PyObject<T>>,
}
pub struct PyObjectWeak<T = dyn PyObjectPayload>
where
T: ?Sized + PyObjectPayload,
{
inner: PyWeak<PyObject<T>>,
}
pub trait AsPyObjectRef {
fn _as_ref(self) -> PyRc<PyObject<dyn PyObjectPayload>>;
}
impl<T> AsPyObjectRef for PyRc<PyObject<T>>
where
T: PyObjectPayload,
{
fn _as_ref(self) -> PyRc<PyObject<dyn PyObjectPayload>> {
self
}
}
impl AsPyObjectRef for PyRc<PyObject<dyn PyObjectPayload>> {
fn _as_ref(self) -> PyRc<PyObject<dyn PyObjectPayload>> {
self
}
}
impl<T> PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
pub fn into_raw(this: Self) -> *const PyObject<T> {
let ptr = PyRc::as_ptr(&this.inner);
std::mem::forget(this);
ptr
}
unsafe fn into_rc(this: Self) -> PyRc<PyObject<T>> {
let raw = Self::into_raw(this);
PyRc::from_raw(raw)
}
pub fn into_ref(this: Self) -> PyObjectRc<dyn PyObjectPayload> {
PyObjectRc::<dyn PyObjectPayload> {
inner: unsafe { Self::into_rc(this) }._as_ref(),
}
}
/// # Safety
/// See PyRc::from_raw
pub unsafe fn from_raw(ptr: *const PyObject<T>) -> Self {
Self {
inner: PyRc::from_raw(ptr),
}
}
pub fn new(value: PyObject<T>) -> Self
where
T: Sized,
{
Self {
inner: PyRc::new(value),
}
}
pub fn strong_count(this: &Self) -> usize {
PyRc::strong_count(&this.inner)
}
pub fn weak_count(this: &Self) -> usize {
PyRc::weak_count(&this.inner)
}
pub fn downgrade(this: &Self) -> PyObjectWeak<T> {
PyObjectWeak {
inner: PyRc::downgrade(&this.inner),
}
}
}
impl<T: ?Sized + PyObjectPayload> IdProtocol for PyObjectRc<T>
where
PyRc<PyObject<T>>: IdProtocol + AsPyObjectRef,
{
fn get_id(&self) -> usize {
self.inner.get_id()
}
}
impl<T> PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
pub fn upgrade(&self) -> Option<PyObjectRc<T>> {
self.inner.upgrade().map(|inner| PyObjectRc { inner })
}
}
#[cfg(feature = "threading")]
unsafe impl<T> Send for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
}
#[cfg(feature = "threading")]
unsafe impl<T> Sync for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
}
#[cfg(feature = "threading")]
unsafe impl<T> Send for PyObjectWeak<T> where T: ?Sized + PyObjectPayload {}
#[cfg(feature = "threading")]
unsafe impl<T> Sync for PyObjectWeak<T> where T: ?Sized + PyObjectPayload {}
impl<T> Drop for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
fn drop(&mut self) {
use crate::pyobject::BorrowValue;
// PyObjectRc will drop the value when its count goes to 0
if PyRc::strong_count(&self.inner) != 1 {
return;
}
// CPython-compatible drop implementation
let zelf = Self::into_ref(self.clone());
if let Some(del_method) = zelf.inner.get_class_attr("__del__") {
crate::vm::thread::with_vm(&zelf, |vm| {
if let Err(e) = vm.invoke(&del_method, vec![zelf.clone()]) {
// exception in del will be ignored but printed
print!("Exception ignored in: ",);
let repr = vm.to_repr(&del_method);
match repr {
Ok(v) => println!("{}", v.to_string()),
Err(_) => println!("{}", &del_method.class().name),
}
let tb_module = vm.import("traceback", &[], 0).unwrap();
// TODO: set exc traceback
let print_stack = vm.get_attribute(tb_module, "print_stack").unwrap();
vm.invoke(&print_stack, vec![]).unwrap();
if let Ok(repr) = vm.to_repr(e.as_object()) {
println!("{}", repr.borrow_value());
}
}
});
}
let _ = unsafe { PyObjectRc::<dyn PyObjectPayload>::into_rc(zelf) };
debug_assert!(PyRc::strong_count(&self.inner) == 1); // make sure to keep same state
}
}
impl<T> Deref for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
type Target = PyObject<T>;
#[inline]
fn deref(&self) -> &PyObject<T> {
self.inner.deref()
}
}
impl<T> Clone for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
{
fn clone(&self) -> Self {
PyObjectRc {
inner: self.inner.clone(),
}
}
}
impl<T> fmt::Display for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
PyObject<T>: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T> fmt::Debug for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
PyObject<T>: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T> fmt::Pointer for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef,
PyObject<T>: fmt::Pointer,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T> borrow::Borrow<T> for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef + borrow::Borrow<T>,
{
fn borrow(&self) -> &T {
self.inner.borrow()
}
}
impl<T> borrow::BorrowMut<T> for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef + borrow::BorrowMut<T>,
{
fn borrow_mut(&mut self) -> &mut T {
self.inner.borrow_mut()
}
}
impl<T> AsRef<T> for PyObjectRc<T>
where
T: ?Sized + PyObjectPayload,
PyRc<PyObject<T>>: AsPyObjectRef + AsRef<T>,
{
fn as_ref(&self) -> &T {
self.inner.as_ref()
}
}
impl<T> Clone for PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
{
fn clone(&self) -> Self {
PyObjectWeak {
inner: self.inner.clone(),
}
}
}
impl<T> fmt::Debug for PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
PyObject<T>: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T> borrow::Borrow<T> for PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
PyWeak<PyObject<T>>: borrow::Borrow<T>,
{
fn borrow(&self) -> &T {
self.inner.borrow()
}
}
impl<T> borrow::BorrowMut<T> for PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
PyWeak<PyObject<T>>: borrow::BorrowMut<T>,
{
fn borrow_mut(&mut self) -> &mut T {
self.inner.borrow_mut()
}
}
impl<T> AsRef<T> for PyObjectWeak<T>
where
T: ?Sized + PyObjectPayload,
PyWeak<PyObject<T>>: AsRef<T>,
{
fn as_ref(&self) -> &T {
self.inner.as_ref()
}
}
| 23.792994 | 92 | 0.577299 |
331c9ff4a78ea5d43bc8363e7f3ddda1baae8951 | 271,113 | #![recursion_limit = "600"]
#![allow(clippy::identity_op)]
#![allow(clippy::collapsible_if)]
extern crate leafish_shared as shared;
use crate::shared::{Axis, Direction, Position};
use cgmath::Point3;
use collision::Aabb3;
use std::collections::HashMap;
pub mod material;
pub use self::material::Material;
pub use self::Block::*;
use parking_lot::RwLock;
use std::sync::Arc;
pub trait WorldAccess {
fn get_block(&self, pos: Position) -> Block;
}
#[doc(hidden)]
#[macro_export]
macro_rules! create_ids {
($t:ty, ) => ();
($t:ty, prev($prev:ident), $name:ident) => (
#[allow(non_upper_case_globals)]
pub const $name: $t = $prev + 1;
);
($t:ty, prev($prev:ident), $name:ident, $($n:ident),+) => (
#[allow(non_upper_case_globals)]
pub const $name: $t = $prev + 1;
create_ids!($t, prev($name), $($n),+);
);
($t:ty, $name:ident, $($n:ident),+) => (
#[allow(non_upper_case_globals)]
pub const $name: $t = 0;
create_ids!($t, prev($name), $($n),+);
);
($t:ty, $name:ident) => (
#[allow(non_upper_case_globals)]
pub const $name: $t = 0;
);
}
#[derive(Default)]
pub struct VanillaIDMap {
flat: Vec<Option<Block>>,
hier: Vec<Option<Block>>,
modded: HashMap<String, [Option<Block>; 16]>,
protocol_version: i32,
}
impl VanillaIDMap {
pub fn new(protocol_version: i32) -> VanillaIDMap {
gen_id_map(protocol_version)
}
pub fn by_vanilla_id(
&self,
id: usize,
modded_block_ids: Arc<RwLock<HashMap<usize, String>>>, // TODO: remove and add to constructor, but have to mutate in Server
) -> Block {
if self.protocol_version >= 404 {
self.flat
.get(id)
.and_then(|v| *v)
.unwrap_or(Block::Missing {})
// TODO: support modded 1.13.2+ blocks after https://github.com/iceiix/stevenarella/pull/145
} else {
if let Some(block) = self.hier.get(id).and_then(|v| *v) {
block
} else {
let data = id & 0xf;
if let Some(name) = modded_block_ids.clone().read().get(&(id >> 4)) {
if let Some(blocks_by_data) = self.modded.get(name) {
blocks_by_data[data].unwrap_or(Block::Missing {})
} else {
//info!("Modded block not supported yet: {}:{} -> {}", id >> 4, data, name);
Block::Missing {}
}
} else {
Block::Missing {}
}
}
}
}
}
macro_rules! define_blocks {
(
$(
$name:ident {
$(modid $modid:expr,)?
props {
$(
$fname:ident : $ftype:ty = [$($val:expr),+],
)*
},
$(data $datafunc:expr,)?
$(offset $offsetfunc:expr,)?
$(offsets $offsetsfunc:expr,)?
$(material $mat:expr,)?
model $model:expr,
$(variant $variant:expr,)?
$(tint $tint:expr,)?
$(collision $collision:expr,)?
$(update_state ($world:ident, $pos:ident) => $update_state:expr,)?
$(multipart ($mkey:ident, $mval:ident) => $multipart:expr,)?
}
)+
) => (
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Block {
$(
$name {
$(
$fname : $ftype,
)?
},
)+
}
mod internal_ids {
create_ids!(usize, $($name),+);
}
impl Block {
#[allow(unused_variables, unreachable_code)]
pub fn get_internal_id(&self) -> usize {
match *self {
$(
Block::$name {
$($fname,)?
} => {
internal_ids::$name
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_hierarchical_data(&self) -> Option<usize> {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(
let data: Option<usize> = ($datafunc).map(|v| v);
return data;
)?
Some(0)
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
#[allow(clippy::redundant_closure_call)] // TODO: fix 'try not to call a closure in the expression where it is declared'
pub fn get_flat_offset(&self, protocol_version: i32) -> Option<usize> {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(
let offset: Option<usize> = ($offsetsfunc)(protocol_version).map(|v| v);
return offset;
)?
$(
let offset: Option<usize> = ($offsetfunc).map(|v| v);
return offset;
)?
$(
let data: Option<usize> = ($datafunc).map(|v| v);
return data;
)?
Some(0)
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_modid(&self) -> Option<&str> {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(
return Some($modid);
)?
None
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_material(&self) -> Material {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(return $mat;)?
material::SOLID
}
)+
}
}
#[allow(unused_variables)]
pub fn get_model(&self) -> (String, String) {
match *self {
$(
Block::$name {
$($fname,)?
} => {
let parts = $model;
(String::from(parts.0), String::from(parts.1))
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_model_variant(&self) -> String {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(return String::from($variant);)?
"normal".to_owned()
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_tint(&self) -> TintType {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(return $tint;)?
TintType::Default
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn get_collision_boxes(&self) -> Vec<Aabb3<f64>> {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(return $collision;)?
vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0, 1.0)
)]
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn update_state<W: WorldAccess>(&self, world: &W, pos: Position) -> Block {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(
let $world = world;
let $pos = pos;
return $update_state;
)?
Block::$name {
$($fname,)?
}
}
)+
}
}
#[allow(unused_variables, unreachable_code)]
pub fn match_multipart(&self, key: &str, val: &str) -> bool {
match *self {
$(
Block::$name {
$($fname,)?
} => {
$(
let $mkey = key;
let $mval = val;
return $multipart;
)?
false
}
)+
}
}
}
mod block_registration_functions {
use super::*;
$(
#[allow(non_snake_case)]
pub fn $name(
protocol_version: i32,
blocks_flat: &mut Vec<Option<Block>>,
blocks_hier: &mut Vec<Option<Block>>,
blocks_modded: &mut HashMap<String, [Option<Block>; 16]>,
flat_id: &mut usize,
last_internal_id: &mut usize,
hier_block_id: &mut usize,
) {
#[allow(non_camel_case_types, dead_code)]
struct CombinationIter<$($fname),*> {
first: bool,
finished: bool,
state: CombinationIterState<$($fname),*>,
orig: CombinationIterOrig<$($fname),*>,
current: CombinationIterCurrent,
}
#[allow(non_camel_case_types)]
struct CombinationIterState<$($fname),*> {
$($fname: $fname,)?
}
#[allow(non_camel_case_types)]
struct CombinationIterOrig<$($fname),*> {
$($fname: $fname,)?
}
#[allow(non_camel_case_types)]
struct CombinationIterCurrent {
$($fname: $ftype,)?
}
#[allow(non_camel_case_types)]
impl <$($fname : Iterator<Item=$ftype> + Clone),*> Iterator for CombinationIter<$($fname),*> {
type Item = Block;
#[allow(unused_mut, unused_variables, unreachable_code, unused_assignments, clippy::never_loop)]
fn next(&mut self) -> Option<Self::Item> {
if self.finished {
return None;
}
if self.first {
self.first = false;
return Some(Block::$name {
$(
$fname: self.current.$fname,
)?
});
}
let mut has_value = false;
loop {
$(
if let Some(val) = self.state.$fname.next() {
self.current.$fname = val;
has_value = true;
break;
}
self.state.$fname = self.orig.$fname.clone();
self.current.$fname = self.state.$fname.next().unwrap();
)?
self.finished = true;
return None;
}
if has_value {
Some(Block::$name {
$(
$fname: self.current.$fname,
)?
})
} else {
None
}
}
}
#[allow(non_camel_case_types)]
impl <$($fname : Iterator<Item=$ftype> + Clone),*> CombinationIter<$($fname),*> {
#[allow(clippy::too_many_arguments)]
fn new($(mut $fname:$fname),*) -> CombinationIter<$($fname),*> {
CombinationIter {
finished: false,
first: true,
orig: CombinationIterOrig {
$($fname: $fname.clone(),)?
},
current: CombinationIterCurrent {
$($fname: $fname.next().unwrap(),)?
},
state: CombinationIterState {
$($fname,)?
}
}
}
}
let iter = CombinationIter::new(
$({
let vals = vec![$($val),+];
vals.into_iter()
}),*
);
let mut last_offset: isize = -1;
let debug_blocks = std::env::var("DEBUG_BLOCKS").is_ok();
for block in iter {
let internal_id = block.get_internal_id();
let hier_data: Option<usize> = block.get_hierarchical_data();
if let Some(modid) = block.get_modid() {
let hier_data = hier_data.unwrap();
if !(*blocks_modded).contains_key(modid) {
(*blocks_modded).insert(modid.to_string(), [None; 16]);
}
let block_from_data = (*blocks_modded).get_mut(modid).unwrap();
block_from_data[hier_data] = Some(block);
continue
}
let vanilla_id =
if let Some(hier_data) = hier_data {
if internal_id != *last_internal_id {
*hier_block_id += 1;
}
*last_internal_id = internal_id;
Some((*hier_block_id << 4) + hier_data)
} else {
None
};
let offset = block.get_flat_offset(protocol_version);
if let Some(offset) = offset {
let id = *flat_id + offset;
if debug_blocks {
if let Some(vanilla_id) = vanilla_id {
println!("{} block state = {:?} hierarchical {}:{} offset={}", id, block, vanilla_id >> 4, vanilla_id & 0xF, offset);
} else {
println!("{} block state = {:?} hierarchical none, offset={}", id, block, offset);
}
}
if offset as isize > last_offset {
last_offset = offset as isize;
}
if (*blocks_flat).len() <= id {
(*blocks_flat).resize(id + 1, None);
}
if (*blocks_flat)[id].is_none() {
(*blocks_flat)[id] = Some(block);
} else {
panic!(
"Tried to register {:#?} to {} but {:#?} was already registered",
block,
id,
(*blocks_flat)[id]
);
}
}
if let Some(vanilla_id) = vanilla_id {
if debug_blocks {
if offset.is_none() {
println!("(no flat) block state = {:?} hierarchical {}:{}", block, vanilla_id >> 4, vanilla_id & 0xF);
}
}
if (*blocks_hier).len() <= vanilla_id {
(*blocks_hier).resize(vanilla_id + 1, None);
}
if (*blocks_hier)[vanilla_id].is_none() {
(*blocks_hier)[vanilla_id] = Some(block);
} else {
panic!(
"Tried to register {:#?} to {} but {:#?} was already registered",
block,
vanilla_id,
(*blocks_hier)[vanilla_id]
);
}
}
}
#[allow(unused_assignments)]
{
*flat_id += (last_offset + 1) as usize;
}
}
)+
}
pub fn gen_id_map(protocol_version: i32) -> VanillaIDMap {
let mut blocks_flat = vec![];
let mut blocks_hier = vec![];
let mut blocks_modded: HashMap<String, [Option<Block>; 16]> = HashMap::new();
let mut flat_id = 0;
let mut last_internal_id = 0;
let mut hier_block_id = 0;
$(
block_registration_functions::$name(protocol_version,
&mut blocks_flat,
&mut blocks_hier,
&mut blocks_modded,
&mut flat_id,
&mut last_internal_id,
&mut hier_block_id);
)+
VanillaIDMap { flat: blocks_flat, hier: blocks_hier, modded: blocks_modded, protocol_version }
}
);
}
#[derive(Clone, Copy)]
pub enum TintType {
Default,
Color { r: u8, g: u8, b: u8 },
Grass,
Foliage,
}
define_blocks! {
Air {
props {},
material material::Material {
collidable: false,
.. material::INVISIBLE
},
model { ("minecraft", "air") },
collision vec![],
}
Stone {
props {
variant: StoneVariant = [
StoneVariant::Normal,
StoneVariant::Granite,
StoneVariant::SmoothGranite,
StoneVariant::Diorite,
StoneVariant::SmoothDiorite,
StoneVariant::Andesite,
StoneVariant::SmoothAndesite
],
},
data Some(variant.data()),
model { ("minecraft", variant.as_string() ) },
}
Grass {
props {
snowy: bool = [false, true],
},
data { if snowy { None } else { Some(0) } },
offset { if snowy { Some(0) } else { Some(1) } },
model { ("minecraft", "grass") },
variant format!("snowy={}", snowy),
tint TintType::Grass,
update_state (world, pos) => Block::Grass{snowy: is_snowy(world, pos)},
}
Dirt {
props {
snowy: bool = [false, true],
variant: DirtVariant = [
DirtVariant::Normal,
DirtVariant::Coarse,
DirtVariant::Podzol
],
},
data if !snowy { Some(variant.data()) } else { None },
offset {
if variant == DirtVariant::Podzol {
Some(variant.data() + if snowy { 0 } else { 1 })
} else {
if snowy {
None
} else {
Some(variant.data())
}
}
},
model { ("minecraft", variant.as_string()) },
variant {
if variant == DirtVariant::Podzol {
format!("snowy={}", snowy)
} else {
"normal".to_owned()
}
},
update_state (world, pos) => if variant == DirtVariant::Podzol {
Block::Dirt{snowy: is_snowy(world, pos), variant}
} else {
Block::Dirt{snowy, variant}
},
}
Cobblestone {
props {},
model { ("minecraft", "cobblestone") },
}
Planks {
props {
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data Some(variant.plank_data()),
model { ("minecraft", format!("{}_planks", variant.as_string()) ) },
}
Sapling {
props {
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
stage: u8 = [0, 1],
},
data Some(variant.plank_data() | ((stage as usize) << 3)),
offset Some((variant.plank_data() << 1) | (stage as usize)),
material material::NON_SOLID,
model { ("minecraft", format!("{}_sapling", variant.as_string()) ) },
variant format!("stage={}", stage),
collision vec![],
}
Bedrock {
props {},
model { ("minecraft", "bedrock") },
}
FlowingWater {
props {
level: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(level as usize),
offset None,
material Material {
absorbed_light: 2,
..material::TRANSPARENT
},
model { ("minecraft", "flowing_water") },
collision vec![],
}
Water {
props {
level: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(level as usize),
material Material {
absorbed_light: 2,
..material::TRANSPARENT
},
model { ("minecraft", "water") },
collision vec![],
}
FlowingLava {
props {
level: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(level as usize),
offset None,
material Material {
absorbed_light: 15,
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "flowing_lava") },
collision vec![],
}
Lava {
props {
level: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(level as usize),
material Material {
absorbed_light: 15,
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "lava") },
collision vec![],
}
Sand {
props {
red: bool = [false, true],
},
data Some(if red { 1 } else { 0 }),
model { ("minecraft", if red { "red_sand" } else { "sand" } ) },
}
Gravel {
props {},
model { ("minecraft", "gravel") },
}
GoldOre {
props {},
model { ("minecraft", "gold_ore") },
}
IronOre {
props {},
model { ("minecraft", "iron_ore") },
}
CoalOre {
props {},
model { ("minecraft", "coal_ore") },
}
NetherGoldOre {
props {},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(0) } else { None } },
model { ("minecraft", "nether_gold_ore") },
}
Log {
props {
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak,
TreeVariant::StrippedSpruce,
TreeVariant::StrippedBirch,
TreeVariant::StrippedJungle,
TreeVariant::StrippedAcacia,
TreeVariant::StrippedDarkOak,
TreeVariant::StrippedOak
],
axis: Axis = [Axis::Y, Axis::Z, Axis::X, Axis::None],
},
data match variant {
TreeVariant::Oak | TreeVariant::Spruce | TreeVariant::Birch | TreeVariant::Jungle =>
Some(variant.data() | (axis.index() << 2)),
_ => None,
},
offset match axis {
Axis::None => None,
Axis::X => Some(variant.offset() * 3 + 0),
Axis::Y => Some(variant.offset() * 3 + 1),
Axis::Z => Some(variant.offset() * 3 + 2),
},
model { ("minecraft", format!("{}_log", variant.as_string()) ) },
variant format!("axis={}", axis.as_string()),
}
Wood {
props {
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak,
TreeVariant::StrippedSpruce,
TreeVariant::StrippedBirch,
TreeVariant::StrippedJungle,
TreeVariant::StrippedAcacia,
TreeVariant::StrippedDarkOak,
TreeVariant::StrippedOak
],
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data None::<usize>,
offset Some(variant.offset() * 3 + axis.index()),
model { ("minecraft", format!("{}_wood", variant.as_string()) ) },
variant format!("axis={}", axis.as_string()),
}
Leaves {
props {
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
decayable: bool = [false, true],
check_decay: bool = [false, true],
distance: u8 = [1, 2, 3, 4, 5, 6, 7],
},
data match variant {
TreeVariant::Oak | TreeVariant::Spruce | TreeVariant::Birch | TreeVariant::Jungle =>
if distance == 1 {
Some(variant.data()
| (if decayable { 0x4 } else { 0x0 })
| (if check_decay { 0x8 } else { 0x0 }))
} else {
None
},
_ => None,
},
offset if check_decay {
None
} else {
Some(variant.offset() * (7 * 2) + ((distance as usize - 1) << 1) + (if decayable { 0 } else { 1 }))
},
material material::LEAVES,
model { ("minecraft", format!("{}_leaves", variant.as_string()) ) },
tint TintType::Foliage,
}
Sponge {
props {
wet: bool = [false, true],
},
data Some(if wet { 1 } else { 0 }),
model { ("minecraft", "sponge") },
variant format!("wet={}", wet),
}
Glass {
props {},
material material::NON_SOLID,
model { ("minecraft", "glass") },
}
LapisOre {
props {},
model { ("minecraft", "lapis_ore") },
}
LapisBlock {
props {},
model { ("minecraft", "lapis_block") },
}
Dispenser {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
triggered: bool = [false, true],
},
data Some(facing.index() | (if triggered { 0x8 } else { 0x0 })),
offset Some((facing.offset() << 1) | (if triggered { 0 } else { 1 })),
model { ("minecraft", "dispenser") },
variant format!("facing={}", facing.as_string()),
}
Sandstone {
props {
variant: SandstoneVariant = [
SandstoneVariant::Normal,
SandstoneVariant::Chiseled,
SandstoneVariant::Smooth
],
},
data Some(variant.data()),
model { ("minecraft", variant.as_string() ) },
}
NoteBlock {
props {
instrument: NoteBlockInstrument = [
NoteBlockInstrument::Harp,
NoteBlockInstrument::BaseDrum,
NoteBlockInstrument::Snare,
NoteBlockInstrument::Hat,
NoteBlockInstrument::Bass,
NoteBlockInstrument::Flute,
NoteBlockInstrument::Bell,
NoteBlockInstrument::Guitar,
NoteBlockInstrument::Chime,
NoteBlockInstrument::Xylophone,
NoteBlockInstrument::IronXylophone,
NoteBlockInstrument::CowBell,
NoteBlockInstrument::Didgeridoo,
NoteBlockInstrument::Bit,
NoteBlockInstrument::Banjo,
NoteBlockInstrument::Pling
],
note: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
powered: bool = [true, false],
},
data if instrument == NoteBlockInstrument::Harp && note == 0 && powered { Some(0) } else { None },
offsets |protocol_version| (instrument.offsets(protocol_version)
.map(|offset| offset * (25 * 2) + ((note as usize) << 1) + if powered { 0 } else { 1 })),
model { ("minecraft", "noteblock") },
}
Bed {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
occupied: bool = [false, true],
part: BedPart = [BedPart::Head, BedPart::Foot],
},
data if color != ColoredVariant::White { None } else { Some(facing.horizontal_index()
| (if occupied { 0x4 } else { 0x0 })
| (if part == BedPart::Head { 0x8 } else { 0x0 }))},
offset Some(color.data() * (2 * 2 * 4)
+ (facing.horizontal_offset() * (2 * 2))
+ (if occupied { 0 } else { 2 })
+ (if part == BedPart::Head { 0 } else { 1 })),
material material::NON_SOLID,
model { ("minecraft", "bed") },
variant format!("facing={},part={}", facing.as_string(), part.as_string()),
collision vec![Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 9.0/16.0, 1.0))],
}
GoldenRail {
props {
powered: bool = [false, true],
shape: RailShape = [
RailShape::NorthSouth,
RailShape::EastWest,
RailShape::AscendingNorth,
RailShape::AscendingSouth,
RailShape::AscendingEast,
RailShape::AscendingWest
],
},
data Some(shape.data() | (if powered { 0x8 } else { 0x0 })),
offset Some(shape.data() + (if powered { 0 } else { 6 })),
material material::NON_SOLID,
model { ("minecraft", "golden_rail") },
variant format!("powered={},shape={}", powered, shape.as_string()),
collision vec![],
}
DetectorRail {
props {
powered: bool = [false, true],
shape: RailShape = [
RailShape::NorthSouth,
RailShape::EastWest,
RailShape::AscendingNorth,
RailShape::AscendingSouth,
RailShape::AscendingEast,
RailShape::AscendingWest
],
},
data Some(shape.data() | (if powered { 0x8 } else { 0x0 })),
offset Some(shape.data() + (if powered { 0 } else { 6 })),
material material::NON_SOLID,
model { ("minecraft", "detector_rail") },
variant format!("powered={},shape={}", powered, shape.as_string()),
collision vec![],
}
StickyPiston {
props {
extended: bool = [false, true],
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if extended { 0x8 } else { 0x0 })),
offset Some(facing.offset() + (if extended { 0 } else { 6 })),
material Material {
should_cull_against: !extended,
..material::NON_SOLID
},
model { ("minecraft", "sticky_piston") },
variant format!("extended={},facing={}", extended, facing.as_string()),
collision piston_collision(extended, facing),
}
Web {
props {},
material material::NON_SOLID,
model { ("minecraft", "web") },
collision vec![],
}
TallGrass {
props {
variant: TallGrassVariant = [
TallGrassVariant::DeadBush,
TallGrassVariant::TallGrass,
TallGrassVariant::Fern
],
},
data Some(variant.data()),
offset Some(variant.offset()),
material material::NON_SOLID,
model { ("minecraft", variant.as_string() ) },
tint TintType::Grass,
collision vec![],
}
Seagrass {
props {},
data None::<usize>,
offset Some(0),
material material::NON_SOLID,
model { ("minecraft", "seagrass") },
collision vec![],
}
TallSeagrass {
props {
half: TallSeagrassHalf = [
TallSeagrassHalf::Upper,
TallSeagrassHalf::Lower
],
},
data None::<usize>,
offset Some(half.offset()),
material material::NON_SOLID,
model { ("minecraft", "tall_seagrass") },
collision vec![],
}
DeadBush {
props {},
offset None,
material material::NON_SOLID,
model { ("minecraft", "dead_bush") },
collision vec![],
}
Piston {
props {
extended: bool = [false, true],
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if extended { 0x8 } else { 0x0 })),
offset Some(facing.offset() + (if extended { 0 } else { 6 })),
material Material {
should_cull_against: !extended,
..material::NON_SOLID
},
model { ("minecraft", "piston") },
variant format!("extended={},facing={}", extended, facing.as_string()),
collision piston_collision(extended, facing),
}
PistonHead {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
short: bool = [false, true],
variant: PistonType = [PistonType::Normal, PistonType::Sticky],
},
data if !short { Some(facing.index() | if variant == PistonType::Sticky { 0x8 } else { 0x0 })} else { None },
offset Some(facing.offset() * 4 +
(if short { 0 } else { 2 }) +
(if variant == PistonType::Normal { 0 } else { 1 })),
material material::NON_SOLID,
model { ("minecraft", "piston_head") },
variant format!("facing={},short={},type={}", facing.as_string(), short, variant.as_string()),
collision {
let (min_x, min_y, min_z, max_x, max_y, max_z) = match facing {
Direction::Up => (3.0/8.0, -0.25, 3.0/8.0, 5.0/8.0, 0.75, 5.0/8.0),
Direction::Down => (3.0/8.0, 0.25, 3.0/8.0, 5.0/8.0, 1.25, 0.625),
Direction::North => (3.0/8.0, 3.0/8.0, 0.25, 5.0/8.0, 5.0/8.0, 1.25),
Direction::South => (3.0/8.0, 3.0/8.0, -0.25, 5.0/8.0, 5.0/8.0, 0.75),
Direction::West => (0.25, 3.0/8.0, 3.0/8.0, 1.25, 5.0/8.0, 5.0/8.0),
Direction::East => (-0.25, 3.0/8.0, 3.0/8.0, 0.75, 5.0/8.0, 5.0/8.0),
_ => unreachable!(),
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z)
)]
},
}
Wool {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_wool", color.as_string()) ) },
}
ThermalExpansionRockwool {
modid "ThermalExpansion:Rockwool",
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_wool", color.as_string()) ) },
}
ThermalFoundationRockwool {
modid "thermalfoundation:rockwool",
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_wool", color.as_string()) ) },
}
PistonExtension {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
variant: PistonType = [PistonType::Normal, PistonType::Sticky],
},
data if facing == Direction::Up && variant == PistonType::Normal { Some(0) } else { None },
offset Some(facing.offset() * 2 + (if variant == PistonType::Normal { 0 } else { 1 })),
material material::INVISIBLE,
model { ("minecraft", "piston_extension") },
}
YellowFlower {
props {},
material material::NON_SOLID,
model { ("minecraft", "dandelion") },
collision vec![],
}
RedFlower {
props {
variant: RedFlowerVariant = [
RedFlowerVariant::Poppy,
RedFlowerVariant::BlueOrchid,
RedFlowerVariant::Allium,
RedFlowerVariant::AzureBluet,
RedFlowerVariant::RedTulip,
RedFlowerVariant::OrangeTulip,
RedFlowerVariant::WhiteTulip,
RedFlowerVariant::PinkTulip,
RedFlowerVariant::OxeyeDaisy,
RedFlowerVariant::Cornflower,
RedFlowerVariant::WitherRose,
RedFlowerVariant::LilyOfTheValley
],
},
data Some(variant.data()),
offsets |protocol_version| (variant.offsets(protocol_version)),
material material::NON_SOLID,
model { ("minecraft", variant.as_string()) },
collision vec![],
}
BrownMushroom {
props {},
material Material {
emitted_light: 1,
..material::NON_SOLID
},
model { ("minecraft", "brown_mushroom") },
collision vec![],
}
RedMushroom {
props {},
material material::NON_SOLID,
model { ("minecraft", "red_mushroom") },
collision vec![],
}
GoldBlock {
props {},
model { ("minecraft", "gold_block") },
}
IronBlock {
props {},
model { ("minecraft", "iron_block") },
}
DoubleStoneSlab {
props {
seamless: bool = [false, true],
variant: StoneSlabVariant = [
StoneSlabVariant::Stone,
StoneSlabVariant::Sandstone,
StoneSlabVariant::PetrifiedWood,
StoneSlabVariant::Cobblestone,
StoneSlabVariant::Brick,
StoneSlabVariant::StoneBrick,
StoneSlabVariant::NetherBrick,
StoneSlabVariant::Quartz
],
},
data {
let data = if seamless {
match variant {
StoneSlabVariant::Stone => 8,
StoneSlabVariant::Sandstone => 9,
StoneSlabVariant::Quartz => 15,
_ => return None,
}
} else {
variant.data()
};
Some(data)
},
offset None,
model { ("minecraft", format!("{}_double_slab", variant.as_string()) ) },
variant if seamless { "all" } else { "normal" },
}
StoneSlab {
props {
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
variant: StoneSlabVariant = [
StoneSlabVariant::Stone,
StoneSlabVariant::Sandstone,
StoneSlabVariant::PetrifiedWood,
StoneSlabVariant::Cobblestone,
StoneSlabVariant::Brick,
StoneSlabVariant::StoneBrick,
StoneSlabVariant::NetherBrick,
StoneSlabVariant::Quartz
],
},
data Some(variant.data() | (if half == BlockHalf::Top { 0x8 } else { 0x0 })),
offset None,
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("half={}", half.as_string()),
collision slab_collision(half),
}
BrickBlock {
props {},
model { ("minecraft", "brick_block") },
}
TNT {
props {
explode: bool = [false, true],
},
data Some(if explode { 1 } else { 0 }),
offset Some(if explode { 0 } else { 1 }),
model { ("minecraft", "tnt") },
}
BookShelf {
props {},
model { ("minecraft", "bookshelf") },
}
MossyCobblestone {
props {},
model { ("minecraft", "mossy_cobblestone") },
}
Obsidian {
props {},
model { ("minecraft", "obsidian") },
}
Torch {
props {
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North,
Direction::Up
],
},
data {
Some(match facing {
Direction::East => 1,
Direction::West => 2,
Direction::South => 3,
Direction::North => 4,
Direction::Up => 5,
_ => unreachable!(),
})
},
offset {
Some(match facing {
Direction::Up => 0,
Direction::North => 1,
Direction::South => 2,
Direction::West => 3,
Direction::East => 4,
_ => unreachable!(),
})
},
material Material {
emitted_light: 14,
..material::NON_SOLID
},
model { ("minecraft", "torch") },
variant format!("facing={}", facing.as_string()),
collision vec![],
}
Fire {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
up: bool = [false, true],
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
},
data if !up && !north && !south && !west && !east { Some(age as usize) } else { None },
offset Some(
if west { 0 } else { 1<<0 } |
if up { 0 } else { 1<<1 } |
if south { 0 } else { 1<<2 } |
if north { 0 } else { 1<<3 } |
if east { 0 } else { 1<<4 } |
((age as usize) << 5)),
material Material {
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "fire") },
collision vec![],
update_state (world, pos) => {
Fire{
age,
up: can_burn(world, pos.shift(Direction::Up)),
north: can_burn(world, pos.shift(Direction::North)),
south: can_burn(world, pos.shift(Direction::South)),
west: can_burn(world, pos.shift(Direction::West)),
east: can_burn(world, pos.shift(Direction::East))
}
},
multipart (key, val) => match key {
"up" => up == (val == "true"),
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
SoulFire {
props {},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(0) } else { None } },
model { ("minecraft", "soul_fire") },
collision vec![],
}
MobSpawner {
props {},
material material::NON_SOLID,
model { ("minecraft", "mob_spawner") },
}
OakStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "oak_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::OakStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
Chest {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
type_: ChestType = [
ChestType::Single,
ChestType::Left,
ChestType::Right
],
waterlogged: bool = [true, false],
},
data if type_ == ChestType::Single && !waterlogged { Some(facing.index()) } else { None },
offset Some(if waterlogged { 0 } else { 1 } +
type_.offset() * 2 +
facing.horizontal_offset() * (2 * 3)),
material material::NON_SOLID,
model { ("minecraft", "chest") },
}
RedstoneWire {
props {
north: RedstoneSide = [RedstoneSide::None, RedstoneSide::Side, RedstoneSide::Up],
south: RedstoneSide = [RedstoneSide::None, RedstoneSide::Side, RedstoneSide::Up],
west: RedstoneSide = [RedstoneSide::None, RedstoneSide::Side, RedstoneSide::Up],
east: RedstoneSide = [RedstoneSide::None, RedstoneSide::Side, RedstoneSide::Up],
power: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data {
if north == RedstoneSide::None && south == RedstoneSide::None
&& west == RedstoneSide::None && east == RedstoneSide::None {
Some(power as usize)
} else {
None
}
},
offset Some(
west.offset() +
south.offset() * 3 +
(power as usize) * (3 * 3) +
north.offset() * (3 * 3 * 16) +
east.offset() * (3 * 3 * 16 * 3)),
material material::NON_SOLID,
model { ("minecraft", "redstone_wire") },
tint TintType::Color{r: ((255.0 / 30.0) * (f64::from(power)) + 14.0) as u8, g: 0, b: 0},
collision vec![],
update_state (world, pos) => Block::RedstoneWire {
north: can_connect_redstone(world, pos, Direction::North),
south: can_connect_redstone(world, pos, Direction::South),
west: can_connect_redstone(world, pos, Direction::West),
east: can_connect_redstone(world, pos, Direction::East),
power
},
multipart (key, val) => match key {
"north" => val.contains(north.as_string()),
"south" => val.contains(south.as_string()),
"west" => val.contains(west.as_string()),
"east" => val.contains(east.as_string()),
_ => false,
},
}
DiamondOre {
props {},
model { ("minecraft", "diamond_ore") },
}
DiamondBlock {
props {},
model { ("minecraft", "diamond_block") },
}
CraftingTable {
props {},
model { ("minecraft", "crafting_table") },
}
Wheat {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "wheat") },
variant format!("age={}", age),
collision vec![],
}
Farmland {
props {
moisture: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
},
data Some(moisture as usize),
material material::NON_SOLID,
model { ("minecraft", "farmland") },
variant format!("moisture={}", moisture),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 15.0/16.0, 1.0)
)],
}
Furnace {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
lit: bool = [true, false],
},
data if !lit { Some(facing.index()) } else { None },
offset Some(if lit { 0 } else { 1 } + facing.horizontal_offset() * 2),
model { ("minecraft", "furnace") },
variant format!("facing={}", facing.as_string()),
}
FurnaceLit {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset None,
material Material {
emitted_light: 13,
..material::SOLID
},
model { ("minecraft", "lit_furnace") },
variant format!("facing={}", facing.as_string()),
}
StandingSign {
props {
rotation: Rotation = [
Rotation::South,
Rotation::SouthSouthWest,
Rotation::SouthWest,
Rotation::WestSouthWest,
Rotation::West,
Rotation::WestNorthWest,
Rotation::NorthWest,
Rotation::NorthNorthWest,
Rotation::North,
Rotation::NorthNorthEast,
Rotation::NorthEast,
Rotation::EastNorthEast,
Rotation::East,
Rotation::EastSouthEast,
Rotation::SouthEast,
Rotation::SouthSouthEast
],
waterlogged: bool = [true, false],
wood: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data if wood == TreeVariant::Oak && !waterlogged { Some(rotation.data()) } else { None },
offsets |protocol_version| {
let o = rotation.data() * 2 + if waterlogged { 0 } else { 1 };
if protocol_version >= 477 {
Some(wood.offset() * 2 * 16 + o)
} else {
if wood == TreeVariant::Oak {
Some(o)
} else {
None
}
}
},
material material::INVISIBLE,
model { ("minecraft", "standing_sign") },
collision vec![],
}
WoodenDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "wooden_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::WoodenDoor{facing, half, hinge, open, powered}
},
}
Ladder {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
waterlogged: bool = [true, false],
},
data if !waterlogged { Some(facing.index()) } else { None },
offset Some(if waterlogged { 0 } else { 1 } + facing.horizontal_offset() * 2),
material material::NON_SOLID,
model { ("minecraft", "ladder") },
variant format!("facing={}", facing.as_string()),
}
Rail {
props {
shape: RailShape = [
RailShape::NorthSouth,
RailShape::EastWest,
RailShape::NorthEast,
RailShape::NorthWest,
RailShape::SouthEast,
RailShape::SouthWest,
RailShape::AscendingNorth,
RailShape::AscendingSouth,
RailShape::AscendingEast,
RailShape::AscendingWest
],
},
data Some(shape.data()),
material material::NON_SOLID,
model { ("minecraft", "rail") },
variant format!("shape={}", shape.as_string()),
collision vec![],
}
StoneStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "stone_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::StoneStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
WallSign {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
waterlogged: bool = [true, false],
wood: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data if wood == TreeVariant::Oak && !waterlogged { Some(facing.index()) } else { None },
offsets |protocol_version| {
let o = if waterlogged { 0 } else { 1 } + facing.horizontal_offset() * 2;
if protocol_version >= 477 {
Some(wood.offset() * 2 * 4 + o)
} else {
if wood == TreeVariant::Oak {
Some(o)
} else {
None
}
}
},
material material::INVISIBLE,
model { ("minecraft", "wall_sign") },
variant format!("facing={}", facing.as_string()),
collision vec![],
}
Lever {
props {
face: AttachedFace = [
AttachedFace::Floor,
AttachedFace::Wall,
AttachedFace::Ceiling
],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
powered: bool = [false, true],
},
data face.data_with_facing_and_powered(facing, powered),
offset Some(face.offset() * (4 * 2) + facing.horizontal_offset() * 2 + if powered { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "lever") },
variant format!("facing={},powered={}", face.variant_with_facing(facing), powered),
collision vec![],
}
StonePressurePlate {
props {
powered: bool = [false, true],
},
data Some(if powered { 1 } else { 0 }),
offset Some(if powered { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "stone_pressure_plate") },
variant format!("powered={}", powered),
collision vec![],
}
IronDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "iron_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::IronDoor{facing, half, hinge, open, powered}
},
}
WoodenPressurePlate {
props {
wood: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
powered: bool = [false, true],
},
data if wood == TreeVariant::Oak { Some(if powered { 1 } else { 0 }) } else { None },
offset Some(wood.offset() * 2 + if powered { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "wooden_pressure_plate") },
variant format!("powered={}", powered),
collision vec![],
}
RedstoneOre {
props {
lit: bool = [true, false],
},
data if !lit { Some(0) } else { None },
offset Some(if lit { 0 } else { 1 }),
model { ("minecraft", if lit { "lit_redstone_ore" } else { "redstone_ore" }) },
}
RedstoneOreLit {
props {},
offset None,
material Material {
emitted_light: 9,
..material::SOLID
},
model { ("minecraft", "lit_redstone_ore") },
}
RedstoneTorchUnlit {
props {
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North,
Direction::Up
],
},
data {
Some(match facing {
Direction::East => 1,
Direction::West => 2,
Direction::South => 3,
Direction::North => 4,
Direction::Up => 5,
_ => unreachable!(),
})
},
offset None,
material material::NON_SOLID,
model { ("minecraft", "unlit_redstone_torch") },
variant format!("facing={}", facing.as_string()),
collision vec![],
}
RedstoneTorchLit {
props {
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North,
Direction::Up
],
},
data {
Some(match facing {
Direction::East => 1,
Direction::West => 2,
Direction::South => 3,
Direction::North => 4,
Direction::Up => 5,
_ => unreachable!(),
})
},
offset None,
material Material {
emitted_light: 7,
..material::NON_SOLID
},
model { ("minecraft", "redstone_torch") },
variant format!("facing={}", facing.as_string()),
collision vec![],
}
RedstoneTorchStanding {
props {
lit: bool = [true, false],
},
data None::<usize>,
offset Some(if lit { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", if lit { "redstone_torch" } else { "unlit_redstone_torch" }) },
variant "facing=up",
collision vec![],
}
RedstoneTorchWall {
props {
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North
],
lit: bool = [true, false],
},
data None::<usize>,
offset Some(if lit { 0 } else { 1 } + facing.horizontal_offset() * 2),
material Material {
emitted_light: 7,
..material::NON_SOLID
},
model { ("minecraft", if lit { "redstone_torch" } else { "unlit_redstone_torch" }) },
variant format!("facing={}", facing.as_string()),
collision vec![],
}
StoneButton {
props {
face: AttachedFace = [
AttachedFace::Floor,
AttachedFace::Wall,
AttachedFace::Ceiling
],
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North
],
powered: bool = [false, true],
},
data face.data_with_facing_and_powered(facing, powered),
offset Some(face.offset() * (4 * 2) + facing.horizontal_offset() * 2 + if powered { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "stone_button") },
variant format!("facing={},powered={}", face.variant_with_facing(facing), powered),
}
SnowLayer {
props {
layers: u8 = [1, 2, 3, 4, 5, 6, 7, 8],
},
data Some(layers as usize - 1),
material material::NON_SOLID,
model { ("minecraft", "snow_layer") },
variant format!("layers={}", layers),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, (f64::from(layers) - 1.0)/8.0, 1.0),
)],
}
Ice {
props {},
material Material {
absorbed_light: 2,
..material::TRANSPARENT
},
model { ("minecraft", "ice") },
}
Snow {
props {},
model { ("minecraft", "snow") },
}
Cactus {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "cactus") },
collision vec![Aabb3::new(
Point3::new(1.0/16.0, 0.0, 1.0/16.0),
Point3::new(1.0 - (1.0/16.0), 1.0 - (1.0/16.0), 1.0 - (1.0/16.0))
)],
}
Clay {
props {},
model { ("minecraft", "clay") },
}
Reeds {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "reeds") },
tint TintType::Foliage,
collision vec![],
}
Jukebox {
props {
has_record: bool = [false, true],
},
data Some(if has_record { 1 } else { 0 }),
offset Some(if has_record { 0 } else { 1 }),
model { ("minecraft", "jukebox") },
}
Fence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [false, true],
},
data if !north && !south && !east && !west && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::Fence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
PumpkinFace {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
without_face: bool = [false, true],
},
data Some(facing.horizontal_index() | (if without_face { 0x4 } else { 0x0 })),
offset None,
model { ("minecraft", "pumpkin") },
variant format!("facing={}", facing.as_string()),
}
Pumpkin {
props {},
data None::<usize>,
offset Some(0),
model { ("minecraft", "pumpkin") },
}
Netherrack {
props {},
model { ("minecraft", "netherrack") },
}
SoulSand {
props {},
material material::NON_SOLID,
model { ("minecraft", "soul_sand") },
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 7.0/8.0, 1.0)
)],
}
SoulSoil {
props {},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(0) } else { None } },
model { ("minecraft", "soul_soil") },
}
Basalt {
props {
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(
match axis {
Axis::X => 0,
Axis::Y => 1,
Axis::Z => 2,
_ => unreachable!()
}) } else { None } },
model { ("minecraft", "basalt") },
}
PolishedBasalt {
props {
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(
match axis {
Axis::X => 0,
Axis::Y => 1,
Axis::Z => 2,
_ => unreachable!()
}) } else { None } },
model { ("minecraft", "polished_basalt") },
}
SoulTorch {
props {},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(0) } else { None } },
model { ("minecraft", "soul_torch") },
}
SoulWallTorch {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None,
offsets |protocol_version| { if protocol_version >= 735 { Some(facing.offset()) } else { None } },
model { ("minecraft", "soul_wall_torch") },
}
Glowstone {
props {},
material Material {
emitted_light: 15,
..material::SOLID
},
model { ("minecraft", "glowstone") },
}
Portal {
props {
axis: Axis = [Axis::X, Axis::Z],
},
data Some(axis.index()),
offset Some(axis.index() - 1),
material Material {
emitted_light: 11,
..material::TRANSPARENT
},
model { ("minecraft", "portal") },
variant format!("axis={}", axis.as_string()),
collision vec![],
}
PumpkinCarved {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material Material {
emitted_light: 15,
..material::SOLID
},
model { ("minecraft", "carved_pumpkin") },
variant format!("facing={}", facing.as_string()),
}
PumpkinLit {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
without_face: bool = [false, true],
},
data Some(facing.horizontal_index() | (if without_face { 0x4 } else { 0x0 })),
offset if without_face { None } else { Some(facing.horizontal_offset()) },
material Material {
emitted_light: 15,
..material::SOLID
},
model { ("minecraft", "lit_pumpkin") },
variant format!("facing={}", facing.as_string()),
}
Cake {
props {
bites: u8 = [0, 1, 2, 3, 4, 5, 6],
},
data Some(bites as usize),
material material::NON_SOLID,
model { ("minecraft", "cake") },
variant format!("bites={}", bites),
collision vec![Aabb3::new(
Point3::new((1.0 + (f64::from(bites) * 2.0)) / 16.0, 0.0, 1.0/16.0),
Point3::new(1.0 - (1.0/16.0), 0.5, 1.0 - (1.0/16.0))
)],
}
Repeater {
props {
delay: u8 = [1, 2, 3, 4],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
locked: bool = [false, true],
powered: bool = [true, false],
},
data if powered { None } else { if !locked { Some(facing.horizontal_index() | (delay as usize - 1) << 2) } else { None } },
offset Some(if powered { 0 } else { 1<<0 } +
if locked { 0 } else { 1<<1 } +
facing.horizontal_offset() * (2 * 2) +
((delay - 1) as usize) * (2 * 2 * 4)),
material material::NON_SOLID,
model { ("minecraft", if powered { "powered_repeater" } else { "unpowered_repeater" }) },
variant format!("delay={},facing={},locked={}", delay, facing.as_string(), locked),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0/8.0, 1.0)
)],
update_state (world, pos) => Repeater{delay, facing, locked: update_repeater_state(world, pos, facing), powered},
}
RepeaterPowered {
props {
delay: u8 = [1, 2, 3, 4],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
locked: bool = [false, true],
},
data if !locked { Some(facing.horizontal_index() | (delay as usize - 1) << 2) } else { None },
offset None,
material material::NON_SOLID,
model { ("minecraft", "powered_repeater") },
variant format!("delay={},facing={},locked={}", delay, facing.as_string(), locked),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0/8.0, 1.0)
)],
update_state (world, pos) => RepeaterPowered{delay, facing, locked: update_repeater_state(world, pos, facing)},
}
StainedGlass {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
material material::TRANSPARENT,
model { ("minecraft", format!("{}_stained_glass", color.as_string()) ) },
}
TrapDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
open: bool = [false, true],
waterlogged: bool = [true, false],
powered: bool = [true, false],
wood: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data if waterlogged || powered || wood != TreeVariant::Oak { None } else { Some(match facing {
Direction::North => 0,
Direction::South => 1,
Direction::West => 2,
Direction::East => 3,
_ => unreachable!(),
} | (if open { 0x4 } else { 0x0 }) | (if half == BlockHalf::Top { 0x8 } else { 0x0 }))},
offset Some(if waterlogged { 0 } else { 1<<0 } +
if powered { 0 } else { 1<<1 } +
if open { 0 } else { 1<<2 } +
if half == BlockHalf::Top { 0 } else { 1<<3 } +
facing.horizontal_offset() * (2 * 2 * 2 * 2) +
wood.offset() * (2 * 2 * 2 * 2 * 4)),
material material::NON_SOLID,
model { ("minecraft", "trapdoor") },
variant format!("facing={},half={},open={}", facing.as_string(), half.as_string(), open),
collision trapdoor_collision(facing, half, open),
}
MonsterEgg {
props {
variant: MonsterEggVariant = [
MonsterEggVariant::Stone,
MonsterEggVariant::Cobblestone,
MonsterEggVariant::StoneBrick,
MonsterEggVariant::MossyBrick,
MonsterEggVariant::CrackedBrick,
MonsterEggVariant::ChiseledBrick
],
},
data Some(variant.data()),
model { ("minecraft", format!("{}_monster_egg", variant.as_string())) },
}
StoneBrick {
props {
variant: StoneBrickVariant = [
StoneBrickVariant::Normal,
StoneBrickVariant::Mossy,
StoneBrickVariant::Cracked,
StoneBrickVariant::Chiseled
],
},
data Some(variant.data()),
model { ("minecraft", variant.as_string() ) },
}
BrownMushroomBlock {
props {
is_stem: bool = [true, false],
west: bool = [true, false],
up: bool = [true, false],
south: bool = [true, false],
north: bool = [true, false],
east: bool = [true, false],
down: bool = [true, false],
},
data mushroom_block_data(is_stem, west, up, south, north, east, down),
offset mushroom_block_offset(is_stem, west, up, south, north, east, down),
model { ("minecraft", "brown_mushroom_block") },
variant format!("variant={}", mushroom_block_variant(is_stem, west, up, south, north, east, down)),
}
RedMushroomBlock {
props {
is_stem: bool = [true, false],
west: bool = [true, false],
up: bool = [true, false],
south: bool = [true, false],
north: bool = [true, false],
east: bool = [true, false],
down: bool = [true, false],
},
data mushroom_block_data(is_stem, west, up, south, north, east, down),
offset mushroom_block_offset(is_stem, west, up, south, north, east, down),
model { ("minecraft", "red_mushroom_block") },
variant format!("variant={}", mushroom_block_variant(is_stem, west, up, south, north, east, down)),
}
MushroomStem {
props {
west: bool = [true, false],
up: bool = [true, false],
south: bool = [true, false],
north: bool = [true, false],
east: bool = [true, false],
down: bool = [true, false],
},
data None::<usize>,
offset mushroom_block_offset(false, west, up, south, north, east, down),
model { ("minecraft", "mushroom_stem") },
variant "variant=all_stem".to_string(),
}
IronBars {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !waterlogged && !north && !south && !west && !east { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "iron_bars") },
collision pane_collision(north, south, east, west),
update_state (world, pos) => {
let f = |block| matches!(block, Block::IronBars{..});
let (north, south, west, east) = can_connect_sides(world, pos, &f);
Block::IronBars{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
Chain {
props {
waterlogged: bool = [true, false],
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data None,
offsets |protocol_version| {
if protocol_version >= 735 {
let o = if waterlogged { 1 } else { 0 };
if protocol_version >= 751 {
Some(match axis {
Axis::X => 0,
Axis::Y => 1,
Axis::Z => 2,
_ => unreachable!()
} * 2 + o)
} else {
match axis {
Axis::Y => Some(o),
_ => None,
}
}
} else {
None
}
},
model { ("minecraft", "chain") },
}
GlassPane {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !waterlogged && !north && !south && !west && !east { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "glass_pane") },
collision pane_collision(north, south, east, west),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_glasspane);
Block::GlassPane{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
MelonBlock {
props {},
model { ("minecraft", "melon_block") },
}
AttachedPumpkinStem {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "pumpkin_stem") },
variant format!("facing={}", facing.as_string()),
collision vec![],
update_state (world, pos) => {
let facing = match (world.get_block(pos.shift(Direction::East)), world.get_block(pos.shift(Direction::West)),
world.get_block(pos.shift(Direction::North)), world.get_block(pos.shift(Direction::South))) {
(Block::Pumpkin{ .. }, _, _, _) => Direction::East,
(_, Block::Pumpkin{ .. }, _, _) => Direction::West,
(_, _, Block::Pumpkin{ .. }, _) => Direction::North,
(_, _, _, Block::Pumpkin{ .. }) => Direction::South,
_ => Direction::Up,
};
Block::AttachedPumpkinStem{facing}
},
}
AttachedMelonStem {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "melon_stem") },
variant format!("facing={}", facing.as_string()),
collision vec![],
update_state (world, pos) => {
let facing = match (world.get_block(pos.shift(Direction::East)), world.get_block(pos.shift(Direction::West)),
world.get_block(pos.shift(Direction::North)), world.get_block(pos.shift(Direction::South))) {
(Block::MelonBlock{ .. }, _, _, _) => Direction::East,
(_, Block::MelonBlock{ .. }, _, _) => Direction::West,
(_, _, Block::MelonBlock{ .. }, _) => Direction::North,
(_, _, _, Block::MelonBlock{ .. }) => Direction::South,
_ => Direction::Up,
};
Block::AttachedMelonStem{facing}
},
}
PumpkinStem {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
facing: Direction = [
Direction::Up,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data if facing == Direction::Up { Some(age as usize) } else { None },
material material::NON_SOLID,
model { ("minecraft", "pumpkin_stem") },
variant {
if facing == Direction::Up {
format!("age={},facing={}", age, facing.as_string())
} else {
format!("facing={}", facing.as_string())
}
},
tint TintType::Color{r: age as u8 * 32, g: 255 - (age as u8 * 8), b: age as u8 * 4},
collision vec![],
update_state (world, pos) => {
let facing = match (world.get_block(pos.shift(Direction::East)), world.get_block(pos.shift(Direction::West)),
world.get_block(pos.shift(Direction::North)), world.get_block(pos.shift(Direction::South))) {
(Block::Pumpkin{ .. }, _, _, _) => Direction::East,
(_, Block::Pumpkin{ .. }, _, _) => Direction::West,
(_, _, Block::Pumpkin{ .. }, _) => Direction::North,
(_, _, _, Block::Pumpkin{ .. }) => Direction::South,
_ => Direction::Up,
};
Block::PumpkinStem{age, facing}
},
}
MelonStem {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
facing: Direction = [
Direction::Up,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data if facing == Direction::North { Some(age as usize) } else { None },
material material::NON_SOLID,
model { ("minecraft", "melon_stem") },
variant {
if facing == Direction::Up {
format!("age={},facing={}", age, facing.as_string())
} else {
format!("facing={}", facing.as_string())
}
},
tint TintType::Color{r: age as u8 * 32, g: 255 - (age as u8 * 8), b: age as u8 * 4},
collision vec![],
update_state (world, pos) => {
let facing = match (world.get_block(pos.shift(Direction::East)), world.get_block(pos.shift(Direction::West)),
world.get_block(pos.shift(Direction::North)), world.get_block(pos.shift(Direction::South))) {
(Block::MelonBlock{ .. }, _, _, _) => Direction::East,
(_, Block::MelonBlock{ .. }, _, _) => Direction::West,
(_, _, Block::MelonBlock{ .. }, _) => Direction::North,
(_, _, _, Block::MelonBlock{ .. }) => Direction::South,
_ => Direction::Up,
};
Block::MelonStem{age, facing}
},
}
Vine {
props {
up: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
north: bool = [false, true],
east: bool = [false, true],
},
data if !up {
Some((if south { 0x1 } else { 0x0 })
| (if west { 0x2 } else { 0x0 })
| (if north { 0x4 } else { 0x0 })
| (if east { 0x8 } else { 0x0 }))
} else {
None
},
offset Some(if west { 0 } else { 1<<0 } +
if up { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "vine") },
variant format!("east={},north={},south={},up={},west={}", east, north, south, up, west),
tint TintType::Foliage,
collision vec![],
update_state (world, pos) => {
let mat = world.get_block(pos.shift(Direction::Up)).get_material();
let up = mat.renderable && (mat.should_cull_against || mat.never_cull /* Because leaves */);
Vine{up, south, west, north, east}
},
}
FenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::FenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
BrickStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "brick_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::BrickStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
StoneBrickStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "stone_brick_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::StoneBrickStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
Mycelium {
props {
snowy: bool = [false, true],
},
data if snowy { None } else { Some(0) },
offset Some(if snowy { 0 } else { 1 }),
material material::SOLID,
model { ("minecraft", "mycelium") },
variant format!("snowy={}", snowy),
update_state (world, pos) => Block::Mycelium{snowy: is_snowy(world, pos)},
}
Waterlily {
props {},
material material::NON_SOLID,
model { ("minecraft", "waterlily") },
tint TintType::Foliage,
collision vec![Aabb3::new(
Point3::new(1.0/16.0, 0.0, 1.0/16.0),
Point3::new(15.0/16.0, 3.0/32.0, 15.0/16.0))
],
}
NetherBrick {
props {},
model { ("minecraft", "nether_brick") },
}
NetherBrickFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "nether_brick_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let f = |block| matches!(block, Block::NetherBrickFence{..} |
Block::FenceGate{..} |
Block::SpruceFenceGate{..} |
Block::BirchFenceGate{..} |
Block::JungleFenceGate{..} |
Block::DarkOakFenceGate{..} |
Block::AcaciaFenceGate{..});
let (north, south, west, east) = can_connect_sides(world, pos, &f);
Block::NetherBrickFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
NetherBrickStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "nether_brick_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::NetherBrickStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
NetherWart {
props {
age: u8 = [0, 1, 2, 3],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "nether_wart") },
variant format!("age={}", age),
collision vec![],
}
EnchantingTable {
props {},
material material::NON_SOLID,
model { ("minecraft", "enchanting_table") },
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 0.75, 1.0))
],
}
BrewingStand {
props {
has_bottle_0: bool = [false, true],
has_bottle_1: bool = [false, true],
has_bottle_2: bool = [false, true],
},
data Some((if has_bottle_0 { 0x1 } else { 0x0 })
| (if has_bottle_1 { 0x2 } else { 0x0 })
| (if has_bottle_2 { 0x4 } else { 0x0 })),
offset Some(if has_bottle_0 { 0 } else { 1<<0 } +
if has_bottle_1 { 0 } else { 1<<1 } +
if has_bottle_2 { 0 } else { 1<<2 }),
material Material {
emitted_light: 1,
..material::NON_SOLID
},
model { ("minecraft", "brewing_stand") },
multipart (key, val) => match key {
"has_bottle_0" => (val == "true") == has_bottle_0,
"has_bottle_1" => (val == "true") == has_bottle_1,
"has_bottle_2" => (val == "true") == has_bottle_2,
_ => false,
},
}
Cauldron {
props {
level: u8 = [0, 1, 2, 3],
},
data Some(level as usize),
material material::NON_SOLID,
model { ("minecraft", "cauldron") },
variant format!("level={}", level),
}
EndPortal {
props {},
material Material {
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "end_portal") },
collision vec![],
}
EndPortalFrame {
props {
eye: bool = [false, true],
facing: Direction = [
Direction::North,
Direction::South,
Direction::East,
Direction::West
],
},
data Some(facing.horizontal_index() | (if eye { 0x4 } else { 0x0 })),
offset Some(facing.horizontal_offset() + (if eye { 0 } else { 4 })),
material Material {
emitted_light: 1,
..material::NON_SOLID
},
model { ("minecraft", "end_portal_frame") },
variant format!("eye={},facing={}", eye, facing.as_string()),
collision {
let mut collision = vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 13.0/16.0, 1.0)
)];
if eye {
collision.push(Aabb3::new(
Point3::new(5.0/16.0, 13.0/16.0, 5.0/16.0),
Point3::new(11.0/16.0, 1.0, 11.0/16.0)
));
}
collision
},
}
EndStone {
props {},
model { ("minecraft", "end_stone") },
}
DragonEgg {
props {},
material Material {
emitted_light: 1,
..material::NON_SOLID
},
model { ("minecraft", "dragon_egg") },
collision vec![Aabb3::new(
Point3::new(1.0/16.0, 0.0, 1.0/16.0),
Point3::new(15.0/16.0, 1.0, 15.0/16.0)
)],
}
RedstoneLamp {
props {},
model { ("minecraft", "redstone_lamp") },
}
RedstoneLampLit {
props {},
material Material {
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "lit_redstone_lamp") },
}
DoubleWoodenSlab {
props {
variant: WoodSlabVariant = [
WoodSlabVariant::Oak,
WoodSlabVariant::Spruce,
WoodSlabVariant::Birch,
WoodSlabVariant::Jungle,
WoodSlabVariant::Acacia,
WoodSlabVariant::DarkOak
],
},
data Some(variant.data()),
offset None,
model { ("minecraft", format!("{}_double_slab", variant.as_string()) ) },
}
WoodenSlab {
props {
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
variant: WoodSlabVariant = [
WoodSlabVariant::Oak,
WoodSlabVariant::Spruce,
WoodSlabVariant::Birch,
WoodSlabVariant::Jungle,
WoodSlabVariant::Acacia,
WoodSlabVariant::DarkOak
],
},
data Some(variant.data() | (if half == BlockHalf::Top { 0x8 } else { 0x0 })),
offset None,
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("half={}", half.as_string()),
collision slab_collision(half),
}
Cocoa {
props {
age: u8 = [0, 1, 2],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index() | ((age as usize) << 2)),
offset Some(facing.horizontal_offset() + ((age as usize) * 4)),
material material::NON_SOLID,
model { ("minecraft", "cocoa") },
variant format!("age={},facing={}", age, facing.as_string()),
collision {
let i = 4.0 + f64::from(age) * 2.0;
let j = 5.0 + f64::from(age) * 2.0;
let f = i / 2.0;
let (min_x, min_y, min_z, max_x, max_y, max_z) = match facing {
Direction::North => (8.0 - f, 12.0 - j, 1.0, 8.0 + f, 12.0, 8.0 + i),
Direction::South => (8.0 - f, 12.0 - j, 15.0 - i, 8.0 + f, 12.0, 15.0),
Direction::West => (1.0, 12.0 - j, 8.0 - f, 1.0 + i, 12.0, 8.0 + f),
Direction::East => (15.0 - i, 12.0 - j, 8.0 - f, 15.0, 12.0, 8.0 + f),
_ => unreachable!(),
};
vec![Aabb3::new(
Point3::new(min_x / 16.0, min_y / 16.0, min_z / 16.0),
Point3::new(max_x / 16.0, max_y / 16.0, max_z / 16.0))
]
},
}
SandstoneStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "sandstone_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::SandstoneStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
EmeraldOre {
props {},
material material::SOLID,
model { ("minecraft", "emerald_ore") },
}
EnderChest {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
waterlogged: bool = [true, false],
},
data if waterlogged { None } else { Some(facing.index()) },
offset Some(if waterlogged { 0 } else { 1 } + facing.horizontal_offset() * 2),
material Material {
emitted_light: 7,
..material::NON_SOLID
},
model { ("minecraft", "ender_chest") },
variant format!("facing={}", facing.as_string()),
collision vec![Aabb3::new(
Point3::new(1.0/16.0, 0.0, 1.0/16.0),
Point3::new(15.0/16.0, 7.0/8.0, 15.0/16.0)
)],
}
TripwireHook {
props {
attached: bool = [false, true],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
powered: bool = [false, true],
},
data Some(facing.horizontal_index()
| (if attached { 0x4 } else { 0x0 })
| (if powered { 0x8 } else { 0x0 })),
offset Some(if powered { 0 } else { 1 } +
facing.horizontal_offset() * 2 +
if attached { 0 } else { 2 * 4 }),
material material::NON_SOLID,
model { ("minecraft", "tripwire_hook") },
variant format!("attached={},facing={},powered={}", attached, facing.as_string(), powered),
collision vec![],
}
Tripwire {
props {
powered: bool = [false, true],
attached: bool = [false, true],
disarmed: bool = [false, true],
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
mojang_cant_even: bool = [false, true],
},
data if !north && !south && !east && !west {
Some((if powered { 0x1 } else { 0x0 })
| (if attached { 0x4 } else { 0x0 })
| (if disarmed { 0x8 } else { 0x0 })
| (if mojang_cant_even { 0x2 } else { 0x0 }))
} else {
None
},
offset if mojang_cant_even {
None
} else {
Some(if west { 0 } else { 1<<0 } +
if south { 0 } else { 1<<1 } +
if powered { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 } +
if disarmed { 0 } else { 1<<5 } +
if attached { 0 } else { 1<<6 })
},
material material::TRANSPARENT,
model { ("minecraft", "tripwire") },
variant format!("attached={},east={},north={},south={},west={}", attached, east, north, south, west),
collision vec![],
update_state (world, pos) => {
let f = |dir| {
match world.get_block(pos.shift(dir)) {
Block::TripwireHook{facing, ..} => facing.opposite() == dir,
Block::Tripwire{..} => true,
_ => false,
}
};
Tripwire{
powered,
attached,
disarmed,
north: f(Direction::North),
south: f(Direction::South),
west: f(Direction::West),
east: f(Direction::East),
mojang_cant_even
}
},
}
EmeraldBlock {
props {},
model { ("minecraft", "emerald_block") },
}
SpruceStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "spruce_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::SpruceStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
BirchStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "birch_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::BirchStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
JungleStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "jungle_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::JungleStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
CommandBlock {
props {
conditional: bool = [false, true],
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if conditional { 0x8 } else { 0x0 })),
offset Some(facing.offset() + (if conditional { 0 } else { 6 })),
model { ("minecraft", "command_block") },
variant format!("conditional={},facing={}", conditional, facing.as_string()),
}
Beacon {
props {},
material Material {
emitted_light: 15,
..material::NON_SOLID
},
model { ("minecraft", "beacon") },
}
CobblestoneWall {
props {
up: bool = [false, true],
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
variant: CobblestoneWallVariant = [
CobblestoneWallVariant::Normal,
CobblestoneWallVariant::Mossy
],
waterlogged: bool = [true, false],
},
data if !north && !south && !east && !west && !up && !waterlogged { Some(variant.data()) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if up { 0 } else { 1<<2 } +
if south { 0 } else { 1<<3 } +
if north { 0 } else { 1<<4 } +
if east { 0 } else { 1<<5 } +
if variant == CobblestoneWallVariant::Normal { 0 } else { 1<<6 }),
material material::NON_SOLID,
model { ("minecraft", format!("{}_wall", variant.as_string())) },
update_state (world, pos) => {
let f = |block| matches!(block, Block::CobblestoneWall{..} |
Block::FenceGate{..} |
Block::SpruceFenceGate{..} |
Block::BirchFenceGate{..} |
Block::JungleFenceGate{..} |
Block::DarkOakFenceGate{..} |
Block::AcaciaFenceGate{..});
let (north, south, west, east) = can_connect_sides(world, pos, &f);
let up = !(matches!(world.get_block(pos.shift(Direction::Up)), Block::Air{..}))
|| !((north && south && !west && !east) || (!north && !south && west && east));
Block::CobblestoneWall{up, north, south, west, east, variant, waterlogged}
},
multipart (key, val) => match key {
"up" => up == (val == "true"),
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"east" => east == (val == "true"),
"west" => west == (val == "true"),
_ => false,
},
}
FlowerPot {
props {
contents: FlowerPotVariant = [
FlowerPotVariant::Empty,
FlowerPotVariant::Poppy,
FlowerPotVariant::Dandelion,
FlowerPotVariant::OakSapling,
FlowerPotVariant::SpruceSapling,
FlowerPotVariant::BirchSapling,
FlowerPotVariant::JungleSapling,
FlowerPotVariant::RedMushroom,
FlowerPotVariant::BrownMushroom,
FlowerPotVariant::Cactus,
FlowerPotVariant::DeadBush,
FlowerPotVariant::Fern,
FlowerPotVariant::AcaciaSapling,
FlowerPotVariant::DarkOakSapling,
FlowerPotVariant::BlueOrchid,
FlowerPotVariant::Allium,
FlowerPotVariant::AzureBluet,
FlowerPotVariant::RedTulip,
FlowerPotVariant::OrangeTulip,
FlowerPotVariant::WhiteTulip,
FlowerPotVariant::PinkTulip,
FlowerPotVariant::Oxeye,
FlowerPotVariant::Cornflower,
FlowerPotVariant::LilyOfTheValley,
FlowerPotVariant::WitherRose
],
legacy_data: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data if contents == FlowerPotVariant::Empty { Some(legacy_data as usize) } else { None },
offsets |protocol_version | {
if legacy_data != 0 { None } else { contents.offsets(protocol_version) }
},
material material::NON_SOLID,
model { ("minecraft", "flower_pot") },
}
Carrots {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "carrots") },
variant format!("age={}", age),
collision vec![],
}
Potatoes {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "potatoes") },
variant format!("age={}", age),
collision vec![],
}
WoodenButton {
props {
face: AttachedFace = [
AttachedFace::Floor,
AttachedFace::Wall,
AttachedFace::Ceiling
],
facing: Direction = [
Direction::East,
Direction::West,
Direction::South,
Direction::North
],
powered: bool = [false, true],
variant: TreeVariant = [
TreeVariant::Oak,
TreeVariant::Spruce,
TreeVariant::Birch,
TreeVariant::Jungle,
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data if variant == TreeVariant::Oak { face.data_with_facing_and_powered(facing, powered) } else { None },
offset Some(variant.offset() * (3 * 4 * 2) + face.offset() * (4 * 2) + facing.horizontal_offset() * 2 + if powered { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "wooden_button") },
variant format!("facing={},powered={}", face.variant_with_facing(facing), powered),
}
SkullSkeletonWall {
props {
facing: Direction = [
Direction::Up,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
nodrop: bool = [false, true],
},
data if !nodrop { Some(facing.index()) } else { None },
offset if !nodrop && facing != Direction::Up { Some(facing.horizontal_offset()) } else { None },
material material::NON_SOLID,
model { ("minecraft", "skull") },
variant format!("facing={},nodrop={}", facing.as_string(), nodrop),
collision {
let (min_x, min_y, min_z, max_x, max_y, max_z) = match facing {
Direction::Up => (0.25, 0.0, 0.25, 0.75, 0.5, 0.75),
Direction::North => (0.25, 0.25, 0.5, 0.75, 0.75, 1.0),
Direction::South => (0.25, 0.25, 0.0, 0.75, 0.75, 0.5),
Direction::West => (0.5, 0.25, 0.25, 1.0, 0.75, 0.75),
Direction::East => (0.0, 0.25, 0.25, 0.5, 0.75, 0.75),
_ => unreachable!(),
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z)
)]
},
}
SkullSkeleton
{
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "skull") },
collision {
let (min_x, min_y, min_z, max_x, max_y, max_z) = (0.25, 0.0, 0.25, 0.75, 0.5, 0.75);
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z)
)]
},
}
SkullWitherSkeletonWall {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "skull") },
collision {
let (min_x, min_y, min_z, max_x, max_y, max_z) = match facing {
Direction::North => (0.25, 0.25, 0.5, 0.75, 0.75, 1.0),
Direction::South => (0.25, 0.25, 0.0, 0.75, 0.75, 0.5),
Direction::West => (0.5, 0.25, 0.25, 1.0, 0.75, 0.75),
Direction::East => (0.0, 0.25, 0.25, 0.5, 0.75, 0.75),
_ => unreachable!(),
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z)
)]
},
}
SkullWitherSkeleton {
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "skull") },
collision {
let (min_x, min_y, min_z, max_x, max_y, max_z) = (0.25, 0.0, 0.25, 0.75, 0.5, 0.75);
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z)
)]
},
}
ZombieWallHead {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "zombie_wall_head") },
}
ZombieHead {
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "zombie_head") },
}
PlayerWallHead {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "player_wall_head") },
}
PlayerHead {
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "player_head") },
}
CreeperWallHead {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "creeper_wall_head") },
}
CreeperHead {
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "creeper_head") },
}
DragonWallHead {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.horizontal_offset()),
material material::NON_SOLID,
model { ("minecraft", "dragon_wall_head") },
}
DragonHead {
props {
rotation: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data None::<usize>,
offset Some(rotation as usize),
material material::NON_SOLID,
model { ("minecraft", "dragon_head") },
}
Anvil {
props {
damage: u8 = [0, 1, 2],
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index() | (match damage { 0 => 0x0, 1 => 0x4, 2 => 0x8, _ => unreachable!() })),
offset Some(facing.horizontal_offset() + (damage as usize) * 4),
material material::NON_SOLID,
model { ("minecraft", "anvil") },
variant format!("damage={},facing={}", damage, facing.as_string()),
collision match facing.axis() {
Axis::Z => vec![Aabb3::new(
Point3::new(1.0/8.0, 0.0, 0.0),
Point3::new(7.0/8.0, 1.0, 1.0)
)],
Axis::X => vec![Aabb3::new(
Point3::new(0.0, 0.0, 1.0/8.0),
Point3::new(1.0, 1.0, 7.0/8.0)
)],
_ => unreachable!(),
},
}
TrappedChest {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
type_: ChestType = [
ChestType::Single,
ChestType::Left,
ChestType::Right
],
waterlogged: bool = [true, false],
},
data if type_ == ChestType::Single && !waterlogged { Some(facing.index()) } else { None },
offset Some(if waterlogged { 0 } else { 1 } +
type_.offset() * 2 +
facing.horizontal_offset() * (2 * 3)),
material material::NON_SOLID,
model { ("minecraft", "trapped_chest") },
variant format!("facing={}", facing.as_string()),
collision vec![Aabb3::new(
Point3::new(1.0/16.0, 0.0, 1.0/16.0),
Point3::new(15.0/16.0, 7.0/8.0, 15.0/16.0)
)],
}
LightWeightedPressurePlate {
props {
power: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(power as usize),
material material::NON_SOLID,
model { ("minecraft", "light_weighted_pressure_plate") },
variant format!("power={}", power),
collision vec![],
}
HeavyWeightedPressurePlate {
props {
power: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(power as usize),
material material::NON_SOLID,
model { ("minecraft", "heavy_weighted_pressure_plate") },
variant format!("power={}", power),
collision vec![],
}
ComparatorUnpowered {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
mode: ComparatorMode = [ComparatorMode::Compare, ComparatorMode::Subtract],
powered: bool = [false, true],
},
data Some(facing.horizontal_index()
| (if mode == ComparatorMode::Subtract { 0x4 } else { 0x0 })
| (if powered { 0x8 } else { 0x0 })),
offset Some(if powered { 0 } else { 1<<0 } +
if mode == ComparatorMode::Compare { 0 } else { 1<<1 } +
facing.horizontal_offset() * (1<<2)),
material material::NON_SOLID,
model { ("minecraft", "unpowered_comparator") },
variant format!("facing={},mode={},powered={}", facing.as_string(), mode.as_string(), powered),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0/8.0, 1.0)
)],
}
ComparatorPowered {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
mode: ComparatorMode = [ComparatorMode::Compare, ComparatorMode::Subtract],
powered: bool = [false, true],
},
data Some(facing.horizontal_index()
| (if mode == ComparatorMode::Subtract { 0x4 } else { 0x0 })
| (if powered { 0x8 } else { 0x0 })),
offset None,
material material::NON_SOLID,
model { ("minecraft", "powered_comparator") },
variant format!("facing={},mode={},powered={}", facing.as_string(), mode.as_string(), powered),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0/8.0, 1.0)
)],
}
DaylightDetector {
props {
power: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
inverted: bool = [true, false],
},
data if inverted { None } else { Some(power as usize) },
offset Some((power as usize) + if inverted { 0 } else { 16 }),
material material::NON_SOLID,
model { ("minecraft", "daylight_detector") },
variant format!("power={}", power),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 3.0/8.0, 1.0)
)],
}
RedstoneBlock {
props {},
model { ("minecraft", "redstone_block") },
}
QuartzOre {
props {},
model { ("minecraft", "quartz_ore") },
}
Hopper {
props {
enabled: bool = [false, true],
facing: Direction = [
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if enabled { 0x8 } else { 0x0 })),
offset Some(match facing {
Direction::Down => 0,
Direction::North => 1,
Direction::South => 2,
Direction::West => 3,
Direction::East => 4,
_ => unreachable!(),
} + if enabled { 0 } else { 5 }),
material material::NON_SOLID,
model { ("minecraft", "hopper") },
variant format!("facing={}", facing.as_string()),
}
QuartzBlock {
props {
variant: QuartzVariant = [
QuartzVariant::Normal,
QuartzVariant::Chiseled,
QuartzVariant::PillarVertical,
QuartzVariant::PillarNorthSouth,
QuartzVariant::PillarEastWest
],
},
data Some(variant.data()),
model { ("minecraft", match variant {
QuartzVariant::Normal => "quartz_block",
QuartzVariant::Chiseled => "chiseled_quartz_block",
QuartzVariant::PillarVertical |
QuartzVariant::PillarNorthSouth |
QuartzVariant::PillarEastWest => "quartz_column",
} ) },
variant variant.as_string(),
}
QuartzStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "quartz_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::QuartzStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
ActivatorRail {
props {
shape: RailShape = [
RailShape::NorthSouth,
RailShape::EastWest,
RailShape::AscendingNorth,
RailShape::AscendingSouth,
RailShape::AscendingEast,
RailShape::AscendingWest
],
powered: bool = [false, true],
},
data Some(shape.data() | (if powered { 0x8 } else { 0x0 })),
offset Some(shape.data() + (if powered { 0 } else { 6 })),
material material::NON_SOLID,
model { ("minecraft", "activator_rail") },
variant format!("powered={},shape={}", powered, shape.as_string()),
collision vec![],
}
Dropper {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
triggered: bool = [false, true],
},
data Some(facing.index() | (if triggered { 0x8 } else { 0x0 })),
offset Some(if triggered { 0 } else { 1 } + facing.offset() * 2),
model { ("minecraft", "dropper") },
variant format!("facing={}", facing.as_string()),
}
StainedHardenedClay {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_stained_hardened_clay", color.as_string()) ) },
}
StainedGlassPane {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
north: bool = [false, true],
south: bool = [false, true],
east: bool = [false, true],
west: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !east && !west && !waterlogged { Some(color.data()) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 } +
color.data() * (1<<5)),
material material::TRANSPARENT,
model { ("minecraft", format!("{}_stained_glass_pane", color.as_string()) ) },
collision pane_collision(north, south, east, west),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_glasspane);
Block::StainedGlassPane{color, north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"east" => east == (val == "true"),
"west" => west == (val == "true"),
_ => false,
},
}
Leaves2 {
props {
check_decay: bool = [false, true],
decayable: bool = [false, true],
variant: TreeVariant = [
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data Some(variant.data()
| (if decayable { 0x4 } else { 0x0 })
| (if check_decay { 0x8 } else { 0x0 })),
offset None,
material material::LEAVES,
model { ("minecraft", format!("{}_leaves", variant.as_string()) ) },
tint TintType::Foliage,
}
Log2 {
props {
axis: Axis = [Axis::None, Axis::X, Axis::Y, Axis::Z],
variant: TreeVariant = [
TreeVariant::Acacia,
TreeVariant::DarkOak
],
},
data Some(variant.data() | (axis.index() << 2)),
offset None,
model { ("minecraft", format!("{}_log", variant.as_string()) ) },
variant format!("axis={}", axis.as_string()),
}
AcaciaStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "acacia_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::AcaciaStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
DarkOakStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "dark_oak_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::DarkOakStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
Slime {
props {},
material material::TRANSPARENT,
model { ("minecraft", "slime") },
}
Barrier {
props {},
material material::INVISIBLE,
model { ("minecraft", "barrier") },
}
IronTrapDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
open: bool = [false, true],
waterlogged: bool = [true, false],
powered: bool = [true, false],
},
data if waterlogged || powered { None } else { Some(match facing {
Direction::North => 0,
Direction::South => 1,
Direction::West => 2,
Direction::East => 3,
_ => unreachable!(),
} | (if open { 0x4 } else { 0x0 }) | (if half == BlockHalf::Top { 0x8 } else { 0x0 }))},
offset Some(if waterlogged { 0 } else { 1<<0 } +
if powered { 0 } else { 1<<1 } +
if open { 0 } else { 1<<2 } +
if half == BlockHalf::Top { 0 } else { 1<<3 } +
facing.horizontal_offset() * (1<<4)),
material material::NON_SOLID,
model { ("minecraft", "iron_trapdoor") },
variant format!("facing={},half={},open={}", facing.as_string(), half.as_string(), open),
collision trapdoor_collision(facing, half, open),
}
Prismarine {
props {
variant: PrismarineVariant = [
PrismarineVariant::Normal,
PrismarineVariant::Brick,
PrismarineVariant::Dark
],
},
data Some(variant.data()),
model { ("minecraft", variant.as_string() ) },
}
PrismarineStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
variant: PrismarineVariant = [
PrismarineVariant::Normal,
PrismarineVariant::Brick,
PrismarineVariant::Dark
],
},
data None::<usize>,
offset Some(stair_offset(facing, half, shape, waterlogged).unwrap() + (2 * 5 * 2 * 4) * variant.data()),
material material::NON_SOLID,
model { ("minecraft", match variant {
PrismarineVariant::Normal => "prismarine_stairs",
PrismarineVariant::Brick => "prismarine_brick_stairs",
PrismarineVariant::Dark => "dark_prismarine_stairs",
}) },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::PrismarineStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged, variant},
}
PrismarineSlab {
props {
type_: BlockHalf = [
BlockHalf::Top,
BlockHalf::Bottom,
BlockHalf::Double
],
waterlogged: bool = [true, false],
variant: PrismarineVariant = [
PrismarineVariant::Normal,
PrismarineVariant::Brick,
PrismarineVariant::Dark
],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } + type_.offset() * 2 + variant.data() * (2 * 3)),
material material::NON_SOLID,
model { ("minecraft", match variant {
PrismarineVariant::Normal => "prismarine_slab",
PrismarineVariant::Brick => "prismarine_brick_slab",
PrismarineVariant::Dark => "dark_prismarine_slab",
}) },
variant format!("type={}", type_.as_string()),
collision slab_collision(type_),
}
SeaLantern {
props {},
material Material {
emitted_light: 15,
..material::SOLID
},
model { ("minecraft", "sea_lantern") },
}
HayBlock {
props {
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data Some(match axis { Axis::X => 0x4, Axis::Y => 0x0, Axis::Z => 0x8, _ => unreachable!() }),
offset Some(match axis { Axis::X => 0, Axis::Y => 1, Axis::Z => 2, _ => unreachable!() }),
model { ("minecraft", "hay_block") },
variant format!("axis={}", axis.as_string()),
}
Carpet {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
material material::NON_SOLID,
model { ("minecraft", format!("{}_carpet", color.as_string()) ) },
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0/16.0, 1.0)
)],
}
HardenedClay {
props {},
model { ("minecraft", "hardened_clay") },
}
CoalBlock {
props {},
model { ("minecraft", "coal_block") },
}
PackedIce {
props {},
model { ("minecraft", "packed_ice") },
}
DoublePlant {
props {
half: BlockHalf = [BlockHalf::Lower, BlockHalf::Upper],
variant: DoublePlantVariant = [
DoublePlantVariant::Sunflower,
DoublePlantVariant::Lilac,
DoublePlantVariant::DoubleTallgrass,
DoublePlantVariant::LargeFern,
DoublePlantVariant::RoseBush,
DoublePlantVariant::Peony
],
},
data Some(variant.data() | (if half == BlockHalf::Upper { 0x8 } else { 0x0 })),
offset Some(half.offset() + variant.offset() * 2),
material material::NON_SOLID,
model { ("minecraft", variant.as_string()) },
variant format!("half={}", half.as_string()),
tint TintType::Foliage,
collision vec![],
update_state (world, pos) => {
let (half, variant) = update_double_plant_state(world, pos, half, variant);
Block::DoublePlant{half, variant}
},
}
StandingBanner {
props {
rotation: Rotation = [
Rotation::South,
Rotation::SouthSouthWest,
Rotation::SouthWest,
Rotation::WestSouthWest,
Rotation::West,
Rotation::WestNorthWest,
Rotation::NorthWest,
Rotation::NorthNorthWest,
Rotation::North,
Rotation::NorthNorthEast,
Rotation::NorthEast,
Rotation::EastNorthEast,
Rotation::East,
Rotation::EastSouthEast,
Rotation::SouthEast,
Rotation::SouthSouthEast
],
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data if color != ColoredVariant::White { None } else { Some(rotation.data()) },
offset Some(rotation.data() + color.data() * 16),
material material::NON_SOLID,
model { ("minecraft", "standing_banner") },
variant format!("rotation={}", rotation.as_string()),
}
WallBanner {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data if color != ColoredVariant::White { None } else { Some(facing.index()) },
offset Some(facing.horizontal_offset() + color.data() * 4),
material material::NON_SOLID,
model { ("minecraft", "wall_banner") },
variant format!("facing={}", facing.as_string()),
}
DaylightDetectorInverted {
props {
power: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
},
data Some(power as usize),
offset None,
material material::NON_SOLID,
model { ("minecraft", "daylight_detector_inverted") },
variant format!("power={}", power),
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 3.0/8.0, 1.0)
)],
}
RedSandstone {
props {
variant: RedSandstoneVariant = [
RedSandstoneVariant::Normal,
RedSandstoneVariant::Chiseled,
RedSandstoneVariant::Smooth
],
},
data Some(variant.data()),
model { ("minecraft", variant.as_string()) },
}
RedSandstoneStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "red_sandstone_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::RedSandstoneStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
WoodenSlabFlat {
props {
type_: BlockHalf = [
BlockHalf::Top,
BlockHalf::Bottom,
BlockHalf::Double
],
waterlogged: bool = [true, false],
variant: WoodSlabVariant = [
WoodSlabVariant::Oak,
WoodSlabVariant::Spruce,
WoodSlabVariant::Birch,
WoodSlabVariant::Jungle,
WoodSlabVariant::Acacia,
WoodSlabVariant::DarkOak
],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } + type_.offset() * 2 + variant.data() * (2 * 3)),
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("type={}", type_.as_string()),
collision slab_collision(type_),
}
StoneSlabFlat {
props {
type_: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom, BlockHalf::Double],
variant: StoneSlabVariant = [
StoneSlabVariant::Stone,
StoneSlabVariant::SmoothStone,
StoneSlabVariant::Sandstone,
StoneSlabVariant::CutSandstone,
StoneSlabVariant::PetrifiedWood,
StoneSlabVariant::Cobblestone,
StoneSlabVariant::Brick,
StoneSlabVariant::StoneBrick,
StoneSlabVariant::NetherBrick,
StoneSlabVariant::Quartz,
StoneSlabVariant::RedSandstone,
StoneSlabVariant::CutRedSandstone,
StoneSlabVariant::Purpur
],
waterlogged: bool = [true, false],
},
data None::<usize>,
offsets |protocol_version| {
variant.offsets(protocol_version).map(|o| if waterlogged { 0 } else { 1 } + type_.offset() * 2 + o * (2 * 3))
},
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("type={}", type_.as_string()),
collision slab_collision(type_),
}
DoubleStoneSlab2 {
props {
seamless: bool = [false, true],
variant: StoneSlabVariant = [
StoneSlabVariant::RedSandstone
],
},
data Some(variant.data() | (if seamless { 0x8 } else { 0x0 })),
offset None,
material material::SOLID,
model { ("minecraft", format!("{}_double_slab", variant.as_string()) ) },
variant if seamless { "all" } else { "normal" },
}
StoneSlab2 {
props {
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
variant: StoneSlabVariant = [StoneSlabVariant::RedSandstone],
},
data Some(variant.data() | (if half == BlockHalf::Top { 0x8 } else { 0x0 })),
offset None,
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("half={}", half.as_string()),
collision slab_collision(half),
}
SmoothStone {
props {
variant: StoneSlabVariant = [
StoneSlabVariant::Stone,
StoneSlabVariant::Sandstone,
StoneSlabVariant::Quartz,
StoneSlabVariant::RedSandstone
],
},
data None::<usize>,
offset Some(match variant {
StoneSlabVariant::Stone => 0,
StoneSlabVariant::Sandstone => 1,
StoneSlabVariant::Quartz => 2,
StoneSlabVariant::RedSandstone => 3,
_ => unreachable!(),
}),
model { ("minecraft", format!("smooth_{}", variant.as_string()) ) },
}
SpruceFenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "spruce_fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::SpruceFenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
BirchFenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "birch_fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::BirchFenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
JungleFenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "jungle_fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::JungleFenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
DarkOakFenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "dark_oak_fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::DarkOakFenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
AcaciaFenceGate {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
in_wall: bool = [false, true],
open: bool = [false, true],
powered: bool = [false, true],
},
data fence_gate_data(facing, in_wall, open, powered),
offset fence_gate_offset(facing, in_wall, open, powered),
material material::NON_SOLID,
model { ("minecraft", "acacia_fence_gate") },
variant format!("facing={},in_wall={},open={}", facing.as_string(), in_wall, open),
collision fence_gate_collision(facing, in_wall, open),
update_state (world, pos) => Block::AcaciaFenceGate{
facing,
in_wall: fence_gate_update_state(world, pos, facing),
open,
powered
},
}
SpruceFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "spruce_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::SpruceFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
BirchFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "birch_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::BirchFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
JungleFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "jungle_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::JungleFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
DarkOakFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "dark_oak_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::DarkOakFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
AcaciaFence {
props {
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
waterlogged: bool = [true, false],
},
data if !north && !south && !west && !east && !waterlogged { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if waterlogged { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 }),
material material::NON_SOLID,
model { ("minecraft", "acacia_fence") },
collision fence_collision(north, south, west, east),
update_state (world, pos) => {
let (north, south, west, east) = can_connect_sides(world, pos, &can_connect_fence);
Block::AcaciaFence{north, south, west, east, waterlogged}
},
multipart (key, val) => match key {
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"west" => west == (val == "true"),
"east" => east == (val == "true"),
_ => false,
},
}
SpruceDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "spruce_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::SpruceDoor{facing, half, hinge, open, powered}
},
}
BirchDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "birch_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::BirchDoor{facing, half, hinge, open, powered}
},
}
JungleDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "jungle_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::JungleDoor{facing, half, hinge, open, powered}
},
}
AcaciaDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "acacia_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::AcaciaDoor{facing, half, hinge, open, powered}
},
}
DarkOakDoor {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: DoorHalf = [DoorHalf::Upper, DoorHalf::Lower],
hinge: Side = [Side::Left, Side::Right],
open: bool = [false, true],
powered: bool = [false, true],
},
data door_data(facing, half, hinge, open, powered),
offset door_offset(facing, half, hinge, open, powered),
material material::NON_SOLID,
model { ("minecraft", "dark_oak_door") },
variant format!("facing={},half={},hinge={},open={}", facing.as_string(), half.as_string(), hinge.as_string(), open),
collision door_collision(facing, hinge, open),
update_state (world, pos) => {
let (facing, hinge, open, powered) = update_door_state(world, pos, half, facing, hinge, open, powered);
Block::DarkOakDoor{facing, half, hinge, open, powered}
},
}
EndRod {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
material Material {
emitted_light: 14,
..material::NON_SOLID
},
model { ("minecraft", "end_rod") },
variant format!("facing={}", facing.as_string()),
collision {
match facing.axis() {
Axis::Y => vec![Aabb3::new(
Point3::new(3.0/8.0, 0.0, 3.0/8.0),
Point3::new(5.0/8.0, 1.0, 5.0/8.0))
],
Axis::Z => vec![Aabb3::new(
Point3::new(3.0/8.0, 3.0/8.0, 0.0),
Point3::new(5.0/8.0, 5.0/8.0, 1.0))
],
Axis::X => vec![Aabb3::new(
Point3::new(0.0, 3.0/8.0, 3.0/8.0),
Point3::new(1.0, 5.0/8.0, 5.0/8.0))
],
_ => unreachable!(),
}
},
}
ChorusPlant {
props {
up: bool = [false, true],
down: bool = [false, true],
north: bool = [false, true],
south: bool = [false, true],
west: bool = [false, true],
east: bool = [false, true],
},
data if !up && !down && !north && !south && !west && !east { Some(0) } else { None },
offset Some(if west { 0 } else { 1<<0 } +
if up { 0 } else { 1<<1 } +
if south { 0 } else { 1<<2 } +
if north { 0 } else { 1<<3 } +
if east { 0 } else { 1<<4 } +
if down { 0 } else { 1<<5 }),
material material::NON_SOLID,
model { ("minecraft", "chorus_plant") },
collision {
let mut collision = vec![Aabb3::new(
Point3::new(3.0/16.0, 3.0/16.0, 3.0/16.0),
Point3::new(13.0/16.0, 13.0/16.0, 13.0/16.0))
];
if up {
collision.push(Aabb3::new(
Point3::new(3.0/16.0, 13.0/16.0, 3.0/16.0),
Point3::new(13.0/16.0, 1.0, 13.0/16.0))
);
}
if down {
collision.push(Aabb3::new(
Point3::new(3.0/16.0, 0.0, 3.0/16.0),
Point3::new(13.0/16.0, 3.0/16.0, 13.0/16.0))
);
}
if north {
collision.push(Aabb3::new(
Point3::new(3.0/16.0, 3.0/16.0, 0.0),
Point3::new(13.0/16.0, 13.0/16.0, 3.0/16.0))
);
}
if south {
collision.push(Aabb3::new(
Point3::new(3.0/16.0, 3.0/16.0, 13.0/16.0),
Point3::new(13.0/16.0, 13.0/16.0, 1.0))
);
}
if east {
collision.push(Aabb3::new(
Point3::new(13.0/16.0, 3.0/16.0, 3.0/16.0),
Point3::new(1.0, 13.0/16.0, 13.0/16.0))
);
}
if west {
collision.push(Aabb3::new(
Point3::new(0.0, 3.0/16.0, 3.0/16.0),
Point3::new(3.0/16.0, 13.0/16.0, 13.0/16.0))
);
}
collision
},
update_state (world, pos) => Block::ChorusPlant {
up: matches!(world.get_block(pos.shift(Direction::Up)), Block::ChorusPlant{..} | Block::ChorusFlower{..}),
down: matches!(world.get_block(pos.shift(Direction::Down)), Block::ChorusPlant{..} | Block::ChorusFlower{..} | Block::EndStone{..}),
north: matches!(world.get_block(pos.shift(Direction::North)), Block::ChorusPlant{..} | Block::ChorusFlower{..}),
south: matches!(world.get_block(pos.shift(Direction::South)), Block::ChorusPlant{..} | Block::ChorusFlower{..}),
west: matches!(world.get_block(pos.shift(Direction::West)), Block::ChorusPlant{..} | Block::ChorusFlower{..}),
east: matches!(world.get_block(pos.shift(Direction::East)), Block::ChorusPlant{..} | Block::ChorusFlower{..}),
},
multipart (key, val) => match key {
"up" => up == (val == "true"),
"down" => down == (val == "true"),
"north" => north == (val == "true"),
"south" => south == (val == "true"),
"east" => east == (val == "true"),
"west" => west == (val == "true"),
_ => false,
},
}
ChorusFlower {
props {
age: u8 = [0, 1, 2, 3, 4, 5],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "chorus_flower") },
variant format!("age={}", age),
}
PurpurBlock {
props {},
model { ("minecraft", "purpur_block") },
}
PurpurPillar {
props {
axis: Axis = [Axis::X, Axis::Y, Axis::Z],
},
data Some(match axis { Axis::X => 0x4, Axis::Y => 0x0, Axis::Z => 0x8, _ => unreachable!() }),
offset Some(match axis { Axis::X => 0, Axis::Y => 1, Axis::Z => 2, _ => unreachable!() }),
model { ("minecraft", "purpur_pillar") },
variant format!("axis={}", axis.as_string()),
}
PurpurStairs {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
shape: StairShape = [
StairShape::Straight,
StairShape::InnerLeft,
StairShape::InnerRight,
StairShape::OuterLeft,
StairShape::OuterRight
],
waterlogged: bool = [true, false],
},
data stair_data(facing, half, shape, waterlogged),
offset stair_offset(facing, half, shape, waterlogged),
material material::NON_SOLID,
model { ("minecraft", "purpur_stairs") },
variant format!("facing={},half={},shape={}", facing.as_string(), half.as_string(), shape.as_string()),
collision stair_collision(facing, shape, half),
update_state (world, pos) => Block::PurpurStairs{facing, half, shape: update_stair_shape(world, pos, facing), waterlogged},
}
PurpurDoubleSlab {
props {
variant: StoneSlabVariant = [StoneSlabVariant::Purpur],
},
offset None,
model { ("minecraft", format!("{}_double_slab", variant.as_string()) ) },
}
PurpurSlab {
props {
half: BlockHalf = [BlockHalf::Top, BlockHalf::Bottom],
variant: StoneSlabVariant = [StoneSlabVariant::Purpur],
},
data if half == BlockHalf::Top { Some(0x8) } else { Some(0) },
offset None,
material material::NON_SOLID,
model { ("minecraft", format!("{}_slab", variant.as_string()) ) },
variant format!("half={},variant=default", half.as_string()),
collision slab_collision(half),
}
EndBricks {
props {},
model { ("minecraft", "end_bricks") },
}
Beetroots {
props {
age: u8 = [0, 1, 2, 3],
},
data Some(age as usize),
material material::NON_SOLID,
model { ("minecraft", "beetroots") },
variant format!("age={}", age),
collision vec![],
}
GrassPath {
props {},
material material::NON_SOLID,
model { ("minecraft", "grass_path") },
collision vec![Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 15.0/16.0, 1.0)
)],
}
EndGateway {
props {},
material material::NON_SOLID,
model { ("minecraft", "end_gateway") },
collision vec![],
}
RepeatingCommandBlock {
props {
conditional: bool = [false, true],
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if conditional { 0x8 } else { 0x0 })),
offset Some(facing.offset() + (if conditional { 0 } else { 6 })),
model { ("minecraft", "repeating_command_block") },
variant format!("conditional={},facing={}", conditional, facing.as_string()),
}
ChainCommandBlock {
props {
conditional: bool = [false, true],
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index() | (if conditional { 0x8 } else { 0x0 })),
offset Some(facing.offset() + (if conditional { 0 } else { 6 })),
model { ("minecraft", "chain_command_block") },
variant format!("conditional={},facing={}", conditional, facing.as_string()),
}
FrostedIce {
props {
age: u8 = [ 0, 1, 2, 3 ],
},
data if age == 0 { Some(0) } else { None },
offset Some(age as usize),
model { ("minecraft", "frosted_ice") },
}
MagmaBlock {
props {},
model { ("minecraft", "magma") },
}
NetherWartBlock {
props {},
model { ("minecraft", "nether_wart_block") },
}
RedNetherBrick {
props {},
model { ("minecraft", "red_nether_brick") },
}
BoneBlock {
props {
axis: Axis = [Axis::Y, Axis::Z, Axis::X],
},
data Some(axis.index() << 2),
offset Some(match axis { Axis::X => 0, Axis::Y => 1, Axis::Z => 2, _ => unreachable!() }),
model { ("minecraft", "bone_block") },
variant format!("axis={}", axis.as_string()),
}
StructureVoid {
props {},
material material::Material {
collidable: false,
.. material::INVISIBLE
},
model { ("minecraft", "structure_void") },
// TODO: a small hit box but no collision
collision vec![],
}
Observer {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
powered: bool = [false, true],
},
data Some(facing.index() | (if powered { 0x8 } else { 0x0 })),
offset Some(if powered { 0 } else { 1 } + facing.offset() * 2),
model { ("minecraft", "observer") },
variant format!("facing={},powered={}", facing.as_string(), powered),
}
// TODO: Shulker box textures (1.11+), since there is no model, we use wool for now
// The textures should be built from textures/blocks/shulker_top_<color>.png
// and textures/entity/shulker/shulker_<color>.png
ShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data None::<usize>,
offset Some(facing.offset()),
model { ("minecraft", "sponge") },
}
WhiteShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "white_wool") },
}
OrangeShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "orange_wool") },
}
MagentaShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "magenta_wool") },
}
LightBlueShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "light_blue_wool") },
}
YellowShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "yellow_wool") },
}
LimeShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "lime_wool") },
}
PinkShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "pink_wool") },
}
GrayShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "gray_wool") },
}
LightGrayShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "light_gray_wool") },
}
CyanShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "cyan_wool") },
}
PurpleShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "purple_wool") },
}
BlueShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "blue_wool") },
}
BrownShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "brown_wool") },
}
GreenShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "green_wool") },
}
RedShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "red_wool") },
}
BlackShulkerBox {
props {
facing: Direction = [
Direction::Up,
Direction::Down,
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.index()),
offset Some(facing.offset()),
model { ("minecraft", "black_wool") },
}
WhiteGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "white_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
OrangeGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "orange_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
MagentaGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "magenta_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
LightBlueGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "light_blue_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
YellowGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "yellow_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
LimeGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "lime_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
PinkGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "pink_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
GrayGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "gray_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
LightGrayGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "silver_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
CyanGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "cyan_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
PurpleGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "purple_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
BlueGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "blue_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
BrownGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "brown_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
GreenGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "green_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
RedGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "red_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
BlackGlazedTerracotta {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
},
data Some(facing.horizontal_index()),
offset Some(facing.horizontal_offset()),
model { ("minecraft", "black_glazed_terracotta") },
variant format!("facing={}", facing.as_string()),
}
Concrete {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_concrete", color.as_string()) ) },
}
ConcretePowder {
props {
color: ColoredVariant = [
ColoredVariant::White,
ColoredVariant::Orange,
ColoredVariant::Magenta,
ColoredVariant::LightBlue,
ColoredVariant::Yellow,
ColoredVariant::Lime,
ColoredVariant::Pink,
ColoredVariant::Gray,
ColoredVariant::Silver,
ColoredVariant::Cyan,
ColoredVariant::Purple,
ColoredVariant::Blue,
ColoredVariant::Brown,
ColoredVariant::Green,
ColoredVariant::Red,
ColoredVariant::Black
],
},
data Some(color.data()),
model { ("minecraft", format!("{}_concrete_powder", color.as_string()) ) },
}
Kelp {
props {
age: u8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25],
},
data None::<usize>,
offset Some(age as usize),
model { ("minecraft", "kelp") },
}
KelpPlant {
props {},
data None::<usize>,
offset Some(0),
model { ("minecraft", "kelp_plant") },
}
DriedKelpBlock {
props {},
data None::<usize>,
offset Some(0),
model { ("minecraft", "dried_kelp_block") },
}
TurtleEgg {
props {
age: u8 = [1, 2, 3, 4],
hatch: u8 = [0, 1, 2],
},
data None::<usize>,
offset Some((hatch as usize) + ((age - 1) as usize) * 3),
model { ("minecraft", "turtle_egg") },
}
CoralBlock {
props {
variant: CoralVariant = [
CoralVariant::DeadTube,
CoralVariant::DeadBrain,
CoralVariant::DeadBubble,
CoralVariant::DeadFire,
CoralVariant::DeadHorn,
CoralVariant::Tube,
CoralVariant::Brain,
CoralVariant::Bubble,
CoralVariant::Fire,
CoralVariant::Horn
],
},
data None::<usize>,
offset Some(variant.offset()),
model { ("minecraft", format!("{}_block", variant.as_string())) },
}
Coral {
props {
waterlogged: bool = [true, false],
variant: CoralVariant = [
CoralVariant::DeadTube,
CoralVariant::DeadBrain,
CoralVariant::DeadBubble,
CoralVariant::DeadFire,
CoralVariant::DeadHorn,
CoralVariant::Tube,
CoralVariant::Brain,
CoralVariant::Bubble,
CoralVariant::Fire,
CoralVariant::Horn
],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } + variant.offset() * 2),
model { ("minecraft", variant.as_string()) },
}
CoralWallFan {
props {
facing: Direction = [
Direction::North,
Direction::South,
Direction::West,
Direction::East
],
waterlogged: bool = [true, false],
variant: CoralVariant = [
CoralVariant::DeadTube,
CoralVariant::DeadBrain,
CoralVariant::DeadBubble,
CoralVariant::DeadFire,
CoralVariant::DeadHorn,
CoralVariant::Tube,
CoralVariant::Brain,
CoralVariant::Bubble,
CoralVariant::Fire,
CoralVariant::Horn
],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } +
facing.horizontal_offset() * 2 +
variant.offset() * (2 * 4)),
model { ("minecraft", format!("{}_wall_fan", variant.as_string())) },
}
CoralFan {
props {
waterlogged: bool = [true, false],
variant: CoralVariant = [
CoralVariant::DeadTube,
CoralVariant::DeadBrain,
CoralVariant::DeadBubble,
CoralVariant::DeadFire,
CoralVariant::DeadHorn,
CoralVariant::Tube,
CoralVariant::Brain,
CoralVariant::Bubble,
CoralVariant::Fire,
CoralVariant::Horn
],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } +
variant.offset() * 2),
model { ("minecraft", format!("{}_fan", variant.as_string())) },
}
SeaPickle {
props {
age: u8 = [1, 2, 3, 4],
waterlogged: bool = [true, false],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 } +
((age - 1) as usize) * 2),
model { ("minecraft", "sea_pickle") },
variant format!("age={}", age),
}
BlueIce {
props {},
data None::<usize>,
offset Some(0),
model { ("minecraft", "blue_ice") },
}
Conduit {
props {
waterlogged: bool = [true, false],
},
data None::<usize>,
offset Some(if waterlogged { 0 } else { 1 }),
material material::NON_SOLID,
model { ("minecraft", "conduit") },
}
VoidAir {
props {},
data None::<usize>,
offset Some(0),
material material::Material {
collidable: false,
.. material::INVISIBLE
},
model { ("minecraft", "air") },
collision vec![],
}
CaveAir {
props {},
data None::<usize>,
offset Some(0),
material material::Material {
collidable: false,
.. material::INVISIBLE
},
model { ("minecraft", "air") },
collision vec![],
}
BubbleColumn {
props {
drag: bool = [true, false],
},
data None::<usize>,
offset Some(if drag { 0 } else { 1 }),
model { ("minecraft", "bubble_column") },
}
Missing253 {
props {},
data Some(0),
offset None,
model { ("leafish", "missing_block") },
}
Missing254 {
props {},
data Some(0),
offset None,
model { ("leafish", "missing_block") },
}
StructureBlock {
props {
mode: StructureBlockMode = [
StructureBlockMode::Save,
StructureBlockMode::Load,
StructureBlockMode::Corner,
StructureBlockMode::Data
],
},
data Some(mode.data()),
model { ("minecraft", "structure_block") },
variant format!("mode={}", mode.as_string()),
}
Missing {
props {},
data None::<usize>,
model { ("leafish", "missing_block") },
}
}
#[cfg(test)]
mod tests {
use super::*;
// Spot check a few blocks across different versions, including the correctly recognized last supported block
// TODO: comprehensive testing against https://github.com/PrismarineJS/minecraft-data/tree/master/data/pc
#[test]
fn hier_1_12_2() {
let id_map = VanillaIDMap::new(340);
assert_eq!(
id_map.by_vanilla_id(255 << 4, &HashMap::new()),
StructureBlock {
mode: StructureBlockMode::Save
}
);
assert_eq!(
id_map.by_vanilla_id((255 << 4) | 3, &HashMap::new()),
StructureBlock {
mode: StructureBlockMode::Data
}
);
}
#[test]
fn flat_1_13_2() {
let id_map = VanillaIDMap::new(404);
assert_eq!(
id_map.by_vanilla_id(8595, &HashMap::new()),
StructureBlock {
mode: StructureBlockMode::Save
}
);
assert_eq!(
id_map.by_vanilla_id(8598, &HashMap::new()),
StructureBlock {
mode: StructureBlockMode::Data
}
);
}
#[test]
fn flat_1_14_4() {
let id_map = VanillaIDMap::new(477);
assert_eq!(
id_map.by_vanilla_id(9113, &HashMap::new()),
Conduit { waterlogged: true }
);
assert_eq!(
id_map.by_vanilla_id(9114, &HashMap::new()),
Conduit { waterlogged: false }
);
}
#[test]
fn flat_1_15_1() {
let id_map = VanillaIDMap::new(575);
assert_eq!(
id_map.by_vanilla_id(9113, &HashMap::new()),
Conduit { waterlogged: true }
);
assert_eq!(
id_map.by_vanilla_id(9114, &HashMap::new()),
Conduit { waterlogged: false }
);
}
#[test]
fn flat_1_16() {
let id_map = VanillaIDMap::new(735);
assert_eq!(
id_map.by_vanilla_id(1048, &HashMap::new()),
NoteBlock {
instrument: NoteBlockInstrument::Pling,
note: 24,
powered: false
}
);
}
#[test]
fn flat_1_16_2() {
let id_map = VanillaIDMap::new(751);
assert_eq!(
id_map.by_vanilla_id(1048, &HashMap::new()),
NoteBlock {
instrument: NoteBlockInstrument::Pling,
note: 24,
powered: false
}
);
}
}
fn can_burn<W: WorldAccess>(world: &W, pos: Position) -> bool {
matches!(
world.get_block(pos),
Block::Planks { .. }
| Block::DoubleWoodenSlab { .. }
| Block::WoodenSlab { .. }
| Block::FenceGate { .. }
| Block::SpruceFenceGate { .. }
| Block::BirchFenceGate { .. }
| Block::JungleFenceGate { .. }
| Block::DarkOakFenceGate { .. }
| Block::AcaciaFenceGate { .. }
| Block::Fence { .. }
| Block::SpruceFence { .. }
| Block::BirchFence { .. }
| Block::JungleFence { .. }
| Block::DarkOakFence { .. }
| Block::AcaciaFence { .. }
| Block::OakStairs { .. }
| Block::BirchStairs { .. }
| Block::SpruceStairs { .. }
| Block::JungleStairs { .. }
| Block::AcaciaStairs { .. }
| Block::DarkOakStairs { .. }
| Block::Log { .. }
| Block::Log2 { .. }
| Block::Leaves { .. }
| Block::Leaves2 { .. }
| Block::BookShelf { .. }
| Block::TNT { .. }
| Block::TallGrass { .. }
| Block::DoublePlant { .. }
| Block::YellowFlower { .. }
| Block::RedFlower { .. }
| Block::DeadBush { .. }
| Block::Wool { .. }
| Block::Vine { .. }
| Block::CoalBlock { .. }
| Block::HayBlock { .. }
| Block::Carpet { .. }
)
}
fn is_snowy<W: WorldAccess>(world: &W, pos: Position) -> bool {
matches!(
world.get_block(pos.shift(Direction::Up)),
Block::Snow { .. } | Block::SnowLayer { .. }
)
}
fn can_connect_sides<F: Fn(Block) -> bool, W: WorldAccess>(
world: &W,
pos: Position,
f: &F,
) -> (bool, bool, bool, bool) {
(
can_connect(world, pos.shift(Direction::North), f),
can_connect(world, pos.shift(Direction::South), f),
can_connect(world, pos.shift(Direction::West), f),
can_connect(world, pos.shift(Direction::East), f),
)
}
fn can_connect<F: Fn(Block) -> bool, W: WorldAccess>(world: &W, pos: Position, f: &F) -> bool {
let block = world.get_block(pos);
f(block) || (block.get_material().renderable && block.get_material().should_cull_against)
}
fn can_connect_fence(block: Block) -> bool {
matches!(
block,
Block::Fence { .. }
| Block::SpruceFence { .. }
| Block::BirchFence { .. }
| Block::JungleFence { .. }
| Block::DarkOakFence { .. }
| Block::AcaciaFence { .. }
| Block::FenceGate { .. }
| Block::SpruceFenceGate { .. }
| Block::BirchFenceGate { .. }
| Block::JungleFenceGate { .. }
| Block::DarkOakFenceGate { .. }
| Block::AcaciaFenceGate { .. }
)
}
fn can_connect_glasspane(block: Block) -> bool {
matches!(
block,
Block::Glass { .. }
| Block::StainedGlass { .. }
| Block::GlassPane { .. }
| Block::StainedGlassPane { .. }
)
}
fn can_connect_redstone<W: WorldAccess>(world: &W, pos: Position, dir: Direction) -> RedstoneSide {
let shift_pos = pos.shift(dir);
let block = world.get_block(shift_pos);
if block.get_material().should_cull_against {
let side_up = world.get_block(shift_pos.shift(Direction::Up));
let up = world.get_block(pos.shift(Direction::Up));
if matches!(side_up, Block::RedstoneWire { .. }) && !up.get_material().should_cull_against {
return RedstoneSide::Up;
}
return RedstoneSide::None;
}
let side_down = world.get_block(shift_pos.shift(Direction::Down));
if matches!(block, Block::RedstoneWire { .. })
|| matches!(side_down, Block::RedstoneWire { .. })
{
return RedstoneSide::Side;
}
RedstoneSide::None
}
fn fence_gate_data(facing: Direction, in_wall: bool, open: bool, powered: bool) -> Option<usize> {
if in_wall || powered {
return None;
}
Some(facing.horizontal_index() | (if open { 0x4 } else { 0x0 }))
}
fn fence_gate_offset(facing: Direction, in_wall: bool, open: bool, powered: bool) -> Option<usize> {
Some(
if powered { 0 } else { 1 << 0 }
+ if open { 0 } else { 1 << 1 }
+ if in_wall { 0 } else { 1 << 2 }
+ facing.horizontal_offset() * (1 << 3),
)
}
fn fence_gate_collision(facing: Direction, in_wall: bool, open: bool) -> Vec<Aabb3<f64>> {
if open {
return vec![];
}
let (min_x, min_y, min_z, max_x, max_y, max_z) = if in_wall {
match facing.axis() {
Axis::Z => (0.0, 0.0, 3.0 / 8.0, 1.0, 13.0 / 16.0, 5.0 / 8.0),
Axis::X => (3.0 / 8.0, 0.0, 0.0, 5.0 / 8.0, 13.0 / 16.0, 1.0),
_ => unreachable!(),
}
} else {
match facing.axis() {
Axis::Z => (0.0, 0.0, 3.0 / 8.0, 1.0, 1.0, 5.0 / 8.0),
Axis::X => (3.0 / 8.0, 0.0, 0.0, 5.0 / 8.0, 1.0, 1.0),
_ => unreachable!(),
}
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z),
)]
}
fn fence_gate_update_state<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> bool {
if let Block::CobblestoneWall { .. } = world.get_block(pos.shift(facing.clockwise())) {
return true;
}
if let Block::CobblestoneWall { .. } = world.get_block(pos.shift(facing.counter_clockwise())) {
return true;
}
false
}
fn door_data(
facing: Direction,
half: DoorHalf,
hinge: Side,
open: bool,
powered: bool,
) -> Option<usize> {
match half {
DoorHalf::Upper => {
if facing == Direction::North && open {
Some(
0x8 | (if hinge == Side::Right { 0x1 } else { 0x0 })
| (if powered { 0x2 } else { 0x0 }),
)
} else {
None
}
}
DoorHalf::Lower => {
if hinge == Side::Left && !powered {
Some(facing.clockwise().horizontal_index() | (if open { 0x4 } else { 0x0 }))
} else {
None
}
}
}
}
fn door_offset(
facing: Direction,
half: DoorHalf,
hinge: Side,
open: bool,
powered: bool,
) -> Option<usize> {
Some(
if powered { 0 } else { 1 << 0 }
+ if open { 0 } else { 1 << 1 }
+ if hinge == Side::Left { 0 } else { 1 << 2 }
+ if half == DoorHalf::Upper { 0 } else { 1 << 3 }
+ facing.horizontal_offset() * (1 << 4),
)
}
fn update_door_state<W: WorldAccess>(
world: &W,
pos: Position,
ohalf: DoorHalf,
ofacing: Direction,
ohinge: Side,
oopen: bool,
opowered: bool,
) -> (Direction, Side, bool, bool) {
let oy = if ohalf == DoorHalf::Upper { -1 } else { 1 };
match world.get_block(pos + (0, oy, 0)) {
Block::WoodenDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::SpruceDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::BirchDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::JungleDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::AcaciaDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::DarkOakDoor {
half,
facing,
hinge,
open,
powered,
}
| Block::IronDoor {
half,
facing,
hinge,
open,
powered,
} => {
if half != ohalf {
if ohalf == DoorHalf::Upper {
return (facing, ohinge, open, opowered);
} else {
return (ofacing, hinge, oopen, powered);
}
}
}
_ => {}
}
(ofacing, ohinge, oopen, opowered)
}
fn door_collision(facing: Direction, hinge: Side, open: bool) -> Vec<Aabb3<f64>> {
use std::f64::consts::PI;
let mut bounds = Aabb3::new(
Point3::new(0.0, 0.0, 0.0),
Point3::new(1.0, 1.0, 3.0 / 16.0),
);
let mut angle = match facing {
Direction::South => 0.0,
Direction::West => PI * 0.5,
Direction::North => PI,
Direction::East => PI * 1.5,
_ => 0.0,
};
angle += if open { PI * 0.5 } else { 0.0 }
* match hinge {
Side::Left => 1.0,
Side::Right => -1.0,
};
let c = angle.cos();
let s = angle.sin();
let x = bounds.min.x - 0.5;
let z = bounds.min.z - 0.5;
bounds.min.x = 0.5 + (x * c - z * s);
bounds.min.z = 0.5 + (z * c + x * s);
let x = bounds.max.x - 0.5;
let z = bounds.max.z - 0.5;
bounds.max.x = 0.5 + (x * c - z * s);
bounds.max.z = 0.5 + (z * c + x * s);
vec![bounds]
}
fn update_repeater_state<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> bool {
let f = |dir| {
matches!(
world.get_block(pos.shift(dir)),
Block::RepeaterPowered { .. }
)
};
f(facing.clockwise()) || f(facing.counter_clockwise())
}
fn update_double_plant_state<W: WorldAccess>(
world: &W,
pos: Position,
ohalf: BlockHalf,
ovariant: DoublePlantVariant,
) -> (BlockHalf, DoublePlantVariant) {
if ohalf != BlockHalf::Upper {
return (ohalf, ovariant);
}
match world.get_block(pos.shift(Direction::Down)) {
Block::DoublePlant { variant, .. } => (ohalf, variant),
_ => (ohalf, ovariant),
}
}
fn piston_collision(extended: bool, facing: Direction) -> Vec<Aabb3<f64>> {
let (min_x, min_y, min_z, max_x, max_y, max_z) = if extended {
match facing {
Direction::Up => (0.0, 0.0, 0.0, 1.0, 0.75, 1.0),
Direction::Down => (0.0, 0.25, 0.0, 1.0, 1.0, 1.0),
Direction::North => (0.0, 0.0, 0.25, 1.0, 1.0, 1.0),
Direction::South => (0.0, 0.0, 0.0, 1.0, 1.0, 0.75),
Direction::West => (0.25, 0.0, 0.0, 1.0, 1.0, 0.75),
Direction::East => (0.0, 0.0, 0.0, 0.75, 1.0, 1.0),
_ => unreachable!(),
}
} else {
(0.0, 0.0, 0.0, 1.0, 1.0, 1.0)
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z),
)]
}
fn trapdoor_collision(facing: Direction, half: BlockHalf, open: bool) -> Vec<Aabb3<f64>> {
let (min_x, min_y, min_z, max_x, max_y, max_z) = if open {
match facing {
Direction::North => (0.0, 0.0, 3.0 / 16.0, 1.0, 1.0, 1.0),
Direction::South => (0.0, 0.0, 0.0, 1.0, 1.0, 3.0 / 16.0),
Direction::West => (3.0 / 16.0, 0.0, 0.0, 1.0, 1.0, 1.0),
Direction::East => (0.0, 0.0, 0.0, 3.0 / 16.0, 1.0, 1.0),
_ => unreachable!(),
}
} else {
match half {
BlockHalf::Bottom => (0.0, 0.0, 0.0, 1.0, 3.0 / 16.0, 1.0),
BlockHalf::Top => (0.0, 3.0 / 16.0, 0.0, 1.0, 1.0, 1.0),
_ => unreachable!(),
}
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z),
)]
}
fn fence_collision(north: bool, south: bool, west: bool, east: bool) -> Vec<Aabb3<f64>> {
let mut collision = vec![Aabb3::new(
Point3::new(3.0 / 8.0, 0.0, 3.0 / 8.0),
Point3::new(5.0 / 8.0, 1.5, 5.0 / 8.0),
)];
if north {
collision.push(Aabb3::new(
Point3::new(3.0 / 8.0, 0.0, 0.0),
Point3::new(5.0 / 8.0, 1.5, 3.0 / 8.0),
));
}
if south {
collision.push(Aabb3::new(
Point3::new(3.0 / 8.0, 0.0, 5.0 / 8.0),
Point3::new(5.0 / 8.0, 1.5, 1.0),
));
}
if west {
collision.push(Aabb3::new(
Point3::new(0.0, 0.0, 3.0 / 8.0),
Point3::new(3.0 / 8.0, 1.5, 5.0 / 8.0),
));
}
if east {
collision.push(Aabb3::new(
Point3::new(5.0 / 8.0, 0.0, 3.0 / 8.0),
Point3::new(1.0, 1.5, 5.0 / 8.0),
));
}
collision
}
fn pane_collision(north: bool, south: bool, east: bool, west: bool) -> Vec<Aabb3<f64>> {
let mut collision = vec![Aabb3::new(
Point3::new(7.0 / 16.0, 0.0, 7.0 / 16.0),
Point3::new(9.0 / 16.0, 1.0, 9.0 / 16.0),
)];
if north {
collision.push(Aabb3::new(
Point3::new(7.0 / 16.0, 0.0, 0.0),
Point3::new(9.0 / 16.0, 1.0, 9.0 / 16.0),
));
}
if south {
collision.push(Aabb3::new(
Point3::new(7.0 / 16.0, 0.0, 7.0 / 16.0),
Point3::new(9.0 / 16.0, 1.0, 1.0),
));
}
if west {
collision.push(Aabb3::new(
Point3::new(0.0, 0.0, 7.0 / 16.0),
Point3::new(9.0 / 16.0, 1.0, 9.0 / 16.0),
));
}
if east {
collision.push(Aabb3::new(
Point3::new(7.0 / 16.0, 0.0, 7.0 / 16.0),
Point3::new(1.0, 1.0, 9.0 / 16.0),
));
}
collision
}
fn get_stair_info<W: WorldAccess>(world: &W, pos: Position) -> Option<(Direction, BlockHalf)> {
match world.get_block(pos) {
Block::OakStairs { facing, half, .. }
| Block::StoneStairs { facing, half, .. }
| Block::BrickStairs { facing, half, .. }
| Block::StoneBrickStairs { facing, half, .. }
| Block::NetherBrickStairs { facing, half, .. }
| Block::SandstoneStairs { facing, half, .. }
| Block::SpruceStairs { facing, half, .. }
| Block::BirchStairs { facing, half, .. }
| Block::JungleStairs { facing, half, .. }
| Block::QuartzStairs { facing, half, .. }
| Block::AcaciaStairs { facing, half, .. }
| Block::DarkOakStairs { facing, half, .. }
| Block::RedSandstoneStairs { facing, half, .. }
| Block::PurpurStairs { facing, half, .. } => Some((facing, half)),
_ => None,
}
}
fn update_stair_shape<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> StairShape {
if let Some((other_facing, _)) = get_stair_info(world, pos.shift(facing)) {
if other_facing != facing && other_facing != facing.opposite() {
if other_facing == facing.clockwise() {
return StairShape::OuterRight;
}
return StairShape::OuterLeft;
}
}
if let Some((other_facing, _)) = get_stair_info(world, pos.shift(facing.opposite())) {
if other_facing != facing && other_facing != facing.opposite() {
if other_facing == facing.clockwise() {
return StairShape::InnerRight;
}
return StairShape::InnerLeft;
}
}
StairShape::Straight
}
fn stair_data(
facing: Direction,
half: BlockHalf,
shape: StairShape,
waterlogged: bool,
) -> Option<usize> {
if shape != StairShape::Straight {
return None;
}
if waterlogged {
return None;
}
Some((5 - facing.index()) | (if half == BlockHalf::Top { 0x4 } else { 0x0 }))
}
fn stair_offset(
facing: Direction,
half: BlockHalf,
shape: StairShape,
waterlogged: bool,
) -> Option<usize> {
Some(
if waterlogged { 0 } else { 1 }
+ shape.offset() * 2
+ if half == BlockHalf::Top { 0 } else { 2 * 5 }
+ facing.horizontal_offset() * 2 * 5 * 2,
)
}
#[allow(clippy::many_single_char_names)]
fn stair_collision(facing: Direction, shape: StairShape, half: BlockHalf) -> Vec<Aabb3<f64>> {
use std::f64::consts::PI;
let mut bounds = match shape {
StairShape::Straight => vec![
Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.5, 1.0)),
Aabb3::new(Point3::new(0.0, 0.5, 0.0), Point3::new(1.0, 1.0, 0.5)),
],
StairShape::InnerLeft => vec![
Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.5, 1.0)),
Aabb3::new(Point3::new(0.0, 0.5, 0.0), Point3::new(1.0, 1.0, 0.5)),
Aabb3::new(Point3::new(0.0, 0.5, 0.5), Point3::new(0.5, 1.0, 1.0)),
],
StairShape::InnerRight => vec![
Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.5, 1.0)),
Aabb3::new(Point3::new(0.0, 0.5, 0.0), Point3::new(1.0, 1.0, 0.5)),
Aabb3::new(Point3::new(0.5, 0.5, 0.5), Point3::new(1.0, 1.0, 1.0)),
],
StairShape::OuterLeft => vec![
Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.5, 1.0)),
Aabb3::new(Point3::new(0.0, 0.5, 0.0), Point3::new(0.5, 1.0, 0.5)),
],
StairShape::OuterRight => vec![
Aabb3::new(Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.5, 1.0)),
Aabb3::new(Point3::new(0.5, 0.5, 0.0), Point3::new(1.0, 1.0, 0.5)),
],
};
let mut angle = match facing {
Direction::North => 0.0,
Direction::East => PI * 0.5,
Direction::South => PI,
Direction::West => PI * 1.5,
_ => 0.0,
};
if half == BlockHalf::Top {
angle -= PI;
}
let c = angle.cos();
let s = angle.sin();
for bound in &mut bounds {
let x = bound.min.x - 0.5;
let z = bound.min.z - 0.5;
bound.min.x = 0.5 + (x * c - z * s);
bound.min.z = 0.5 + (z * c + x * s);
let x = bound.max.x - 0.5;
let z = bound.max.z - 0.5;
bound.max.x = 0.5 + (x * c - z * s);
bound.max.z = 0.5 + (z * c + x * s);
if half == BlockHalf::Top {
let c = PI.cos();
let s = PI.sin();
let z = bound.min.z - 0.5;
let y = bound.min.y - 0.5;
bound.min.z = 0.5 + (z * c - y * s);
bound.min.y = 0.5 + (y * c + z * s);
let z = bound.max.z - 0.5;
let y = bound.max.y - 0.5;
bound.max.z = 0.5 + (z * c - y * s);
bound.max.y = 0.5 + (y * c + z * s);
bound.min.x = 1.0 - bound.min.x;
bound.max.x = 1.0 - bound.max.x;
}
}
bounds
}
fn slab_collision(half: BlockHalf) -> Vec<Aabb3<f64>> {
let (min_x, min_y, min_z, max_x, max_y, max_z) = match half {
BlockHalf::Top => (0.0, 0.5, 0.0, 1.0, 1.0, 1.0),
BlockHalf::Bottom => (0.0, 0.0, 0.0, 1.0, 0.5, 1.0),
BlockHalf::Double => (0.0, 0.0, 0.0, 1.0, 1.0, 1.0),
_ => unreachable!(),
};
vec![Aabb3::new(
Point3::new(min_x, min_y, min_z),
Point3::new(max_x, max_y, max_z),
)]
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum StoneVariant {
Normal,
Granite,
SmoothGranite,
Diorite,
SmoothDiorite,
Andesite,
SmoothAndesite,
}
impl StoneVariant {
pub fn as_string(self) -> &'static str {
match self {
StoneVariant::Normal => "stone",
StoneVariant::Granite => "granite",
StoneVariant::SmoothGranite => "smooth_granite",
StoneVariant::Diorite => "diorite",
StoneVariant::SmoothDiorite => "smooth_diorite",
StoneVariant::Andesite => "andesite",
StoneVariant::SmoothAndesite => "smooth_andesite",
}
}
fn data(self) -> usize {
match self {
StoneVariant::Normal => 0,
StoneVariant::Granite => 1,
StoneVariant::SmoothGranite => 2,
StoneVariant::Diorite => 3,
StoneVariant::SmoothDiorite => 4,
StoneVariant::Andesite => 5,
StoneVariant::SmoothAndesite => 6,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DirtVariant {
Normal,
Coarse,
Podzol,
}
impl DirtVariant {
pub fn as_string(self) -> &'static str {
match self {
DirtVariant::Normal => "dirt",
DirtVariant::Coarse => "coarse_dirt",
DirtVariant::Podzol => "podzol",
}
}
fn data(self) -> usize {
match self {
DirtVariant::Normal => 0,
DirtVariant::Coarse => 1,
DirtVariant::Podzol => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum BedPart {
Head,
Foot,
}
impl BedPart {
pub fn as_string(self) -> &'static str {
match self {
BedPart::Head => "head",
BedPart::Foot => "foot",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum SandstoneVariant {
Normal,
Chiseled,
Smooth,
}
impl SandstoneVariant {
pub fn as_string(self) -> &'static str {
match self {
SandstoneVariant::Normal => "sandstone",
SandstoneVariant::Chiseled => "chiseled_sandstone",
SandstoneVariant::Smooth => "smooth_sandstone",
}
}
fn data(self) -> usize {
match self {
SandstoneVariant::Normal => 0,
SandstoneVariant::Chiseled => 1,
SandstoneVariant::Smooth => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum NoteBlockInstrument {
Harp,
BaseDrum,
Snare,
Hat,
Bass,
Flute,
Bell,
Guitar,
Chime,
Xylophone,
IronXylophone,
CowBell,
Didgeridoo,
Bit,
Banjo,
Pling,
}
impl NoteBlockInstrument {
pub fn as_string(self) -> &'static str {
match self {
NoteBlockInstrument::Harp => "harp",
NoteBlockInstrument::BaseDrum => "basedrum",
NoteBlockInstrument::Snare => "snare",
NoteBlockInstrument::Hat => "hat",
NoteBlockInstrument::Bass => "bass",
NoteBlockInstrument::Flute => "flute",
NoteBlockInstrument::Bell => "bell",
NoteBlockInstrument::Guitar => "guitar",
NoteBlockInstrument::Chime => "chime",
NoteBlockInstrument::Xylophone => "xylophone",
NoteBlockInstrument::IronXylophone => "iron_xylophone",
NoteBlockInstrument::CowBell => "cow_bell",
NoteBlockInstrument::Didgeridoo => "didgeridoo",
NoteBlockInstrument::Bit => "bit",
NoteBlockInstrument::Banjo => "banjo",
NoteBlockInstrument::Pling => "pling",
}
}
fn offsets(self, protocol_version: i32) -> Option<usize> {
match self {
NoteBlockInstrument::Harp => Some(0),
NoteBlockInstrument::BaseDrum => Some(1),
NoteBlockInstrument::Snare => Some(2),
NoteBlockInstrument::Hat => Some(3),
NoteBlockInstrument::Bass => Some(4),
NoteBlockInstrument::Flute => Some(5),
NoteBlockInstrument::Bell => Some(6),
NoteBlockInstrument::Guitar => Some(7),
NoteBlockInstrument::Chime => Some(8),
NoteBlockInstrument::Xylophone => Some(9),
_ => {
if protocol_version >= 477 {
match self {
NoteBlockInstrument::IronXylophone => Some(10),
NoteBlockInstrument::CowBell => Some(11),
NoteBlockInstrument::Didgeridoo => Some(12),
NoteBlockInstrument::Bit => Some(13),
NoteBlockInstrument::Banjo => Some(14),
NoteBlockInstrument::Pling => Some(15),
_ => unreachable!(),
}
} else {
None
}
}
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum RedSandstoneVariant {
Normal,
Chiseled,
Smooth,
}
impl RedSandstoneVariant {
pub fn as_string(self) -> &'static str {
match self {
RedSandstoneVariant::Normal => "red_sandstone",
RedSandstoneVariant::Chiseled => "chiseled_red_sandstone",
RedSandstoneVariant::Smooth => "smooth_red_sandstone",
}
}
fn data(self) -> usize {
match self {
RedSandstoneVariant::Normal => 0,
RedSandstoneVariant::Chiseled => 1,
RedSandstoneVariant::Smooth => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum QuartzVariant {
Normal,
Chiseled,
PillarVertical,
PillarNorthSouth,
PillarEastWest,
}
impl QuartzVariant {
pub fn as_string(self) -> &'static str {
match self {
QuartzVariant::Normal | QuartzVariant::Chiseled => "normal",
QuartzVariant::PillarVertical => "axis=y",
QuartzVariant::PillarNorthSouth => "axis=z",
QuartzVariant::PillarEastWest => "axis=x",
}
}
fn data(self) -> usize {
match self {
QuartzVariant::Normal => 0,
QuartzVariant::Chiseled => 1,
QuartzVariant::PillarVertical => 2,
QuartzVariant::PillarNorthSouth => 3,
QuartzVariant::PillarEastWest => 4,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum PrismarineVariant {
Normal,
Brick,
Dark,
}
impl PrismarineVariant {
pub fn as_string(self) -> &'static str {
match self {
PrismarineVariant::Normal => "prismarine",
PrismarineVariant::Brick => "prismarine_bricks",
PrismarineVariant::Dark => "dark_prismarine",
}
}
fn data(self) -> usize {
match self {
PrismarineVariant::Normal => 0,
PrismarineVariant::Brick => 1,
PrismarineVariant::Dark => 2,
}
}
}
fn mushroom_block_data(
is_stem: bool,
west: bool,
up: bool,
south: bool,
north: bool,
east: bool,
down: bool,
) -> Option<usize> {
Some(match (is_stem, west, up, south, north, east, down) {
(false, false, false, false, false, false, false) => 0,
(false, true, false, false, true, false, false) => 1,
(false, false, false, false, true, false, false) => 2,
(false, false, false, false, true, true, false) => 3,
(false, true, false, false, false, false, false) => 4,
(false, false, true, false, false, false, false) => 5,
(false, false, false, false, false, true, false) => 6,
(false, true, false, true, false, false, false) => 7,
(false, false, false, true, false, false, false) => 8,
(false, false, false, true, false, true, false) => 9,
(false, true, false, true, true, true, false) => 10,
(false, true, true, true, true, true, true) => 14,
(true, false, false, false, false, false, false) => 15,
_ => return None,
})
}
fn mushroom_block_offset(
is_stem: bool,
west: bool,
up: bool,
south: bool,
north: bool,
east: bool,
down: bool,
) -> Option<usize> {
if is_stem {
None
} else {
Some(
if west { 0 } else { 1 << 0 }
+ if up { 0 } else { 1 << 1 }
+ if south { 0 } else { 1 << 2 }
+ if north { 0 } else { 1 << 3 }
+ if east { 0 } else { 1 << 4 }
+ if down { 0 } else { 1 << 5 },
)
}
}
fn mushroom_block_variant(
is_stem: bool,
west: bool,
up: bool,
south: bool,
north: bool,
east: bool,
down: bool,
) -> String {
(if is_stem {
"all_stem"
} else {
match (west, up, south, north, east, down) {
(false, false, false, false, false, false) => "all_inside",
(true, false, false, true, false, false) => "north_west",
(false, false, false, true, false, false) => "north",
(false, false, false, true, true, false) => "north_east",
(true, false, false, false, false, false) => "west",
(false, true, false, false, false, false) => "center",
(false, false, false, false, true, false) => "east",
(true, false, true, false, false, false) => "south_west",
(false, false, true, false, false, false) => "south",
(false, false, true, false, true, false) => "south_east",
(true, false, true, true, true, false) => "stem",
(true, true, true, true, true, true) => "all_outside",
_ => "all_stem",
}
})
.to_string()
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DoorHalf {
Upper,
Lower,
}
impl DoorHalf {
pub fn as_string(self) -> &'static str {
match self {
DoorHalf::Upper => "upper",
DoorHalf::Lower => "lower",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Side {
Left,
Right,
}
impl Side {
pub fn as_string(self) -> &'static str {
match self {
Side::Left => "left",
Side::Right => "right",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ColoredVariant {
White,
Orange,
Magenta,
LightBlue,
Yellow,
Lime,
Pink,
Gray,
Silver,
Cyan,
Purple,
Blue,
Brown,
Green,
Red,
Black,
}
impl ColoredVariant {
pub fn as_string(self) -> &'static str {
match self {
ColoredVariant::White => "white",
ColoredVariant::Orange => "orange",
ColoredVariant::Magenta => "magenta",
ColoredVariant::LightBlue => "light_blue",
ColoredVariant::Yellow => "yellow",
ColoredVariant::Lime => "lime",
ColoredVariant::Pink => "pink",
ColoredVariant::Gray => "gray",
ColoredVariant::Silver => "silver",
ColoredVariant::Cyan => "cyan",
ColoredVariant::Purple => "purple",
ColoredVariant::Blue => "blue",
ColoredVariant::Brown => "brown",
ColoredVariant::Green => "green",
ColoredVariant::Red => "red",
ColoredVariant::Black => "black",
}
}
fn data(self) -> usize {
match self {
ColoredVariant::White => 0,
ColoredVariant::Orange => 1,
ColoredVariant::Magenta => 2,
ColoredVariant::LightBlue => 3,
ColoredVariant::Yellow => 4,
ColoredVariant::Lime => 5,
ColoredVariant::Pink => 6,
ColoredVariant::Gray => 7,
ColoredVariant::Silver => 8,
ColoredVariant::Cyan => 9,
ColoredVariant::Purple => 10,
ColoredVariant::Blue => 11,
ColoredVariant::Brown => 12,
ColoredVariant::Green => 13,
ColoredVariant::Red => 14,
ColoredVariant::Black => 15,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum RedFlowerVariant {
Poppy,
BlueOrchid,
Allium,
AzureBluet,
RedTulip,
OrangeTulip,
WhiteTulip,
PinkTulip,
OxeyeDaisy,
Cornflower,
WitherRose,
LilyOfTheValley,
}
impl RedFlowerVariant {
pub fn as_string(self) -> &'static str {
match self {
RedFlowerVariant::Poppy => "poppy",
RedFlowerVariant::BlueOrchid => "blue_orchid",
RedFlowerVariant::Allium => "allium",
RedFlowerVariant::AzureBluet => "houstonia",
RedFlowerVariant::RedTulip => "red_tulip",
RedFlowerVariant::OrangeTulip => "orange_tulip",
RedFlowerVariant::WhiteTulip => "white_tulip",
RedFlowerVariant::PinkTulip => "pink_tulip",
RedFlowerVariant::OxeyeDaisy => "oxeye_daisy",
RedFlowerVariant::Cornflower => "cornflower",
RedFlowerVariant::WitherRose => "wither_rose",
RedFlowerVariant::LilyOfTheValley => "lily_of_the_valley",
}
}
fn data(self) -> usize {
match self {
RedFlowerVariant::Poppy => 0,
RedFlowerVariant::BlueOrchid => 1,
RedFlowerVariant::Allium => 2,
RedFlowerVariant::AzureBluet => 3,
RedFlowerVariant::RedTulip => 4,
RedFlowerVariant::OrangeTulip => 5,
RedFlowerVariant::WhiteTulip => 6,
RedFlowerVariant::PinkTulip => 7,
RedFlowerVariant::OxeyeDaisy => 8,
// TODO: shouldn't be available protocol_version < 477
RedFlowerVariant::Cornflower => 9,
RedFlowerVariant::WitherRose => 10,
RedFlowerVariant::LilyOfTheValley => 11,
}
}
fn offsets(self, protocol_version: i32) -> Option<usize> {
match self {
RedFlowerVariant::Poppy => Some(0),
RedFlowerVariant::BlueOrchid => Some(1),
RedFlowerVariant::Allium => Some(2),
RedFlowerVariant::AzureBluet => Some(3),
RedFlowerVariant::RedTulip => Some(4),
RedFlowerVariant::OrangeTulip => Some(5),
RedFlowerVariant::WhiteTulip => Some(6),
RedFlowerVariant::PinkTulip => Some(7),
RedFlowerVariant::OxeyeDaisy => Some(8),
_ => {
if protocol_version >= 477 {
match self {
RedFlowerVariant::Cornflower => Some(9),
RedFlowerVariant::WitherRose => Some(10),
RedFlowerVariant::LilyOfTheValley => Some(11),
_ => unreachable!(),
}
} else {
None
}
}
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum MonsterEggVariant {
Stone,
Cobblestone,
StoneBrick,
MossyBrick,
CrackedBrick,
ChiseledBrick,
}
impl MonsterEggVariant {
pub fn as_string(self) -> &'static str {
match self {
MonsterEggVariant::Stone => "stone",
MonsterEggVariant::Cobblestone => "cobblestone",
MonsterEggVariant::StoneBrick => "stone_brick",
MonsterEggVariant::MossyBrick => "mossy_brick",
MonsterEggVariant::CrackedBrick => "cracked_brick",
MonsterEggVariant::ChiseledBrick => "chiseled_brick",
}
}
fn data(self) -> usize {
match self {
MonsterEggVariant::Stone => 0,
MonsterEggVariant::Cobblestone => 1,
MonsterEggVariant::StoneBrick => 2,
MonsterEggVariant::MossyBrick => 3,
MonsterEggVariant::CrackedBrick => 4,
MonsterEggVariant::ChiseledBrick => 5,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum StoneBrickVariant {
Normal,
Mossy,
Cracked,
Chiseled,
}
impl StoneBrickVariant {
pub fn as_string(self) -> &'static str {
match self {
StoneBrickVariant::Normal => "stonebrick",
StoneBrickVariant::Mossy => "mossy_stonebrick",
StoneBrickVariant::Cracked => "cracked_stonebrick",
StoneBrickVariant::Chiseled => "chiseled_stonebrick",
}
}
fn data(self) -> usize {
match self {
StoneBrickVariant::Normal => 0,
StoneBrickVariant::Mossy => 1,
StoneBrickVariant::Cracked => 2,
StoneBrickVariant::Chiseled => 3,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum RailShape {
NorthSouth,
EastWest,
AscendingNorth,
AscendingSouth,
AscendingEast,
AscendingWest,
NorthEast,
NorthWest,
SouthEast,
SouthWest,
}
impl RailShape {
pub fn as_string(self) -> &'static str {
match self {
RailShape::NorthSouth => "north_south",
RailShape::EastWest => "east_west",
RailShape::AscendingNorth => "ascending_north",
RailShape::AscendingSouth => "ascending_south",
RailShape::AscendingEast => "ascending_east",
RailShape::AscendingWest => "ascending_west",
RailShape::NorthEast => "north_east",
RailShape::NorthWest => "north_west",
RailShape::SouthEast => "south_east",
RailShape::SouthWest => "south_west",
}
}
pub fn data(self) -> usize {
match self {
RailShape::NorthSouth => 0,
RailShape::EastWest => 1,
RailShape::AscendingEast => 2,
RailShape::AscendingWest => 3,
RailShape::AscendingNorth => 4,
RailShape::AscendingSouth => 5,
RailShape::SouthEast => 6,
RailShape::SouthWest => 7,
RailShape::NorthWest => 8,
RailShape::NorthEast => 9,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ComparatorMode {
Compare,
Subtract,
}
impl ComparatorMode {
pub fn as_string(self) -> &'static str {
match self {
ComparatorMode::Compare => "compare",
ComparatorMode::Subtract => "subtract",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum RedstoneSide {
None,
Side,
Up,
}
impl RedstoneSide {
pub fn as_string(self) -> &'static str {
match self {
RedstoneSide::None => "none",
RedstoneSide::Side => "side",
RedstoneSide::Up => "up",
}
}
pub fn offset(self) -> usize {
match self {
RedstoneSide::Up => 0,
RedstoneSide::Side => 1,
RedstoneSide::None => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum PistonType {
Normal,
Sticky,
}
impl PistonType {
pub fn as_string(self) -> &'static str {
match self {
PistonType::Normal => "normal",
PistonType::Sticky => "sticky",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum StoneSlabVariant {
Stone,
SmoothStone,
Sandstone,
CutSandstone,
PetrifiedWood,
Cobblestone,
Brick,
StoneBrick,
NetherBrick,
Quartz,
RedSandstone,
CutRedSandstone,
Purpur,
}
impl StoneSlabVariant {
pub fn as_string(self) -> &'static str {
match self {
StoneSlabVariant::Stone => "stone",
StoneSlabVariant::SmoothStone => "smooth_stone",
StoneSlabVariant::Sandstone => "sandstone",
StoneSlabVariant::CutSandstone => "cut_sandstone",
StoneSlabVariant::PetrifiedWood => "wood_old",
StoneSlabVariant::Cobblestone => "cobblestone",
StoneSlabVariant::Brick => "brick",
StoneSlabVariant::StoneBrick => "stone_brick",
StoneSlabVariant::NetherBrick => "nether_brick",
StoneSlabVariant::Quartz => "quartz",
StoneSlabVariant::RedSandstone => "red_sandstone",
StoneSlabVariant::CutRedSandstone => "cut_red_sandstone",
StoneSlabVariant::Purpur => "purpur",
}
}
fn data(self) -> usize {
match self {
StoneSlabVariant::Stone | StoneSlabVariant::RedSandstone | StoneSlabVariant::Purpur => {
0
}
StoneSlabVariant::Sandstone => 1,
StoneSlabVariant::PetrifiedWood => 2,
StoneSlabVariant::Cobblestone => 3,
StoneSlabVariant::Brick => 4,
StoneSlabVariant::StoneBrick => 5,
StoneSlabVariant::NetherBrick => 6,
StoneSlabVariant::Quartz => 7,
_ => unimplemented!(),
}
}
fn offsets(self, protocol_version: i32) -> Option<usize> {
if protocol_version >= 477 {
match self {
StoneSlabVariant::Stone => Some(0),
StoneSlabVariant::SmoothStone => Some(1),
StoneSlabVariant::Sandstone => Some(2),
StoneSlabVariant::CutSandstone => Some(3),
StoneSlabVariant::PetrifiedWood => Some(4),
StoneSlabVariant::Cobblestone => Some(5),
StoneSlabVariant::Brick => Some(6),
StoneSlabVariant::StoneBrick => Some(7),
StoneSlabVariant::NetherBrick => Some(8),
StoneSlabVariant::Quartz => Some(9),
StoneSlabVariant::RedSandstone => Some(10),
StoneSlabVariant::CutRedSandstone => Some(11),
StoneSlabVariant::Purpur => Some(12),
}
} else {
match self {
StoneSlabVariant::Stone => Some(0),
StoneSlabVariant::SmoothStone => None,
StoneSlabVariant::Sandstone => Some(1),
StoneSlabVariant::CutSandstone => None,
StoneSlabVariant::PetrifiedWood => Some(2),
StoneSlabVariant::Cobblestone => Some(3),
StoneSlabVariant::Brick => Some(4),
StoneSlabVariant::StoneBrick => Some(5),
StoneSlabVariant::NetherBrick => Some(6),
StoneSlabVariant::Quartz => Some(7),
StoneSlabVariant::RedSandstone => Some(8),
StoneSlabVariant::CutRedSandstone => None,
StoneSlabVariant::Purpur => Some(9),
}
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum WoodSlabVariant {
Oak,
Spruce,
Birch,
Jungle,
Acacia,
DarkOak,
}
impl WoodSlabVariant {
pub fn as_string(self) -> &'static str {
match self {
WoodSlabVariant::Oak => "oak",
WoodSlabVariant::Spruce => "spruce",
WoodSlabVariant::Birch => "birch",
WoodSlabVariant::Jungle => "jungle",
WoodSlabVariant::Acacia => "acacia",
WoodSlabVariant::DarkOak => "dark_oak",
}
}
fn data(self) -> usize {
match self {
WoodSlabVariant::Oak => 0,
WoodSlabVariant::Spruce => 1,
WoodSlabVariant::Birch => 2,
WoodSlabVariant::Jungle => 3,
WoodSlabVariant::Acacia => 4,
WoodSlabVariant::DarkOak => 5,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum BlockHalf {
Top,
Bottom,
Upper,
Lower,
Double,
}
impl BlockHalf {
pub fn as_string(self) -> &'static str {
match self {
BlockHalf::Top => "top",
BlockHalf::Bottom => "bottom",
BlockHalf::Upper => "upper",
BlockHalf::Lower => "lower",
BlockHalf::Double => "double",
}
}
pub fn offset(self) -> usize {
match self {
BlockHalf::Top | BlockHalf::Upper => 0,
BlockHalf::Bottom | BlockHalf::Lower => 1,
BlockHalf::Double => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CobblestoneWallVariant {
Normal,
Mossy,
}
impl CobblestoneWallVariant {
pub fn as_string(self) -> &'static str {
match self {
CobblestoneWallVariant::Normal => "cobblestone",
CobblestoneWallVariant::Mossy => "mossy_cobblestone",
}
}
pub fn data(self) -> usize {
match self {
CobblestoneWallVariant::Normal => 0,
CobblestoneWallVariant::Mossy => 1,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Rotation {
South,
SouthSouthWest,
SouthWest,
WestSouthWest,
West,
WestNorthWest,
NorthWest,
NorthNorthWest,
North,
NorthNorthEast,
NorthEast,
EastNorthEast,
East,
EastSouthEast,
SouthEast,
SouthSouthEast,
}
impl Rotation {
pub fn as_string(self) -> &'static str {
match self {
Rotation::South => "south",
Rotation::SouthSouthWest => "south-southwest",
Rotation::SouthWest => "southwest",
Rotation::WestSouthWest => "west-southwest",
Rotation::West => "west",
Rotation::WestNorthWest => "west-northwest",
Rotation::NorthWest => "northwest",
Rotation::NorthNorthWest => "north-northwest",
Rotation::North => "north",
Rotation::NorthNorthEast => "north-northeast",
Rotation::NorthEast => "northeast",
Rotation::EastNorthEast => "east-northeast",
Rotation::East => "east",
Rotation::EastSouthEast => "east-southeast",
Rotation::SouthEast => "southseast",
Rotation::SouthSouthEast => "south-southeast",
}
}
pub fn data(self) -> usize {
match self {
Rotation::South => 0,
Rotation::SouthSouthWest => 1,
Rotation::SouthWest => 2,
Rotation::WestSouthWest => 3,
Rotation::West => 4,
Rotation::WestNorthWest => 5,
Rotation::NorthWest => 6,
Rotation::NorthNorthWest => 7,
Rotation::North => 8,
Rotation::NorthNorthEast => 9,
Rotation::NorthEast => 10,
Rotation::EastNorthEast => 11,
Rotation::East => 12,
Rotation::EastSouthEast => 13,
Rotation::SouthEast => 14,
Rotation::SouthSouthEast => 15,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum StairShape {
Straight,
InnerLeft,
InnerRight,
OuterLeft,
OuterRight,
}
impl StairShape {
pub fn as_string(self) -> &'static str {
match self {
StairShape::Straight => "straight",
StairShape::InnerLeft => "inner_left",
StairShape::InnerRight => "inner_right",
StairShape::OuterLeft => "outer_left",
StairShape::OuterRight => "outer_right",
}
}
pub fn offset(self) -> usize {
match self {
StairShape::Straight => 0,
StairShape::InnerLeft => 1,
StairShape::InnerRight => 2,
StairShape::OuterLeft => 3,
StairShape::OuterRight => 4,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AttachedFace {
Floor,
Wall,
Ceiling,
}
impl AttachedFace {
pub fn as_string(self) -> &'static str {
match self {
AttachedFace::Floor => "floor",
AttachedFace::Wall => "wall",
AttachedFace::Ceiling => "ceiling",
}
}
pub fn offset(self) -> usize {
match self {
AttachedFace::Floor => 0,
AttachedFace::Wall => 1,
AttachedFace::Ceiling => 2,
}
}
pub fn data_with_facing(self, facing: Direction) -> Option<usize> {
Some(match (self, facing) {
(AttachedFace::Ceiling, Direction::East) => 0,
(AttachedFace::Wall, Direction::East) => 1,
(AttachedFace::Wall, Direction::West) => 2,
(AttachedFace::Wall, Direction::South) => 3,
(AttachedFace::Wall, Direction::North) => 4,
(AttachedFace::Floor, Direction::South) => 5,
(AttachedFace::Floor, Direction::East) => 6,
(AttachedFace::Ceiling, Direction::South) => 7,
_ => return None,
})
}
pub fn data_with_facing_and_powered(self, facing: Direction, powered: bool) -> Option<usize> {
if let Some(facing_data) = self.data_with_facing(facing) {
Some(facing_data | (if powered { 0x8 } else { 0x0 }))
} else {
None
}
}
pub fn variant_with_facing(self, facing: Direction) -> String {
match (self, facing) {
(AttachedFace::Ceiling, Direction::East) => "down_x",
(AttachedFace::Wall, Direction::East) => "east",
(AttachedFace::Wall, Direction::West) => "west",
(AttachedFace::Wall, Direction::South) => "south",
(AttachedFace::Wall, Direction::North) => "north",
(AttachedFace::Floor, Direction::South) => "up_z",
(AttachedFace::Floor, Direction::East) => "up_x",
(AttachedFace::Ceiling, Direction::South) => "down_z",
_ => "north", // TODO: support 1.13.2+ new directions
}
.to_owned()
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ChestType {
Single,
Left,
Right,
}
impl ChestType {
pub fn as_string(self) -> &'static str {
match self {
ChestType::Single => "single",
ChestType::Left => "left",
ChestType::Right => "right",
}
}
pub fn offset(self) -> usize {
match self {
ChestType::Single => 0,
ChestType::Left => 1,
ChestType::Right => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum StructureBlockMode {
Save,
Load,
Corner,
Data,
}
impl StructureBlockMode {
pub fn data(self) -> usize {
match self {
StructureBlockMode::Save => 0,
StructureBlockMode::Load => 1,
StructureBlockMode::Corner => 2,
StructureBlockMode::Data => 3,
}
}
pub fn as_string(self) -> &'static str {
match self {
StructureBlockMode::Save => "save",
StructureBlockMode::Load => "load",
StructureBlockMode::Corner => "corner",
StructureBlockMode::Data => "data",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TreeVariant {
Oak,
Spruce,
Birch,
Jungle,
Acacia,
DarkOak,
StrippedSpruce,
StrippedBirch,
StrippedJungle,
StrippedAcacia,
StrippedDarkOak,
StrippedOak,
}
impl TreeVariant {
pub fn as_string(self) -> &'static str {
match self {
TreeVariant::Oak => "oak",
TreeVariant::Spruce => "spruce",
TreeVariant::Birch => "birch",
TreeVariant::Jungle => "jungle",
TreeVariant::Acacia => "acacia",
TreeVariant::DarkOak => "dark_oak",
TreeVariant::StrippedSpruce => "stripped_spruce_log",
TreeVariant::StrippedBirch => "stripped_birch_log",
TreeVariant::StrippedJungle => "stripped_jungle_log",
TreeVariant::StrippedAcacia => "stripped_acacia_log",
TreeVariant::StrippedDarkOak => "stripped_dark_oak_log",
TreeVariant::StrippedOak => "stripped_oak_log",
}
}
pub fn data(self) -> usize {
match self {
TreeVariant::Oak | TreeVariant::Acacia => 0,
TreeVariant::Spruce | TreeVariant::DarkOak => 1,
TreeVariant::Birch => 2,
TreeVariant::Jungle => 3,
_ => panic!("TreeVariant {:?} has no data (1.13+ only)", self),
}
}
pub fn offset(self) -> usize {
match self {
TreeVariant::Oak => 0,
TreeVariant::Spruce => 1,
TreeVariant::Birch => 2,
TreeVariant::Jungle => 3,
TreeVariant::Acacia => 4,
TreeVariant::DarkOak => 5,
TreeVariant::StrippedSpruce => 6,
TreeVariant::StrippedBirch => 7,
TreeVariant::StrippedJungle => 8,
TreeVariant::StrippedAcacia => 9,
TreeVariant::StrippedDarkOak => 10,
TreeVariant::StrippedOak => 11,
}
}
pub fn plank_data(self) -> usize {
match self {
TreeVariant::Oak => 0,
TreeVariant::Spruce => 1,
TreeVariant::Birch => 2,
TreeVariant::Jungle => 3,
TreeVariant::Acacia => 4,
TreeVariant::DarkOak => 5,
_ => panic!("TreeVariant {:?} has no plank data (1.13+ only)", self),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TallGrassVariant {
DeadBush,
TallGrass,
Fern,
}
impl TallGrassVariant {
pub fn as_string(self) -> &'static str {
match self {
TallGrassVariant::DeadBush => "dead_bush",
TallGrassVariant::TallGrass => "tall_grass",
TallGrassVariant::Fern => "fern",
}
}
fn data(self) -> usize {
match self {
TallGrassVariant::DeadBush => 0,
TallGrassVariant::TallGrass => 1,
TallGrassVariant::Fern => 2,
}
}
fn offset(self) -> usize {
match self {
TallGrassVariant::TallGrass => 0,
TallGrassVariant::Fern => 1,
TallGrassVariant::DeadBush => 2,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TallSeagrassHalf {
Upper,
Lower,
}
impl TallSeagrassHalf {
pub fn as_string(self) -> &'static str {
match self {
TallSeagrassHalf::Upper => "upper",
TallSeagrassHalf::Lower => "lower",
}
}
fn offset(self) -> usize {
match self {
TallSeagrassHalf::Upper => 0,
TallSeagrassHalf::Lower => 1,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DoublePlantVariant {
Sunflower,
Lilac,
DoubleTallgrass,
LargeFern,
RoseBush,
Peony,
}
impl DoublePlantVariant {
pub fn as_string(self) -> &'static str {
match self {
DoublePlantVariant::Sunflower => "sunflower",
DoublePlantVariant::Lilac => "syringa",
DoublePlantVariant::DoubleTallgrass => "double_grass",
DoublePlantVariant::LargeFern => "double_fern",
DoublePlantVariant::RoseBush => "double_rose",
DoublePlantVariant::Peony => "paeonia",
}
}
pub fn data(self) -> usize {
match self {
DoublePlantVariant::Sunflower => 0,
DoublePlantVariant::Lilac => 1,
DoublePlantVariant::DoubleTallgrass => 2,
DoublePlantVariant::LargeFern => 3,
DoublePlantVariant::RoseBush => 4,
DoublePlantVariant::Peony => 5,
}
}
pub fn offset(self) -> usize {
match self {
DoublePlantVariant::Sunflower => 0,
DoublePlantVariant::Lilac => 1,
DoublePlantVariant::RoseBush => 2,
DoublePlantVariant::Peony => 3,
DoublePlantVariant::DoubleTallgrass => 4,
DoublePlantVariant::LargeFern => 5,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum FlowerPotVariant {
Empty,
Poppy,
Dandelion,
OakSapling,
SpruceSapling,
BirchSapling,
JungleSapling,
RedMushroom,
BrownMushroom,
Cactus,
DeadBush,
Fern,
AcaciaSapling,
DarkOakSapling,
BlueOrchid,
Allium,
AzureBluet,
RedTulip,
OrangeTulip,
WhiteTulip,
PinkTulip,
Oxeye,
Cornflower,
LilyOfTheValley,
WitherRose,
}
impl FlowerPotVariant {
pub fn as_string(self) -> &'static str {
match self {
FlowerPotVariant::Empty => "empty",
FlowerPotVariant::Poppy => "rose",
FlowerPotVariant::Dandelion => "dandelion",
FlowerPotVariant::OakSapling => "oak_sapling",
FlowerPotVariant::SpruceSapling => "spruce_sapling",
FlowerPotVariant::BirchSapling => "birch_sapling",
FlowerPotVariant::JungleSapling => "jungle_sapling",
FlowerPotVariant::RedMushroom => "mushroom_red",
FlowerPotVariant::BrownMushroom => "mushroom_brown",
FlowerPotVariant::Cactus => "cactus",
FlowerPotVariant::DeadBush => "dead_bush",
FlowerPotVariant::Fern => "fern",
FlowerPotVariant::AcaciaSapling => "acacia_sapling",
FlowerPotVariant::DarkOakSapling => "dark_oak_sapling",
FlowerPotVariant::BlueOrchid => "blue_orchid",
FlowerPotVariant::Allium => "allium",
FlowerPotVariant::AzureBluet => "houstonia",
FlowerPotVariant::RedTulip => "red_tulip",
FlowerPotVariant::OrangeTulip => "orange_tulip",
FlowerPotVariant::WhiteTulip => "white_tulip",
FlowerPotVariant::PinkTulip => "pink_tulip",
FlowerPotVariant::Oxeye => "oxeye_daisy",
FlowerPotVariant::Cornflower => "cornflower",
FlowerPotVariant::LilyOfTheValley => "lily_of_the_valley",
FlowerPotVariant::WitherRose => "wither_rose",
}
}
pub fn offsets(self, protocol_version: i32) -> Option<usize> {
match self {
FlowerPotVariant::Empty => Some(0),
FlowerPotVariant::OakSapling => Some(1),
FlowerPotVariant::SpruceSapling => Some(2),
FlowerPotVariant::BirchSapling => Some(3),
FlowerPotVariant::JungleSapling => Some(4),
FlowerPotVariant::AcaciaSapling => Some(5),
FlowerPotVariant::DarkOakSapling => Some(6),
FlowerPotVariant::Fern => Some(7),
FlowerPotVariant::Dandelion => Some(8),
FlowerPotVariant::Poppy => Some(9),
FlowerPotVariant::BlueOrchid => Some(10),
FlowerPotVariant::Allium => Some(11),
FlowerPotVariant::AzureBluet => Some(12),
FlowerPotVariant::RedTulip => Some(13),
FlowerPotVariant::OrangeTulip => Some(14),
FlowerPotVariant::WhiteTulip => Some(15),
FlowerPotVariant::PinkTulip => Some(16),
FlowerPotVariant::Oxeye => Some(17),
FlowerPotVariant::Cornflower => {
if protocol_version >= 477 {
Some(18)
} else {
None
}
}
FlowerPotVariant::LilyOfTheValley => {
if protocol_version >= 477 {
Some(19)
} else {
None
}
}
FlowerPotVariant::WitherRose => {
if protocol_version >= 477 {
Some(20)
} else {
None
}
}
FlowerPotVariant::RedMushroom => Some(if protocol_version >= 477 { 21 } else { 18 }),
FlowerPotVariant::BrownMushroom => Some(if protocol_version >= 477 { 22 } else { 19 }),
FlowerPotVariant::DeadBush => Some(if protocol_version >= 477 { 23 } else { 20 }),
FlowerPotVariant::Cactus => Some(if protocol_version >= 477 { 24 } else { 21 }),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum CoralVariant {
DeadTube,
DeadBrain,
DeadBubble,
DeadFire,
DeadHorn,
Tube,
Brain,
Bubble,
Fire,
Horn,
}
impl CoralVariant {
pub fn as_string(self) -> &'static str {
match self {
CoralVariant::DeadTube => "dead_tube",
CoralVariant::DeadBrain => "dead_brain",
CoralVariant::DeadBubble => "dead_bubble",
CoralVariant::DeadFire => "dead_fire",
CoralVariant::DeadHorn => "dead_horn",
CoralVariant::Tube => "dead_tube",
CoralVariant::Brain => "brain",
CoralVariant::Bubble => "bubble",
CoralVariant::Fire => "fire",
CoralVariant::Horn => "horn",
}
}
pub fn offset(self) -> usize {
match self {
CoralVariant::DeadTube => 0,
CoralVariant::DeadBrain => 1,
CoralVariant::DeadBubble => 2,
CoralVariant::DeadFire => 3,
CoralVariant::DeadHorn => 4,
CoralVariant::Tube => 5,
CoralVariant::Brain => 6,
CoralVariant::Bubble => 7,
CoralVariant::Fire => 8,
CoralVariant::Horn => 9,
}
}
}
| 34.300734 | 153 | 0.475993 |
e2a4feb741dbb2ba0a64aaa71d6e9df144a06f56 | 4,359 | use glium::glutin;
use glium::glutin::event::{Event, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::{Display, Surface};
use imgui::{Context, FontConfig, FontGlyphRanges, FontSource, Ui};
use imgui_glium_renderer::Renderer;
use imgui_winit_support::{HiDpiMode, WinitPlatform};
use std::time::Instant;
pub struct System {
pub event_loop: EventLoop<()>,
pub display: glium::Display,
pub imgui: Context,
pub platform: WinitPlatform,
pub renderer: Renderer,
pub font_size: f32,
}
pub fn init(title: &str) -> System {
let title = match title.rfind('/') {
Some(idx) => title.split_at(idx + 1).1,
None => title,
};
let event_loop = EventLoop::new();
let context = glutin::ContextBuilder::new().with_vsync(true);
let builder = WindowBuilder::new()
.with_title(title.to_owned())
.with_inner_size(glutin::dpi::LogicalSize::new(1024f64, 768f64));
let display =
Display::new(builder, context, &event_loop).expect("Failed to initialize display");
let mut imgui = Context::create();
imgui.set_ini_filename(None);
let mut platform = WinitPlatform::init(&mut imgui);
{
let gl_window = display.gl_window();
let window = gl_window.window();
platform.attach_window(imgui.io_mut(), &window, HiDpiMode::Rounded);
}
let hidpi_factor = platform.hidpi_factor();
let font_size = (13.0 * hidpi_factor) as f32;
imgui.fonts().add_font(&[
FontSource::DefaultFontData {
config: Some(FontConfig {
size_pixels: font_size,
..FontConfig::default()
}),
},
FontSource::TtfData {
data: include_bytes!("../../resources/iosevka-regular.ttf"),
size_pixels: font_size,
config: Some(FontConfig {
rasterizer_multiply: 1.75,
glyph_ranges: FontGlyphRanges::japanese(),
..FontConfig::default()
}),
},
]);
imgui.io_mut().font_global_scale = (1.0 / hidpi_factor) as f32;
let renderer = Renderer::init(&mut imgui, &display).expect("Failed to initialize renderer");
System {
event_loop,
display,
imgui,
platform,
renderer,
font_size,
}
}
impl System {
pub fn main_loop<F: FnMut(&mut bool, &mut Ui) + 'static>(self, mut run_ui: F) {
let System {
event_loop,
display,
mut imgui,
mut platform,
mut renderer,
..
} = self;
let mut last_frame = Instant::now();
event_loop.run(move |event, _, control_flow| match event {
Event::NewEvents(_) => {
let now = Instant::now();
imgui.io_mut().update_delta_time(now - last_frame);
last_frame = now;
}
Event::MainEventsCleared => {
let gl_window = display.gl_window();
platform
.prepare_frame(imgui.io_mut(), &gl_window.window())
.expect("Failed to prepare frame");
gl_window.window().request_redraw();
}
Event::RedrawRequested(_) => {
let mut ui = imgui.frame();
let mut run = true;
run_ui(&mut run, &mut ui);
if !run {
*control_flow = ControlFlow::Exit;
}
let gl_window = display.gl_window();
let mut target = display.draw();
target.clear_color_srgb(1.0, 1.0, 1.0, 1.0);
platform.prepare_render(&ui, gl_window.window());
let draw_data = ui.render();
renderer
.render(&mut target, draw_data)
.expect("Rendering failed");
target.finish().expect("Failed to swap buffers");
}
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => *control_flow = ControlFlow::Exit,
event => {
let gl_window = display.gl_window();
platform.handle_event(imgui.io_mut(), gl_window.window(), &event);
}
})
}
}
| 33.022727 | 96 | 0.54462 |
1eb7fed961f8383d646e5ad48bcfdbf49ab76429 | 9,374 | mod app;
mod args;
mod colorscheme;
mod draw;
mod update;
mod widgets;
use std::fs;
use std::io::{self, Write};
use std::panic;
use std::path::Path;
use std::thread;
use std::time::Duration;
use crossbeam_channel::{select, tick, unbounded, Receiver};
use crossterm::cursor;
use crossterm::event::{Event, KeyCode, KeyEvent, KeyModifiers, MouseEvent};
use crossterm::execute;
use crossterm::terminal;
use num_rational::Ratio;
use platform_dirs::{AppDirs, AppUI};
use structopt::StructOpt;
use tui::backend::CrosstermBackend;
use tui::Terminal;
use app::*;
use args::*;
use colorscheme::*;
use draw::*;
use update::*;
const PROGRAM_NAME: &str = env!("CARGO_PKG_NAME");
fn setup_terminal() {
let mut stdout = io::stdout();
execute!(stdout, terminal::EnterAlternateScreen).unwrap();
execute!(stdout, cursor::Hide).unwrap();
// Needed for when ytop is run in a TTY since TTYs don't actually have an alternate screen.
// Must be executed after attempting to enter the alternate screen so that it only clears the
// primary screen if we are running in a TTY.
// If not running in a TTY, then we just end up clearing the alternate screen which should have
// no effect.
execute!(stdout, terminal::Clear(terminal::ClearType::All)).unwrap();
terminal::enable_raw_mode().unwrap();
}
fn cleanup_terminal() {
let mut stdout = io::stdout();
// Needed for when ytop is run in a TTY since TTYs don't actually have an alternate screen.
// Must be executed before attempting to leave the alternate screen so that it only modifies the
// primary screen if we are running in a TTY.
// If not running in a TTY, then we just end up modifying the alternate screen which should have
// no effect.
execute!(stdout, cursor::MoveTo(0, 0)).unwrap();
execute!(stdout, terminal::Clear(terminal::ClearType::All)).unwrap();
execute!(stdout, terminal::LeaveAlternateScreen).unwrap();
execute!(stdout, cursor::Show).unwrap();
terminal::disable_raw_mode().unwrap();
}
fn setup_ui_events() -> Receiver<Event> {
let (sender, receiver) = unbounded();
thread::spawn(move || loop {
sender.send(crossterm::event::read().unwrap()).unwrap();
});
receiver
}
fn setup_ctrl_c() -> Receiver<()> {
let (sender, receiver) = unbounded();
ctrlc::set_handler(move || {
sender.send(()).unwrap();
})
.unwrap();
receiver
}
// The log file currently isn't being used for anything right now, but it does help when debugging
// and we'll probably use it when we clean up the error handling.
fn setup_logfile(logfile_path: &Path) {
fs::create_dir_all(logfile_path.parent().unwrap()).unwrap();
let logfile = fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(logfile_path)
.unwrap();
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}]: {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.chain(logfile)
.level_for("mio", log::LevelFilter::Debug)
.apply()
.unwrap();
}
// We need to catch panics since we need to close the UI and cleanup the terminal before logging any
// error messages to the screen.
fn setup_panic_hook() {
panic::set_hook(Box::new(|panic_info| {
cleanup_terminal();
better_panic::Settings::auto().create_panic_handler()(panic_info);
}));
}
fn main() {
better_panic::install();
let args = Args::from_args();
let draw_interval = Ratio::min(Ratio::from_integer(1), args.interval);
let app_dirs = AppDirs::new(Some(PROGRAM_NAME), AppUI::CommandLine).unwrap();
let logfile_path = app_dirs.state_dir.join("errors.log");
let colorscheme = read_colorscheme(&app_dirs.config_dir, &args.colorscheme);
let mut app = setup_app(&args, &colorscheme, PROGRAM_NAME);
setup_logfile(&logfile_path);
let backend = CrosstermBackend::new(io::stdout());
let mut terminal = Terminal::new(backend).unwrap();
setup_panic_hook();
setup_terminal();
let ticker = tick(Duration::from_secs_f64(
*draw_interval.numer() as f64 / *draw_interval.denom() as f64,
));
let ui_events_receiver = setup_ui_events();
let ctrl_c_events = setup_ctrl_c();
// Used to keep track of how many seconds has passed so we know which widgets to update.
// Resets to 0 every 60 Seconds.
let mut update_seconds = Ratio::from_integer(0);
let mut show_help_menu = false;
let mut paused = false;
// Used to keep track of the previous key for actions that required 2 keypresses.
let mut previous_key_event: Option<KeyEvent> = None;
// If `skip_key` is set to true, we set the previous key to None instead of recording it.
let mut skip_key: bool;
// Used to keep track of whether we need to redraw the process or CPU/Mem widgets after they
// have been updated.
let mut proc_modified: bool;
let mut graphs_modified: bool;
update_widgets(&mut app.widgets, update_seconds);
draw(&mut terminal, &mut app);
loop {
select! {
recv(ctrl_c_events) -> _ => {
break;
}
recv(ticker) -> _ => {
if !paused {
update_seconds = (update_seconds + draw_interval) % Ratio::from_integer(60);
update_widgets(&mut app.widgets, update_seconds);
if !show_help_menu {
draw(&mut terminal, &mut app);
}
}
}
recv(ui_events_receiver) -> message => {
proc_modified = false;
graphs_modified = false;
skip_key = false;
match message.unwrap() {
Event::Key(key_event) => {
if key_event.modifiers.is_empty() {
match key_event.code {
KeyCode::Char('q') => {
break
},
KeyCode::Char('?') => {
show_help_menu = !show_help_menu;
if show_help_menu {
draw_help_menu(&mut terminal, &mut app);
} else {
draw(&mut terminal, &mut app);
}
},
KeyCode::Char(' ') => {
paused = !paused;
},
KeyCode::Char('j') | KeyCode::Down => {
app.widgets.proc.scroll_down();
proc_modified = true;
},
KeyCode::Char('k') | KeyCode::Up => {
app.widgets.proc.scroll_up();
proc_modified = true;
},
KeyCode::Char('g') => {
if previous_key_event == Some(KeyEvent::from(KeyCode::Char('g'))) {
app.widgets.proc.scroll_top();
proc_modified = true;
skip_key = true;
}
},
KeyCode::Home => {
app.widgets.proc.scroll_top();
proc_modified = true;
},
KeyCode::Char('G') | KeyCode::End => {
app.widgets.proc.scroll_bottom();
proc_modified = true;
},
KeyCode::Char('d') => {
if previous_key_event == Some(KeyEvent::from(KeyCode::Char('d'))) {
app.widgets.proc.kill_process();
skip_key = true;
}
},
KeyCode::Char('h') => {
app.widgets.cpu.scale_in();
app.widgets.mem.scale_in();
graphs_modified = true;
},
KeyCode::Char('l') => {
app.widgets.cpu.scale_out();
app.widgets.mem.scale_out();
graphs_modified = true;
},
KeyCode::Esc => {
if show_help_menu {
show_help_menu = false;
draw(&mut terminal, &mut app);
}
}
KeyCode::Tab => {
app.widgets.proc.toggle_grouping();
proc_modified = true;
},
KeyCode::Char('p') => {
app.widgets.proc.sort_by_num();
proc_modified = true;
},
KeyCode::Char('n') => {
app.widgets.proc.sort_by_command();
proc_modified = true;
},
KeyCode::Char('c') => {
app.widgets.proc.sort_by_cpu();
proc_modified = true;
},
KeyCode::Char('m') => {
app.widgets.proc.sort_by_mem();
proc_modified = true;
},
_ => {}
}
} else if key_event.modifiers == KeyModifiers::CONTROL {
match key_event.code {
KeyCode::Char('c') => {
break
},
KeyCode::Char('d') => {
app.widgets.proc.scroll_half_page_down();
proc_modified = true;
},
KeyCode::Char('u') => {
app.widgets.proc.scroll_half_page_up();
proc_modified = true;
},
KeyCode::Char('f') => {
app.widgets.proc.scroll_full_page_down();
proc_modified = true;
},
KeyCode::Char('b') => {
app.widgets.proc.scroll_full_page_up();
proc_modified = true;
},
_ => {}
}
}
previous_key_event = if skip_key {
None
} else {
Some(key_event)
};
}
// TODO: figure out why these aren't working
Event::Mouse(mouse_event) => match mouse_event {
MouseEvent::ScrollUp(_, _, _) => {
app.widgets.proc.scroll_up();
proc_modified = true;
},
MouseEvent::ScrollDown(_, _, _) => {
app.widgets.proc.scroll_down();
proc_modified = true;
},
_ => {}
}
Event::Resize(_width, _height) => {
if show_help_menu {
draw_help_menu(&mut terminal, &mut app);
} else {
draw(&mut terminal, &mut app);
}
}
}
if !show_help_menu {
if proc_modified {
draw_proc(&mut terminal, &mut app);
} else if graphs_modified {
draw_graphs(&mut terminal, &mut app);
}
}
}
}
}
cleanup_terminal();
}
| 27.98209 | 100 | 0.609132 |
03e362f8dcdb31908534e5eeebc97e95ca3826ab | 3,626 | #[cfg(not(feature = "loom"))]
mod loom {
pub use std::sync;
}
#[cfg(feature = "loom")]
use loom;
pub mod queue;
use std::num::NonZeroUsize;
use std::marker::PhantomData;
use cache_padded::CachePadded;
use per_thread_object::ThreadLocal;
pub struct WfQueue<T: Queueable> {
queue: queue::WfQueue,
context: ThreadLocal<Context>,
_phantom: PhantomData<T>
}
pub trait Queueable {
fn into_nonzero(self) -> NonZeroUsize;
/// # Safety
///
/// Unsafe conversion from `NonZeroUsize`.
unsafe fn from_nonzero(n: NonZeroUsize) -> Self;
}
struct Context {
enq: CachePadded<queue::EnqueueCtx>,
deq: CachePadded<queue::DequeueCtx>
}
impl<T: Queueable> WfQueue<T> {
pub fn new(cap: usize) -> WfQueue<T> {
WfQueue {
queue: queue::WfQueue::new(cap),
context: ThreadLocal::new(),
_phantom: PhantomData
}
}
#[inline]
pub fn len(&self) -> usize {
self.queue.len()
}
#[inline]
pub fn capacity(&self) -> usize {
self.queue.capacity()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.queue.is_empty()
}
#[inline]
pub fn is_full(&self) -> bool {
self.queue.is_full()
}
pub fn push(&self, val: T) -> Result<(), T> {
let ctx = self.context.get_or(Context::new);
let val = val.into_nonzero();
if self.queue.try_enqueue(&ctx.enq, val) {
Ok(())
} else {
unsafe {
Err(T::from_nonzero(val))
}
}
}
pub fn pop(&self) -> Option<T> {
let ctx = self.context.get_or(Context::new);
let val = self.queue.try_dequeue(&ctx.deq)?;
unsafe {
Some(T::from_nonzero(val))
}
}
}
impl<T: Queueable> Drop for WfQueue<T> {
#[inline]
fn drop(&mut self) {
while self.pop().is_some() {}
}
}
impl Context {
pub const fn new() -> Context {
Context {
enq: CachePadded::new(queue::EnqueueCtx::new()),
deq: CachePadded::new(queue::DequeueCtx::new())
}
}
}
// impl Queueable
impl Queueable for NonZeroUsize {
#[inline]
fn into_nonzero(self) -> NonZeroUsize {
self
}
#[inline]
unsafe fn from_nonzero(n: NonZeroUsize) -> Self {
n
}
}
impl<T> Queueable for &'static T {
#[inline]
fn into_nonzero(self) -> NonZeroUsize {
unsafe {
NonZeroUsize::new_unchecked(self as *const T as usize)
}
}
#[inline]
unsafe fn from_nonzero(n: NonZeroUsize) -> Self {
&*(n.get() as *const T)
}
}
impl<T> Queueable for Box<T> {
#[inline]
fn into_nonzero(self) -> NonZeroUsize {
unsafe {
NonZeroUsize::new_unchecked(Box::into_raw(self) as usize)
}
}
#[inline]
unsafe fn from_nonzero(n: NonZeroUsize) -> Self {
Box::from_raw(n.get() as *mut _)
}
}
use loom::sync::Arc;
impl<T> Queueable for Arc<T> {
#[inline]
fn into_nonzero(self) -> NonZeroUsize {
unsafe {
NonZeroUsize::new_unchecked(Arc::into_raw(self) as usize)
}
}
#[inline]
unsafe fn from_nonzero(n: NonZeroUsize) -> Self {
Arc::from_raw(n.get() as *mut _)
}
}
use std::ptr::NonNull;
impl<T> Queueable for NonNull<T> {
#[inline]
fn into_nonzero(self) -> NonZeroUsize {
unsafe {
NonZeroUsize::new_unchecked(self.as_ptr() as usize)
}
}
#[inline]
unsafe fn from_nonzero(n: NonZeroUsize) -> Self {
NonNull::new_unchecked(n.get() as *mut _)
}
}
| 20.370787 | 69 | 0.553778 |
e66b2bd905c49a9e7140afaabee24697ba62c6ae | 7,724 | use std::io::{BufRead, Error as IoError};
use std::str::FromStr;
use std::sync::Arc;
use failure::Fail;
use futures::{future::*, Future, Stream};
use hex::FromHex;
use parking_lot::Mutex;
use reqwest::r#async::{Chunk, Client, Response};
use url::Url;
use genesis::{NetworkId, NetworkInfo};
use keys::Signature;
use peer_address::address::peer_uri::{PeerUri, PeerUriError};
use peer_address::address::PeerAddress;
use utils::observer::Notifier;
use crate::network_config::{NetworkConfig, Seed};
pub enum PeerAddressSeederEvent {
Seeds(Vec<PeerAddress>),
End,
}
#[derive(Fail, Debug)]
pub enum PeerAddressSeederError {
#[fail(display = "The seed list file didn't contain any parseable seed peer address")]
EmptySeedAddresses,
#[fail(display = "The fetching of the seed list file failed with error '{}'", _0)]
FetchError(#[cause] reqwest::Error),
#[fail(display = "Failed while reading a line from the seed list with io::error '{}'", _0)]
IoError(IoError),
#[fail(display = "Seed node address parsing failed with error '{}'", _0)]
PeerUriParsingError(#[cause] PeerUriError),
#[fail(display = "The seed list file didn't contain any parseable signature")]
SignatureMissing,
#[fail(display = "The signature in the file was in a line other than the last one")]
SignatureNotInLastLine,
#[fail(display = "The signature verification for the seed list file failed")]
SignatureVerificationFailed,
#[fail(display = "The remote server responded with status code '{}'", _0)]
UnexpectedHttpStatus(reqwest::StatusCode),
}
impl From<reqwest::Error> for PeerAddressSeederError {
fn from(error: reqwest::Error) -> Self {
PeerAddressSeederError::FetchError(error)
}
}
impl From<PeerUriError> for PeerAddressSeederError {
fn from(error: PeerUriError) -> Self {
PeerAddressSeederError::PeerUriParsingError(error)
}
}
pub struct PeerAddressSeeder {
pub notifier: Arc<Mutex<Notifier<'static, PeerAddressSeederEvent>>>,
}
impl PeerAddressSeeder {
pub fn new() -> Self {
Self {
notifier: Arc::new(Mutex::new(Notifier::new())),
}
}
pub fn collect(&self, network_id: NetworkId, network_config: Arc<NetworkConfig>) {
let network_info = NetworkInfo::from_network_id(network_id);
// Get additional seed lists from the config file (in Iterator form)
let additional_seedlists = network_config.additional_seeds().iter().filter_map(|seed| match seed {
Seed::List(seed_list) => Some(&**seed_list),
Seed::Peer(_) => None,
});
// Create a new Iterator chaining the hardcoded seed lists with the seed lists from the config file
// TODO: Optimize this to use references instead of cloning
let seed_lists = network_info.seed_lists().iter().chain(additional_seedlists);
// Process all seed lists asynchronously
for seed_list in seed_lists.cloned() {
let notifier = Arc::clone(&self.notifier);
let seed_list_url = seed_list.url().clone();
trace!("Start processing remote seed list: {}", &seed_list.url());
let task = Self::fetch(seed_list.url().clone())
.and_then(move |response_body| {
let mut signature = None;
let mut seed_addresses = Vec::new();
// Process each line of the seed list
for line in response_body.lines() {
// Abort if the line can't be read properly
if let Err(e) = line {
return err(PeerAddressSeederError::IoError(e));
}
let line = line.expect("Validated this above");
// The signature should always be in the last line
if signature.is_some() {
return err(PeerAddressSeederError::SignatureNotInLastLine);
}
// Ignore comments and empty lines
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
// Try to parse the line as a seed address, if that fails, fallback to try to parse it as a signature
// TODO: Should we fail if this step fails (i.e. if there is a non-comment/non-empty line that is not
// a seed address neither a signature)?
match PeerUri::from_str(line) {
Ok(seed_address) => match seed_address.as_seed_peer_address() {
Ok(peer_address) => seed_addresses.push(peer_address),
Err(e) => return err(PeerAddressSeederError::PeerUriParsingError(e)),
},
_ => signature = Signature::from_hex(line).ok(),
}
}
// Error out if we couldn't find any parseable seed address
if seed_addresses.is_empty() {
return err(PeerAddressSeederError::EmptySeedAddresses);
}
// Verify the signature if a public key was provided for this seed list
if let Some(public_key) = seed_list.public_key() {
if let Some(signature) = signature {
// Serialize the seed addresses for signature verification
let data = seed_addresses
.iter()
.filter_map(PeerAddress::to_seed_string)
.collect::<Vec<String>>()
.join("\n");
let data = data.as_bytes();
if !public_key.verify(&signature, data) {
return err(PeerAddressSeederError::SignatureVerificationFailed);
}
} else {
// No signature was found on the seed list file
return err(PeerAddressSeederError::SignatureMissing);
}
}
// Notify the Seeds event with the array of seed addresses
notifier.lock().notify(PeerAddressSeederEvent::Seeds(seed_addresses));
ok(())
})
.map_err(move |err| warn!("Failed to retrieve seed list from {}: {}", seed_list_url, err));
tokio::spawn(task);
}
// Notify that we're done collecting seed addresses
self.notifier.lock().notify(PeerAddressSeederEvent::End);
}
// Asynchronously fetches a seed list from a remote location
fn fetch(url: Url) -> impl Future<Item = Chunk, Error = PeerAddressSeederError> {
Client::new()
.get(url)
.send()
.map_err(PeerAddressSeederError::from)
.and_then(Self::fetch_callback)
}
// Note: this is a standalone function to help the compiler because as a closure in the fetch() function
// it would fail to infer the types correctly
fn fetch_callback(res: Response) -> Box<dyn Future<Item = Chunk, Error = PeerAddressSeederError> + Send> {
let status = res.status();
if status == 200 {
Box::new(res.into_body().concat2().map_err(PeerAddressSeederError::from))
} else {
Box::new(err(PeerAddressSeederError::UnexpectedHttpStatus(status)))
}
}
}
| 42.674033 | 125 | 0.566157 |
1a5ad4cb4678c09edc7a1ae882b788854872622c | 510 | use glommio::prelude::*;
use std::time::Instant;
fn main() {
let local_ex = LocalExecutorBuilder::new()
.pin_to_cpu(0)
.spawn(|| async move {
let mut runs = 0;
let t = Instant::now();
while !Local::need_preempt() {
runs += 1;
}
println!(
"cost of checking for need_preempt: {:#?}",
t.elapsed() / runs,
);
})
.unwrap();
local_ex.join().unwrap();
}
| 22.173913 | 59 | 0.437255 |
f927e23cee60d0ce31d62ea4dbb8936ed4ec754b | 214,624 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct AssociateAssessmentReportEvidenceFolderError {
pub kind: AssociateAssessmentReportEvidenceFolderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum AssociateAssessmentReportEvidenceFolderErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for AssociateAssessmentReportEvidenceFolderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
AssociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
AssociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
AssociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
AssociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
AssociateAssessmentReportEvidenceFolderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for AssociateAssessmentReportEvidenceFolderError {
fn code(&self) -> Option<&str> {
AssociateAssessmentReportEvidenceFolderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl AssociateAssessmentReportEvidenceFolderError {
pub fn new(
kind: AssociateAssessmentReportEvidenceFolderErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: AssociateAssessmentReportEvidenceFolderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: AssociateAssessmentReportEvidenceFolderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
AssociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
AssociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
AssociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
AssociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for AssociateAssessmentReportEvidenceFolderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
AssociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
AssociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
AssociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
AssociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_inner) => {
Some(_inner)
}
AssociateAssessmentReportEvidenceFolderErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchAssociateAssessmentReportEvidenceError {
pub kind: BatchAssociateAssessmentReportEvidenceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchAssociateAssessmentReportEvidenceErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchAssociateAssessmentReportEvidenceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchAssociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
BatchAssociateAssessmentReportEvidenceErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
BatchAssociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
BatchAssociateAssessmentReportEvidenceErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
BatchAssociateAssessmentReportEvidenceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchAssociateAssessmentReportEvidenceError {
fn code(&self) -> Option<&str> {
BatchAssociateAssessmentReportEvidenceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchAssociateAssessmentReportEvidenceError {
pub fn new(
kind: BatchAssociateAssessmentReportEvidenceErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchAssociateAssessmentReportEvidenceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchAssociateAssessmentReportEvidenceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateAssessmentReportEvidenceErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateAssessmentReportEvidenceErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for BatchAssociateAssessmentReportEvidenceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchAssociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
BatchAssociateAssessmentReportEvidenceErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
BatchAssociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
BatchAssociateAssessmentReportEvidenceErrorKind::ValidationException(_inner) => {
Some(_inner)
}
BatchAssociateAssessmentReportEvidenceErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchCreateDelegationByAssessmentError {
pub kind: BatchCreateDelegationByAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchCreateDelegationByAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchCreateDelegationByAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchCreateDelegationByAssessmentErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
BatchCreateDelegationByAssessmentErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
BatchCreateDelegationByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
BatchCreateDelegationByAssessmentErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
BatchCreateDelegationByAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchCreateDelegationByAssessmentError {
fn code(&self) -> Option<&str> {
BatchCreateDelegationByAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchCreateDelegationByAssessmentError {
pub fn new(
kind: BatchCreateDelegationByAssessmentErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchCreateDelegationByAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchCreateDelegationByAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
BatchCreateDelegationByAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
BatchCreateDelegationByAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
BatchCreateDelegationByAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
BatchCreateDelegationByAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for BatchCreateDelegationByAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchCreateDelegationByAssessmentErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
BatchCreateDelegationByAssessmentErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
BatchCreateDelegationByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
BatchCreateDelegationByAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
BatchCreateDelegationByAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchDeleteDelegationByAssessmentError {
pub kind: BatchDeleteDelegationByAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchDeleteDelegationByAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchDeleteDelegationByAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchDeleteDelegationByAssessmentErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
BatchDeleteDelegationByAssessmentErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
BatchDeleteDelegationByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
BatchDeleteDelegationByAssessmentErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
BatchDeleteDelegationByAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchDeleteDelegationByAssessmentError {
fn code(&self) -> Option<&str> {
BatchDeleteDelegationByAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchDeleteDelegationByAssessmentError {
pub fn new(
kind: BatchDeleteDelegationByAssessmentErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchDeleteDelegationByAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchDeleteDelegationByAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
BatchDeleteDelegationByAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
BatchDeleteDelegationByAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
BatchDeleteDelegationByAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
BatchDeleteDelegationByAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for BatchDeleteDelegationByAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchDeleteDelegationByAssessmentErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
BatchDeleteDelegationByAssessmentErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
BatchDeleteDelegationByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
BatchDeleteDelegationByAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
BatchDeleteDelegationByAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchDisassociateAssessmentReportEvidenceError {
pub kind: BatchDisassociateAssessmentReportEvidenceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchDisassociateAssessmentReportEvidenceErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchDisassociateAssessmentReportEvidenceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchDisassociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(
_inner,
) => _inner.fmt(f),
BatchDisassociateAssessmentReportEvidenceErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchDisassociateAssessmentReportEvidenceError {
fn code(&self) -> Option<&str> {
BatchDisassociateAssessmentReportEvidenceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchDisassociateAssessmentReportEvidenceError {
pub fn new(
kind: BatchDisassociateAssessmentReportEvidenceErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchDisassociateAssessmentReportEvidenceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchDisassociateAssessmentReportEvidenceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateAssessmentReportEvidenceErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateAssessmentReportEvidenceErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for BatchDisassociateAssessmentReportEvidenceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchDisassociateAssessmentReportEvidenceErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::ResourceNotFoundException(
_inner,
) => Some(_inner),
BatchDisassociateAssessmentReportEvidenceErrorKind::ValidationException(_inner) => {
Some(_inner)
}
BatchDisassociateAssessmentReportEvidenceErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchImportEvidenceToAssessmentControlError {
pub kind: BatchImportEvidenceToAssessmentControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchImportEvidenceToAssessmentControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchImportEvidenceToAssessmentControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchImportEvidenceToAssessmentControlErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
BatchImportEvidenceToAssessmentControlErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
BatchImportEvidenceToAssessmentControlErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
BatchImportEvidenceToAssessmentControlErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
BatchImportEvidenceToAssessmentControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchImportEvidenceToAssessmentControlError {
fn code(&self) -> Option<&str> {
BatchImportEvidenceToAssessmentControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchImportEvidenceToAssessmentControlError {
pub fn new(
kind: BatchImportEvidenceToAssessmentControlErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchImportEvidenceToAssessmentControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchImportEvidenceToAssessmentControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
BatchImportEvidenceToAssessmentControlErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
BatchImportEvidenceToAssessmentControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
BatchImportEvidenceToAssessmentControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
BatchImportEvidenceToAssessmentControlErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for BatchImportEvidenceToAssessmentControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchImportEvidenceToAssessmentControlErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
BatchImportEvidenceToAssessmentControlErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
BatchImportEvidenceToAssessmentControlErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
BatchImportEvidenceToAssessmentControlErrorKind::ValidationException(_inner) => {
Some(_inner)
}
BatchImportEvidenceToAssessmentControlErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateAssessmentError {
pub kind: CreateAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateAssessmentErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
CreateAssessmentErrorKind::InternalServerException(_inner) => _inner.fmt(f),
CreateAssessmentErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateAssessmentErrorKind::ValidationException(_inner) => _inner.fmt(f),
CreateAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateAssessmentError {
fn code(&self) -> Option<&str> {
CreateAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateAssessmentError {
pub fn new(kind: CreateAssessmentErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for CreateAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateAssessmentErrorKind::AccessDeniedException(_inner) => Some(_inner),
CreateAssessmentErrorKind::InternalServerException(_inner) => Some(_inner),
CreateAssessmentErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
CreateAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateAssessmentFrameworkError {
pub kind: CreateAssessmentFrameworkErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateAssessmentFrameworkErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateAssessmentFrameworkError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
CreateAssessmentFrameworkErrorKind::InternalServerException(_inner) => _inner.fmt(f),
CreateAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateAssessmentFrameworkErrorKind::ValidationException(_inner) => _inner.fmt(f),
CreateAssessmentFrameworkErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateAssessmentFrameworkError {
fn code(&self) -> Option<&str> {
CreateAssessmentFrameworkError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateAssessmentFrameworkError {
pub fn new(kind: CreateAssessmentFrameworkErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateAssessmentFrameworkErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateAssessmentFrameworkErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentFrameworkErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentFrameworkErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentFrameworkErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentFrameworkErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for CreateAssessmentFrameworkError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => Some(_inner),
CreateAssessmentFrameworkErrorKind::InternalServerException(_inner) => Some(_inner),
CreateAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateAssessmentFrameworkErrorKind::ValidationException(_inner) => Some(_inner),
CreateAssessmentFrameworkErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateAssessmentReportError {
pub kind: CreateAssessmentReportErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateAssessmentReportErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateAssessmentReportError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateAssessmentReportErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
CreateAssessmentReportErrorKind::InternalServerException(_inner) => _inner.fmt(f),
CreateAssessmentReportErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateAssessmentReportErrorKind::ValidationException(_inner) => _inner.fmt(f),
CreateAssessmentReportErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateAssessmentReportError {
fn code(&self) -> Option<&str> {
CreateAssessmentReportError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateAssessmentReportError {
pub fn new(kind: CreateAssessmentReportErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateAssessmentReportErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateAssessmentReportErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentReportErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentReportErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentReportErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
CreateAssessmentReportErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for CreateAssessmentReportError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateAssessmentReportErrorKind::AccessDeniedException(_inner) => Some(_inner),
CreateAssessmentReportErrorKind::InternalServerException(_inner) => Some(_inner),
CreateAssessmentReportErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateAssessmentReportErrorKind::ValidationException(_inner) => Some(_inner),
CreateAssessmentReportErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateControlError {
pub kind: CreateControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateControlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
CreateControlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
CreateControlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateControlErrorKind::ValidationException(_inner) => _inner.fmt(f),
CreateControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateControlError {
fn code(&self) -> Option<&str> {
CreateControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateControlError {
pub fn new(kind: CreateControlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, CreateControlErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
CreateControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, CreateControlErrorKind::ValidationException(_))
}
}
impl std::error::Error for CreateControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateControlErrorKind::AccessDeniedException(_inner) => Some(_inner),
CreateControlErrorKind::InternalServerException(_inner) => Some(_inner),
CreateControlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateControlErrorKind::ValidationException(_inner) => Some(_inner),
CreateControlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteAssessmentError {
pub kind: DeleteAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteAssessmentErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
DeleteAssessmentErrorKind::InternalServerException(_inner) => _inner.fmt(f),
DeleteAssessmentErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteAssessmentErrorKind::ValidationException(_inner) => _inner.fmt(f),
DeleteAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteAssessmentError {
fn code(&self) -> Option<&str> {
DeleteAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteAssessmentError {
pub fn new(kind: DeleteAssessmentErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DeleteAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteAssessmentErrorKind::AccessDeniedException(_inner) => Some(_inner),
DeleteAssessmentErrorKind::InternalServerException(_inner) => Some(_inner),
DeleteAssessmentErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
DeleteAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteAssessmentFrameworkError {
pub kind: DeleteAssessmentFrameworkErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteAssessmentFrameworkErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteAssessmentFrameworkError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
DeleteAssessmentFrameworkErrorKind::InternalServerException(_inner) => _inner.fmt(f),
DeleteAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteAssessmentFrameworkErrorKind::ValidationException(_inner) => _inner.fmt(f),
DeleteAssessmentFrameworkErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteAssessmentFrameworkError {
fn code(&self) -> Option<&str> {
DeleteAssessmentFrameworkError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteAssessmentFrameworkError {
pub fn new(kind: DeleteAssessmentFrameworkErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteAssessmentFrameworkErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteAssessmentFrameworkErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentFrameworkErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentFrameworkErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentFrameworkErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentFrameworkErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DeleteAssessmentFrameworkError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => Some(_inner),
DeleteAssessmentFrameworkErrorKind::InternalServerException(_inner) => Some(_inner),
DeleteAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteAssessmentFrameworkErrorKind::ValidationException(_inner) => Some(_inner),
DeleteAssessmentFrameworkErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteAssessmentReportError {
pub kind: DeleteAssessmentReportErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteAssessmentReportErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteAssessmentReportError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteAssessmentReportErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
DeleteAssessmentReportErrorKind::InternalServerException(_inner) => _inner.fmt(f),
DeleteAssessmentReportErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteAssessmentReportErrorKind::ValidationException(_inner) => _inner.fmt(f),
DeleteAssessmentReportErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteAssessmentReportError {
fn code(&self) -> Option<&str> {
DeleteAssessmentReportError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteAssessmentReportError {
pub fn new(kind: DeleteAssessmentReportErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteAssessmentReportErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteAssessmentReportErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentReportErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentReportErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentReportErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DeleteAssessmentReportErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DeleteAssessmentReportError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteAssessmentReportErrorKind::AccessDeniedException(_inner) => Some(_inner),
DeleteAssessmentReportErrorKind::InternalServerException(_inner) => Some(_inner),
DeleteAssessmentReportErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteAssessmentReportErrorKind::ValidationException(_inner) => Some(_inner),
DeleteAssessmentReportErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteControlError {
pub kind: DeleteControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteControlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
DeleteControlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
DeleteControlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteControlErrorKind::ValidationException(_inner) => _inner.fmt(f),
DeleteControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteControlError {
fn code(&self) -> Option<&str> {
DeleteControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteControlError {
pub fn new(kind: DeleteControlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, DeleteControlErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeleteControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, DeleteControlErrorKind::ValidationException(_))
}
}
impl std::error::Error for DeleteControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteControlErrorKind::AccessDeniedException(_inner) => Some(_inner),
DeleteControlErrorKind::InternalServerException(_inner) => Some(_inner),
DeleteControlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteControlErrorKind::ValidationException(_inner) => Some(_inner),
DeleteControlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeregisterAccountError {
pub kind: DeregisterAccountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeregisterAccountErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeregisterAccountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeregisterAccountErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
DeregisterAccountErrorKind::InternalServerException(_inner) => _inner.fmt(f),
DeregisterAccountErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeregisterAccountErrorKind::ValidationException(_inner) => _inner.fmt(f),
DeregisterAccountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeregisterAccountError {
fn code(&self) -> Option<&str> {
DeregisterAccountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeregisterAccountError {
pub fn new(kind: DeregisterAccountErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeregisterAccountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeregisterAccountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterAccountErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterAccountErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterAccountErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterAccountErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DeregisterAccountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeregisterAccountErrorKind::AccessDeniedException(_inner) => Some(_inner),
DeregisterAccountErrorKind::InternalServerException(_inner) => Some(_inner),
DeregisterAccountErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeregisterAccountErrorKind::ValidationException(_inner) => Some(_inner),
DeregisterAccountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeregisterOrganizationAdminAccountError {
pub kind: DeregisterOrganizationAdminAccountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeregisterOrganizationAdminAccountErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeregisterOrganizationAdminAccountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeregisterOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
DeregisterOrganizationAdminAccountErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
DeregisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
DeregisterOrganizationAdminAccountErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
DeregisterOrganizationAdminAccountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeregisterOrganizationAdminAccountError {
fn code(&self) -> Option<&str> {
DeregisterOrganizationAdminAccountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeregisterOrganizationAdminAccountError {
pub fn new(
kind: DeregisterOrganizationAdminAccountErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeregisterOrganizationAdminAccountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeregisterOrganizationAdminAccountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterOrganizationAdminAccountErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterOrganizationAdminAccountErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterOrganizationAdminAccountErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DeregisterOrganizationAdminAccountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeregisterOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
DeregisterOrganizationAdminAccountErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
DeregisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
DeregisterOrganizationAdminAccountErrorKind::ValidationException(_inner) => {
Some(_inner)
}
DeregisterOrganizationAdminAccountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DisassociateAssessmentReportEvidenceFolderError {
pub kind: DisassociateAssessmentReportEvidenceFolderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DisassociateAssessmentReportEvidenceFolderErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DisassociateAssessmentReportEvidenceFolderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DisassociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
DisassociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(
_inner,
) => _inner.fmt(f),
DisassociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(
_inner,
) => _inner.fmt(f),
DisassociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
DisassociateAssessmentReportEvidenceFolderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DisassociateAssessmentReportEvidenceFolderError {
fn code(&self) -> Option<&str> {
DisassociateAssessmentReportEvidenceFolderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DisassociateAssessmentReportEvidenceFolderError {
pub fn new(
kind: DisassociateAssessmentReportEvidenceFolderErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DisassociateAssessmentReportEvidenceFolderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DisassociateAssessmentReportEvidenceFolderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for DisassociateAssessmentReportEvidenceFolderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DisassociateAssessmentReportEvidenceFolderErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
DisassociateAssessmentReportEvidenceFolderErrorKind::InternalServerException(
_inner,
) => Some(_inner),
DisassociateAssessmentReportEvidenceFolderErrorKind::ResourceNotFoundException(
_inner,
) => Some(_inner),
DisassociateAssessmentReportEvidenceFolderErrorKind::ValidationException(_inner) => {
Some(_inner)
}
DisassociateAssessmentReportEvidenceFolderErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetAccountStatusError {
pub kind: GetAccountStatusErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetAccountStatusErrorKind {
InternalServerException(crate::error::InternalServerException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetAccountStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetAccountStatusErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetAccountStatusErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetAccountStatusError {
fn code(&self) -> Option<&str> {
GetAccountStatusError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetAccountStatusError {
pub fn new(kind: GetAccountStatusErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetAccountStatusErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetAccountStatusErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetAccountStatusErrorKind::InternalServerException(_)
)
}
}
impl std::error::Error for GetAccountStatusError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetAccountStatusErrorKind::InternalServerException(_inner) => Some(_inner),
GetAccountStatusErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetAssessmentError {
pub kind: GetAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetAssessmentErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetAssessmentErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetAssessmentErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetAssessmentErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetAssessmentError {
fn code(&self) -> Option<&str> {
GetAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetAssessmentError {
pub fn new(kind: GetAssessmentErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetAssessmentErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, GetAssessmentErrorKind::ValidationException(_))
}
}
impl std::error::Error for GetAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetAssessmentErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetAssessmentErrorKind::InternalServerException(_inner) => Some(_inner),
GetAssessmentErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
GetAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetAssessmentFrameworkError {
pub kind: GetAssessmentFrameworkErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetAssessmentFrameworkErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetAssessmentFrameworkError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetAssessmentFrameworkErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetAssessmentFrameworkErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetAssessmentFrameworkErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetAssessmentFrameworkError {
fn code(&self) -> Option<&str> {
GetAssessmentFrameworkError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetAssessmentFrameworkError {
pub fn new(kind: GetAssessmentFrameworkErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetAssessmentFrameworkErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetAssessmentFrameworkErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentFrameworkErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentFrameworkErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentFrameworkErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentFrameworkErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetAssessmentFrameworkError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetAssessmentFrameworkErrorKind::InternalServerException(_inner) => Some(_inner),
GetAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetAssessmentFrameworkErrorKind::ValidationException(_inner) => Some(_inner),
GetAssessmentFrameworkErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetAssessmentReportUrlError {
pub kind: GetAssessmentReportUrlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetAssessmentReportUrlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetAssessmentReportUrlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetAssessmentReportUrlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetAssessmentReportUrlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetAssessmentReportUrlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetAssessmentReportUrlErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetAssessmentReportUrlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetAssessmentReportUrlError {
fn code(&self) -> Option<&str> {
GetAssessmentReportUrlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetAssessmentReportUrlError {
pub fn new(kind: GetAssessmentReportUrlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetAssessmentReportUrlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetAssessmentReportUrlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentReportUrlErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentReportUrlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentReportUrlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetAssessmentReportUrlErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetAssessmentReportUrlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetAssessmentReportUrlErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetAssessmentReportUrlErrorKind::InternalServerException(_inner) => Some(_inner),
GetAssessmentReportUrlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetAssessmentReportUrlErrorKind::ValidationException(_inner) => Some(_inner),
GetAssessmentReportUrlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetChangeLogsError {
pub kind: GetChangeLogsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetChangeLogsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetChangeLogsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetChangeLogsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetChangeLogsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetChangeLogsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetChangeLogsErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetChangeLogsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetChangeLogsError {
fn code(&self) -> Option<&str> {
GetChangeLogsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetChangeLogsError {
pub fn new(kind: GetChangeLogsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetChangeLogsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetChangeLogsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetChangeLogsErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetChangeLogsErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetChangeLogsErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, GetChangeLogsErrorKind::ValidationException(_))
}
}
impl std::error::Error for GetChangeLogsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetChangeLogsErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetChangeLogsErrorKind::InternalServerException(_inner) => Some(_inner),
GetChangeLogsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetChangeLogsErrorKind::ValidationException(_inner) => Some(_inner),
GetChangeLogsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetControlError {
pub kind: GetControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetControlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetControlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetControlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetControlErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetControlError {
fn code(&self) -> Option<&str> {
GetControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetControlError {
pub fn new(kind: GetControlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetControlErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(&self.kind, GetControlErrorKind::InternalServerException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, GetControlErrorKind::ValidationException(_))
}
}
impl std::error::Error for GetControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetControlErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetControlErrorKind::InternalServerException(_inner) => Some(_inner),
GetControlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetControlErrorKind::ValidationException(_inner) => Some(_inner),
GetControlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetDelegationsError {
pub kind: GetDelegationsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetDelegationsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetDelegationsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetDelegationsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetDelegationsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetDelegationsErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetDelegationsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetDelegationsError {
fn code(&self) -> Option<&str> {
GetDelegationsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetDelegationsError {
pub fn new(kind: GetDelegationsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetDelegationsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetDelegationsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetDelegationsErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetDelegationsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, GetDelegationsErrorKind::ValidationException(_))
}
}
impl std::error::Error for GetDelegationsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetDelegationsErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetDelegationsErrorKind::InternalServerException(_inner) => Some(_inner),
GetDelegationsErrorKind::ValidationException(_inner) => Some(_inner),
GetDelegationsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetEvidenceError {
pub kind: GetEvidenceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetEvidenceErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetEvidenceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetEvidenceErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetEvidenceErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetEvidenceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetEvidenceErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetEvidenceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetEvidenceError {
fn code(&self) -> Option<&str> {
GetEvidenceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetEvidenceError {
pub fn new(kind: GetEvidenceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetEvidenceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetEvidenceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetEvidenceErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(&self.kind, GetEvidenceErrorKind::InternalServerException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, GetEvidenceErrorKind::ValidationException(_))
}
}
impl std::error::Error for GetEvidenceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetEvidenceErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetEvidenceErrorKind::InternalServerException(_inner) => Some(_inner),
GetEvidenceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetEvidenceErrorKind::ValidationException(_inner) => Some(_inner),
GetEvidenceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetEvidenceByEvidenceFolderError {
pub kind: GetEvidenceByEvidenceFolderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetEvidenceByEvidenceFolderErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetEvidenceByEvidenceFolderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetEvidenceByEvidenceFolderErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetEvidenceByEvidenceFolderErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetEvidenceByEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
GetEvidenceByEvidenceFolderErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetEvidenceByEvidenceFolderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetEvidenceByEvidenceFolderError {
fn code(&self) -> Option<&str> {
GetEvidenceByEvidenceFolderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetEvidenceByEvidenceFolderError {
pub fn new(kind: GetEvidenceByEvidenceFolderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetEvidenceByEvidenceFolderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetEvidenceByEvidenceFolderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceByEvidenceFolderErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceByEvidenceFolderErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceByEvidenceFolderErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceByEvidenceFolderErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetEvidenceByEvidenceFolderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetEvidenceByEvidenceFolderErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetEvidenceByEvidenceFolderErrorKind::InternalServerException(_inner) => Some(_inner),
GetEvidenceByEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetEvidenceByEvidenceFolderErrorKind::ValidationException(_inner) => Some(_inner),
GetEvidenceByEvidenceFolderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetEvidenceFolderError {
pub kind: GetEvidenceFolderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetEvidenceFolderErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetEvidenceFolderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetEvidenceFolderErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetEvidenceFolderErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetEvidenceFolderErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetEvidenceFolderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetEvidenceFolderError {
fn code(&self) -> Option<&str> {
GetEvidenceFolderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetEvidenceFolderError {
pub fn new(kind: GetEvidenceFolderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetEvidenceFolderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetEvidenceFolderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFolderErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFolderErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFolderErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFolderErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetEvidenceFolderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetEvidenceFolderErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetEvidenceFolderErrorKind::InternalServerException(_inner) => Some(_inner),
GetEvidenceFolderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetEvidenceFolderErrorKind::ValidationException(_inner) => Some(_inner),
GetEvidenceFolderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetEvidenceFoldersByAssessmentError {
pub kind: GetEvidenceFoldersByAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetEvidenceFoldersByAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetEvidenceFoldersByAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetEvidenceFoldersByAssessmentErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetEvidenceFoldersByAssessmentErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetEvidenceFoldersByAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetEvidenceFoldersByAssessmentError {
fn code(&self) -> Option<&str> {
GetEvidenceFoldersByAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetEvidenceFoldersByAssessmentError {
pub fn new(kind: GetEvidenceFoldersByAssessmentErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetEvidenceFoldersByAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetEvidenceFoldersByAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetEvidenceFoldersByAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetEvidenceFoldersByAssessmentErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetEvidenceFoldersByAssessmentErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
GetEvidenceFoldersByAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetEvidenceFoldersByAssessmentControlError {
pub kind: GetEvidenceFoldersByAssessmentControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetEvidenceFoldersByAssessmentControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetEvidenceFoldersByAssessmentControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetEvidenceFoldersByAssessmentControlErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentControlErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentControlErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentControlErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
GetEvidenceFoldersByAssessmentControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetEvidenceFoldersByAssessmentControlError {
fn code(&self) -> Option<&str> {
GetEvidenceFoldersByAssessmentControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetEvidenceFoldersByAssessmentControlError {
pub fn new(
kind: GetEvidenceFoldersByAssessmentControlErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetEvidenceFoldersByAssessmentControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetEvidenceFoldersByAssessmentControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentControlErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetEvidenceFoldersByAssessmentControlErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetEvidenceFoldersByAssessmentControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetEvidenceFoldersByAssessmentControlErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentControlErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentControlErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentControlErrorKind::ValidationException(_inner) => {
Some(_inner)
}
GetEvidenceFoldersByAssessmentControlErrorKind::Unhandled(_inner) => {
Some(_inner.as_ref())
}
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetOrganizationAdminAccountError {
pub kind: GetOrganizationAdminAccountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetOrganizationAdminAccountErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetOrganizationAdminAccountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetOrganizationAdminAccountErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
GetOrganizationAdminAccountErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetOrganizationAdminAccountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetOrganizationAdminAccountError {
fn code(&self) -> Option<&str> {
GetOrganizationAdminAccountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetOrganizationAdminAccountError {
pub fn new(kind: GetOrganizationAdminAccountErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetOrganizationAdminAccountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetOrganizationAdminAccountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetOrganizationAdminAccountErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetOrganizationAdminAccountErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetOrganizationAdminAccountErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetOrganizationAdminAccountErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetOrganizationAdminAccountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetOrganizationAdminAccountErrorKind::InternalServerException(_inner) => Some(_inner),
GetOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetOrganizationAdminAccountErrorKind::ValidationException(_inner) => Some(_inner),
GetOrganizationAdminAccountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetServicesInScopeError {
pub kind: GetServicesInScopeErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetServicesInScopeErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetServicesInScopeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetServicesInScopeErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetServicesInScopeErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetServicesInScopeErrorKind::ValidationException(_inner) => _inner.fmt(f),
GetServicesInScopeErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetServicesInScopeError {
fn code(&self) -> Option<&str> {
GetServicesInScopeError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetServicesInScopeError {
pub fn new(kind: GetServicesInScopeErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetServicesInScopeErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetServicesInScopeErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
GetServicesInScopeErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
GetServicesInScopeErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
GetServicesInScopeErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for GetServicesInScopeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetServicesInScopeErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetServicesInScopeErrorKind::InternalServerException(_inner) => Some(_inner),
GetServicesInScopeErrorKind::ValidationException(_inner) => Some(_inner),
GetServicesInScopeErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetSettingsError {
pub kind: GetSettingsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetSettingsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetSettingsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetSettingsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
GetSettingsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
GetSettingsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetSettingsError {
fn code(&self) -> Option<&str> {
GetSettingsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetSettingsError {
pub fn new(kind: GetSettingsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetSettingsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetSettingsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetSettingsErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(&self.kind, GetSettingsErrorKind::InternalServerException(_))
}
}
impl std::error::Error for GetSettingsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetSettingsErrorKind::AccessDeniedException(_inner) => Some(_inner),
GetSettingsErrorKind::InternalServerException(_inner) => Some(_inner),
GetSettingsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListAssessmentFrameworksError {
pub kind: ListAssessmentFrameworksErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListAssessmentFrameworksErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListAssessmentFrameworksError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListAssessmentFrameworksErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListAssessmentFrameworksErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListAssessmentFrameworksErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListAssessmentFrameworksErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListAssessmentFrameworksError {
fn code(&self) -> Option<&str> {
ListAssessmentFrameworksError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListAssessmentFrameworksError {
pub fn new(kind: ListAssessmentFrameworksErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListAssessmentFrameworksErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListAssessmentFrameworksErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentFrameworksErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentFrameworksErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentFrameworksErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ListAssessmentFrameworksError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListAssessmentFrameworksErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListAssessmentFrameworksErrorKind::InternalServerException(_inner) => Some(_inner),
ListAssessmentFrameworksErrorKind::ValidationException(_inner) => Some(_inner),
ListAssessmentFrameworksErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListAssessmentReportsError {
pub kind: ListAssessmentReportsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListAssessmentReportsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListAssessmentReportsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListAssessmentReportsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListAssessmentReportsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListAssessmentReportsErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListAssessmentReportsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListAssessmentReportsError {
fn code(&self) -> Option<&str> {
ListAssessmentReportsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListAssessmentReportsError {
pub fn new(kind: ListAssessmentReportsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListAssessmentReportsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListAssessmentReportsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentReportsErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentReportsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentReportsErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ListAssessmentReportsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListAssessmentReportsErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListAssessmentReportsErrorKind::InternalServerException(_inner) => Some(_inner),
ListAssessmentReportsErrorKind::ValidationException(_inner) => Some(_inner),
ListAssessmentReportsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListAssessmentsError {
pub kind: ListAssessmentsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListAssessmentsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListAssessmentsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListAssessmentsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListAssessmentsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListAssessmentsErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListAssessmentsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListAssessmentsError {
fn code(&self) -> Option<&str> {
ListAssessmentsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListAssessmentsError {
pub fn new(kind: ListAssessmentsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListAssessmentsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListAssessmentsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentsErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListAssessmentsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, ListAssessmentsErrorKind::ValidationException(_))
}
}
impl std::error::Error for ListAssessmentsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListAssessmentsErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListAssessmentsErrorKind::InternalServerException(_inner) => Some(_inner),
ListAssessmentsErrorKind::ValidationException(_inner) => Some(_inner),
ListAssessmentsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListControlsError {
pub kind: ListControlsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListControlsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListControlsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListControlsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListControlsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListControlsErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListControlsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListControlsError {
fn code(&self) -> Option<&str> {
ListControlsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListControlsError {
pub fn new(kind: ListControlsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListControlsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListControlsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, ListControlsErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListControlsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, ListControlsErrorKind::ValidationException(_))
}
}
impl std::error::Error for ListControlsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListControlsErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListControlsErrorKind::InternalServerException(_inner) => Some(_inner),
ListControlsErrorKind::ValidationException(_inner) => Some(_inner),
ListControlsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListKeywordsForDataSourceError {
pub kind: ListKeywordsForDataSourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListKeywordsForDataSourceErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListKeywordsForDataSourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListKeywordsForDataSourceErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListKeywordsForDataSourceErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListKeywordsForDataSourceErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListKeywordsForDataSourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListKeywordsForDataSourceError {
fn code(&self) -> Option<&str> {
ListKeywordsForDataSourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListKeywordsForDataSourceError {
pub fn new(kind: ListKeywordsForDataSourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListKeywordsForDataSourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListKeywordsForDataSourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ListKeywordsForDataSourceErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListKeywordsForDataSourceErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ListKeywordsForDataSourceErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ListKeywordsForDataSourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListKeywordsForDataSourceErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListKeywordsForDataSourceErrorKind::InternalServerException(_inner) => Some(_inner),
ListKeywordsForDataSourceErrorKind::ValidationException(_inner) => Some(_inner),
ListKeywordsForDataSourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListNotificationsError {
pub kind: ListNotificationsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListNotificationsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListNotificationsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListNotificationsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
ListNotificationsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListNotificationsErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListNotificationsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListNotificationsError {
fn code(&self) -> Option<&str> {
ListNotificationsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListNotificationsError {
pub fn new(kind: ListNotificationsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListNotificationsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListNotificationsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ListNotificationsErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListNotificationsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ListNotificationsErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ListNotificationsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListNotificationsErrorKind::AccessDeniedException(_inner) => Some(_inner),
ListNotificationsErrorKind::InternalServerException(_inner) => Some(_inner),
ListNotificationsErrorKind::ValidationException(_inner) => Some(_inner),
ListNotificationsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTagsForResourceError {
pub kind: ListTagsForResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTagsForResourceErrorKind {
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTagsForResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTagsForResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f),
ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
ListTagsForResourceErrorKind::ValidationException(_inner) => _inner.fmt(f),
ListTagsForResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTagsForResourceError {
fn code(&self) -> Option<&str> {
ListTagsForResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTagsForResourceError {
pub fn new(kind: ListTagsForResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTagsForResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTagsForResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForResourceErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForResourceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForResourceErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ListTagsForResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTagsForResourceErrorKind::InternalServerException(_inner) => Some(_inner),
ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
ListTagsForResourceErrorKind::ValidationException(_inner) => Some(_inner),
ListTagsForResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RegisterAccountError {
pub kind: RegisterAccountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RegisterAccountErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RegisterAccountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RegisterAccountErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
RegisterAccountErrorKind::InternalServerException(_inner) => _inner.fmt(f),
RegisterAccountErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
RegisterAccountErrorKind::ValidationException(_inner) => _inner.fmt(f),
RegisterAccountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RegisterAccountError {
fn code(&self) -> Option<&str> {
RegisterAccountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RegisterAccountError {
pub fn new(kind: RegisterAccountErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RegisterAccountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RegisterAccountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
RegisterAccountErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
RegisterAccountErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
RegisterAccountErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, RegisterAccountErrorKind::ValidationException(_))
}
}
impl std::error::Error for RegisterAccountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RegisterAccountErrorKind::AccessDeniedException(_inner) => Some(_inner),
RegisterAccountErrorKind::InternalServerException(_inner) => Some(_inner),
RegisterAccountErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
RegisterAccountErrorKind::ValidationException(_inner) => Some(_inner),
RegisterAccountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RegisterOrganizationAdminAccountError {
pub kind: RegisterOrganizationAdminAccountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RegisterOrganizationAdminAccountErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RegisterOrganizationAdminAccountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RegisterOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
RegisterOrganizationAdminAccountErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
RegisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
RegisterOrganizationAdminAccountErrorKind::ValidationException(_inner) => _inner.fmt(f),
RegisterOrganizationAdminAccountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RegisterOrganizationAdminAccountError {
fn code(&self) -> Option<&str> {
RegisterOrganizationAdminAccountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RegisterOrganizationAdminAccountError {
pub fn new(kind: RegisterOrganizationAdminAccountErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RegisterOrganizationAdminAccountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RegisterOrganizationAdminAccountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
RegisterOrganizationAdminAccountErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
RegisterOrganizationAdminAccountErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
RegisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
RegisterOrganizationAdminAccountErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for RegisterOrganizationAdminAccountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RegisterOrganizationAdminAccountErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
RegisterOrganizationAdminAccountErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
RegisterOrganizationAdminAccountErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
RegisterOrganizationAdminAccountErrorKind::ValidationException(_inner) => Some(_inner),
RegisterOrganizationAdminAccountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct TagResourceError {
pub kind: TagResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum TagResourceErrorKind {
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for TagResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
TagResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f),
TagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
TagResourceErrorKind::ValidationException(_inner) => _inner.fmt(f),
TagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for TagResourceError {
fn code(&self) -> Option<&str> {
TagResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl TagResourceError {
pub fn new(kind: TagResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: TagResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: TagResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(&self.kind, TagResourceErrorKind::InternalServerException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
TagResourceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, TagResourceErrorKind::ValidationException(_))
}
}
impl std::error::Error for TagResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
TagResourceErrorKind::InternalServerException(_inner) => Some(_inner),
TagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
TagResourceErrorKind::ValidationException(_inner) => Some(_inner),
TagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UntagResourceError {
pub kind: UntagResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UntagResourceErrorKind {
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UntagResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UntagResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UntagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UntagResourceErrorKind::ValidationException(_inner) => _inner.fmt(f),
UntagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UntagResourceError {
fn code(&self) -> Option<&str> {
UntagResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UntagResourceError {
pub fn new(kind: UntagResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UntagResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UntagResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UntagResourceErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UntagResourceErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, UntagResourceErrorKind::ValidationException(_))
}
}
impl std::error::Error for UntagResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UntagResourceErrorKind::InternalServerException(_inner) => Some(_inner),
UntagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UntagResourceErrorKind::ValidationException(_inner) => Some(_inner),
UntagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateAssessmentError {
pub kind: UpdateAssessmentErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateAssessmentErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateAssessmentError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateAssessmentErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateAssessmentErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateAssessmentErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateAssessmentErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateAssessmentErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateAssessmentError {
fn code(&self) -> Option<&str> {
UpdateAssessmentError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateAssessmentError {
pub fn new(kind: UpdateAssessmentErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateAssessmentErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateAssessmentErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for UpdateAssessmentError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateAssessmentErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateAssessmentErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateAssessmentErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateAssessmentErrorKind::ValidationException(_inner) => Some(_inner),
UpdateAssessmentErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateAssessmentControlError {
pub kind: UpdateAssessmentControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateAssessmentControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateAssessmentControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateAssessmentControlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateAssessmentControlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateAssessmentControlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateAssessmentControlErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateAssessmentControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateAssessmentControlError {
fn code(&self) -> Option<&str> {
UpdateAssessmentControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateAssessmentControlError {
pub fn new(kind: UpdateAssessmentControlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateAssessmentControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateAssessmentControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for UpdateAssessmentControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateAssessmentControlErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateAssessmentControlErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateAssessmentControlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateAssessmentControlErrorKind::ValidationException(_inner) => Some(_inner),
UpdateAssessmentControlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateAssessmentControlSetStatusError {
pub kind: UpdateAssessmentControlSetStatusErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateAssessmentControlSetStatusErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateAssessmentControlSetStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateAssessmentControlSetStatusErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
UpdateAssessmentControlSetStatusErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
UpdateAssessmentControlSetStatusErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
UpdateAssessmentControlSetStatusErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateAssessmentControlSetStatusErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateAssessmentControlSetStatusError {
fn code(&self) -> Option<&str> {
UpdateAssessmentControlSetStatusError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateAssessmentControlSetStatusError {
pub fn new(kind: UpdateAssessmentControlSetStatusErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateAssessmentControlSetStatusErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateAssessmentControlSetStatusErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlSetStatusErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlSetStatusErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlSetStatusErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentControlSetStatusErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for UpdateAssessmentControlSetStatusError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateAssessmentControlSetStatusErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
UpdateAssessmentControlSetStatusErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
UpdateAssessmentControlSetStatusErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
UpdateAssessmentControlSetStatusErrorKind::ValidationException(_inner) => Some(_inner),
UpdateAssessmentControlSetStatusErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateAssessmentFrameworkError {
pub kind: UpdateAssessmentFrameworkErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateAssessmentFrameworkErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateAssessmentFrameworkError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateAssessmentFrameworkErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateAssessmentFrameworkErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateAssessmentFrameworkErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateAssessmentFrameworkError {
fn code(&self) -> Option<&str> {
UpdateAssessmentFrameworkError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateAssessmentFrameworkError {
pub fn new(kind: UpdateAssessmentFrameworkErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateAssessmentFrameworkErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateAssessmentFrameworkErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentFrameworkErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentFrameworkErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentFrameworkErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentFrameworkErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for UpdateAssessmentFrameworkError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateAssessmentFrameworkErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateAssessmentFrameworkErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateAssessmentFrameworkErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateAssessmentFrameworkErrorKind::ValidationException(_inner) => Some(_inner),
UpdateAssessmentFrameworkErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateAssessmentStatusError {
pub kind: UpdateAssessmentStatusErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateAssessmentStatusErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateAssessmentStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateAssessmentStatusErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateAssessmentStatusErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateAssessmentStatusErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateAssessmentStatusErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateAssessmentStatusErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateAssessmentStatusError {
fn code(&self) -> Option<&str> {
UpdateAssessmentStatusError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateAssessmentStatusError {
pub fn new(kind: UpdateAssessmentStatusErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateAssessmentStatusErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateAssessmentStatusErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentStatusErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentStatusErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentStatusErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
UpdateAssessmentStatusErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for UpdateAssessmentStatusError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateAssessmentStatusErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateAssessmentStatusErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateAssessmentStatusErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateAssessmentStatusErrorKind::ValidationException(_inner) => Some(_inner),
UpdateAssessmentStatusErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateControlError {
pub kind: UpdateControlErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateControlErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateControlError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateControlErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateControlErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateControlErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateControlErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateControlErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateControlError {
fn code(&self) -> Option<&str> {
UpdateControlError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateControlError {
pub fn new(kind: UpdateControlErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateControlErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateControlErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(&self.kind, UpdateControlErrorKind::AccessDeniedException(_))
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateControlErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateControlErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, UpdateControlErrorKind::ValidationException(_))
}
}
impl std::error::Error for UpdateControlError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateControlErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateControlErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateControlErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateControlErrorKind::ValidationException(_inner) => Some(_inner),
UpdateControlErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateSettingsError {
pub kind: UpdateSettingsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateSettingsErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateSettingsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateSettingsErrorKind::AccessDeniedException(_inner) => _inner.fmt(f),
UpdateSettingsErrorKind::InternalServerException(_inner) => _inner.fmt(f),
UpdateSettingsErrorKind::ValidationException(_inner) => _inner.fmt(f),
UpdateSettingsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateSettingsError {
fn code(&self) -> Option<&str> {
UpdateSettingsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateSettingsError {
pub fn new(kind: UpdateSettingsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateSettingsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateSettingsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
UpdateSettingsErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
UpdateSettingsErrorKind::InternalServerException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(&self.kind, UpdateSettingsErrorKind::ValidationException(_))
}
}
impl std::error::Error for UpdateSettingsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateSettingsErrorKind::AccessDeniedException(_inner) => Some(_inner),
UpdateSettingsErrorKind::InternalServerException(_inner) => Some(_inner),
UpdateSettingsErrorKind::ValidationException(_inner) => Some(_inner),
UpdateSettingsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ValidateAssessmentReportIntegrityError {
pub kind: ValidateAssessmentReportIntegrityErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ValidateAssessmentReportIntegrityErrorKind {
AccessDeniedException(crate::error::AccessDeniedException),
InternalServerException(crate::error::InternalServerException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
ValidationException(crate::error::ValidationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ValidateAssessmentReportIntegrityError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ValidateAssessmentReportIntegrityErrorKind::AccessDeniedException(_inner) => {
_inner.fmt(f)
}
ValidateAssessmentReportIntegrityErrorKind::InternalServerException(_inner) => {
_inner.fmt(f)
}
ValidateAssessmentReportIntegrityErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
ValidateAssessmentReportIntegrityErrorKind::ValidationException(_inner) => {
_inner.fmt(f)
}
ValidateAssessmentReportIntegrityErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ValidateAssessmentReportIntegrityError {
fn code(&self) -> Option<&str> {
ValidateAssessmentReportIntegrityError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ValidateAssessmentReportIntegrityError {
pub fn new(
kind: ValidateAssessmentReportIntegrityErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ValidateAssessmentReportIntegrityErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ValidateAssessmentReportIntegrityErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
ValidateAssessmentReportIntegrityErrorKind::AccessDeniedException(_)
)
}
pub fn is_internal_server_exception(&self) -> bool {
matches!(
&self.kind,
ValidateAssessmentReportIntegrityErrorKind::InternalServerException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ValidateAssessmentReportIntegrityErrorKind::ResourceNotFoundException(_)
)
}
pub fn is_validation_exception(&self) -> bool {
matches!(
&self.kind,
ValidateAssessmentReportIntegrityErrorKind::ValidationException(_)
)
}
}
impl std::error::Error for ValidateAssessmentReportIntegrityError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ValidateAssessmentReportIntegrityErrorKind::AccessDeniedException(_inner) => {
Some(_inner)
}
ValidateAssessmentReportIntegrityErrorKind::InternalServerException(_inner) => {
Some(_inner)
}
ValidateAssessmentReportIntegrityErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
ValidateAssessmentReportIntegrityErrorKind::ValidationException(_inner) => Some(_inner),
ValidateAssessmentReportIntegrityErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
/// <p>
/// The request has invalid or missing parameters.
/// </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ValidationException {
pub message: std::option::Option<std::string::String>,
/// <p>
/// The reason the request failed validation.
/// </p>
pub reason: std::option::Option<crate::model::ValidationExceptionReason>,
/// <p>
/// The fields that caused the error, if applicable.
/// </p>
pub fields: std::option::Option<std::vec::Vec<crate::model::ValidationExceptionField>>,
}
impl std::fmt::Debug for ValidationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ValidationException");
formatter.field("message", &self.message);
formatter.field("reason", &self.reason);
formatter.field("fields", &self.fields);
formatter.finish()
}
}
impl ValidationException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ValidationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ValidationException")?;
if let Some(inner_1) = &self.message {
write!(f, ": {}", inner_1)?;
}
Ok(())
}
}
impl std::error::Error for ValidationException {}
/// See [`ValidationException`](crate::error::ValidationException)
pub mod validation_exception {
/// A builder for [`ValidationException`](crate::error::ValidationException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
pub(crate) reason: std::option::Option<crate::model::ValidationExceptionReason>,
pub(crate) fields:
std::option::Option<std::vec::Vec<crate::model::ValidationExceptionField>>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// <p>
/// The reason the request failed validation.
/// </p>
pub fn reason(mut self, input: crate::model::ValidationExceptionReason) -> Self {
self.reason = Some(input);
self
}
pub fn set_reason(
mut self,
input: std::option::Option<crate::model::ValidationExceptionReason>,
) -> Self {
self.reason = input;
self
}
pub fn fields(mut self, input: impl Into<crate::model::ValidationExceptionField>) -> Self {
let mut v = self.fields.unwrap_or_default();
v.push(input.into());
self.fields = Some(v);
self
}
pub fn set_fields(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::ValidationExceptionField>>,
) -> Self {
self.fields = input;
self
}
/// Consumes the builder and constructs a [`ValidationException`](crate::error::ValidationException)
pub fn build(self) -> crate::error::ValidationException {
crate::error::ValidationException {
message: self.message,
reason: self.reason,
fields: self.fields,
}
}
}
}
impl ValidationException {
/// Creates a new builder-style object to manufacture [`ValidationException`](crate::error::ValidationException)
pub fn builder() -> crate::error::validation_exception::Builder {
crate::error::validation_exception::Builder::default()
}
}
/// <p>
/// The resource specified in the request cannot be found.
/// </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceNotFoundException {
pub message: std::option::Option<std::string::String>,
/// <p>
/// The unique identifier for the specified resource.
/// </p>
pub resource_id: std::option::Option<std::string::String>,
/// <p>
/// The type of resource affected by the error.
/// </p>
pub resource_type: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceNotFoundException");
formatter.field("message", &self.message);
formatter.field("resource_id", &self.resource_id);
formatter.field("resource_type", &self.resource_type);
formatter.finish()
}
}
impl ResourceNotFoundException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceNotFoundException")?;
if let Some(inner_2) = &self.message {
write!(f, ": {}", inner_2)?;
}
Ok(())
}
}
impl std::error::Error for ResourceNotFoundException {}
/// See [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub mod resource_not_found_exception {
/// A builder for [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
pub(crate) resource_id: std::option::Option<std::string::String>,
pub(crate) resource_type: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// <p>
/// The unique identifier for the specified resource.
/// </p>
pub fn resource_id(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_id = Some(input.into());
self
}
pub fn set_resource_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.resource_id = input;
self
}
/// <p>
/// The type of resource affected by the error.
/// </p>
pub fn resource_type(mut self, input: impl Into<std::string::String>) -> Self {
self.resource_type = Some(input.into());
self
}
pub fn set_resource_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.resource_type = input;
self
}
/// Consumes the builder and constructs a [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn build(self) -> crate::error::ResourceNotFoundException {
crate::error::ResourceNotFoundException {
message: self.message,
resource_id: self.resource_id,
resource_type: self.resource_type,
}
}
}
}
impl ResourceNotFoundException {
/// Creates a new builder-style object to manufacture [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn builder() -> crate::error::resource_not_found_exception::Builder {
crate::error::resource_not_found_exception::Builder::default()
}
}
/// <p>
/// An internal service error occurred during the processing of your request. Try again later.
/// </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InternalServerException {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InternalServerException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InternalServerException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InternalServerException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InternalServerException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InternalServerException")?;
if let Some(inner_3) = &self.message {
write!(f, ": {}", inner_3)?;
}
Ok(())
}
}
impl std::error::Error for InternalServerException {}
/// See [`InternalServerException`](crate::error::InternalServerException)
pub mod internal_server_exception {
/// A builder for [`InternalServerException`](crate::error::InternalServerException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InternalServerException`](crate::error::InternalServerException)
pub fn build(self) -> crate::error::InternalServerException {
crate::error::InternalServerException {
message: self.message,
}
}
}
}
impl InternalServerException {
/// Creates a new builder-style object to manufacture [`InternalServerException`](crate::error::InternalServerException)
pub fn builder() -> crate::error::internal_server_exception::Builder {
crate::error::internal_server_exception::Builder::default()
}
}
/// <p>
/// Your account is not registered with Audit Manager. Check the delegated administrator setup on the Audit Manager settings page, and try again.
/// </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AccessDeniedException {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for AccessDeniedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AccessDeniedException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl AccessDeniedException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for AccessDeniedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "AccessDeniedException")?;
if let Some(inner_4) = &self.message {
write!(f, ": {}", inner_4)?;
}
Ok(())
}
}
impl std::error::Error for AccessDeniedException {}
/// See [`AccessDeniedException`](crate::error::AccessDeniedException)
pub mod access_denied_exception {
/// A builder for [`AccessDeniedException`](crate::error::AccessDeniedException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`AccessDeniedException`](crate::error::AccessDeniedException)
pub fn build(self) -> crate::error::AccessDeniedException {
crate::error::AccessDeniedException {
message: self.message,
}
}
}
}
impl AccessDeniedException {
/// Creates a new builder-style object to manufacture [`AccessDeniedException`](crate::error::AccessDeniedException)
pub fn builder() -> crate::error::access_denied_exception::Builder {
crate::error::access_denied_exception::Builder::default()
}
}
| 36.775874 | 145 | 0.644732 |
e4cca3a4c1ad10fcfd79cc4f4e7da894a76a81ea | 1,399 | use futures::Future;
use tokio::reactor::Handle;
use {RamStore, sexp_proto, TailService, ServerService, Replicator, ReplicaClient};
use replica::HostConfig;
use view_management::{EtcdHeartbeater, ViewManager};
use std::net::SocketAddr;
use std::io;
pub fn build_server(handle: &Handle,
head_addr: SocketAddr,
tail_addr: SocketAddr)
-> Result<HostConfig, io::Error> {
let store = RamStore::new();
let head = ServerService::new(store.clone());
let tail = TailService::new(store.clone());
let head_host = try!(sexp_proto::server::serve(handle, head_addr, head));
let tail_host = try!(sexp_proto::server::serve(handle, tail_addr, tail));
let host_config = HostConfig {
head: head_host.local_addr().clone(),
tail: tail_host.local_addr().clone(),
};
let etcd = EtcdHeartbeater::new("", "/my-chain", host_config.clone());
let view_manager = ViewManager::new(store.clone(), &host_config, etcd);
handle.spawn(view_manager.map_err(|e| panic!("ViewManager failed!: {:?}", e)));
let downstream = {
let handle = handle.clone();
move |addr| ReplicaClient::connect(handle.clone(), &addr)
};
let replica = Replicator::new(store.clone(), &host_config, downstream);
handle.spawn(replica.map_err(|e| panic!("Replicator failed!: {:?}", e)));
Ok(host_config)
}
| 35.871795 | 83 | 0.647605 |
e97ca1b8585fae4311a409a00ee942adeceaa95a | 2,465 | //! This crate provides Serde's two derive macros.
//!
//! ```edition2018
//! # use serde_derive::{Serialize, Deserialize};
//! #
//! #[derive(Serialize, Deserialize)]
//! # struct S;
//! #
//! # fn main() {}
//! ```
//!
//! Please refer to [https://serde.rs/derive.html] for how to set this up.
//!
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.98")]
#![allow(unknown_lints, bare_trait_objects)]
#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Ignored clippy lints
#![cfg_attr(
feature = "cargo-clippy",
allow(
cognitive_complexity,
enum_variant_names,
needless_pass_by_value,
redundant_field_names,
too_many_arguments,
trivially_copy_pass_by_ref,
used_underscore_binding,
)
)]
// Ignored clippy_pedantic lints
#![cfg_attr(
feature = "cargo-clippy",
allow(
cast_possible_truncation,
checked_conversions,
doc_markdown,
enum_glob_use,
filter_map,
indexing_slicing,
items_after_statements,
match_same_arms,
module_name_repetitions,
similar_names,
single_match_else,
unseparated_literal_suffix,
use_self,
)
)]
// The `quote!` macro requires deep recursion.
#![recursion_limit = "512"]
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
extern crate proc_macro;
extern crate proc_macro2;
mod internals;
use proc_macro::TokenStream;
use syn::DeriveInput;
#[macro_use]
mod bound;
#[macro_use]
mod fragment;
mod de;
mod dummy;
mod pretend;
mod ser;
mod try;
#[proc_macro_derive(Serialize, attributes(serde))]
pub fn derive_serialize(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
ser::expand_derive_serialize(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(Deserialize, attributes(serde))]
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
de::expand_derive_deserialize(&input)
.unwrap_or_else(to_compile_errors)
.into()
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}
| 24.89899 | 75 | 0.68073 |
0ecb3df6d87a5b7d339bffa5a4a4067022e76bfc | 169 | mod day1;
mod day10;
mod day11;
mod day2;
mod day3;
mod day4;
mod day5;
mod day6;
mod day7;
mod day8;
mod day9;
mod utils;
fn main() {
println!("Hello, world!");
}
| 9.941176 | 30 | 0.650888 |
1ae78c7de35f260ed879b900dcaaef41494406e1 | 4,725 | use io::{Read, Write};
use std::env::args;
use std::fs::File;
use std::io;
use log::{error, info, warn};
use simple_logger::SimpleLogger;
use Raz_lang::backends::js::codegen::JSCodeGen;
use Raz_lang::backends::CodeGen;
use Raz_lang::Raz_compiler::{Compiler, CompilerVariableMap};
use Raz_lang::Raz_parser::parse;
use Raz_lang::Raz_vm::value::ValueType;
use Raz_lang::Raz_vm::vm::{Heap, VariableMap, VM};
fn get_input() -> String {
let mut input = String::new();
print!("> ");
io::stdout().flush().expect("flush failed!");
match io::stdin().read_line(&mut input) {
Ok(_goes_into_input_above) => {}
Err(_no_updates_is_fine) => {}
}
input.trim_end().to_string()
}
fn cli() {
SimpleLogger::new().init().unwrap();
let argv: Vec<String> = args().collect();
if argv.len() < 2 {
let mut heap = Heap::default();
let mut vars = VariableMap::default();
let mut insts = vec![];
let mut compvars = CompilerVariableMap::default();
compvars.new_frame();
vars.new_frame();
println!("Welcome to Raz repl. Type :quit to quit.",);
loop {
let ip = get_input();
if ip.trim() == ":quit" {
break;
}
let ast = parse(&ip);
if let Ok(ast) = ast {
let mut compiler = Compiler::new(&ip);
compiler.variable_map = compvars.clone();
compiler.compile_no_new_frame(ast);
compvars = compiler.variable_map;
let mut insts_copy = insts.clone();
let inst = compiler.result.clone();
insts_copy.extend(inst.clone());
let mut vm = VM::default();
vm.heap = heap.clone();
vm.variables = vars.clone();
let l = insts.len();
vm.run_with_start(insts_copy, l, l as isize);
if let Some(x) = &vm.error {
error!("Runtime Error:\n{}", x.to_string(&heap));
} else if let Some(l) = &vm.last_popped {
heap = vm.heap;
vars = vm.variables;
insts.extend(inst);
if l.value_type() != ValueType::Null {
println!("{}", l.to_debug_string(&heap));
}
} else {
heap = vm.heap;
vars = vm.variables;
insts.extend(inst);
}
} else if let Err(e) = ast {
error!("Parsing Error:\n{}", e);
}
}
return;
}
let filename = &argv[1];
let mut file = File::open(filename).expect("Unable to open the file");
let mut contents = vec![];
file.read_to_end(&mut contents)
.expect("Unable to read the file");
let code = String::from_utf8(contents).unwrap();
let ast = parse(&*code);
if let Ok(ast) = ast {
if let Some(mode) = argv.get(2) {
if mode.starts_with("mode=") {
let opt = mode.split("mode=").collect::<Vec<&str>>()[1].trim();
match opt {
"vm" => {}
"js" => {
let mut jsgen = JSCodeGen::default();
let res = jsgen.generate(&ast, ());
println!("{}", res);
return;
}
_ => {
warn!("WARNING: invalid mode: {}", opt);
}
}
}
}
let mut compiler = Compiler::new(&code);
compiler.compile(ast);
let inst = compiler.result;
let mut vm = VM::default();
let mut index = 2;
loop {
if let Some(option) = argv.get(index) {
match option.as_str() {
"no_gc" => {
info!("INFO: Using NoGC Mode");
vm.use_gc = false;
}
_ => {
warn!("WARNING: no such option: {}", option)
}
}
} else {
break;
}
index += 1;
}
vm.run(inst);
if let Some(x) = &vm.error {
error!("At Line {}, ", vm.line + 1);
error!("Runtime Error:\n{}", x.to_string(&vm.heap));
}
} else if let Err(e) = ast {
error!("Parsing Error:\n{}", e);
}
}
fn main() {
cli();
}
| 31.291391 | 80 | 0.433439 |
deead6ee3ef68beaa0a36a1acdab6509e6dc15cf | 9,861 | //! Generic label independent partition abstraction.
//! Provides label independent abstraction. The partitions are not directly connected with
//! partition table (label) data. Any change to fdisk_partition does not affects in-memory
//! or on-disk label data. The fdisk_partition is possible to use as a
//! template for fdisk_add_partition() or fdisk_set_partition() operations.
use crate::context::Context;
use crate::errors::*;
use fdisk_sys;
use std::ffi::{CStr, CString};
/// Generic label independent partition abstraction
pub struct Partition {
pub(crate) ptr: *mut fdisk_sys::fdisk_partition,
}
impl Partition {
/// Return newly allocated Partition
pub fn new() -> Partition {
Partition {
ptr: unsafe { fdisk_sys::fdisk_new_partition() },
}
}
/// Increment reference counter.
pub fn ref_partition(&self) {
unsafe { fdisk_sys::fdisk_ref_partition(self.ptr) }
}
/// Reset partition content.
pub fn reset_partition(&self) {
unsafe { fdisk_sys::fdisk_reset_partition(self.ptr) }
}
/// Return partition attributes in string format
pub fn attrs(&self) -> Option<String> {
unsafe {
let ptr = fdisk_sys::fdisk_partition_get_attrs(self.ptr);
if ptr.is_null() {
return None;
}
Some(CStr::from_ptr(ptr).to_str().unwrap().to_string())
}
}
/// Return last partition sector LBA.
pub fn end(&self) -> Option<u64> {
match unsafe { fdisk_sys::fdisk_partition_has_end(self.ptr) } {
0 => None,
_ => Some(unsafe { fdisk_sys::fdisk_partition_get_end(self.ptr) }),
}
}
/// Return partition UUID as string
pub fn name(&self) -> Result<String> {
unsafe {
let src = fdisk_sys::fdisk_partition_get_name(self.ptr);
if src.is_null() {
return Err("no valid Name".into());
}
match CStr::from_ptr(src).to_str() {
Ok(v) => Ok(v.to_string()),
Err(e) => Err(e.into()),
}
}
}
/// Return devno of the parent
pub fn parent(&self) -> Result<usize> {
let mut p: usize = 0;
match unsafe { fdisk_sys::fdisk_partition_get_parent(self.ptr, &mut p) } {
0 => Ok(p),
v => Err(nix::Error::from_errno(nix::errno::from_i32(v)).into()),
}
}
/// Return partition number (0 is the first partition)
pub fn partno(&self) -> Option<usize> {
match unsafe { fdisk_sys::fdisk_partition_has_partno(self.ptr) } {
0 => None,
_ => Some(unsafe { fdisk_sys::fdisk_partition_get_partno(self.ptr) }),
}
}
/// Return size offset in sectors
pub fn size(&self) -> Option<u64> {
match unsafe { fdisk_sys::fdisk_partition_has_size(self.ptr) } {
0 => None,
_ => Some(unsafe { fdisk_sys::fdisk_partition_get_size(self.ptr) }),
}
}
/// Return start offset in sectors
pub fn start(&self) -> Option<u64> {
match unsafe { fdisk_sys::fdisk_partition_has_start(self.ptr) } {
0 => None,
_ => Some(unsafe { fdisk_sys::fdisk_partition_get_start(self.ptr) }),
}
}
/// Return partition UUID as string
pub fn uuid(&self) -> Result<String> {
unsafe {
let src = fdisk_sys::fdisk_partition_get_uuid(self.ptr);
if src.is_null() {
return Err("no valid UUID".into());
}
match CStr::from_ptr(src).to_str() {
Ok(v) => Ok(v.to_string()),
Err(e) => Err(e.into()),
}
}
}
/// Return true if the partition has enabled boot flag
pub fn is_bootable(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_bootable(self.ptr) } {
1 => true,
_ => false,
}
}
/// Return true if the partition is container (e.g. MBR extended partition)
pub fn is_container(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_container(self.ptr) } {
1 => true,
_ => false,
}
}
/// Return true if points to freespace
pub fn is_freespace(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_freespace(self.ptr) } {
1 => true,
_ => false,
}
}
/// Return true if the partition is nested (e.g. MBR logical partition)
pub fn is_nested(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_nested(self.ptr) } {
1 => true,
_ => false,
}
}
/// Return true if the partition points to some area
pub fn is_used(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_used(self.ptr) } {
1 => true,
_ => false,
}
}
/// Return true if the partition is special whole-disk (e.g. SUN) partition
pub fn is_wholedisk(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_is_wholedisk(self.ptr) } {
1 => true,
_ => false,
}
}
pub fn set_partno(&self, partno: usize) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_set_partno(self.ptr, partno) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
pub fn set_size(&self, size: u64) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_set_size(self.ptr, size) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
pub fn set_start(&self, start: u64) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_set_start(self.ptr, start) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
pub fn set_attrs(&self, attrs: &str) -> Result<()> {
let attrs = CString::new(attrs.as_bytes())?;
match unsafe { fdisk_sys::fdisk_partition_set_attrs(self.ptr, attrs.as_ptr()) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
pub fn set_name(&self, name: &str) -> Result<()> {
let name = CString::new(name.as_bytes())?;
match unsafe { fdisk_sys::fdisk_partition_set_name(self.ptr, name.as_ptr()) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
pub fn set_uuid(&self, uuid: &str) -> Result<()> {
let uuid = CString::new(uuid.as_bytes())?;
match unsafe { fdisk_sys::fdisk_partition_set_uuid(self.ptr, uuid.as_ptr()) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// By default libfdisk aligns the size when add the new partition (by add_partition()).
/// If you want to disable this functionality use enable = true.
pub fn size_explicit(&self, enable: bool) -> Result<()> {
match unsafe {
fdisk_sys::fdisk_partition_size_explicit(self.ptr, if enable { 1 } else { 0 })
} {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// When partition used as a template for add_partition() when force label driver
pub fn start_follow_default(&self, enable: bool) -> Result<()> {
match unsafe {
fdisk_sys::fdisk_partition_start_follow_default(self.ptr, if enable { 1 } else { 0 })
} {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// Return true if the partition follows default
pub fn start_is_default(&self) -> bool {
match unsafe { fdisk_sys::fdisk_partition_start_is_default(self.ptr) } {
1 => true,
_ => false,
}
}
/// Sets the partno as undefined.
pub fn unset_partno(&self) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_unset_partno(self.ptr) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// Sets the size as undefined
pub fn unset_size(&self) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_unset_size(self.ptr) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// Sets the start as undefined
pub fn unset_start(&self) -> Result<()> {
match unsafe { fdisk_sys::fdisk_partition_unset_start(self.ptr) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
}
impl Drop for Partition {
fn drop(&mut self) {
unsafe { fdisk_sys::fdisk_unref_partition(self.ptr) }
}
}
impl Default for Partition {
fn default() -> Self {
Self::new()
}
}
impl Context {
/// Modifies disklabel according to setting with in pa .
/// # Arguments
/// * `partno` - partition number (0 is the first partition)
/// * `pt` - new partition setting
pub fn set_partition(&self, no: usize, pt: &Partition) -> Result<()> {
match unsafe { fdisk_sys::fdisk_set_partition(self.ptr, no, pt.ptr) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
/// Delete all used partitions from disklabel
pub fn delete_all_partitions(&self) -> Result<()> {
match unsafe { fdisk_sys::fdisk_delete_all_partitions(self.ptr) } {
0 => Ok(()),
v => Err(nix::Error::from_errno(nix::errno::from_i32(-v)).into()),
}
}
}
| 33.540816 | 97 | 0.556536 |
8a22ceb5aa901ddec49d81798014b575b6710d6a | 8,785 | use std::collections::{HashMap, HashSet};
use crate::{
core::types::{SpecializationGroup, SpecializationGroups},
resources::course::CourseId,
};
use super::BankRuleHandler;
// General comment for the whole file
// sg = specialization_group
// sgs = specialization_groups
fn get_groups_indices(course_id_to_sg_index: &HashMap<CourseId, usize>) -> Vec<usize> {
let mut uniques = HashSet::new();
let mut indices = course_id_to_sg_index
.clone()
.into_values()
.collect::<Vec<_>>();
indices.retain(|e| uniques.insert(*e));
indices
}
fn get_complete_sgs_indices(
sgs: &[SpecializationGroup],
course_id_to_sg_index: &HashMap<CourseId, usize>,
) -> Vec<usize> {
let groups_indices = get_groups_indices(course_id_to_sg_index);
let mut complete_sgs_indices = Vec::new();
for sg_index in groups_indices {
// check there are enough courses in this specialization group
if (course_id_to_sg_index
.values()
.filter(|&&group| group == sg_index)
.count())
< sgs[sg_index].courses_sum
{
// There are not enough courses in this sg to complete the requirement
continue;
}
// check if the user completed the mandatory courses in sg
if let Some(mandatory) = &sgs[sg_index].mandatory {
let mut complete_mandatory = true;
for courses in mandatory {
let mut completed_current_demand = false;
for (course_id, group) in course_id_to_sg_index {
// check if the user completed one of courses
if *group == sg_index && courses.contains(course_id) {
completed_current_demand = true;
break;
}
}
if !completed_current_demand {
complete_mandatory = false;
}
}
if complete_mandatory {
complete_sgs_indices.push(sg_index);
}
}
}
complete_sgs_indices
}
// This function is looking for a valid assignment for the courses which fulfill the sgs requirements
// If an assignment is found it returns it, None otherwise.
fn find_valid_assignment_for_courses(
sgs: &[SpecializationGroup],
groups_indices: &[usize],
optional_sgs_for_course: &HashMap<CourseId, Vec<usize>>, // list of all optional sgs for each course
current_best_match: &mut HashMap<CourseId, usize>, // the best match of sgs
course_id_to_sg_index: &mut HashMap<CourseId, usize>,
course_index: usize, // course_index-th element in optional_sgs_for_course
) -> Option<HashMap<CourseId, usize>> {
if course_index >= optional_sgs_for_course.len() {
let complete_sgs_indices = get_complete_sgs_indices(sgs, course_id_to_sg_index);
if complete_sgs_indices.len() >= groups_indices.len() {
return Some(course_id_to_sg_index.clone());
}
let complete_sgs_for_current_best_match = get_complete_sgs_indices(sgs, current_best_match);
if complete_sgs_indices.len() > complete_sgs_for_current_best_match.len() {
current_best_match.clear();
current_best_match.extend(course_id_to_sg_index.to_owned());
}
return None;
}
if let Some((course_id, optional_groups)) = optional_sgs_for_course.iter().nth(course_index) {
for sg_index in optional_groups {
course_id_to_sg_index.insert(course_id.clone(), *sg_index);
if let Some(valid_assignment) = find_valid_assignment_for_courses(
sgs,
groups_indices,
optional_sgs_for_course,
current_best_match,
course_id_to_sg_index,
course_index + 1,
) {
return Some(valid_assignment);
}
}
}
None
}
fn get_sgs_courses_assignment(
sgs: &[SpecializationGroup],
groups_indices: &[usize],
courses: &[CourseId],
best_match: &mut HashMap<CourseId, usize>,
) -> Option<HashMap<CourseId, usize>> {
let mut optional_sgs_for_course = HashMap::<CourseId, Vec<usize>>::new();
for course_id in courses {
let mut relevant_groups_for_course = Vec::new();
for sg_index in groups_indices {
if sgs[*sg_index].course_list.contains(course_id) {
relevant_groups_for_course.push(*sg_index);
}
}
if !relevant_groups_for_course.is_empty() {
// only this subset specialization groups consist course_id
optional_sgs_for_course.insert(course_id.clone(), relevant_groups_for_course);
}
}
let mut courses_assignment = HashMap::new();
find_valid_assignment_for_courses(
sgs,
groups_indices,
&optional_sgs_for_course,
best_match,
&mut courses_assignment,
0,
)
}
// generates all subsets of size specialization_groups.groups_number and checks if one of them is fulfilled
fn generate_sgs_subsets(
sgs: &[SpecializationGroup],
required_number_of_groups: usize,
sg_index: usize,
groups_indices: &mut Vec<usize>,
courses: &[CourseId],
best_match: &mut HashMap<CourseId, usize>,
) -> Option<HashMap<CourseId, usize>> {
if groups_indices.len() == required_number_of_groups {
return get_sgs_courses_assignment(sgs, groups_indices, courses, best_match);
}
if sg_index >= sgs.len() {
return None;
}
// current group is included
groups_indices.push(sg_index);
if let Some(valid_assignment) = generate_sgs_subsets(
sgs,
required_number_of_groups,
sg_index + 1,
groups_indices,
courses,
best_match,
) {
return Some(valid_assignment);
}
// current group is excluded
groups_indices.pop();
generate_sgs_subsets(
sgs,
required_number_of_groups,
sg_index + 1,
groups_indices,
courses,
best_match,
)
}
fn run_exhaustive_search(
sgs: &SpecializationGroups,
courses: Vec<CourseId>, // list of all courses the user completed in specialization groups bank
) -> HashMap<CourseId, usize> {
let mut best_match = HashMap::new();
generate_sgs_subsets(
&sgs.groups_list,
sgs.groups_number,
0,
&mut Vec::new(),
&courses,
&mut best_match,
)
.or(Some(best_match))
.unwrap() // unwraping is safe since the line above always returns Some(_)
}
impl<'a> BankRuleHandler<'a> {
pub fn specialization_group(
mut self,
sgs: &SpecializationGroups,
completed_groups: &mut Vec<String>,
) -> f32 {
// All courses which might be in SOME specialization group should get its name assigned to them
// later on, if we find a valid assignment for said courses with a DIFFERENT specialization group,
// we will simply re-assign the specialization group name.
for sg in sgs.groups_list.iter() {
for course_id in sg.course_list.iter() {
if let Some(course_status) =
self.degree_status.get_mut_course_status(course_id.as_str())
{
course_status.set_specialization_group_name(&sg.name);
}
}
}
let credit_info = self.iterate_course_list();
let mut completed_courses = Vec::new();
for (course_id_in_list, course_id_done_by_user) in credit_info.handled_courses {
if let Some(course_status) = self
.degree_status
.get_course_status(&course_id_done_by_user)
{
if course_status.completed() {
completed_courses.push(course_id_in_list);
}
}
}
let valid_assignment_for_courses = run_exhaustive_search(sgs, completed_courses);
let complete_sgs_indices =
get_complete_sgs_indices(&sgs.groups_list, &valid_assignment_for_courses);
// The set is to prevent duplications
let mut sgs_names = HashSet::new();
for (course_id, sg_index) in valid_assignment_for_courses {
if let Some(course_status) = self.degree_status.get_mut_course_status(&course_id) {
if complete_sgs_indices.contains(&sg_index) {
course_status.set_specialization_group_name(&sgs.groups_list[sg_index].name);
sgs_names.insert(&sgs.groups_list[sg_index].name);
}
}
}
for sg_name in sgs_names {
completed_groups.push(sg_name.clone());
}
credit_info.sum_credit
}
}
| 35.566802 | 107 | 0.624587 |
ebd3313177d68a8638277dc92255ea38a6f46c1e | 14,413 | use log::{debug, error, info};
use nom::bytes::complete::take;
use nom::combinator::cond;
use nom::multi::count;
use nom::number::complete::{le_u16, le_u32, le_u8};
use nom::IResult;
use std::fmt::{Display, Formatter, Result};
use crate::disk_format::stx::sector::{
stx_sector_data_parser, stx_sector_header_parser, stx_sector_parser_plain, STXSectorHeader,
};
use crate::disk_format::stx::SanityCheck;
/// The STXTrackHeader structure contains information about a single track in a STX disk image
/// 16 bytes
#[derive(Debug)]
pub struct STXTrackHeader {
/// The block size of this track, in bytes
/// byte 0 in the track header
pub block_size: u32,
/// The fuzzy sector mask size, in bytes
/// byte 3 in the track header
/// The fuzzy sector mask is used for copy protection.
/// It has bits set for every bit in the sector that are not random
/// (the bits that are real data)
pub fuzzy_size: u32,
/// The number of sectors in this track
/// THis includes sector address blocks and sector data blocks
/// byte 7 in the header
pub sectors_count: u16,
/// Flags for this track
/// byte 9 in the header
/// bit 0: if bit 0 is set, the track contains sector blocks
/// the track is protected with a custom sector-size
/// One of the "custom" sizes is still 512-bytes long
/// if bit 0 is set, a standard sector size of 512 bytes is used
/// This is like a .ST disk image dump, data is just after the header
/// bit 5: the track is protected
/// bit 6: the track contains a track image
/// bit 7: the track image has a sync position
/// the sync position is a word at the start of the track image
pub flags: u16,
/// The MFM size, also known as the track length
/// byte 11 in the header
pub mfm_size: u16,
/// The track number
/// byte 13 in the header
/// bit 7 determines the side of the floppy (0 is side A, 1 is side B)
pub track_number: u8,
/// The record type or track type
/// byte 14 in the header
/// 0 == WDC track dump, 0xCC == DC type track dump
/// bits 0-6 are
/// bit 7 is
pub record_type: u8,
}
/// Perform sanity checks for a track header
/// For now, these are done post-parsing of the section
/// These are generally less strict than things like magic number identification
/// but are good indicators the data may be corrupted
impl SanityCheck for STXTrackHeader {
fn check(&self) -> bool {
if (self.flags != 0x21) && (self.flags != 0x61) && (self.flags != 0xc1) {
debug!("Disk flags are a nonstandard value: 0x{:X}", self.flags);
return false;
}
if ((self.flags & 0x40) == 0) && (self.sectors_count > 0) {
debug!("If flags bit 6 is not set, the sector count should be zero");
return false;
}
true
}
}
/// Format a STXTrackHeader for display
impl Display for STXTrackHeader {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"block_size: {}, fuzzy_size: {}, ",
self.block_size, self.fuzzy_size
)?;
writeln!(f, "sectors_count: {}", self.sectors_count)?;
writeln!(f, " Flags: 0x{:X} {:b}", self.flags, self.flags)?;
writeln!(
f,
" bit0(custom-size-byte-sector): {}",
// bit 0
if (self.flags & 0x01) == 0x01 {
"T"
} else {
"F"
}
)?;
writeln!(
f,
" bit5(track-is-proteced): {}",
// bit 5
if (self.flags & 0x20) == 0x20 {
"T"
} else {
"F"
}
)?;
writeln!(
f,
" bit6(has-track-image): {}",
// bit 6
if (self.flags & 0x40) == 0x40 {
"T"
} else {
"F"
}
)?;
writeln!(
f,
" bit7(track-image-has-sync-pos): {}",
// bit 7
if (self.flags & 0x80) == 0x80 {
"T"
} else {
"F"
}
)?;
write!(
f,
" mfm_size: {}, track_number: {}, ",
self.mfm_size, self.track_number
)?;
write!(f, "record_type: {}", self.record_type)
}
}
/// Parse a single track on the disk
/// Returns the remaining bytes an STXTrackHeader filled out with the track information
pub fn stx_track_header_parser(i: &[u8]) -> IResult<&[u8], STXTrackHeader> {
// The track header is 16 bytes long
let (i, block_size) = le_u32(i)?;
let (i, fuzzy_size) = le_u32(i)?;
let (i, sectors_count) = le_u16(i)?;
let (i, flags) = le_u16(i)?;
let (i, mfm_size) = le_u16(i)?;
let (i, track_number) = le_u8(i)?;
let (i, record_type) = le_u8(i)?;
let stx_track_header = STXTrackHeader {
block_size,
fuzzy_size,
sectors_count,
flags,
mfm_size,
track_number,
record_type,
};
Ok((i, stx_track_header))
}
/// A STXTrack contains a STXTrackHeader
#[derive(Debug)]
pub struct STXTrack<'a> {
/// Thea header for this track
pub header: STXTrackHeader,
/// The sector headers in this track
pub sector_headers: Option<Vec<STXSectorHeader>>,
/// The sector data for this track
pub sector_data: Option<Vec<&'a [u8]>>,
}
/// Display a single track
impl Display for STXTrack<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(f, "header: {}", self.header)
}
}
/// Parse the track data, including sector headers in the track
/// TODO: Implement full parsing
/// This currently doesn't parse track data, just the headers
/// TODO: Simplify this parser
pub fn stx_track_parser(i: &[u8]) -> IResult<&[u8], STXTrack> {
// Record the starting position so we can figure out how much was missed
let starting_position = i;
let stx_track_header_result = stx_track_header_parser(i)?;
let stx_track_header = stx_track_header_result.1;
let i = stx_track_header_result.0;
if !stx_track_header.check() {
error!("Invalid data");
panic!("Invalid data");
}
let (_, sector_headers, sector_data) = if (stx_track_header.flags & 0x01) != 0x01 {
// Parse a plain data track
if stx_track_header.sectors_count > 0 {
let stx_sector = stx_sector_parser_plain(stx_track_header.sectors_count as usize)(i)?;
(stx_sector.0, None, None)
} else {
(i, None, None)
}
} else {
// Parse a set of sector headers
// Fuzzy byte reading is not implemented
if stx_track_header.fuzzy_size > 0 {
error!("Fuzzy bytes reading not implemented");
panic!("Fuzzy bytes reading not implemented");
}
// Find out how many sector headers to parse
info!("Track header: {}", stx_track_header);
// Parse the STX sector headers
// The last track has issues parsing in some cases, we hit EOF
// The last tracks are sometimes flag 0x21 and not 0x61, we need to
// deal with each track image data separately
let (i, sector_headers, sector_data) = if stx_track_header.sectors_count > 0 {
let stx_sector_headers_result = count(
stx_sector_header_parser,
stx_track_header.sectors_count as usize,
)(stx_track_header_result.0)?;
let stx_sector_headers = stx_sector_headers_result.1;
let sector_header_iter = stx_sector_headers.iter();
for header in sector_header_iter {
info!("stx_sector_header: {}", header);
}
// Skip past the fuzzy mask record
let (i, _) = take(stx_track_header.fuzzy_size)(stx_sector_headers_result.0)?;
// The track image data
// First the header, two or four bytes depending on the flags
// If track flags bit six (starting from bit zero) is set
// Then also test bit seven.
// If bit seven is set, read in two bytes, the first sync offset
// Then read read in the track image size, two bytes
// If bit seven is not set, the first sync offset is zero, size is
// calculated from other data
// just read in the track image data
let stx_track_image_header_result =
stx_track_image_header_parser(stx_track_header.flags)(i)?;
info!(
"stx_track_image_header: {}",
stx_track_image_header_result.1
);
let stx_sector_data_parser_result =
//stx_sector_data_parser(&stx_track_header, &stx_sector_headers)(stx_track_image_header_result.0)?;
stx_sector_data_parser(&stx_sector_headers)(i)?;
(
stx_track_image_header_result.0,
Some(stx_sector_headers),
Some(stx_sector_data_parser_result.1),
)
} else {
(i, None, None)
};
(i, sector_headers, sector_data)
};
// TODO: Fix up the other track image data parsing
// We don't use the i returned from the sector headers parsing block above, because
// currently the image track data parsing is unfinished. So the parser is left in
// an unfinished state after parsing track and sector headers.
// But we know the total length of the tracks, so we can skip to the next block
let (i, _) = take(stx_track_header.block_size)(starting_position)?;
Ok((
i,
STXTrack {
header: stx_track_header,
sector_headers,
sector_data,
},
))
}
/// Get n tracks from the disk
/// Returns a vector of the tracks
pub fn stx_tracks_parser(n: usize) -> impl Fn(&[u8]) -> IResult<&[u8], Vec<STXTrack>> {
move |i| count(stx_track_parser, n)(i)
}
/// The track image data on the disk, appears in each track,
/// after the sector headers if they exist, or just after the track headers
pub struct STXTrackImageHeader {
/// The first sync offset
/// This field exists if the track flags bit 7
pub first_sync_offset: u16,
/// The track image size
/// This field exists if the track flags bit 6 is set
pub track_image_size: u16,
}
/// Display a single sector
impl Display for STXTrackImageHeader {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(f, "track image header: ")?;
write!(
f,
"first_sync_offset: {}, track_image_size: {}",
self.first_sync_offset, self.track_image_size
)
}
}
/// Parse a STX track image header
pub fn stx_track_image_header_parser(
flags: u16,
) -> impl Fn(&[u8]) -> IResult<&[u8], STXTrackImageHeader> {
// Create and return a closure as the main result of this function
// i is not a simple value, even though it may appear to operate as one
// so it doesn't get copied by default
// This is why we need the move to capture ownership of the values it uses in the
// environment
move |i| {
// If flag bit 6 and 7 are set, get the first sync offset
let (i, first_sync_offset) =
cond(((flags & 0x40) != 0) && ((flags & 0x80) != 0), le_u16)(i)?;
// If flag bit 6 is set, get the track image size
let (i, track_image_size) = cond((flags & 0x40) != 0, le_u16)(i)?;
let stx_track_image_header = STXTrackImageHeader {
first_sync_offset: first_sync_offset.unwrap_or(0),
track_image_size: track_image_size.unwrap_or(0),
};
Ok((i, stx_track_image_header))
}
}
/// The actual track data
pub struct STXTrackData<'a> {
/// The track image data
data: &'a [u8],
}
/// Display metadata for the track data
impl Display for STXTrackData<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(f, "size of contents: {}", self.data.len())
}
}
#[cfg(test)]
mod tests {
use super::SanityCheck;
use super::stx_track_header_parser;
/// Test parsing a STX track header
#[test]
fn stx_valid_track_header_parser_works() {
// image_rider::disk_format::stx] Track header: block_size: 11022, fuzzy_size: 0, sectors_count: 9
let stx_track_header: [u8; 16] = [
0x43, 0x2b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x61, 0x00, 0x74, 0x18,
0x00, 0x00,
];
let stx_track_header_parser_result = stx_track_header_parser(&stx_track_header);
match stx_track_header_parser_result {
Ok((_, res)) => {
assert_eq!(res.block_size, 0x2b43);
assert_eq!(res.fuzzy_size, 0x00);
assert_eq!(res.sectors_count, 0x09);
assert_eq!(res.flags, 0x61);
assert_eq!(res.mfm_size, 0x1874);
assert_eq!(res.track_number, 0x00);
assert_eq!(res.record_type, 0x00);
}
Err(e) => panic!("Parsing failed on the STX disk header: {}", e),
}
}
/// Test parsing a STX track header with an unknown flags field
#[test]
fn stx_unknown_track_header_parser_works() {
// image_rider::disk_format::stx] Track header: block_size: 11022, fuzzy_size: 0, sectors_count: 9
let stx_track_header: [u8; 16] = [
0x43, 0x2b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x62, 0x00, 0x74, 0x18,
0x00, 0x00,
];
let stx_track_header_parser_result = stx_track_header_parser(&stx_track_header);
match stx_track_header_parser_result {
Ok((_, res)) => {
assert_eq!(res.block_size, 0x2b43);
assert_eq!(res.fuzzy_size, 0x00);
assert_eq!(res.sectors_count, 0x09);
assert_eq!(res.flags, 0x62);
assert_eq!(res.mfm_size, 0x1874);
assert_eq!(res.track_number, 0x00);
assert_eq!(res.record_type, 0x00);
// Should fail because of the flags
assert_eq!(false, res.check());
}
Err(e) => panic!("Parsing failed on the STX disk header: {}", e),
}
}
}
| 34.98301 | 115 | 0.584611 |
fb166a6404e8d0bcb93edb57e44f8458abd840d6 | 2,342 | #[macro_use]
extern crate serde_derive;
use crate::config::LogConfig;
use crate::file::FileLayer;
use crate::term::TermLayer;
use std::fmt::Debug;
use tracing_subscriber::layer::SubscriberExt;
pub mod config;
mod file;
mod term;
#[derive(Copy, Clone, Debug, Hash, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Level {
Trace = 0,
Debug = 1,
Info = 2,
Warn = 3,
Error = 4,
Critical = 5,
}
impl std::fmt::Display for Level {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
Level::Trace => f.write_str("Trace"),
Level::Debug => f.write_str("Debug"),
Level::Info => f.write_str("Info"),
Level::Warn => f.write_str("Warn"),
Level::Error => f.write_str("Error"),
Level::Critical => f.write_str("Critical"),
}
}
}
impl PartialEq for Level {
#[inline]
fn eq(&self, other: &Level) -> bool {
*self as usize == *other as usize
}
}
impl Into<slog::Level> for Level {
fn into(self) -> slog::Level {
match self {
Level::Trace => slog::Level::Trace,
Level::Debug => slog::Level::Debug,
Level::Info => slog::Level::Info,
Level::Warn => slog::Level::Warning,
Level::Error => slog::Level::Error,
Level::Critical => slog::Level::Critical,
}
}
}
impl Into<tracing::Level> for Level {
fn into(self) -> tracing::Level {
match self {
Level::Trace => tracing::Level::TRACE,
Level::Debug => tracing::Level::DEBUG,
Level::Info => tracing::Level::INFO,
Level::Warn => tracing::Level::WARN,
Level::Error => tracing::Level::ERROR,
Level::Critical => tracing::Level::ERROR,
}
}
}
pub fn init_tracing(verbosity: u8, cfg: &LogConfig) {
let mut file_layer = FileLayer::new(cfg.level(), cfg.log_path());
file_layer.init();
let level: tracing::Level = cfg.level().into();
let subscriber = tracing_subscriber::registry()
// tracing_subscriber::fmt()
// .with_max_level(level)
// .finish()
.with(TermLayer::new(verbosity))
.with(file_layer);
tracing::subscriber::set_global_default(subscriber).unwrap()
}
| 27.232558 | 83 | 0.567464 |
fffd96aaf4ffcb252e2e6d96cb9647c31faec46a | 1,082 | #[macro_use]
mod circular_unit;
#[macro_use]
mod living_unit;
#[macro_use]
mod unit;
mod action_type;
mod bonus;
mod building;
mod faction;
mod game;
mod lane_type;
mod message;
mod minion;
mod move_;
mod player;
mod player_context;
mod projectile;
mod skill_type;
mod status;
mod tree;
mod wizard;
mod world;
pub use self::action_type::ActionType;
pub use self::bonus::{Bonus, Type as BonusType};
pub use self::building::{Building, Type as BuildingType};
pub use self::circular_unit::CircularUnit;
pub use self::faction::Faction;
pub use self::game::Game;
pub use self::lane_type::LaneType;
pub use self::living_unit::LivingUnit;
pub use self::message::Message;
pub use self::minion::{Minion, Type as MinionType};
pub use self::move_::Move;
pub use self::player::Player;
pub use self::player_context::PlayerContext;
pub use self::projectile::{Projectile, Type as ProjectileType};
pub use self::skill_type::SkillType;
pub use self::status::{Status, Type as StatusType};
pub use self::tree::Tree;
pub use self::unit::Unit;
pub use self::wizard::Wizard;
pub use self::world::World;
| 23.521739 | 63 | 0.756007 |
f5a41e3f152ec73cb3da7f01c1476de3caec88a9 | 2,038 | // Copyright 2019 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! qp2p enables communication within a peer to peer network over the QUIC protocol.
// For explanation of lint checks, run `rustc -W help`
#![forbid(
arithmetic_overflow,
mutable_transmutes,
no_mangle_const_items,
unknown_crate_types
)]
#![deny(
bad_style,
deprecated,
improper_ctypes,
missing_docs,
non_shorthand_field_patterns,
overflowing_literals,
stable_features,
unconditional_recursion,
unknown_lints,
unsafe_code,
unused_allocation,
unused_attributes,
unused_comparisons,
unused_features,
unused_parens,
while_true,
clippy::unicode_not_nfc,
warnings
)]
#![warn(
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
pub mod config;
mod connection_deduplicator;
mod connection_pool;
mod connections;
mod endpoint;
mod error;
#[cfg(feature = "igd")]
mod igd;
mod utils;
mod wire_msg;
pub use config::{Config, ConfigError, RetryConfig};
pub use connection_pool::ConnId;
pub use connections::{DisconnectionEvents, RecvStream, SendStream};
pub use endpoint::{Endpoint, IncomingConnections, IncomingMessages};
#[cfg(feature = "igd")]
pub use error::UpnpError;
pub use error::{
ClientEndpointError, Close, ConnectionError, EndpointError, InternalConfigError, RecvError,
RpcError, SendError, SerializationError, StreamError, TransportErrorCode,
UnsupportedStreamOperation,
};
#[cfg(test)]
mod tests;
| 27.173333 | 95 | 0.750736 |
efa5c725c954d41c6e8ad8572fde153917efcdcd | 942 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma version(1)
#pragma rs java_package_name(com.example.android.rs.hellocomputendk)
const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
void root(const uchar4 *v_in, uchar4 *v_out) {
float4 f4 = rsUnpackColor8888(*v_in);
float3 mono = dot(f4.rgb, gMonoMult);
*v_out = rsPackColorTo8888(mono);
}
| 32.482759 | 75 | 0.729299 |
f71bb266126a283e1225372554e1433b1092f323 | 2,484 | use std::time::{Duration, Instant};
use tokio::sync::Mutex;
use trust_dns_proto::op::Message;
use crate::doh::config::CacheConfiguration;
use crate::doh::request_key::RequestKey;
#[derive(Clone)]
pub struct CacheObject {
message: Message,
cache_time: Instant,
expiration_time: Instant,
}
impl CacheObject {
pub fn new(message: Message, cache_time: Instant, cache_duration: Duration) -> Self {
let expiration_time = cache_time + cache_duration;
CacheObject {
message,
cache_time,
expiration_time,
}
}
pub fn message(self) -> Message {
self.message
}
pub fn message_mut(&mut self) -> &mut Message {
&mut self.message
}
pub fn expired(&self, now: Instant) -> bool {
now > self.expiration_time
}
pub fn duration_in_cache(&self, now: Instant) -> Duration {
now - self.cache_time
}
}
pub struct Cache {
cache_configuration: CacheConfiguration,
cache: Mutex<lru::LruCache<RequestKey, CacheObject>>,
}
impl Cache {
pub fn new(cache_configuration: CacheConfiguration) -> Self {
let max_size = cache_configuration.max_size();
Cache {
cache_configuration,
cache: Mutex::new(lru::LruCache::new(max_size)),
}
}
pub async fn get(&self, key: &RequestKey) -> Option<CacheObject> {
let mut mut_cache = self.cache.lock().await;
match mut_cache.get(key) {
Some(v) => Some(v.clone()),
None => None,
}
}
pub async fn put(&self, key: RequestKey, cache_object: CacheObject) {
let mut mut_cache = self.cache.lock().await;
mut_cache.put(key, cache_object);
}
pub async fn periodic_purge(&self) -> (usize, usize) {
let mut mut_cache = self.cache.lock().await;
let mut items_purged = 0;
let now = Instant::now();
while items_purged < self.cache_configuration.max_purges_per_timer_pop() {
let lru_key_and_value = match mut_cache.peek_lru() {
None => break,
Some(lru_key_and_value) => lru_key_and_value,
};
if lru_key_and_value.1.expired(now) {
let key_clone = lru_key_and_value.0.clone();
mut_cache.pop(&key_clone);
items_purged += 1;
} else {
break;
}
}
(mut_cache.len(), items_purged)
}
}
| 25.346939 | 89 | 0.585346 |
1e3f56a3508be0befb0919b5e97e44bdc2ed6e18 | 13,362 | use libipld::cache::Cache;
use libipld::cache::IpldCache;
use libipld::cbor::DagCbor;
use libipld::cbor::DagCborCodec;
use libipld::cid::Cid;
use libipld::error::Result;
use libipld::ipld::Ipld;
use libipld::prelude::{Decode, Encode, References};
use libipld::store::Store;
use libipld::store::StoreParams;
use libipld::DagCbor;
pub struct ListConfig<S>
where
S: Store,
<S::Params as StoreParams>::Codecs: Into<DagCborCodec>,
DagCborCodec: Into<<S::Params as StoreParams>::Codecs>,
Ipld: References<<S::Params as StoreParams>::Codecs>,
{
store: S,
cache_size: usize,
hash: <S::Params as StoreParams>::Hashes,
width: Option<usize>,
}
impl<S> ListConfig<S>
where
S: Store,
<S::Params as StoreParams>::Codecs: Into<DagCborCodec>,
DagCborCodec: Into<<S::Params as StoreParams>::Codecs>,
Ipld: References<<S::Params as StoreParams>::Codecs>,
{
pub fn new(store: S, hash: <S::Params as StoreParams>::Hashes) -> Self {
Self {
store,
cache_size: 64,
hash,
width: None,
}
}
pub fn set_cache_size(&mut self, cache_size: usize) {
self.cache_size = cache_size;
}
pub fn set_width(&mut self, width: usize) {
self.width = Some(width);
}
fn width<T>(&self) -> usize {
if let Some(width) = self.width {
width
} else {
let elem_size = usize::max(std::mem::size_of::<T>(), std::mem::size_of::<Cid>());
<S::Params as StoreParams>::MAX_BLOCK_SIZE / elem_size
}
}
fn cache<T>(self) -> IpldCache<S, DagCborCodec, Node<T>>
where
T: DagCbor + Clone + Send + Sync,
{
IpldCache::new(self.store, DagCborCodec, self.hash, self.cache_size)
}
}
pub struct List<S: Store, T: DagCbor> {
cache: IpldCache<S, DagCborCodec, Node<T>>,
root: Cid,
tmp: S::TempPin,
}
impl<S, T> List<S, T>
where
S: Store,
<S::Params as StoreParams>::Codecs: Into<DagCborCodec>,
DagCborCodec: Into<<S::Params as StoreParams>::Codecs>,
Ipld: References<<S::Params as StoreParams>::Codecs>,
T: DagCbor + Clone + Send + Sync,
{
pub async fn new(config: ListConfig<S>) -> Result<Self> {
let width = config.width::<T>();
let cache = config.cache();
let tmp = cache.temp_pin().await?;
let root = cache
.insert(Node::new(width as _, 0, vec![]), Some(&tmp))
.await?;
Ok(Self { cache, root, tmp })
}
pub async fn open(config: ListConfig<S>, root: Cid) -> Result<Self> {
let cache = config.cache();
let tmp = cache.temp_pin().await?;
// warm up the cache and make sure it's available
cache.get(&root, Some(&tmp)).await?;
Ok(Self { cache, root, tmp })
}
pub fn root(&self) -> &Cid {
&self.root
}
pub async fn from(config: ListConfig<S>, items: impl Iterator<Item = T>) -> Result<Self> {
let width = config.width::<T>();
let cache = config.cache();
let tmp = cache.temp_pin().await?;
let mut items: Vec<Data<T>> = items.map(Data::Value).collect();
let mut height = 0;
let mut cid = cache
.insert(Node::new(width as _, height, vec![]), Some(&tmp))
.await?;
loop {
let n_items = items.len() / width + 1;
let mut items_next = Vec::with_capacity(n_items);
for chunk in items.chunks(width) {
let node = Node::new(width as u32, height, chunk.to_vec());
cid = cache.insert(node, Some(&tmp)).await?;
items_next.push(Data::Link(cid));
}
if items_next.len() == 1 {
return Ok(Self {
cache,
root: cid,
tmp,
});
}
items = items_next;
height += 1;
}
}
pub async fn push(&mut self, value: T) -> Result<()> {
let mut value = Data::Value(value);
let root = self.cache.get(&self.root, Some(&self.tmp)).await?;
let height = root.height();
let width = root.width();
let chain = {
let mut height = root.height();
let mut chain = Vec::with_capacity(height as usize + 1);
chain.push(root);
while height > 0 {
let cid = chain
.last()
.expect("at least one block")
.data()
.last()
.expect("at least one link")
.cid()
.expect("height > 0, payload must be a cid");
let node = self.cache.get(cid, Some(&self.tmp)).await?;
height = node.height();
chain.push(node);
}
chain
};
let mut mutated = false;
let mut last = self
.cache
.insert(Node::new(width as u32, height, vec![]), Some(&self.tmp))
.await?;
for mut node in chain.into_iter().rev() {
if mutated {
let data = node.data_mut();
data.pop();
data.push(value);
last = self.cache.insert(node, Some(&self.tmp)).await?;
value = Data::Link(last);
} else {
let data = node.data_mut();
if data.len() < width {
data.push(value);
last = self.cache.insert(node, Some(&self.tmp)).await?;
value = Data::Link(last);
mutated = true;
} else {
let node = Node::new(width as u32, node.height(), vec![value]);
last = self.cache.insert(node, Some(&self.tmp)).await?;
value = Data::Link(last);
mutated = false;
}
}
}
if !mutated {
let children = vec![Data::Link(*self.root()), value];
let node = Node::new(width as u32, height + 1, children);
last = self.cache.insert(node, Some(&self.tmp)).await?;
}
self.root = last;
Ok(())
}
pub async fn pop(&mut self) -> Result<Option<T>> {
// TODO
Ok(None)
}
pub async fn get(&mut self, mut index: usize) -> Result<Option<T>> {
let node = self.cache.get(&self.root, Some(&self.tmp)).await?;
let mut node_ref = &node;
let width = node.width();
let mut height = node.height();
let mut node;
if index > width.pow(height + 1) {
return Ok(None);
}
loop {
let data_index = index / width.pow(height);
if let Some(data) = node_ref.data().get(data_index) {
if height == 0 {
return Ok(Some(data.value().unwrap().clone()));
}
let cid = data.cid().unwrap();
node = self.cache.get(cid, Some(&self.tmp)).await?;
node_ref = &node;
index %= width.pow(height);
height = node.height();
} else {
return Ok(None);
}
}
}
pub async fn set(&mut self, _index: usize, _value: T) -> Result<()> {
// TODO
Ok(())
}
pub async fn len(&mut self) -> Result<usize> {
let root = self.cache.get(&self.root, Some(&self.tmp)).await?;
let width = root.width();
let mut height = root.height();
let mut size = width.pow(height + 1);
let mut node = root;
loop {
let data = node.data();
size -= width.pow(height) * (width - data.len());
if height == 0 {
return Ok(size);
}
let cid = data.last().unwrap().cid().unwrap();
node = self.cache.get(cid, Some(&self.tmp)).await?;
height = node.height();
}
}
pub async fn is_empty(&mut self) -> Result<bool> {
let root = self.cache.get(&self.root, Some(&self.tmp)).await?;
Ok(root.data().is_empty())
}
pub fn iter(&mut self) -> ListIter<'_, S, T> {
ListIter {
list: self,
index: 0,
}
}
pub async fn flush<A: AsRef<[u8]> + Send + Sync>(&mut self, alias: A) -> Result<()> {
self.cache.alias(alias, Some(self.root())).await?;
self.tmp = self.cache.temp_pin().await?;
self.cache.flush().await?;
Ok(())
}
}
pub struct ListIter<'a, S: Store, T: DagCbor> {
list: &'a mut List<S, T>,
index: usize,
}
impl<'a, S, T: DagCbor> ListIter<'a, S, T>
where
S: Store,
<S::Params as StoreParams>::Codecs: Into<DagCborCodec>,
DagCborCodec: Into<<S::Params as StoreParams>::Codecs>,
Ipld: References<<S::Params as StoreParams>::Codecs>,
T: Decode<DagCborCodec> + Encode<DagCborCodec> + Clone + Send + Sync,
{
#[allow(clippy::should_implement_trait)]
pub async fn next(&mut self) -> Result<Option<T>> {
let elem = self.list.get(self.index).await?;
self.index += 1;
Ok(elem)
}
}
#[derive(Clone, Debug, DagCbor)]
struct Node<T: DagCbor> {
width: u32,
height: u32,
data: Vec<Data<T>>,
}
impl<T: DagCbor> Node<T> {
fn new(width: u32, height: u32, data: Vec<Data<T>>) -> Self {
Node {
width,
height,
data,
}
}
fn width(&self) -> usize {
self.width as usize
}
fn height(&self) -> u32 {
self.height
}
fn data(&self) -> &[Data<T>] {
&self.data
}
fn data_mut(&mut self) -> &mut Vec<Data<T>> {
&mut self.data
}
}
#[derive(Clone, Debug, DagCbor)]
enum Data<T: DagCbor> {
Value(T),
Link(Cid),
}
impl<T: DagCbor> Data<T> {
fn value(&self) -> Option<&T> {
if let Self::Value(value) = self {
Some(value)
} else {
None
}
}
fn cid(&self) -> Option<&Cid> {
if let Self::Link(cid) = self {
Some(cid)
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use async_std::task;
use libipld::mem::MemStore;
use libipld::multihash::Code;
use libipld::store::DefaultParams;
use model::*;
#[async_std::test]
async fn test_list() -> Result<()> {
let store = MemStore::<DefaultParams>::default();
let mut config = ListConfig::new(store, Code::Blake2b256);
config.set_width(3);
let mut list = List::new(config).await?;
for i in 0..13 {
assert_eq!(list.get(i).await?, None);
assert_eq!(list.len().await?, i);
list.push(i as i64).await?;
for j in 0..i {
assert_eq!(list.get(j).await?, Some(j as i64));
}
}
/*for i in 0..13 {
list.set(i, (i as i128 + 1).into())?;
assert_eq!(list.get(i)?, int(i + 1));
}*/
/*for i in (0..13).rev() {
assert_eq!(vec.len()?, i + 1);
assert_eq!(vec.pop()?, int(i));
}*/
Ok(())
}
#[async_std::test]
async fn test_list_from() -> Result<()> {
let store = MemStore::<DefaultParams>::default();
let mut config = ListConfig::new(store, Code::Blake2b256);
config.set_width(3);
let data: Vec<_> = (0..13).map(|i| i as i64).collect();
let mut list = List::from(config, data.clone().into_iter()).await?;
let mut data2 = vec![];
let mut iter = list.iter();
while let Some(elem) = iter.next().await? {
data2.push(elem)
}
assert_eq!(data, data2);
Ok(())
}
#[test]
fn list_vec_eqv() {
const LEN: usize = 25;
model! {
Model => let mut vec = Vec::new(),
Implementation => let mut list = {
let store = MemStore::<DefaultParams>::default();
let mut config = ListConfig::new(store, Code::Blake2b256);
config.set_width(3);
let fut = List::new(config);
task::block_on(fut).unwrap()
},
Push(usize)(i in 0..LEN) => {
vec.push(i as i64);
task::block_on(list.push(i as i64)).unwrap();
},
Get(usize)(i in 0..LEN) => {
let r1 = vec.get(i).cloned();
let r2 = task::block_on(list.get(i)).unwrap();
assert_eq!(r1, r2);
},
Len(usize)(_ in 0..LEN) => {
let r1 = vec.len();
let r2 = task::block_on(list.len()).unwrap();
assert_eq!(r1, r2);
},
IsEmpty(usize)(_ in 0..LEN) => {
let r1 = vec.is_empty();
let r2 = task::block_on(list.is_empty()).unwrap();
assert_eq!(r1, r2);
}
}
}
#[async_std::test]
async fn test_width() -> Result<()> {
let store = MemStore::<DefaultParams>::default();
let config = ListConfig::new(store, Code::Blake2b256);
let n = DefaultParams::MAX_BLOCK_SIZE / 8 * 10;
let _list = List::from(config, (0..n).map(|n| n as u64)).await?;
Ok(())
}
}
| 29.959641 | 94 | 0.49304 |
8fc7de1f082dbb1bfd7c399aaf296861e50da50b | 5,430 | //! Rust wrapper for darknet convolutional neural networks.
extern crate darknet_sys as ffi;
extern crate libc;
mod errors;
use crate::errors::Error;
use std::ffi::CString;
use std::path::Path;
use std::ptr;
pub struct Metadata(pub ffi::metadata);
/// Load metadata from a file
pub fn load_metadata<P: AsRef<Path>>(metadata_file_path: P) -> Result<Metadata, Error> {
let metadata_file_path_c =
CString::new(metadata_file_path.as_ref().to_string_lossy().as_bytes()).map_err(|_| {
Error::new(
"Failed to convert config file path to CString when loading metadata.".to_owned(),
)
})?;
let md = unsafe { ffi::get_metadata(metadata_file_path_c.as_ptr() as *mut _) };
Ok(Metadata(md))
}
/// A wrapper for a darknet network
pub struct Network(*mut ffi::network);
/// Free the underlying network when it goes out of scope
impl Drop for Network {
fn drop(&mut self) {
unsafe { ffi::free_network(self.0) }
}
}
pub fn set_batch_network(network: &mut Network, batch: i32) {
unsafe { ffi::set_batch_network(network.0, batch) }
}
/// Load the network from a configuration file
pub fn load_network<P: AsRef<Path>>(
config_path: P,
weights_path: Option<P>,
clear: bool,
) -> Result<Network, Error> {
let config_path_c =
CString::new(config_path.as_ref().to_string_lossy().as_bytes()).map_err(|_| {
Error::new(
"Failed to convert config file path to CString when loading network.".to_owned(),
)
})?;
let network = match weights_path {
Some(w_path) => {
let weights_path_c = CString::new(w_path.as_ref().to_string_lossy().as_bytes())
.map_err(|_| {
Error::new(
"Failed to convert weight file path to CString when loading network."
.to_string(),
)
})?;
unsafe {
ffi::load_network(
config_path_c.as_ptr() as *mut _,
weights_path_c.as_ptr() as *mut _,
clear as i32,
)
}
}
None => unsafe {
ffi::load_network(
config_path_c.as_ptr() as *mut _,
ptr::null_mut() as *mut _,
clear as i32,
)
},
};
Ok(Network(network))
}
//pub fn load
pub fn predict_image(network: &mut Network, image: &Image) {
unsafe { ffi::network_predict_image(network.0, image.0) };
}
pub fn forward_network(network: &mut Network) {
unsafe { ffi::forward_network(network.0) }
}
pub fn backward_network(network: &mut Network) {
unsafe { ffi::backward_network(network.0) }
}
pub fn update_network(network: &mut Network) {
unsafe { ffi::update_network(network.0) }
}
/// A wrapper for a darknet image
pub struct Image(pub ffi::image);
impl Drop for Image {
fn drop(&mut self) {
unsafe { ffi::free_image(self.0) }
}
}
pub fn load_image_color<P: AsRef<Path>>(
image_filepath: P,
width: i32,
height: i32,
) -> Result<Image, Error> {
let image_filepath_c = CString::new(image_filepath.as_ref().to_string_lossy().as_bytes())
.map_err(|_| Error::new("Error converting image_filepath into a CString".to_string()))?;
let image =
unsafe { ffi::load_image_color(image_filepath_c.as_ptr() as *mut _, width, height) };
Ok(Image(image))
}
pub fn resize_image(im: &Image, w: i32, h: i32) -> Image {
let resize_image = unsafe { ffi::resize_image(im.0, w, h) };
Image(resize_image)
}
pub fn save_image(im: &Image, image_file_name: &str) {
let image_fn_c = CString::new(image_file_name.to_string().as_bytes())
.map_err(|_| Error::new("Error converting image name into a CString".to_string()))
.unwrap();
unsafe { ffi::save_image(im.0, image_fn_c.as_ptr()) }
}
pub struct Detection(*mut ffi::detection);
pub fn do_nms_obj(dets: &mut Detection, total: i32, classes: i32, thresh: f32) {
unsafe { ffi::do_nms_obj(dets.0, total, classes, thresh) }
}
pub fn get_network_boxes(
net: Network,
w: i32,
h: i32,
thresh: f32,
hier: f32,
map: &mut i32,
relative: i32,
num: &mut i32,
) -> Detection {
let m = map as *mut i32;
let n = num as *mut i32;
let det = unsafe { ffi::get_network_boxes(net.0, w, h, thresh, hier, m, relative, n) };
Detection(det)
}
pub struct Alphabet(*mut *mut ffi::image);
pub fn load_alphabet() -> Alphabet {
Alphabet(unsafe { ffi::load_alphabet() })
}
pub struct Names(*mut *mut ::std::os::raw::c_char);
pub fn load_names(names_arr: Vec<&str>) -> Result<Names, Error> {
let mut names_c: Vec<*mut u8> = Vec::new();
for s in names_arr {
let name_c = CString::new(s.to_string().as_bytes())
.map_err(|_| Error::new("Error converting name into a CString".to_string()))?
.as_ptr();
names_c.push(name_c as *mut _);
}
Ok(Names(names_c.as_ptr() as *mut _))
}
pub fn draw_detections(
img: &Image,
dets: Detection,
num: i32,
thresh: f32,
names: Names,
alphabet: Alphabet,
classes: i32,
) {
unsafe { ffi::draw_detections(img.0, dets.0, num, thresh, names.0, alphabet.0, classes) }
}
pub fn free_detections(dets: &mut Detection, n: i32) {
unsafe { ffi::free_detections(dets.0, n) }
}
| 26.881188 | 98 | 0.604604 |
9c371380bea2815e8eaacdc4e48e3c107c31c8fb | 608 | use crate::y86_defines::*;
use kaze::*;
pub fn pc_sel<'a>(c: &'a Context<'a>) -> &'a Module {
let f = c.module("SelectPC");
let pred_pc = f.input("f_predPC", QWORD);
let m_icode = f.input("f_M_icode", NIBBLE);
let m_val_a = f.input("f_M_valA", QWORD);
let w_icode = f.input("f_W_icode", NIBBLE);
let w_val_m = f.input("f_W_valM", QWORD);
let cnd = f.input("f_cnd", BIT);
let mux_1 = f.mux(f.lit(IJXX, NIBBLE).eq(m_icode) & f.low().eq(cnd), m_val_a, pred_pc);
let mux_2 = f.mux(f.lit(IRET, NIBBLE).eq(w_icode), w_val_m, mux_1);
f.output("f_pc", mux_2);
f
} | 30.4 | 91 | 0.600329 |
acb954bf1d2ef86893dd103b9d9cf006b07c3b05 | 1,012 | use spongemock::mock;
use std::io::{self, BufRead};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
/// Outputs a MoCkInG version of its input
///
/// Takes each INPUT_TO_MOCK argument, converts it to a MoCkEd version and outputs it on its own
/// line.
struct Opt {
/// Each argument will be MoCkEd on a new line in the output.
///
/// If INPUT_TO_MOCK is not provided, input will instead be read from stdin.
#[structopt(name = "INPUT_TO_MOCK")]
mock_vals: Vec<String>,
}
fn main() {
std::process::exit(match run_app() {
Ok(_) => 0,
Err(error) => {
eprintln!("{:?}", error);
1
}
});
}
fn run_app() -> Result<(), io::Error> {
let opt: Opt = Opt::from_args();
if !opt.mock_vals.is_empty() {
for mock_val in opt.mock_vals {
println!("{}", mock(&mock_val));
}
} else {
for line in io::stdin().lock().lines() {
println!("{}", mock(&line?));
}
}
Ok(())
}
| 24.095238 | 96 | 0.552372 |
76d372fee2376bc7bf26d09fece653e55e043bea | 125 | //! Highly optimized non-blocking communication channels.
#![warn(missing_docs)]
pub mod mpsc;
pub mod oneshot;
mod queue;
| 15.625 | 57 | 0.752 |
d9a1bb10e04a509a92cd54488cbae87085f9ea28 | 5,822 | use core::convert::TryInto;
use contract::{
contract_api::{runtime, storage, system},
unwrap_or_revert::UnwrapOrRevert,
};
use types::{
account::PublicKey,
bytesrepr::{FromBytes, ToBytes},
CLTyped, URef, U512,
};
use crate::{api::Api, error::Error};
use vesting_logic::{VestingError, VestingTrait};
pub const INIT_FLAG_KEY: &str = "is_initialized";
pub const ADMIN_KEY: &str = "admin_account";
pub const RECIPIENT_KEY: &str = "recipient_account";
pub const PURSE_NAME: &str = "vesting_main_purse";
type Amount = U512;
type Time = U512;
struct VestingContract;
impl VestingTrait<Amount, Time> for VestingContract {
fn set_amount(&mut self, name: &str, value: Amount) {
set_key(name, value)
}
fn amount(&self, name: &str) -> Amount {
key(name)
}
fn set_time(&mut self, name: &str, value: Time) {
set_key(name, value);
}
fn time(&self, name: &str) -> Time {
key(name)
}
fn set_boolean(&mut self, name: &str, value: bool) {
set_key(name, value)
}
fn boolean(&self, name: &str) -> bool {
key(name)
}
fn current_timestamp(&self) -> Time {
let time: u64 = runtime::get_blocktime().into();
time.into()
}
}
fn construct() {
let mut vault = VestingContract;
match Api::from_args() {
Api::Init(admin, recipient, vesting_config) => {
set_admin_account(admin);
set_recipient_account(recipient);
vault.init(
vesting_config.cliff_time,
vesting_config.cliff_amount,
vesting_config.drip_period,
vesting_config.drip_amount,
vesting_config.total_amount,
vesting_config.admin_release_period,
);
}
_ => runtime::revert(Error::UnknownConstructorCommand),
}
}
fn entry_point() {
let mut vault = VestingContract;
match Api::from_args() {
Api::Pause => {
verify_admin_account();
match vault.pause() {
Ok(()) => {}
Err(VestingError::AlreadyPaused) => runtime::revert(Error::AlreadyPaused),
_ => runtime::revert(Error::UnexpectedVestingError),
}
}
Api::Unpause => {
verify_admin_account();
match vault.unpause() {
Ok(()) => {}
Err(VestingError::AlreadyUnpaused) => runtime::revert(Error::AlreadyUnpaused),
_ => runtime::revert(Error::UnexpectedVestingError),
}
}
Api::Withdraw(purse, amount) => {
verify_recipient_account();
match vault.withdraw(amount) {
Ok(()) => transfer_out_clx_to_purse(purse, amount),
Err(VestingError::NotEnoughBalance) => runtime::revert(Error::NotEnoughBalance),
_ => runtime::revert(Error::UnexpectedVestingError),
}
}
Api::AdminRelease(purse) => {
verify_admin_account();
match vault.admin_release() {
Ok(amount) => transfer_out_clx_to_purse(purse, amount),
Err(VestingError::AdminReleaseErrorNotPaused) => runtime::revert(Error::NotPaused),
Err(VestingError::AdminReleaseErrorNothingToWithdraw) => {
runtime::revert(Error::NothingToWithdraw)
}
Err(VestingError::AdminReleaseErrorNotEnoughTimeElapsed) => {
runtime::revert(Error::NotEnoughTimeElapsed)
}
_ => runtime::revert(Error::UnexpectedVestingError),
}
}
_ => runtime::revert(Error::UnknownVestingCallCommand),
}
}
fn is_initialized() -> bool {
runtime::has_key(INIT_FLAG_KEY)
}
fn mark_as_initialized() {
set_key(INIT_FLAG_KEY, 1);
}
fn set_admin_account(admin: PublicKey) {
set_key(ADMIN_KEY, admin);
}
fn admin_account() -> PublicKey {
key(ADMIN_KEY)
}
fn set_recipient_account(recipient: PublicKey) {
set_key(RECIPIENT_KEY, recipient);
}
fn recipient_account() -> PublicKey {
key(RECIPIENT_KEY)
}
fn verify_admin_account() {
let admin = admin_account();
let caller = runtime::get_caller();
if admin != caller {
runtime::revert(Error::NotTheAdminAccount);
}
}
fn verify_recipient_account() {
let recipient = recipient_account();
let caller = runtime::get_caller();
if recipient != caller {
runtime::revert(Error::NotTheRecipientAccount);
}
}
fn transfer_out_clx_to_purse(purse: URef, amount: U512) {
let local_purse = local_purse();
system::transfer_from_purse_to_purse(local_purse, purse, amount)
.unwrap_or_revert_with(Error::PurseTransferError);
}
fn local_purse() -> URef {
let key = runtime::get_key(PURSE_NAME).unwrap_or_revert_with(Error::LocalPurseKeyMissing);
key.into_uref().unwrap_or_revert_with(Error::UnexpectedType)
}
fn key<T: FromBytes + CLTyped>(name: &str) -> T {
let key = runtime::get_key(name)
.unwrap_or_revert_with(Error::MissingKey)
.try_into()
.unwrap_or_revert_with(Error::UnexpectedType);
storage::read(key)
.unwrap_or_revert_with(Error::MissingKey)
.unwrap_or_revert_with(Error::UnexpectedType)
}
fn set_key<T: ToBytes + CLTyped>(name: &str, value: T) {
match runtime::get_key(name) {
Some(key) => {
let key_ref = key.try_into().unwrap_or_revert();
storage::write(key_ref, value);
}
None => {
let key = storage::new_uref(value).into();
runtime::put_key(name, key);
}
}
}
#[no_mangle]
pub extern "C" fn vesting() {
if !is_initialized() {
construct();
mark_as_initialized();
} else {
entry_point();
}
}
| 29.40404 | 99 | 0.600653 |
e68055d34d3acc0fae2eed2efb42486d901d85e2 | 6,486 | //! The `Slot` and `Epoch` types are defined as new types over u64 to enforce type-safety between
//! the two types.
//!
//! `Slot` and `Epoch` have implementations which permit conversion, comparison and math operations
//! between each and `u64`, however specifically not between each other.
//!
//! All math operations on `Slot` and `Epoch` are saturating, they never wrap.
//!
//! It would be easy to define `PartialOrd` and other traits generically across all types which
//! implement `Into<u64>`, however this would allow operations between `Slots` and `Epochs` which
//! may lead to programming errors which are not detected by the compiler.
use crate::test_utils::TestRandom;
use crate::{ChainSpec, SignedRoot};
use rand::RngCore;
use safe_arith::{ArithError, SafeArith};
use serde_derive::{Deserialize, Serialize};
use ssz::{ssz_encode, Decode, DecodeError, Encode};
use std::fmt;
use std::hash::Hash;
use std::iter::Iterator;
#[cfg(feature = "legacy-arith")]
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, Sub, SubAssign};
#[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))]
#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Slot(#[serde(with = "eth2_serde_utils::quoted_u64")] u64);
#[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))]
#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Epoch(#[serde(with = "eth2_serde_utils::quoted_u64")] u64);
impl_common!(Slot);
impl_common!(Epoch);
impl Slot {
pub const fn new(slot: u64) -> Slot {
Slot(slot)
}
pub fn epoch(self, slots_per_epoch: u64) -> Epoch {
Epoch::new(self.0)
.safe_div(slots_per_epoch)
.expect("slots_per_epoch is not 0")
}
pub fn max_value() -> Slot {
Slot(u64::max_value())
}
}
impl Epoch {
pub const fn new(slot: u64) -> Epoch {
Epoch(slot)
}
pub fn max_value() -> Epoch {
Epoch(u64::max_value())
}
/// The first slot in the epoch.
pub fn start_slot(self, slots_per_epoch: u64) -> Slot {
Slot::from(self.0.saturating_mul(slots_per_epoch))
}
/// The last slot in the epoch.
pub fn end_slot(self, slots_per_epoch: u64) -> Slot {
Slot::from(
self.0
.saturating_mul(slots_per_epoch)
.saturating_add(slots_per_epoch.saturating_sub(1)),
)
}
/// Position of some slot inside an epoch, if any.
///
/// E.g., the first `slot` in `epoch` is at position `0`.
pub fn position(self, slot: Slot, slots_per_epoch: u64) -> Option<usize> {
let start = self.start_slot(slots_per_epoch);
let end = self.end_slot(slots_per_epoch);
if slot >= start && slot <= end {
slot.as_usize().checked_sub(start.as_usize())
} else {
None
}
}
/// Compute the sync committee period for an epoch.
pub fn sync_committee_period(&self, spec: &ChainSpec) -> Result<u64, ArithError> {
Ok(self
.safe_div(spec.epochs_per_sync_committee_period)?
.as_u64())
}
pub fn slot_iter(&self, slots_per_epoch: u64) -> SlotIter {
SlotIter {
current_iteration: 0,
epoch: self,
slots_per_epoch,
}
}
}
pub struct SlotIter<'a> {
current_iteration: u64,
epoch: &'a Epoch,
slots_per_epoch: u64,
}
impl<'a> Iterator for SlotIter<'a> {
type Item = Slot;
fn next(&mut self) -> Option<Slot> {
if self.current_iteration >= self.slots_per_epoch {
None
} else {
let start_slot = self.epoch.start_slot(self.slots_per_epoch);
let previous = self.current_iteration;
self.current_iteration = self.current_iteration.checked_add(1)?;
start_slot.safe_add(previous).ok()
}
}
}
#[cfg(test)]
mod slot_tests {
use super::*;
all_tests!(Slot);
}
#[cfg(test)]
mod epoch_tests {
use super::*;
all_tests!(Epoch);
#[test]
fn epoch_start_end() {
let slots_per_epoch = 8;
let epoch = Epoch::new(0);
assert_eq!(epoch.start_slot(slots_per_epoch), Slot::new(0));
assert_eq!(epoch.end_slot(slots_per_epoch), Slot::new(7));
}
#[test]
fn end_slot_boundary_test() {
let slots_per_epoch = 32;
// The last epoch which can be represented by u64.
let epoch = Epoch::new(u64::max_value() / slots_per_epoch);
// A slot number on the epoch should be equal to u64::max_value.
assert_eq!(epoch.end_slot(slots_per_epoch), Slot::new(u64::max_value()));
}
#[test]
fn position() {
let slots_per_epoch = 8;
let epoch = Epoch::new(0);
assert_eq!(epoch.position(Slot::new(0), slots_per_epoch), Some(0));
assert_eq!(epoch.position(Slot::new(1), slots_per_epoch), Some(1));
assert_eq!(epoch.position(Slot::new(2), slots_per_epoch), Some(2));
assert_eq!(epoch.position(Slot::new(3), slots_per_epoch), Some(3));
assert_eq!(epoch.position(Slot::new(4), slots_per_epoch), Some(4));
assert_eq!(epoch.position(Slot::new(5), slots_per_epoch), Some(5));
assert_eq!(epoch.position(Slot::new(6), slots_per_epoch), Some(6));
assert_eq!(epoch.position(Slot::new(7), slots_per_epoch), Some(7));
assert_eq!(epoch.position(Slot::new(8), slots_per_epoch), None);
let epoch = Epoch::new(1);
assert_eq!(epoch.position(Slot::new(7), slots_per_epoch), None);
assert_eq!(epoch.position(Slot::new(8), slots_per_epoch), Some(0));
}
#[test]
fn slot_iter() {
let slots_per_epoch = 8;
let epoch = Epoch::new(0);
let mut slots = vec![];
for slot in epoch.slot_iter(slots_per_epoch) {
slots.push(slot);
}
assert_eq!(slots.len(), slots_per_epoch as usize);
for i in 0..slots_per_epoch {
assert_eq!(Slot::from(i), slots[i as usize])
}
}
#[test]
fn max_epoch_ssz() {
let max_epoch = Epoch::max_value();
assert_eq!(
&max_epoch.as_ssz_bytes(),
&[255, 255, 255, 255, 255, 255, 255, 255]
);
assert_eq!(
max_epoch,
Epoch::from_ssz_bytes(&max_epoch.as_ssz_bytes()).unwrap()
);
}
}
| 30.167442 | 99 | 0.615634 |
f9052495a099f5f6311baab85eeda07913a87a4b | 5,199 | use crate::context::Context;
use crate::window::FlutterEvent;
use copypasta::{ClipboardContext, ClipboardProvider};
use flutter_engine::tasks::TaskRunnerHandler;
use flutter_engine::FlutterOpenGLHandler;
use flutter_plugins::platform::{AppSwitcherDescription, MimeError, PlatformHandler};
use flutter_plugins::textinput::TextInputHandler;
use flutter_plugins::window::{PositionParams, WindowHandler};
use glutin::event_loop::EventLoopProxy;
use parking_lot::Mutex;
use std::ffi::CStr;
use std::os::raw::{c_char, c_void};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use thiserror::Error;
// TODO: Investigate removing mutex
pub struct WinitPlatformTaskHandler {
proxy: Mutex<EventLoopProxy<FlutterEvent>>,
}
impl WinitPlatformTaskHandler {
pub fn new(proxy: EventLoopProxy<FlutterEvent>) -> Self {
Self {
proxy: Mutex::new(proxy),
}
}
}
impl TaskRunnerHandler for WinitPlatformTaskHandler {
fn wake(&self) {
self.proxy
.lock()
.send_event(FlutterEvent::WakePlatformThread)
.ok();
}
}
pub struct WinitOpenGLHandler {
context: Arc<Mutex<Context>>,
resource_context: Arc<Mutex<Context>>,
}
impl WinitOpenGLHandler {
pub fn new(context: Arc<Mutex<Context>>, resource_context: Arc<Mutex<Context>>) -> Self {
Self {
context,
resource_context,
}
}
}
impl FlutterOpenGLHandler for WinitOpenGLHandler {
fn swap_buffers(&self) -> bool {
self.context.lock().present()
}
fn make_current(&self) -> bool {
unsafe { self.context.lock().make_current() }
}
fn clear_current(&self) -> bool {
unsafe { self.context.lock().make_not_current() }
}
fn fbo_callback(&self) -> u32 {
0
}
fn make_resource_current(&self) -> bool {
unsafe { self.resource_context.lock().make_current() }
}
fn gl_proc_resolver(&self, proc: *const c_char) -> *mut c_void {
unsafe {
if let Ok(proc) = CStr::from_ptr(proc).to_str() {
return self.context.lock().get_proc_address(proc) as _;
}
std::ptr::null_mut()
}
}
}
pub struct WinitPlatformHandler {
clipboard: ClipboardContext,
context: Arc<Mutex<Context>>,
}
#[derive(Error, Debug)]
pub enum ClipboardError {
#[error("Clipboard not available!")]
ClipboardUnavailable,
}
impl WinitPlatformHandler {
pub fn new(context: Arc<Mutex<Context>>) -> Result<Self, ClipboardError> {
if let Ok(cb) = ClipboardContext::new() {
Ok(Self {
clipboard: cb,
context,
})
} else {
Err(ClipboardError::ClipboardUnavailable)
}
}
}
impl PlatformHandler for WinitPlatformHandler {
fn set_application_switcher_description(&mut self, description: AppSwitcherDescription) {
self.context.lock().window().set_title(&description.label);
}
fn set_clipboard_data(&mut self, text: String) {
if let Err(err) = self.clipboard.set_contents(text) {
log::error!("{}", err);
}
}
fn get_clipboard_data(&mut self, mime: &str) -> Result<String, MimeError> {
if mime != "text/plain" {
return Err(MimeError);
}
let result = self.clipboard.get_contents();
if let Err(err) = &result {
log::error!("{}", err);
}
Ok(result.unwrap_or_default())
}
}
pub struct WinitWindowHandler {
context: Arc<Mutex<Context>>,
maximized: bool,
visible: bool,
close: Arc<AtomicBool>,
}
impl WinitWindowHandler {
pub fn new(context: Arc<Mutex<Context>>, close: Arc<AtomicBool>) -> Self {
Self {
context,
maximized: false,
visible: false,
close,
}
}
}
impl WindowHandler for WinitWindowHandler {
fn close(&mut self) {
self.close.store(true, Ordering::Relaxed);
}
fn show(&mut self) {
self.visible = true;
self.context.lock().window().set_visible(self.visible);
}
fn hide(&mut self) {
self.visible = false;
self.context.lock().window().set_visible(self.visible);
}
fn is_visible(&mut self) -> bool {
self.visible
}
fn maximize(&mut self) {
self.maximized = true;
self.context.lock().window().set_maximized(self.maximized);
}
fn restore(&mut self) {
self.maximized = false;
self.context.lock().window().set_maximized(self.maximized);
}
fn is_maximized(&mut self) -> bool {
self.maximized
}
fn iconify(&mut self) {}
fn is_iconified(&mut self) -> bool {
false
}
fn set_pos(&mut self, _pos: PositionParams) {}
fn get_pos(&mut self) -> PositionParams {
PositionParams { x: 0.0, y: 0.0 }
}
fn start_drag(&mut self) {}
fn end_drag(&mut self) {}
}
pub struct WinitTextInputHandler {}
impl Default for WinitTextInputHandler {
fn default() -> Self {
Self {}
}
}
impl TextInputHandler for WinitTextInputHandler {
fn show(&mut self) {}
fn hide(&mut self) {}
}
| 24.523585 | 93 | 0.610117 |
29b4a24fc34e2a9c976487d3c84ac78c64bcf6ab | 22,497 | use crate::uuid::{convert_str_to_array, UUID};
use lazy_static::lazy_static;
use std::collections::HashMap;
lazy_static! {
pub static ref TYPE_MAP: HashMap<&'static str, HashMap<[u8; 16], &'static str>> = {
let mut cat = HashMap::new();
let mut m = HashMap::new();
m.insert(
convert_str_to_array("00000000-0000-0000-0000-000000000000").unwrap(),
"Unused entry",
);
m.insert(
convert_str_to_array("024DEE41-33E7-11D3-9D69-0008C781F39F").unwrap(),
"MBR partition scheme",
);
m.insert(
convert_str_to_array("C12A7328-F81F-11D2-BA4B-00A0C93EC93B").unwrap(),
"EFI System partition",
);
m.insert(
convert_str_to_array("21686148-6449-6E6F-744E-656564454649").unwrap(),
"BIOS boot partition",
);
m.insert(
convert_str_to_array("D3BFE2DE-3DAF-11DF-BA40-E3A556D89593").unwrap(),
"Intel Fast Flash (iFFS) partition (for Intel Rapid Start technology)",
);
m.insert(
convert_str_to_array("F4019732-066E-4E12-8273-346C5641494F").unwrap(),
"Sony boot partition",
);
m.insert(
convert_str_to_array("BFBFAFE7-A34F-448A-9A5B-6213EB736C22").unwrap(),
"Lenovo boot partition",
);
cat.insert("_", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("E3C9E316-0B5C-4DB8-817D-F92DF00215AE").unwrap(),
"Microsoft Reserved Partition (MSR)",
);
m.insert(
convert_str_to_array("EBD0A0A2-B9E5-4433-87C0-68B6B72699C7").unwrap(),
"Basic data partition",
);
m.insert(
convert_str_to_array("5808C8AA-7E8F-42E0-85D2-E1E90434CFB3").unwrap(),
"Logical Disk Manager (LDM) metadata partition",
);
m.insert(
convert_str_to_array("AF9B60A0-1431-4F62-BC68-3311714A69AD").unwrap(),
"Logical Disk Manager data partition",
);
m.insert(
convert_str_to_array("DE94BBA4-06D1-4D40-A16A-BFD50179D6AC").unwrap(),
"Windows Recovery Environment",
);
m.insert(
convert_str_to_array("37AFFC90-EF7D-4E96-91C3-2D7AE055B174").unwrap(),
"IBM General Parallel File System (GPFS) partition",
);
m.insert(
convert_str_to_array("E75CAF8F-F680-4CEE-AFA3-B001E56EFC2D").unwrap(),
"Storage Spaces partition",
);
cat.insert("Windows", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("75894C1E-3AEB-11D3-B7C1-7B03A0000000").unwrap(),
"Data partition",
);
m.insert(
convert_str_to_array("E2A1E728-32E3-11D6-A682-7B03A0000000").unwrap(),
"Service Partition",
);
cat.insert("HP-UX", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("0FC63DAF-8483-4772-8E79-3D69D8477DE4").unwrap(),
"Linux filesystem data",
);
m.insert(
convert_str_to_array("A19D880F-05FC-4D3B-A006-743F0F84911E").unwrap(),
"RAID partition",
);
m.insert(
convert_str_to_array("44479540-F297-41B2-9AF7-D131D5F0458A").unwrap(),
"Root partition (x86)",
);
m.insert(
convert_str_to_array("4F68BCE3-E8CD-4DB1-96E7-FBCAF984B709").unwrap(),
"Root partition (x86-64)",
);
m.insert(
convert_str_to_array("69DAD710-2CE4-4E3C-B16C-21A1D49ABED3").unwrap(),
"Root partition (32-bit ARM)",
);
m.insert(
convert_str_to_array("B921B045-1DF0-41C3-AF44-4C6F280D3FAE").unwrap(),
"Root partition (64-bit ARM/AArch64)",
);
m.insert(
convert_str_to_array("A2A0D0EB-E5B9-3344-87C0-68B6B72699C7").unwrap(),
"Data partition",
);
m.insert(
convert_str_to_array("AF3DC60F-8384-7247-8E79-3D69D8477DE4").unwrap(),
"Data partition",
);
m.insert(
convert_str_to_array("0657FD6D-A4AB-43C4-84E5-0933C84B4F4F").unwrap(),
"Swap partition",
);
m.insert(
convert_str_to_array("E6D6D379-F507-44C2-A23C-238F2A3DF928").unwrap(),
"Logical Volume Manager (LVM) partition",
);
m.insert(
convert_str_to_array("933AC7E1-2EB4-4F13-B844-0E14E2AEF915").unwrap(),
"/home partition",
);
m.insert(
convert_str_to_array("3B8F8425-20E0-4F3B-907F-1A25A76F98E8").unwrap(),
"/srv (server data) partition",
);
m.insert(
convert_str_to_array("7FFEC5C9-2D00-49B7-8941-3EA10A5586B7").unwrap(),
"Plain dm-crypt partition",
);
m.insert(
convert_str_to_array("CA7D7CCB-63ED-4C53-861C-1742536059CC").unwrap(),
"LUKS partition",
);
m.insert(
convert_str_to_array("8DA63339-0007-60C0-C436-083AC8230908").unwrap(),
"Reserved",
);
cat.insert("Linux", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("83BD6B9D-7F41-11DC-BE0B-001560B84F0F").unwrap(),
"Boot partition",
);
m.insert(
convert_str_to_array("516E7CB4-6ECF-11D6-8FF8-00022D09712B").unwrap(),
"Data partition",
);
m.insert(
convert_str_to_array("516E7CB5-6ECF-11D6-8FF8-00022D09712B").unwrap(),
"Swap partition",
);
m.insert(
convert_str_to_array("516E7CB6-6ECF-11D6-8FF8-00022D09712B").unwrap(),
"Unix File System (UFS) partition",
);
m.insert(
convert_str_to_array("516E7CB8-6ECF-11D6-8FF8-00022D09712B").unwrap(),
"Vinum volume manager partition",
);
m.insert(
convert_str_to_array("516E7CBA-6ECF-11D6-8FF8-00022D09712B").unwrap(),
"ZFS partition",
);
cat.insert("FreeBSD", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("48465300-0000-11AA-AA11-00306543ECAC").unwrap(),
"Hierarchical File System Plus (HFS+) partition",
);
m.insert(
convert_str_to_array("7C3457EF-0000-11AA-AA11-00306543ECAC").unwrap(),
"Apple APFS",
);
m.insert(
convert_str_to_array("55465300-0000-11AA-AA11-00306543ECAC").unwrap(),
"Apple UFS container",
);
m.insert(
convert_str_to_array("6A898CC3-1DD2-11B2-99A6-080020736631").unwrap(),
"ZFS",
);
m.insert(
convert_str_to_array("52414944-0000-11AA-AA11-00306543ECAC").unwrap(),
"Apple RAID partition",
);
m.insert(
convert_str_to_array("52414944-5F4F-11AA-AA11-00306543ECAC").unwrap(),
"Apple RAID partition, offline",
);
m.insert(
convert_str_to_array("426F6F74-0000-11AA-AA11-00306543ECAC").unwrap(),
"Apple Boot partition (Recovery HD)",
);
m.insert(
convert_str_to_array("4C616265-6C00-11AA-AA11-00306543ECAC").unwrap(),
"Apple Label",
);
m.insert(
convert_str_to_array("5265636F-7665-11AA-AA11-00306543ECAC").unwrap(),
"Apple TV Recovery partition",
);
m.insert(
convert_str_to_array("53746F72-6167-11AA-AA11-00306543ECAC").unwrap(),
"Apple Core Storage (i.e. Lion FileVault) partition",
);
m.insert(
convert_str_to_array("B6FA30DA-92D2-4A9A-96F1-871EC6486200").unwrap(),
"SoftRAID_Status",
);
m.insert(
convert_str_to_array("2E313465-19B9-463F-8126-8A7993773801").unwrap(),
"SoftRAID_Scratch",
);
m.insert(
convert_str_to_array("FA709C7E-65B1-4593-BFD5-E71D61DE9B02").unwrap(),
"SoftRAID_Volume",
);
m.insert(
convert_str_to_array("BBBA6DF5-F46F-4A89-8F59-8765B2727503").unwrap(),
"SoftRAID_Cache",
);
cat.insert("macOS / Darwin", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("6A82CB45-1DD2-11B2-99A6-080020736631").unwrap(),
"Boot partition",
);
m.insert(
convert_str_to_array("6A85CF4D-1DD2-11B2-99A6-080020736631").unwrap(),
"Root partition",
);
m.insert(
convert_str_to_array("6A87C46F-1DD2-11B2-99A6-080020736631").unwrap(),
"Swap partition",
);
m.insert(
convert_str_to_array("6A8B642B-1DD2-11B2-99A6-080020736631").unwrap(),
"Backup partition",
);
m.insert(
convert_str_to_array("6A898CC3-1DD2-11B2-99A6-080020736631").unwrap(),
"/usr partition",
);
m.insert(
convert_str_to_array("6A8EF2E9-1DD2-11B2-99A6-080020736631").unwrap(),
"/var partition",
);
m.insert(
convert_str_to_array("6A90BA39-1DD2-11B2-99A6-080020736631").unwrap(),
"/home partition",
);
m.insert(
convert_str_to_array("6A9283A5-1DD2-11B2-99A6-080020736631").unwrap(),
"Alternate sector",
);
m.insert(
convert_str_to_array("6A945A3B-1DD2-11B2-99A6-080020736631").unwrap(),
"Reserved partition",
);
m.insert(
convert_str_to_array("6A9630D1-1DD2-11B2-99A6-080020736631").unwrap(),
"Reserved partition",
);
m.insert(
convert_str_to_array("6A980767-1DD2-11B2-99A6-080020736631").unwrap(),
"Reserved partition",
);
m.insert(
convert_str_to_array("6A96237F-1DD2-11B2-99A6-080020736631").unwrap(),
"Reserved partition",
);
m.insert(
convert_str_to_array("6A8D2AC7-1DD2-11B2-99A6-080020736631").unwrap(),
"Reserved partition",
);
cat.insert("Solaris / illumos", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("49F48D32-B10E-11DC-B99B-0019D1879648").unwrap(),
"Swap partition",
);
m.insert(
convert_str_to_array("49F48D5A-B10E-11DC-B99B-0019D1879648").unwrap(),
"FFS partition",
);
m.insert(
convert_str_to_array("49F48D82-B10E-11DC-B99B-0019D1879648").unwrap(),
"LFS partition",
);
m.insert(
convert_str_to_array("49F48DAA-B10E-11DC-B99B-0019D1879648").unwrap(),
"RAID partition",
);
m.insert(
convert_str_to_array("2DB519C4-B10F-11DC-B99B-0019D1879648").unwrap(),
"Concatenated partition",
);
m.insert(
convert_str_to_array("2DB519EC-B10F-11DC-B99B-0019D1879648").unwrap(),
"Encrypted partition",
);
cat.insert("NetBSD", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("FE3A2A5D-4F32-41A7-B725-ACCC3285A309").unwrap(),
"Chrome OS kernel",
);
m.insert(
convert_str_to_array("3CB8E202-3B7E-47DD-8A3C-7FF2A13CFCEC").unwrap(),
"Chrome OS rootfs",
);
m.insert(
convert_str_to_array("2E0A753D-9E48-43B0-8337-B15192CB1B5E").unwrap(),
"Chrome OS future use",
);
cat.insert("Chrome OS", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("5DFBF5F4-2848-4BAC-AA5E-0D9A20B745A6").unwrap(),
"/usr partition (coreos-usr)",
);
m.insert(
convert_str_to_array("3884DD41-8582-4404-B9A8-E9B84F2DF50E").unwrap(),
"Resizable rootfs (coreos-resize)",
);
m.insert(
convert_str_to_array("C95DC21A-DF0E-4340-8D7B-26CBFA9A03E0").unwrap(),
"OEM customizations (coreos-reserved)",
);
m.insert(
convert_str_to_array("BE9067B9-EA49-4F15-B4F6-F36F8C9E1818").unwrap(),
"Root filesystem on RAID (coreos-root-raid)",
);
cat.insert("Container Linux by CoreOS", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("42465331-3BA3-10F1-802A-4861696B7521").unwrap(),
"Haiku BFS",
);
cat.insert("Haiku", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("85D5E45E-237C-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"Boot partition",
);
m.insert(
convert_str_to_array("85D5E45A-237C-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"Data partition",
);
m.insert(
convert_str_to_array("85D5E45B-237C-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"Swap partition",
);
m.insert(
convert_str_to_array("0394EF8B-237E-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"Unix File System (UFS) partition",
);
m.insert(
convert_str_to_array("85D5E45C-237C-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"Vinum volume manager partition",
);
m.insert(
convert_str_to_array("85D5E45D-237C-11E1-B4B3-E89A8F7FC3A7").unwrap(),
"ZFS partition",
);
cat.insert("MidnightBSD", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("45B0969E-9B03-4F30-B4C6-B4B80CEFF106").unwrap(),
"Journal",
);
m.insert(
convert_str_to_array("45B0969E-9B03-4F30-B4C6-5EC00CEFF106").unwrap(),
"dm-crypt journal",
);
m.insert(
convert_str_to_array("4FBD7E29-9D25-41B8-AFD0-062C0CEFF05D").unwrap(),
"OSD",
);
m.insert(
convert_str_to_array("4FBD7E29-9D25-41B8-AFD0-5EC00CEFF05D").unwrap(),
"dm-crypt OSD",
);
m.insert(
convert_str_to_array("89C57F98-2FE5-4DC0-89C1-F3AD0CEFF2BE").unwrap(),
"Disk in creation",
);
m.insert(
convert_str_to_array("89C57F98-2FE5-4DC0-89C1-5EC00CEFF2BE").unwrap(),
"dm-crypt disk in creation",
);
m.insert(
convert_str_to_array("CAFECAFE-9B03-4F30-B4C6-B4B80CEFF106").unwrap(),
"Block",
);
m.insert(
convert_str_to_array("30CD0809-C2B2-499C-8879-2D6B78529876").unwrap(),
"Block DB",
);
m.insert(
convert_str_to_array("5CE17FCE-4087-4169-B7FF-056CC58473F9").unwrap(),
"Block write-ahead log",
);
m.insert(
convert_str_to_array("FB3AABF9-D25F-47CC-BF5E-721D1816496B").unwrap(),
"Lockbox for dm-crypt keys",
);
m.insert(
convert_str_to_array("4FBD7E29-8AE0-4982-BF9D-5A8D867AF560").unwrap(),
"Multipath OSD",
);
m.insert(
convert_str_to_array("45B0969E-8AE0-4982-BF9D-5A8D867AF560").unwrap(),
"Multipath journal",
);
m.insert(
convert_str_to_array("CAFECAFE-8AE0-4982-BF9D-5A8D867AF560").unwrap(),
"Multipath block",
);
m.insert(
convert_str_to_array("7F4A666A-16F3-47A2-8445-152EF4D03F6C").unwrap(),
"Multipath block",
);
m.insert(
convert_str_to_array("EC6D6385-E346-45DC-BE91-DA2A7C8B3261").unwrap(),
"Multipath block DB",
);
m.insert(
convert_str_to_array("01B41E1B-002A-453C-9F17-88793989FF8F").unwrap(),
"Multipath block write-ahead log",
);
m.insert(
convert_str_to_array("CAFECAFE-9B03-4F30-B4C6-5EC00CEFF106").unwrap(),
"dm-crypt block",
);
m.insert(
convert_str_to_array("93B0052D-02D9-4D8A-A43B-33A3EE4DFBC3").unwrap(),
"dm-crypt block DB",
);
m.insert(
convert_str_to_array("306E8683-4FE2-4330-B7C0-00A917C16966").unwrap(),
"dm-crypt block write-ahead log",
);
m.insert(
convert_str_to_array("45B0969E-9B03-4F30-B4C6-35865CEFF106").unwrap(),
"dm-crypt LUKS journal",
);
m.insert(
convert_str_to_array("CAFECAFE-9B03-4F30-B4C6-35865CEFF106").unwrap(),
"dm-crypt LUKS block",
);
m.insert(
convert_str_to_array("166418DA-C469-4022-ADF4-B30AFD37F176").unwrap(),
"dm-crypt LUKS block DB",
);
m.insert(
convert_str_to_array("86A32090-3647-40B9-BBBD-38D8C573AA86").unwrap(),
"dm-crypt LUKS block write-ahead log",
);
m.insert(
convert_str_to_array("4FBD7E29-9D25-41B8-AFD0-35865CEFF05D").unwrap(),
"dm-crypt LUKS OSD",
);
cat.insert("Ceph", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("824CC7A0-36A8-11E3-890A-952519AD3F61").unwrap(),
"Data partition",
);
cat.insert("OpenBSD", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("CEF5A9AD-73BC-4601-89F3-CDEEEEE321A1").unwrap(),
"Power-safe (QNX6) file system",
);
cat.insert("QNX", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("C91818F9-8025-47AF-89D2-F030D7000C2C").unwrap(),
"Plan 9 partition",
);
cat.insert("Plan 9", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("9D275380-40AD-11DB-BF97-000C2911D1B8").unwrap(),
"vmkcore (coredump partition)",
);
m.insert(
convert_str_to_array("AA31E02A-400F-11DB-9590-000C2911D1B8").unwrap(),
"VMFS filesystem partition",
);
m.insert(
convert_str_to_array("9198EFFC-31C0-11DB-8F78-000C2911D1B8").unwrap(),
"VMware Reserved",
);
cat.insert("VMware ESX", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("2568845D-2332-4675-BC39-8FA5A4748D15").unwrap(),
"Bootloader",
);
m.insert(
convert_str_to_array("114EAFFE-1552-4022-B26E-9B053604CF84").unwrap(),
"Bootloader2",
);
m.insert(
convert_str_to_array("49A4D17F-93A3-45C1-A0DE-F50B2EBE2599").unwrap(),
"Boot",
);
m.insert(
convert_str_to_array("4177C722-9E92-4AAB-8644-43502BFD5506").unwrap(),
"Recovery",
);
m.insert(
convert_str_to_array("EF32A33B-A409-486C-9141-9FFB711F6266").unwrap(),
"Misc",
);
m.insert(
convert_str_to_array("20AC26BE-20B7-11E3-84C5-6CFDB94711E9").unwrap(),
"Metadata",
);
m.insert(
convert_str_to_array("38F428E6-D326-425D-9140-6E0EA133647C").unwrap(),
"System",
);
m.insert(
convert_str_to_array("A893EF21-E428-470A-9E55-0668FD91A2D9").unwrap(),
"Cache",
);
m.insert(
convert_str_to_array("DC76DDA9-5AC1-491C-AF42-A82591580C0D").unwrap(),
"Data",
);
m.insert(
convert_str_to_array("EBC597D0-2053-4B15-8B64-E0AAC75F4DB1").unwrap(),
"Persistent",
);
m.insert(
convert_str_to_array("C5A0AEEC-13EA-11E5-A1B1-001E67CA0C3C").unwrap(),
"Vendor",
);
m.insert(
convert_str_to_array("BD59408B-4514-490D-BF12-9878D963F378").unwrap(),
"Config",
);
m.insert(
convert_str_to_array("8F68CC74-C5E5-48DA-BE91-A0C8C15E9C80").unwrap(),
"Factory",
);
m.insert(
convert_str_to_array("9FDAA6EF-4B3F-40D2-BA8D-BFF16BFB887B").unwrap(),
"Factory (alt)",
);
m.insert(
convert_str_to_array("767941D0-2085-11E3-AD3B-6CFDB94711E9").unwrap(),
"Fastboot / Tertiary",
);
m.insert(
convert_str_to_array("AC6D7924-EB71-4DF8-B48D-E267B27148FF").unwrap(),
"OEM",
);
cat.insert("Android-IA", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("19A710A2-B3CA-11E4-B026-10604B889DCF").unwrap(),
"Android Meta",
);
m.insert(
convert_str_to_array("193D1EA4-B3CA-11E4-B075-10604B889DCF").unwrap(),
"Android EXT",
);
cat.insert("Android 6.0+ ARM", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("7412F7D5-A156-4B13-81DC-867174929325").unwrap(),
"Boot",
);
m.insert(
convert_str_to_array("D4E6E2CD-4469-46F3-B5CB-1BFF57AFC149").unwrap(),
"Config",
);
cat.insert("Open Network Install Environment (ONIE)", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("9E1A2D38-C612-4316-AA26-8B49521E5A8B").unwrap(),
"PReP boot",
);
cat.insert("PowerPC", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("BC13C2FF-59E6-4262-A352-B275FD6F7172").unwrap(),
"Shared boot loader configuration",
);
cat.insert("freedesktop.org OSes (Linux, etc.)", m);
let mut m = HashMap::new();
m.insert(
convert_str_to_array("734E5AFE-F61A-11E6-BC64-92361F002671").unwrap(),
"Basic data partition (GEM, BGM, F32)",
);
cat.insert("Atari TOS", m);
cat
};
}
pub trait PartitionTypeGUID {
fn display_partition_type_guid(&self) -> String;
}
impl PartitionTypeGUID for [u8; 16] {
fn display_partition_type_guid(&self) -> String {
TYPE_MAP
.iter()
.filter_map(|(cat, m)| m.get(self).map(|x| format!("{} / {}", cat, x)))
.next()
.unwrap_or_else(|| self.display_uuid())
}
}
| 34.987558 | 87 | 0.551362 |
3a92f5f9cabd5d78d4fba88f2120dbb448666223 | 663 | use core::convert::{AsMut, AsRef};
use core::ops::{Deref, DerefMut};
/// Type used to access `!Sync` storages.
#[cfg_attr(docsrs, doc(cfg(feature = "thread_local")))]
pub struct NonSync<T: ?Sized>(pub(crate) T);
impl<T: ?Sized> AsRef<T> for NonSync<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl<T: ?Sized> AsMut<T> for NonSync<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: ?Sized> Deref for NonSync<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: ?Sized> DerefMut for NonSync<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
| 20.71875 | 55 | 0.565611 |
901e3435aa460781797771743cf18734c0b54c53 | 453 | use bigint::{H256, U256};
#[derive(Clone, Serialize, Deserialize, PartialEq, Default, Debug)]
pub struct BlockExt {
pub received_at: u64,
pub total_difficulty: U256,
pub total_uncles_count: u64,
}
#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Debug)]
pub struct TransactionAddress {
// Block hash
pub block_hash: H256,
// Offset of block transaction in serialized bytes
pub offset: usize,
pub length: usize,
}
| 25.166667 | 67 | 0.704194 |
910a10a652d0efb38b50b73cc10c2e39e40d8de5 | 68 | pub mod data;
pub mod helper;
pub mod x_actix_web;
pub mod x_tonic;
| 13.6 | 20 | 0.764706 |
b9da64e12b65b7761da2e8684a170d3f30c6c1ff | 3,294 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[cfg(web_sys_unstable_apis)]
#[wasm_bindgen]
extern "C" {
# [ wasm_bindgen ( extends = :: js_sys :: Object , js_name = GPUImageBitmapCopyView ) ]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `GpuImageBitmapCopyView` dictionary."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GpuImageBitmapCopyView`*"]
#[doc = ""]
#[doc = "*This API is unstable and requires `--cfg=web_sys_unstable_apis` to be activated, as"]
#[doc = "[described in the `wasm-bindgen` guide](https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html)*"]
pub type GpuImageBitmapCopyView;
}
#[cfg(web_sys_unstable_apis)]
impl GpuImageBitmapCopyView {
#[cfg(feature = "ImageBitmap")]
#[doc = "Construct a new `GpuImageBitmapCopyView`."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GpuImageBitmapCopyView`, `ImageBitmap`*"]
#[doc = ""]
#[doc = "*This API is unstable and requires `--cfg=web_sys_unstable_apis` to be activated, as"]
#[doc = "[described in the `wasm-bindgen` guide](https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html)*"]
pub fn new(image_bitmap: &ImageBitmap) -> Self {
#[allow(unused_mut)]
let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new());
ret.image_bitmap(image_bitmap);
ret
}
#[cfg(web_sys_unstable_apis)]
#[cfg(feature = "ImageBitmap")]
#[doc = "Change the `imageBitmap` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GpuImageBitmapCopyView`, `ImageBitmap`*"]
#[doc = ""]
#[doc = "*This API is unstable and requires `--cfg=web_sys_unstable_apis` to be activated, as"]
#[doc = "[described in the `wasm-bindgen` guide](https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html)*"]
pub fn image_bitmap(&mut self, val: &ImageBitmap) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("imageBitmap"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[cfg(web_sys_unstable_apis)]
#[doc = "Change the `origin` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GpuImageBitmapCopyView`*"]
#[doc = ""]
#[doc = "*This API is unstable and requires `--cfg=web_sys_unstable_apis` to be activated, as"]
#[doc = "[described in the `wasm-bindgen` guide](https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html)*"]
pub fn origin(&mut self, val: &::wasm_bindgen::JsValue) -> &mut Self {
use wasm_bindgen::JsValue;
let r =
::js_sys::Reflect::set(self.as_ref(), &JsValue::from("origin"), &JsValue::from(val));
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
}
| 45.123288 | 128 | 0.627201 |
cc2a855e6c069ad23152439a668766e15e4f913c | 564 |
U svako polje upiši po jednu od datih cifara, tako da dobiješ tačno oduzimanje:
@repeat(2)@
@vspace@
@center@ @task[1]@, @hspace@ @task[2]@, @hspace@ @task[3]@, @hspace@ @task[4]@, @hspace@ @task[5]@, @hspace@ @task[6]@, @hspace@ @task[7]@
@center@ @lib.check_number(term[1],15)@ @lib.check_number(term[2],15)@ @lib.check_number(term[3],15)@ @hspacept(3)@ - @hspacept(3)@ @lib.check_number(value,15)@ @hspacept(3)@ = @hspacept(3)@ @lib.check_number(result[1],15)@ @lib.check_number(result[2],15)@ @lib.check_number(result[3],15)@
@vspace@
@/repeat@
| 33.176471 | 295 | 0.652482 |
e56a1951a1ba63361ef21fe738523d9a7bee3263 | 8,239 | #![deny(warnings, rust_2018_idioms)]
use futures::prelude::*;
use linkerd_stack::{layer, NewService};
use parking_lot::RwLock;
use std::{
collections::{hash_map::Entry, HashMap},
hash::Hash,
sync::{Arc, Weak},
task::{Context, Poll},
};
use tokio::{sync::Notify, time};
use tracing::{debug, instrument, trace};
#[derive(Clone)]
pub struct Cache<T, N>
where
T: Eq + Hash,
N: NewService<T>,
{
inner: N,
services: Arc<Services<T, N::Service>>,
idle: time::Duration,
}
#[derive(Clone, Debug)]
pub struct Cached<S>
where
S: Send + Sync + 'static,
{
inner: S,
// Notifies entry's eviction task that a drop has occurred.
handle: Arc<Notify>,
}
type Services<T, S> = RwLock<HashMap<T, (S, Weak<Notify>)>>;
// === impl Cache ===
impl<T, N> Cache<T, N>
where
T: Clone + std::fmt::Debug + Eq + Hash + Send + Sync + 'static,
N: NewService<T> + 'static,
N::Service: Send + Sync + 'static,
{
pub fn layer(idle: time::Duration) -> impl layer::Layer<N, Service = Self> + Clone {
layer::mk(move |inner| Self::new(idle, inner))
}
fn new(idle: time::Duration, inner: N) -> Self {
let services = Arc::new(Services::default());
Self {
inner,
services,
idle,
}
}
fn spawn_idle(
target: T,
idle: time::Duration,
cache: &Arc<Services<T, N::Service>>,
) -> Arc<Notify> {
// Spawn a background task that holds the handle. Every time the handle
// is notified, it resets the idle timeout. Every time teh idle timeout
// expires, the handle is checked and the service is dropped if there
// are no active handles.
let handle = Arc::new(Notify::new());
tokio::spawn(Self::evict(
target,
idle,
handle.clone(),
Arc::downgrade(&cache),
));
handle
}
#[instrument(level = "debug", skip(idle, reset, cache))]
async fn evict(
target: T,
idle: time::Duration,
mut reset: Arc<Notify>,
cache: Weak<Services<T, N::Service>>,
) {
// Wait for the handle to be notified before starting to track idleness.
reset.notified().await;
debug!("Awaiting idleness");
// Wait for either the reset to be notified or the idle timeout to
// elapse.
loop {
futures::select_biased! {
// If the reset was notified, restart the timer.
_ = reset.notified().fuse() => {
trace!("Reset");
}
_ = time::sleep(idle).fuse() => match cache.upgrade() {
Some(cache) => match Arc::try_unwrap(reset) {
// If this is the last reference to the handle after the
// idle timeout, remove the cache entry.
Ok(_) => {
let removed = cache.write().remove(&target).is_some();
debug_assert!(removed, "Cache item must exist: {:?}", target);
debug!("Cache entry dropped");
return;
}
// Otherwise, another handle has been acquired, so
// restore our reset reference for the next iteration.
Err(r) => {
trace!("The handle is still active");
reset = r;
}
},
None => {
trace!("Cache already dropped");
return;
}
},
}
}
}
}
impl<T, N> NewService<T> for Cache<T, N>
where
T: Clone + std::fmt::Debug + Eq + Hash + Send + Sync + 'static,
N: NewService<T> + 'static,
N::Service: Clone + Send + Sync + 'static,
{
type Service = Cached<N::Service>;
fn new_service(&mut self, target: T) -> Cached<N::Service> {
// We expect the item to be available in most cases, so initially obtain
// only a read lock.
if let Some((svc, weak)) = self.services.read().get(&target) {
if let Some(handle) = weak.upgrade() {
trace!("Using cached service");
return Cached {
inner: svc.clone(),
handle,
};
}
}
// Otherwise, obtain a write lock to insert a new service.
match self.services.write().entry(target.clone()) {
Entry::Occupied(mut entry) => {
// Another thread raced us to create a service for this target.
// Try to use it.
let (svc, weak) = entry.get();
match weak.upgrade() {
Some(handle) => {
trace!(?target, "Using cached service");
Cached {
inner: svc.clone(),
handle,
}
}
None => {
debug!(?target, "Replacing defunct service");
let handle = Self::spawn_idle(target.clone(), self.idle, &self.services);
let inner = self.inner.new_service(target);
entry.insert((inner.clone(), Arc::downgrade(&handle)));
Cached { inner, handle }
}
}
}
Entry::Vacant(entry) => {
debug!(?target, "Caching new service");
let handle = Self::spawn_idle(target.clone(), self.idle, &self.services);
let inner = self.inner.new_service(target);
entry.insert((inner.clone(), Arc::downgrade(&handle)));
Cached { inner, handle }
}
}
}
}
// === impl Cached ===
impl<Req, S> tower::Service<Req> for Cached<S>
where
S: tower::Service<Req> + Send + Sync + 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), S::Error>> {
self.inner.poll_ready(cx)
}
#[inline]
fn call(&mut self, req: Req) -> Self::Future {
self.inner.call(req)
}
}
impl<S> Drop for Cached<S>
where
S: Send + Sync + 'static,
{
fn drop(&mut self) {
self.handle.notify_one();
}
}
#[cfg(test)]
#[tokio::test]
async fn test_idle_retain() {
let _ = tracing_subscriber::fmt::try_init();
time::pause();
let idle = time::Duration::from_secs(10);
let cache = Arc::new(Services::default());
let handle = Cache::<(), fn(()) -> ()>::spawn_idle((), idle, &cache);
cache.write().insert((), ((), Arc::downgrade(&handle)));
let c0 = Cached { inner: (), handle };
let handle = Arc::downgrade(&c0.handle);
// Let an idle timeout elapse and ensured the held service has not been
// evicted.
time::sleep(idle * 2).await;
assert!(handle.upgrade().is_some());
assert!(cache.read().contains_key(&()));
// Drop the original cached instance and elapse only half of the idle
// timeout.
drop(c0);
time::sleep(time::Duration::from_secs(5)).await;
assert!(handle.upgrade().is_some());
assert!(cache.read().contains_key(&()));
// Ensure that the handle hasn't been dropped yet and revive it to create a
// new cached instance.
let c1 = Cached {
inner: (),
// Retain the handle from the first instance.
handle: handle.upgrade().unwrap(),
};
// Drop the new cache instance. Wait the remainder of the first idle timeout
// and esnure that the handle is still retained.
drop(c1);
time::sleep(time::Duration::from_secs(5)).await;
assert!(handle.upgrade().is_some());
assert!(cache.read().contains_key(&()));
// Wait the remainder of the second idle timeout and esnure the handle has
// been dropped.
time::sleep(time::Duration::from_secs(5)).await;
assert!(handle.upgrade().is_none());
assert!(!cache.read().contains_key(&()));
}
| 31.934109 | 97 | 0.514626 |
ef0a595f9b67fece8a618cce24bf476068ec8bfe | 13,229 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type resolution: the phase that finds all the types in the AST with
// unresolved type variables and replaces "ty_var" types with their
// substitutions.
use middle::pat_util;
use middle::ty;
use middle::typeck::astconv::AstConv;
use middle::typeck::check::{FnCtxt, SelfInfo};
use middle::typeck::infer::{force_all, resolve_all, resolve_region};
use middle::typeck::infer::resolve_type;
use middle::typeck::infer;
use middle::typeck::{vtable_res, vtable_origin};
use middle::typeck::{vtable_static, vtable_param};
use middle::typeck::method_map_entry;
use middle::typeck::write_substs_to_tcx;
use middle::typeck::write_ty_to_tcx;
use util::ppaux;
use util::ppaux::Repr;
use syntax::ast;
use syntax::codemap::Span;
use syntax::print::pprust::pat_to_str;
use syntax::visit;
use syntax::visit::Visitor;
fn resolve_type_vars_in_type(fcx: @FnCtxt, sp: Span, typ: ty::t)
-> Option<ty::t> {
if !ty::type_needs_infer(typ) { return Some(typ); }
match resolve_type(fcx.infcx(), typ, resolve_all | force_all) {
Ok(new_type) => return Some(new_type),
Err(e) => {
if !fcx.ccx.tcx.sess.has_errors() {
fcx.ccx.tcx.sess.span_err(
sp,
format!("cannot determine a type \
for this expression: {}",
infer::fixup_err_to_str(e)))
}
return None;
}
}
}
fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t])
-> ~[ty::t] {
tys.map(|t| {
match resolve_type_vars_in_type(fcx, sp, *t) {
Some(t1) => t1,
None => ty::mk_err()
}
})
}
fn resolve_method_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) {
// Resolve any method map entry
let method_map_entry_opt = {
let method_map = fcx.inh.method_map.borrow();
method_map.get().find_copy(&id)
};
match method_map_entry_opt {
None => {}
Some(mme) => {
{
let r = resolve_type_vars_in_type(fcx, sp, mme.self_ty);
for t in r.iter() {
let method_map = fcx.ccx.method_map;
let new_entry = method_map_entry { self_ty: *t, ..mme };
debug!("writeback::resolve_method_map_entry(id={:?}, \
new_entry={:?})",
id, new_entry);
let mut method_map = method_map.borrow_mut();
method_map.get().insert(id, new_entry);
}
}
}
}
}
fn resolve_vtable_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) {
// Resolve any method map entry
{
let origins_opt = {
let vtable_map = fcx.inh.vtable_map.borrow();
vtable_map.get().find_copy(&id)
};
match origins_opt {
None => {}
Some(origins) => {
let r_origins = resolve_origins(fcx, sp, origins);
let mut vtable_map = fcx.ccx.vtable_map.borrow_mut();
vtable_map.get().insert(id, r_origins);
debug!("writeback::resolve_vtable_map_entry(id={}, vtables={:?})",
id, r_origins.repr(fcx.tcx()));
}
}
}
fn resolve_origins(fcx: @FnCtxt, sp: Span,
vtbls: vtable_res) -> vtable_res {
@vtbls.map(|os| @os.map(|o| resolve_origin(fcx, sp, o)))
}
fn resolve_origin(fcx: @FnCtxt,
sp: Span,
origin: &vtable_origin) -> vtable_origin {
match origin {
&vtable_static(def_id, ref tys, origins) => {
let r_tys = resolve_type_vars_in_types(fcx, sp, *tys);
let r_origins = resolve_origins(fcx, sp, origins);
vtable_static(def_id, r_tys, r_origins)
}
&vtable_param(n, b) => {
vtable_param(n, b)
}
}
}
}
fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
-> Option<ty::t> {
let fcx = wbcx.fcx;
let tcx = fcx.ccx.tcx;
// Resolve any borrowings for the node with id `id`
let adjustment = {
let adjustments = fcx.inh.adjustments.borrow();
adjustments.get().find_copy(&id)
};
match adjustment {
None => (),
Some(@ty::AutoAddEnv(r, s)) => {
match resolve_region(fcx.infcx(), r, resolve_all | force_all) {
Err(e) => {
// This should not, I think, happen:
fcx.ccx.tcx.sess.span_err(
sp, format!("cannot resolve bound for closure: {}",
infer::fixup_err_to_str(e)));
}
Ok(r1) => {
let resolved_adj = @ty::AutoAddEnv(r1, s);
debug!("Adjustments for node {}: {:?}", id, resolved_adj);
let mut adjustments = fcx.tcx().adjustments.borrow_mut();
adjustments.get().insert(id, resolved_adj);
}
}
}
Some(@ty::AutoDerefRef(adj)) => {
let fixup_region = |r| {
match resolve_region(fcx.infcx(), r, resolve_all | force_all) {
Ok(r1) => r1,
Err(e) => {
// This should not, I think, happen.
fcx.ccx.tcx.sess.span_err(
sp, format!("cannot resolve scope of borrow: {}",
infer::fixup_err_to_str(e)));
r
}
}
};
let resolved_autoref = match adj.autoref {
None => None,
Some(ref r) => Some(r.map_region(fixup_region))
};
let resolved_adj = @ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: adj.autoderefs,
autoref: resolved_autoref,
});
debug!("Adjustments for node {}: {:?}", id, resolved_adj);
let mut adjustments = fcx.tcx().adjustments.borrow_mut();
adjustments.get().insert(id, resolved_adj);
}
Some(adjustment @ @ty::AutoObject(..)) => {
debug!("Adjustments for node {}: {:?}", id, adjustment);
let mut adjustments = fcx.tcx().adjustments.borrow_mut();
adjustments.get().insert(id, adjustment);
}
}
// Resolve the type of the node with id `id`
let n_ty = fcx.node_ty(id);
match resolve_type_vars_in_type(fcx, sp, n_ty) {
None => {
wbcx.success = false;
return None;
}
Some(t) => {
debug!("resolve_type_vars_for_node(id={}, n_ty={}, t={})",
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t);
fcx.opt_node_ty_substs(id, |substs| {
let mut new_tps = ~[];
for subst in substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, *subst) {
Some(t) => new_tps.push(t),
None => { wbcx.success = false; ret = None; break }
}
}
write_substs_to_tcx(tcx, id, new_tps);
ret.is_some()
});
ret
}
}
}
fn maybe_resolve_type_vars_for_node(wbcx: &mut WbCtxt,
sp: Span,
id: ast::NodeId)
-> Option<ty::t> {
let contained = {
let node_types = wbcx.fcx.inh.node_types.borrow();
node_types.get().contains_key(&id)
};
if contained {
resolve_type_vars_for_node(wbcx, sp, id)
} else {
None
}
}
struct WbCtxt {
fcx: @FnCtxt,
// As soon as we hit an error we have to stop resolving
// the entire function.
success: bool,
}
fn visit_stmt(s: &ast::Stmt, wbcx: &mut WbCtxt) {
if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s));
visit::walk_stmt(wbcx, s, ());
}
fn visit_expr(e: &ast::Expr, wbcx: &mut WbCtxt) {
if !wbcx.success {
return;
}
resolve_type_vars_for_node(wbcx, e.span, e.id);
resolve_method_map_entry(wbcx.fcx, e.span, e.id);
{
let r = e.get_callee_id();
for callee_id in r.iter() {
resolve_method_map_entry(wbcx.fcx, e.span, *callee_id);
}
}
resolve_vtable_map_entry(wbcx.fcx, e.span, e.id);
{
let r = e.get_callee_id();
for callee_id in r.iter() {
resolve_vtable_map_entry(wbcx.fcx, e.span, *callee_id);
}
}
match e.node {
ast::ExprFnBlock(ref decl, _) | ast::ExprProc(ref decl, _) => {
for input in decl.inputs.iter() {
let _ = resolve_type_vars_for_node(wbcx, e.span, input.id);
}
}
ast::ExprBinary(callee_id, _, _, _) |
ast::ExprUnary(callee_id, _, _) |
ast::ExprAssignOp(callee_id, _, _, _) |
ast::ExprIndex(callee_id, _, _) => {
maybe_resolve_type_vars_for_node(wbcx, e.span, callee_id);
}
ast::ExprMethodCall(callee_id, _, _, _, _, _) => {
// We must always have written in a callee ID type for these.
resolve_type_vars_for_node(wbcx, e.span, callee_id);
}
_ => ()
}
visit::walk_expr(wbcx, e, ());
}
fn visit_block(b: &ast::Block, wbcx: &mut WbCtxt) {
if !wbcx.success {
return;
}
resolve_type_vars_for_node(wbcx, b.span, b.id);
visit::walk_block(wbcx, b, ());
}
fn visit_pat(p: &ast::Pat, wbcx: &mut WbCtxt) {
if !wbcx.success {
return;
}
resolve_type_vars_for_node(wbcx, p.span, p.id);
debug!("Type for pattern binding {} (id {}) resolved to {}",
pat_to_str(p, wbcx.fcx.ccx.tcx.sess.intr()), p.id,
wbcx.fcx.infcx().ty_to_str(
ty::node_id_to_type(wbcx.fcx.ccx.tcx,
p.id)));
visit::walk_pat(wbcx, p, ());
}
fn visit_local(l: &ast::Local, wbcx: &mut WbCtxt) {
if !wbcx.success { return; }
let var_ty = wbcx.fcx.local_ty(l.span, l.id);
match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
Ok(lty) => {
debug!("Type for local {} (id {}) resolved to {}",
pat_to_str(l.pat, wbcx.fcx.tcx().sess.intr()),
l.id,
wbcx.fcx.infcx().ty_to_str(lty));
write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.id, lty);
}
Err(e) => {
wbcx.fcx.ccx.tcx.sess.span_err(
l.span,
format!("cannot determine a type \
for this local variable: {}",
infer::fixup_err_to_str(e)));
wbcx.success = false;
}
}
visit::walk_local(wbcx, l, ());
}
fn visit_item(_item: &ast::Item, _wbcx: &mut WbCtxt) {
// Ignore items
}
impl Visitor<()> for WbCtxt {
fn visit_item(&mut self, i: &ast::Item, _: ()) { visit_item(i, self); }
fn visit_stmt(&mut self, s: &ast::Stmt, _: ()) { visit_stmt(s, self); }
fn visit_expr(&mut self, ex:&ast::Expr, _: ()) { visit_expr(ex, self); }
fn visit_block(&mut self, b: &ast::Block, _: ()) { visit_block(b, self); }
fn visit_pat(&mut self, p: &ast::Pat, _: ()) { visit_pat(p, self); }
fn visit_local(&mut self, l: &ast::Local, _: ()) { visit_local(l, self); }
// FIXME(#10894) should continue recursing
fn visit_ty(&mut self, _t: &ast::Ty, _: ()) {}
}
pub fn resolve_type_vars_in_expr(fcx: @FnCtxt, e: &ast::Expr) -> bool {
let mut wbcx = WbCtxt { fcx: fcx, success: true };
let wbcx = &mut wbcx;
wbcx.visit_expr(e, ());
return wbcx.success;
}
pub fn resolve_type_vars_in_fn(fcx: @FnCtxt,
decl: &ast::FnDecl,
blk: &ast::Block,
self_info: Option<SelfInfo>) -> bool {
let mut wbcx = WbCtxt { fcx: fcx, success: true };
let wbcx = &mut wbcx;
wbcx.visit_block(blk, ());
for self_info in self_info.iter() {
resolve_type_vars_for_node(wbcx,
self_info.span,
self_info.self_id);
}
for arg in decl.inputs.iter() {
wbcx.visit_pat(arg.pat, ());
// Privacy needs the type for the whole pattern, not just each binding
if !pat_util::pat_is_binding(fcx.tcx().def_map, arg.pat) {
resolve_type_vars_for_node(wbcx, arg.pat.span, arg.pat.id);
}
}
return wbcx.success;
}
| 34.007712 | 82 | 0.52846 |
9106e296eae26dd25b8c0cd9edef630ed5b38682 | 1,572 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
//! This crate defines [`trait StateView`](StateView).
use anyhow::Result;
use diem_crypto::HashValue;
use diem_types::{access_path::AccessPath, transaction::Version};
/// `StateView` is a trait that defines a read-only snapshot of the global state. It is passed to
/// the VM for transaction execution, during which the VM is guaranteed to read anything at the
/// given state.
pub trait StateView: Sync {
/// For logging and debugging purpose, identifies what this view is for.
fn id(&self) -> StateViewId {
StateViewId::Miscellaneous
}
/// Gets the state for a single access path.
fn get(&self, access_path: &AccessPath) -> Result<Option<Vec<u8>>>;
/// Gets states for a list of access paths.
fn multi_get(&self, access_paths: &[AccessPath]) -> Result<Vec<Option<Vec<u8>>>>;
/// VM needs this method to know whether the current state view is for genesis state creation.
/// Currently TransactionPayload::WriteSet is only valid for genesis state creation.
fn is_genesis(&self) -> bool;
}
#[derive(Copy, Clone)]
pub enum StateViewId {
/// State-sync applying a chunk of transactions.
ChunkExecution { first_version: Version },
/// LEC applying a block.
BlockExecution { block_id: HashValue },
/// VmValidator verifying incoming transaction.
TransactionValidation { base_version: Version },
/// For test, db-bootstrapper, etc. Usually not aimed to pass to VM.
Miscellaneous,
}
| 36.55814 | 98 | 0.705471 |
87a5e25b7c20f010534175068f9144e2932d2021 | 1,065 | use crate::audio::SoundInstance;
use crate::sync::Consumer;
use alloc::vec::Vec;
pub struct Mixer {
receiver: Consumer<SoundInstance>,
active: Vec<SoundInstance>,
sample_interval: f32,
}
impl Mixer {
pub fn new(sample_rate: u32, receiver: Consumer<SoundInstance>) -> Mixer {
Mixer {
receiver,
active: Vec::with_capacity(32),
sample_interval: 1.0 / sample_rate as f32,
}
}
pub fn sample(&mut self, out: &mut [[f32; 2]]) {
while let Some(instance) = self.receiver.try_pop() {
self.active.push(instance);
}
for target in out.iter_mut() {
*target = [0.0, 0.0];
}
let mut index = 0;
while index < self.active.len() {
let instance = &mut self.active[index];
if instance.mix(self.sample_interval, out) {
let mut instance = self.active.swap_remove(index);
instance.control().stop();
} else {
index += 1;
}
}
}
}
| 25.97561 | 78 | 0.532394 |
3a5b07edaf0ca9e84e5fc64e42c27928aa4c7519 | 600 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Symbolic Math
#![warn(missing_docs)]
#![deny(trivial_numeric_casts,
unsafe_code, unstable_features,
unused_import_braces, unused_qualifications)]
extern crate num;
pub mod ops;
mod terms;
pub use self::terms::*;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {}
}
| 23.076923 | 68 | 0.7 |
23f027a514ce022a8ee1d35a8c77bce0ba100a5a | 3,966 | use std::collections::HashSet;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Read;
use itertools::Itertools;
use itertools::MinMaxResult;
use regex::Regex;
use common::Solution;
type Coordinate = (usize, usize);
#[derive(Default)]
pub struct Day17 {
clays: HashSet<Coordinate>,
flowing: HashSet<Coordinate>,
contained: HashSet<Coordinate>,
ymin: usize,
ymax: usize,
}
impl Day17 {
pub fn new() -> Self {
Default::default()
}
fn read_input(&mut self, input: &mut dyn Read) {
let matcher = Regex::new(r"(.)=(\d+), (.)=(\d+)\.\.(\d+)").unwrap();
let reader = BufReader::new(input);
for line in reader.lines() {
let line = line.unwrap();
let captures = matcher.captures(&line).unwrap();
let fixed: usize = captures[2].parse().unwrap();
let a: usize = captures[4].parse().unwrap();
let b: usize = captures[5].parse().unwrap();
match &captures[1] {
"x" => {
for y in a..=b {
self.clays.insert((fixed, y));
}
}
"y" => {
for x in a..=b {
self.clays.insert((x, fixed));
}
}
_ => panic!(),
}
}
match self.clays.iter().map(|(_, y)| y).minmax() {
MinMaxResult::MinMax(a, b) => {
self.ymin = *a;
self.ymax = *b;
}
_ => panic!(),
};
}
fn support_end<T>(&mut self, center: usize, range: T, y: usize) -> (usize, bool)
where
T: Iterator<Item = usize>,
{
let mut prev = center;
for x in range {
let pos = (x, y);
if self.clays.contains(&pos) {
return (prev, true);
}
prev = x;
let below = (x, y + 1);
self.descend(below);
if !self.is_supported(&below) {
return (x, false);
}
}
unreachable!();
}
fn is_supported(&self, pos: &Coordinate) -> bool {
self.clays.contains(pos) || self.contained.contains(pos)
}
fn descend(&mut self, pos: Coordinate) {
let (x, y) = pos;
if y > self.ymax
|| self.clays.contains(&pos)
|| self.flowing.contains(&pos)
|| self.contained.contains(&pos)
{
return;
}
let below = (x, y + 1);
self.descend(below);
if self.is_supported(&below) {
let (right, right_contained) = self.support_end(x, (x + 1).., y);
let (left, left_contained) = self.support_end(x, (0..x).rev(), y);
let range = (left..=right).map(|x| (x, y));
if left_contained && right_contained {
self.contained.extend(range);
} else {
self.flowing.extend(range);
}
} else {
self.flowing.insert(pos);
}
}
}
impl Solution for Day17 {
fn part1(&mut self, input: &mut dyn Read) -> String {
self.read_input(input);
self.descend((500, 0));
let result = self.contained.len() + self.flowing.len() - self.ymin;
result.to_string()
}
fn part2(&mut self, input: &mut dyn Read) -> String {
self.read_input(input);
self.descend((500, 0));
self.contained.len().to_string()
}
}
#[cfg(test)]
mod tests {
use common::Solution;
use day17::Day17;
const SAMPLE_INPUT: &[u8] = include_bytes!("samples/17.txt");
#[test]
fn sample_part1() {
let mut instance = Day17::new();
assert_eq!("57", instance.part1(&mut SAMPLE_INPUT));
}
#[test]
fn sample_part2() {
let mut instance = Day17::new();
assert_eq!("29", instance.part2(&mut SAMPLE_INPUT));
}
}
| 24.943396 | 84 | 0.480081 |
22b6d3af71b29f10da40c11b8e268a4368240b23 | 2,184 | /*
Based on the Python suffix_trees module by Peter Us
https://github.com/ptrus/suffix-trees
Published under the MIT License
*/
use std::collections::HashMap;
use super::utils::{compute_slink, create_leaf, create_node};
use super::{Node, NodeRef};
pub fn build_mc_creight<'a>(slab: &mut Vec<Node>, /*x*/ word: &'a [String], root: NodeRef) {
/*
Builds a Suffix tree using McCreight O(n) algorithm.
Algorithm based on:
McCreight, Edward M. "A space-economical suffix tree construction algorithm." - ACM, 1976.
Implementation based on:
UH CS - 58093 String Processing Algorithms Lecture Notes
*/
let mut transition_links: Vec<HashMap<&'a [String], NodeRef>> = vec![HashMap::new()];
let mut suffix_links: Vec<NodeRef> = vec![root];
let mut u_ref: NodeRef = root;
let mut d: usize = 0;
for i in 0..word.len() {
let mut u = &slab[u_ref];
while u.depth == d
&& transition_links[u_ref].contains_key(std::slice::from_ref(&word[d + i]))
{
u_ref = transition_links[u_ref][std::slice::from_ref(&word[d + i])];
d += 1;
u = &slab[u_ref];
while d < u.depth && word[u.index + d] == word[i + d] {
d += 1;
}
}
if d < u.depth {
u_ref = create_node(
slab,
&mut transition_links,
&mut suffix_links,
word,
u_ref,
d,
);
}
create_leaf(
slab,
&mut transition_links,
&mut suffix_links,
word,
i,
u_ref,
d,
);
// Check if !suffix_links.contains_key(u_ref)
if suffix_links[u_ref] == std::usize::MAX {
compute_slink(slab, &mut transition_links, &mut suffix_links, word, u_ref);
}
u_ref = suffix_links[u_ref];
d = d.saturating_sub(1);
}
for (node, transition_links) in slab.iter_mut().zip(transition_links.into_iter()) {
for link in transition_links.values() {
node.transition_links.insert(*link);
}
}
}
| 27.3 | 94 | 0.543498 |
76b2babd29e60384449913f37d6784c36535384f | 8,998 | // Copyright 2018 Parity Technologies (UK) Ltd.
// Copyright 2020 Netwarps Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use futures::AsyncWriteExt;
use prost::Message;
use std::{error::Error, fmt, io};
use crate::{rpc_proto, Topic, FLOOD_SUB_ID};
use async_trait::async_trait;
use futures::{channel::mpsc, SinkExt};
use libp2prs_core::upgrade::UpgradeInfo;
use libp2prs_core::{PeerId, ProtocolId, ReadEx};
use libp2prs_swarm::protocol_handler::Notifiee;
use libp2prs_swarm::{
connection::Connection,
protocol_handler::{IProtocolHandler, ProtocolHandler},
substream::Substream,
};
pub(crate) enum PeerEvent {
NewPeer(PeerId),
DeadPeer(PeerId),
}
#[derive(Clone)]
pub struct Handler {
incoming_tx: mpsc::UnboundedSender<RPC>,
peer_tx: mpsc::UnboundedSender<PeerEvent>,
}
impl Handler {
pub(crate) fn new(incoming_tx: mpsc::UnboundedSender<RPC>, peer_tx: mpsc::UnboundedSender<PeerEvent>) -> Self {
Handler { incoming_tx, peer_tx }
}
}
impl UpgradeInfo for Handler {
type Info = ProtocolId;
fn protocol_info(&self) -> Vec<Self::Info> {
vec![ProtocolId::new(FLOOD_SUB_ID, 0)]
}
}
impl Notifiee for Handler {
fn connected(&mut self, conn: &mut Connection) {
let peer_id = conn.remote_peer();
let _ = self.peer_tx.unbounded_send(PeerEvent::NewPeer(peer_id));
}
fn disconnected(&mut self, conn: &mut Connection) {
let peer_id = conn.remote_peer();
let _ = self.peer_tx.unbounded_send(PeerEvent::DeadPeer(peer_id));
}
}
#[async_trait]
impl ProtocolHandler for Handler {
async fn handle(&mut self, mut stream: Substream, _info: <Self as UpgradeInfo>::Info) -> Result<(), Box<dyn Error>> {
log::trace!("Handle stream from {}", stream.remote_peer());
loop {
let packet = match stream.read_one(2048).await {
Ok(p) => p,
Err(e) => {
if e.kind() == io::ErrorKind::UnexpectedEof {
stream.close().await?;
}
return Err(Box::new(e));
}
};
let rpc = rpc_proto::Rpc::decode(&packet[..])?;
log::trace!("recv rpc msg: {:?}", rpc);
let mut messages = Vec::with_capacity(rpc.publish.len());
for publish in rpc.publish.into_iter() {
messages.push(FloodsubMessage {
source: PeerId::from_bytes(&publish.from.unwrap_or_default()).map_err(|_| FloodsubDecodeError::InvalidPeerId)?,
data: publish.data.unwrap_or_default(),
sequence_number: publish.seqno.unwrap_or_default(),
topics: publish.topic_ids.into_iter().map(Topic::new).collect(),
});
}
let rpc = RPC {
rpc: FloodsubRpc {
messages,
subscriptions: rpc
.subscriptions
.into_iter()
.map(|sub| FloodsubSubscription {
action: if Some(true) == sub.subscribe {
FloodsubSubscriptionAction::Subscribe
} else {
FloodsubSubscriptionAction::Unsubscribe
},
topic: Topic::new(sub.topic_id.unwrap_or_default()),
})
.collect(),
},
from: stream.remote_peer(),
};
self.incoming_tx.send(rpc).await.map_err(|_| FloodsubDecodeError::ProtocolExit)?;
}
}
fn box_clone(&self) -> IProtocolHandler {
Box::new(self.clone())
}
}
/// Reach attempt interrupt errors.
#[derive(Debug)]
pub enum FloodsubDecodeError {
/// Error when reading the packet from the socket.
ReadError(io::Error),
/// Error when decoding the raw buffer into a protobuf.
ProtobufError(prost::DecodeError),
/// Error when parsing the `PeerId` in the message.
InvalidPeerId,
/// Protocol message process mainloop exit
ProtocolExit,
}
impl From<prost::DecodeError> for FloodsubDecodeError {
fn from(err: prost::DecodeError) -> Self {
FloodsubDecodeError::ProtobufError(err)
}
}
impl fmt::Display for FloodsubDecodeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
FloodsubDecodeError::ReadError(ref err) => write!(f, "Error while reading from socket: {}", err),
FloodsubDecodeError::ProtobufError(ref err) => write!(f, "Error while decoding protobuf: {}", err),
FloodsubDecodeError::InvalidPeerId => write!(f, "Error while decoding PeerId from message"),
FloodsubDecodeError::ProtocolExit => write!(f, "Error while send message to message process mainloop"),
}
}
}
impl Error for FloodsubDecodeError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match *self {
FloodsubDecodeError::ReadError(ref err) => Some(err),
FloodsubDecodeError::ProtobufError(ref err) => Some(err),
FloodsubDecodeError::InvalidPeerId => None,
FloodsubDecodeError::ProtocolExit => None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RPC {
pub rpc: FloodsubRpc,
// unexported on purpose, not sending this over the wire
pub from: PeerId,
}
/// An RPC received by the floodsub system.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FloodsubRpc {
/// List of messages that were part of this RPC query.
pub messages: Vec<FloodsubMessage>,
/// List of subscriptions.
pub subscriptions: Vec<FloodsubSubscription>,
}
impl FloodsubRpc {
/// Turns this `FloodsubRpc` into a message that can be sent to a substream.
pub fn into_bytes(self) -> Vec<u8> {
let rpc = rpc_proto::Rpc {
publish: self
.messages
.into_iter()
.map(|msg| rpc_proto::Message {
from: Some(msg.source.to_bytes()),
data: Some(msg.data),
seqno: Some(msg.sequence_number),
topic_ids: msg.topics.into_iter().map(|topic| topic.into()).collect(),
})
.collect(),
subscriptions: self
.subscriptions
.into_iter()
.map(|topic| rpc_proto::rpc::SubOpts {
subscribe: Some(topic.action == FloodsubSubscriptionAction::Subscribe),
topic_id: Some(topic.topic.into()),
})
.collect(),
};
let mut buf = Vec::with_capacity(rpc.encoded_len());
rpc.encode(&mut buf).expect("Vec<u8> provides capacity as needed");
buf
}
}
/// A message received by the floodsub system.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FloodsubMessage {
/// Id of the peer that published this message.
pub source: PeerId,
/// Content of the message. Its meaning is out of scope of this library.
pub data: Vec<u8>,
/// An incrementing sequence number.
pub sequence_number: Vec<u8>,
/// List of topics this message belongs to.
///
/// Each message can belong to multiple topics at once.
pub topics: Vec<Topic>,
}
/// A subscription received by the floodsub system.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FloodsubSubscription {
/// Action to perform.
pub action: FloodsubSubscriptionAction,
/// The topic from which to subscribe or unsubscribe.
pub topic: Topic,
}
/// Action that a subscription wants to perform.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FloodsubSubscriptionAction {
/// The remote wants to subscribe to the given topic.
Subscribe,
/// The remote wants to unsubscribe from the given topic.
Unsubscribe,
}
| 35.565217 | 131 | 0.612247 |
acd95bc51504443451e6d30b0d7190b2e3721f15 | 173 | //! Module implements emulation of sound chip AY, Spectrum Beeper and Mixer
#[cfg(feature = "ay")]
pub mod ay;
pub mod sample;
pub(crate) mod beeper;
pub(crate) mod mixer;
| 21.625 | 75 | 0.722543 |
6a486476c70b4d77a4ef53d2c53b0490077cb7ba | 20,403 | use quickcheck::{Arbitrary, Gen};
use crate::{
account::Ledger as AccountLedger,
block::Block,
certificate::PoolId,
chaintypes::{ChainLength, ConsensusType, ConsensusVersion, HeaderId},
config::{Block0Date, ConfigParam, RewardParams},
date::BlockDate,
fee::{LinearFee, PerCertificateFee, PerVoteCertificateFee},
fragment::{config::ConfigParams, Fragment, FragmentId},
key::BftLeaderId,
leadership::genesis::LeadershipData,
ledger::{
Error, LeadersParticipationRecord, Ledger, LedgerParameters, Pots, RewardsInfoParameters,
},
milli::Milli,
rewards::{Ratio, TaxType},
stake::PoolsState,
testing::{
builders::GenesisPraosBlockBuilder,
data::{AddressData, AddressDataValue, StakePool, Wallet},
},
transaction::{Output, TxBuilder},
utxo::{Entry, Iter},
value::Value,
vote::CommitteeId,
};
use chain_addr::{Address, Discrimination};
use chain_crypto::*;
use chain_time::TimeEra;
use std::{
collections::HashMap,
num::{NonZeroU32, NonZeroU64},
};
#[derive(Clone)]
pub struct ConfigBuilder {
slot_duration: u8,
slots_per_epoch: u32,
active_slots_coeff: Milli,
discrimination: Discrimination,
linear_fee: Option<LinearFee>,
per_certificate_fee: Option<PerCertificateFee>,
per_vote_certificate_fee: Option<PerVoteCertificateFee>,
leaders: Vec<BftLeaderId>,
seed: u64,
committees_ids: Vec<CommitteeId>,
rewards: Value,
treasury: Value,
fees_in_treasury: bool,
treasury_params: TaxType,
reward_params: RewardParams,
block_content_max_size: Option<u32>,
kes_update_speed: u32,
block0_date: Block0Date,
consensus_version: ConsensusVersion,
pool_capping_ratio: Ratio,
}
impl ConfigBuilder {
pub fn new(seed: u64) -> Self {
ConfigBuilder {
slot_duration: 20,
slots_per_epoch: 21600,
active_slots_coeff: Milli::HALF,
discrimination: Discrimination::Test,
leaders: Vec::new(),
linear_fee: None,
per_certificate_fee: None,
per_vote_certificate_fee: None,
committees_ids: Vec::new(),
pool_capping_ratio: Ratio {
numerator: 0,
denominator: NonZeroU64::new(1).unwrap(),
},
fees_in_treasury: false,
seed,
rewards: Value(1_000_000),
reward_params: RewardParams::Linear {
constant: 100,
ratio: Ratio {
numerator: 1,
denominator: NonZeroU64::new(100).unwrap(),
},
epoch_start: 0,
epoch_rate: NonZeroU32::new(1).unwrap(),
},
treasury_params: TaxType::zero(),
treasury: Value(1_000),
block_content_max_size: None,
kes_update_speed: 3600 * 12,
block0_date: Block0Date(0),
consensus_version: ConsensusVersion::Bft,
}
}
pub fn with_committee_id(mut self, committee_id: CommitteeId) -> Self {
self.committees_ids.push(committee_id);
self
}
pub fn with_rewards(mut self, value: Value) -> Self {
self.rewards = value;
self
}
pub fn with_treasury(mut self, value: Value) -> Self {
self.treasury = value;
self
}
pub fn with_fees_in_treasury(mut self) -> Self {
self.fees_in_treasury = true;
self
}
pub fn with_pool_reward_participation_caping(
mut self,
numerator: u64,
denominator: u64,
) -> Self {
self.pool_capping_ratio = Ratio {
numerator,
denominator: NonZeroU64::new(denominator).unwrap(),
};
self
}
pub fn with_treasury_params(mut self, tax_type: TaxType) -> Self {
self.treasury_params = tax_type;
self
}
pub fn with_rewards_params(mut self, reward_params: RewardParams) -> Self {
self.reward_params = reward_params;
self
}
pub fn with_discrimination(mut self, discrimination: Discrimination) -> Self {
self.discrimination = discrimination;
self
}
pub fn with_slot_duration(mut self, slot_duration: u8) -> Self {
self.slot_duration = slot_duration;
self
}
pub fn with_leaders(mut self, leaders: &[BftLeaderId]) -> Self {
self.leaders.extend(leaders.iter().cloned());
self
}
pub fn with_fee(mut self, linear_fee: LinearFee) -> Self {
self.linear_fee = Some(linear_fee);
self
}
pub fn with_per_certificate_fee(mut self, per_certificate_fee: PerCertificateFee) -> Self {
self.per_certificate_fee = Some(per_certificate_fee);
self
}
pub fn with_per_vote_certificate_fee(
mut self,
per_vote_certificate_fee: PerVoteCertificateFee,
) -> Self {
self.per_vote_certificate_fee = Some(per_vote_certificate_fee);
self
}
pub fn with_slots_per_epoch(mut self, slots_per_epoch: u32) -> Self {
self.slots_per_epoch = slots_per_epoch;
self
}
pub fn with_active_slots_coeff(mut self, active_slots_coeff: Milli) -> Self {
self.active_slots_coeff = active_slots_coeff;
self
}
pub fn with_block_content_max_size(mut self, block_content_max_size: u32) -> Self {
self.block_content_max_size = Some(block_content_max_size);
self
}
pub fn with_kes_update_speed(mut self, kes_update_speed: u32) -> Self {
self.kes_update_speed = kes_update_speed;
self
}
pub fn with_block0_date(mut self, block0_date: Block0Date) -> Self {
self.block0_date = block0_date;
self
}
pub fn with_consensus_version(mut self, consensus_version: ConsensusType) -> Self {
self.consensus_version = consensus_version;
self
}
fn create_single_bft_leader() -> BftLeaderId {
let leader_prv_key: SecretKey<Ed25519Extended> = SecretKey::generate(rand_core::OsRng);
let leader_pub_key = leader_prv_key.to_public();
leader_pub_key.into()
}
pub fn normalize(&mut self) {
// TODO remove rng: make this creation deterministic
if self.leaders.is_empty() {
self.leaders.push(Self::create_single_bft_leader());
}
}
pub fn build(self) -> ConfigParams {
let mut ie = ConfigParams::new();
ie.push(ConfigParam::Discrimination(self.discrimination));
ie.push(ConfigParam::ConsensusVersion(self.consensus_version));
for leader_id in self.leaders.iter().cloned() {
ie.push(ConfigParam::AddBftLeader(leader_id));
}
ie.push(ConfigParam::RewardPot(self.rewards));
ie.push(ConfigParam::TreasuryAdd(self.treasury));
ie.push(ConfigParam::TreasuryParams(self.treasury_params));
ie.push(ConfigParam::RewardParams(self.reward_params.clone()));
ie.push(ConfigParam::FeesInTreasury(self.fees_in_treasury));
if self.pool_capping_ratio.numerator >= 1 {
ie.push(ConfigParam::PoolRewardParticipationCapping((
NonZeroU32::new(self.pool_capping_ratio.numerator as u32).unwrap(),
NonZeroU32::new(self.pool_capping_ratio.numerator as u32).unwrap(),
)));
}
if let Some(linear_fee) = self.linear_fee {
ie.push(ConfigParam::LinearFee(linear_fee));
}
if let Some(block_content_max_size) = self.block_content_max_size {
ie.push(ConfigParam::BlockContentMaxSize(block_content_max_size));
}
if self.per_certificate_fee.is_some() {
ie.push(ConfigParam::PerCertificateFees(
self.per_certificate_fee.clone().unwrap(),
));
}
if self.per_vote_certificate_fee.is_some() {
ie.push(ConfigParam::PerVoteCertificateFees(
self.per_vote_certificate_fee.clone().unwrap(),
));
}
for committee_id in self.committees_ids {
ie.push(ConfigParam::AddCommitteeId(committee_id.clone()));
}
ie.push(ConfigParam::Block0Date(self.block0_date));
ie.push(ConfigParam::SlotDuration(self.slot_duration));
ie.push(ConfigParam::ConsensusGenesisPraosActiveSlotsCoeff(
self.active_slots_coeff,
));
ie.push(ConfigParam::SlotsPerEpoch(self.slots_per_epoch));
ie.push(ConfigParam::KESUpdateSpeed(self.kes_update_speed));
ie
}
}
#[derive(Clone)]
pub struct LedgerBuilder {
cfg_builder: ConfigBuilder,
cfg_params: ConfigParams,
fragments: Vec<Fragment>,
certs: Vec<Fragment>,
faucets: Vec<AddressDataValue>,
utxo_declaration: Vec<UtxoDeclaration>,
}
pub type UtxoDeclaration = Output<Address>;
#[derive(Clone, Debug)]
pub struct UtxoDb {
db: HashMap<(FragmentId, u8), UtxoDeclaration>,
}
impl UtxoDb {
pub fn find_fragments(&self, decl: &UtxoDeclaration) -> Vec<(FragmentId, u8)> {
self.db
.iter()
.filter_map(|(k, v)| if v == decl { Some(k) } else { None })
.copied()
.collect()
}
pub fn get(&self, key: &(FragmentId, u8)) -> Option<&UtxoDeclaration> {
self.db.get(key)
}
}
impl LedgerBuilder {
pub fn from_config(mut cfg_builder: ConfigBuilder) -> Self {
cfg_builder.normalize();
let cfg_params = cfg_builder.clone().build();
Self {
cfg_builder,
cfg_params,
faucets: Vec::new(),
utxo_declaration: Vec::new(),
fragments: Vec::new(),
certs: Vec::new(),
}
}
pub fn fragment(mut self, f: Fragment) -> Self {
self.fragments.push(f);
self
}
pub fn fragments(mut self, f: &[Fragment]) -> Self {
self.fragments.extend_from_slice(f);
self
}
pub fn certs(mut self, f: &[Fragment]) -> Self {
self.certs.extend_from_slice(f);
self
}
// add a fragment that pre-fill the address with a specific value at ledger start
pub fn prefill_address(self, address: Address, value: Value) -> Self {
self.prefill_output(Output { address, value })
}
pub fn prefill_output(self, output: Output<Address>) -> Self {
let tx = TxBuilder::new()
.set_nopayload()
.set_ios(&[], &[output])
.set_witnesses(&[])
.set_payload_auth(&());
self.fragment(Fragment::Transaction(tx))
}
pub fn prefill_outputs(self, outputs: &[Output<Address>]) -> Self {
let tx = TxBuilder::new()
.set_nopayload()
.set_ios(&[], outputs)
.set_witnesses(&[])
.set_payload_auth(&());
self.fragment(Fragment::Transaction(tx))
}
pub fn faucet_value(mut self, value: Value) -> Self {
self.faucets.push(AddressDataValue::account(
self.cfg_builder.discrimination,
value,
));
self
}
pub fn initial_fund(mut self, fund: &AddressDataValue) -> Self {
if fund.is_utxo() {
self = self.utxos(&[fund.make_output()]);
} else {
self = self.faucet(&fund);
}
self
}
pub fn initial_funds(mut self, funds: &[AddressDataValue]) -> Self {
for fund in funds {
self = self.initial_fund(fund);
}
self
}
pub fn faucet(mut self, faucet: &AddressDataValue) -> Self {
self.faucets.push(faucet.clone());
self
}
pub fn faucets_wallets(mut self, faucets: Vec<&Wallet>) -> Self {
self.faucets
.extend(faucets.iter().cloned().map(|x| x.as_account()));
self
}
pub fn faucets(mut self, faucets: &[AddressDataValue]) -> Self {
self.faucets.extend(faucets.iter().cloned());
self
}
pub fn utxos(mut self, decls: &[UtxoDeclaration]) -> Self {
self.utxo_declaration.extend_from_slice(decls);
self
}
pub fn build(mut self) -> Result<TestLedger, Error> {
let block0_hash = HeaderId::hash_bytes(&[1, 2, 3]);
let outputs: Vec<Output<Address>> = self.faucets.iter().map(|x| x.make_output()).collect();
self = self.prefill_outputs(&outputs);
let utxodb = if !self.utxo_declaration.is_empty() {
let mut db = HashMap::new();
// TODO subdivide utxo_declaration in group of 254 elements
// and repeatdly create fragment
assert!(self.utxo_declaration.len() < 254);
let group = self.utxo_declaration;
{
let tx = TxBuilder::new()
.set_nopayload()
.set_ios(&[], &group)
.set_witnesses(&[])
.set_payload_auth(&());
let fragment = Fragment::Transaction(tx);
let fragment_id = fragment.hash();
for (idx, o) in group.iter().enumerate() {
let m = db.insert((fragment_id, idx as u8), o.clone());
assert!(m.is_none());
}
self.fragments.push(fragment);
}
UtxoDb { db }
} else {
UtxoDb { db: HashMap::new() }
};
let cfg = self.cfg_params.clone();
let mut fragments = Vec::new();
fragments.push(Fragment::Initial(self.cfg_params));
fragments.extend_from_slice(&self.fragments);
fragments.extend_from_slice(&self.certs);
let faucets = self.faucets;
Ledger::new(block0_hash, &fragments).map(|ledger| {
let parameters = ledger.get_ledger_parameters();
TestLedger {
cfg,
faucets,
ledger,
block0_hash,
utxodb,
parameters,
}
})
}
}
#[derive(Clone, Debug)]
pub struct TestLedger {
pub block0_hash: HeaderId,
pub cfg: ConfigParams,
pub faucets: Vec<AddressDataValue>,
pub ledger: Ledger,
pub parameters: LedgerParameters,
pub utxodb: UtxoDb,
}
impl TestLedger {
pub fn apply_transaction(&mut self, fragment: Fragment) -> Result<(), Error> {
let fragment_id = fragment.hash();
match fragment {
Fragment::Transaction(tx) => {
match self.ledger.clone().apply_transaction(
&fragment_id,
&tx.as_slice(),
&self.parameters,
) {
Err(err) => Err(err),
Ok((ledger, _)) => {
// TODO more bookkeeping for accounts and utxos
self.ledger = ledger;
Ok(())
}
}
}
_ => panic!("test ledger apply transaction only supports transaction type for now"),
}
}
pub fn apply_fragment(&mut self, fragment: &Fragment, date: BlockDate) -> Result<(), Error> {
self.ledger = self
.ledger
.clone()
.apply_fragment(&self.parameters, fragment, date)?;
Ok(())
}
pub fn apply_block(&mut self, block: Block) -> Result<(), Error> {
let header_meta = block.header.to_content_eval_context();
self.ledger = self.ledger.clone().apply_block(
&self.ledger.get_ledger_parameters(),
&block.contents,
&header_meta,
)?;
Ok(())
}
pub fn apply_protocol_changes(&mut self) -> Result<(), Error> {
Ok(self.ledger = self.ledger.apply_protocol_changes()?)
}
pub fn total_funds(&self) -> Value {
self.ledger
.get_total_value()
.expect("total ledger funds are too big")
}
pub fn find_utxo_for_address<'a>(
&'a self,
address_data: &AddressData,
) -> Option<Entry<'a, Address>> {
self.utxos()
.find(|x| x.output.address == address_data.address)
}
pub fn accounts(&self) -> &AccountLedger {
&self.ledger.accounts()
}
pub fn block0_hash(&self) -> &HeaderId {
&self.block0_hash
}
pub fn faucets(&self) -> Vec<AddressDataValue> {
self.faucets.clone()
}
pub fn utxos(&self) -> Iter<'_, Address> {
self.ledger.utxos()
}
pub fn fee(&self) -> LinearFee {
self.parameters.fees
}
pub fn chain_length(&self) -> ChainLength {
self.ledger.chain_length()
}
pub fn era(&self) -> &TimeEra {
self.ledger.era()
}
pub fn delegation(&self) -> PoolsState {
self.ledger.delegation().clone()
}
pub fn date(&self) -> BlockDate {
self.ledger.date()
}
// use it only for negative testing since it introduce bad state in ledger
pub fn set_date(&mut self, date: BlockDate) {
self.ledger.date = date;
}
pub fn leaders_log(&self) -> LeadersParticipationRecord {
self.ledger.leaders_log.clone()
}
pub fn leaders_log_for(&self, pool_id: &PoolId) -> u32 {
*self
.leaders_log()
.iter()
.find(|record| *record.0 == *pool_id)
.unwrap()
.1
}
// use it only for negative testing since it introduce bad state in ledger
pub fn increase_leader_log(&mut self, pool_id: &PoolId) {
self.ledger.leaders_log.increase_for(pool_id);
}
pub fn distribute_rewards(&mut self) -> Result<(), Error> {
match self.ledger.distribute_rewards(
&self.ledger.get_stake_distribution(),
&self.ledger.get_ledger_parameters(),
RewardsInfoParameters::default(),
) {
Err(err) => Err(err),
Ok((ledger, _)) => {
self.ledger = ledger;
Ok(())
}
}
}
pub fn forge_empty_block(&self, stake_pool: &StakePool) -> Block {
self.forge_block_with_fragments(stake_pool, Vec::new())
}
pub fn produce_empty_block(&mut self, stake_pool: &StakePool) -> Result<(), Error> {
self.produce_block(stake_pool, vec![])
}
pub fn produce_block(
&mut self,
stake_pool: &StakePool,
fragments: Vec<Fragment>,
) -> Result<(), Error> {
let block = self.forge_block_with_fragments(stake_pool, fragments);
self.apply_block(block)
}
pub fn forge_block_with_fragments(
&self,
stake_pool: &StakePool,
fragments: Vec<Fragment>,
) -> Block {
GenesisPraosBlockBuilder::new()
.with_date(self.date())
.with_fragments(fragments)
.with_chain_length(self.ledger.chain_length())
.with_parent_id(self.block0_hash)
.build(stake_pool, self.ledger.era())
}
pub fn forward_date(&mut self) {
self.ledger.date = self.ledger.date.next(self.ledger.era());
}
pub fn can_distribute_reward(&self) -> bool {
self.ledger.can_distribute_reward()
}
pub fn fast_forward_to(&mut self, date: BlockDate) {
self.set_date(date);
}
pub fn fire_leadership_event(
&mut self,
stake_pools: Vec<StakePool>,
fragments: Vec<Fragment>,
) -> Result<bool, Error> {
let selection = LeadershipData::new(self.date().epoch, &self.ledger);
for stake_pool in stake_pools {
if selection
.leader(
&stake_pool.id(),
&stake_pool.vrf().private_key(),
self.ledger.date(),
)
.expect("cannot calculate leader")
.is_some()
{
self.produce_block(&stake_pool, fragments)?;
return Ok(true);
}
}
self.forward_date();
Ok(false)
}
pub fn pots(&self) -> Pots {
self.ledger.pots.clone()
}
}
impl Into<Ledger> for TestLedger {
fn into(self) -> Ledger {
self.ledger
}
}
impl Arbitrary for TestLedger {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
LedgerBuilder::arbitrary(g).build().unwrap()
}
}
impl Arbitrary for Ledger {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
TestLedger::arbitrary(g).into()
}
}
| 29.828947 | 99 | 0.578346 |
efb131a37b1cb62fe7a3a0f2a25bcff58fceaeb8 | 2,064 | //! Delays
use cortex_m::peripheral::syst::SystClkSource;
use cortex_m::peripheral::SYST;
use crate::clock::GenericClockController;
use crate::ehal::blocking::delay::{DelayMs, DelayUs};
use crate::time::Hertz;
/// System timer (SysTick) as a delay provider
pub struct Delay {
sysclock: Hertz,
syst: SYST,
}
impl Delay {
/// Configures the system timer (SysTick) as a delay provider
pub fn new(mut syst: SYST, clocks: &mut GenericClockController) -> Self {
syst.set_clock_source(SystClkSource::Core);
Delay {
syst,
sysclock: clocks.gclk0().into(),
}
}
/// Releases the system timer (SysTick) resource
pub fn free(self) -> SYST {
self.syst
}
}
impl DelayMs<u32> for Delay {
fn delay_ms(&mut self, ms: u32) {
self.delay_us(ms * 1_000);
}
}
impl DelayMs<u16> for Delay {
fn delay_ms(&mut self, ms: u16) {
self.delay_ms(ms as u32);
}
}
impl DelayMs<u8> for Delay {
fn delay_ms(&mut self, ms: u8) {
self.delay_ms(ms as u32);
}
}
impl DelayUs<u32> for Delay {
fn delay_us(&mut self, us: u32) {
// The SysTick Reload Value register supports values between 1 and 0x00FFFFFF.
const MAX_RVR: u32 = 0x00FF_FFFF;
let mut total_rvr = us * (self.sysclock.0 / 1_000_000);
while total_rvr != 0 {
let current_rvr = if total_rvr <= MAX_RVR {
total_rvr
} else {
MAX_RVR
};
self.syst.set_reload(current_rvr);
self.syst.clear_current();
self.syst.enable_counter();
// Update the tracking variable while we are waiting...
total_rvr -= current_rvr;
while !self.syst.has_wrapped() {}
self.syst.disable_counter();
}
}
}
impl DelayUs<u16> for Delay {
fn delay_us(&mut self, us: u16) {
self.delay_us(us as u32)
}
}
impl DelayUs<u8> for Delay {
fn delay_us(&mut self, us: u8) {
self.delay_us(us as u32)
}
}
| 22.933333 | 86 | 0.582364 |
6798c0b75fe1c11527b0255d9d144bc54d09fd3e | 2,195 | use super::{common_loan_args, parse_common_loan_args};
use clap::{App, Arg, ArgMatches, SubCommand};
use loan::Loan;
use prettytable::row::Row;
pub const SUB_LOAN_TABLE: &str = "table";
const ARG_EVERY_PERIOD: &str = "every-period";
/// Returns the loan info-at sub command
pub fn loan_table_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUB_LOAN_TABLE)
.about("print the amotization table for a loan")
.arg(
Arg::with_name(ARG_EVERY_PERIOD)
.takes_value(true)
.required(true)
.index(1),
).args(common_loan_args().as_slice())
}
/// Execute the work and print results for the info-at sub command
///
/// # Arguments
/// * `matches` - The command matches to retrieve the paramters
pub fn execute_loan_table<'a>(matches: &ArgMatches<'a>) {
let loan = parse_common_loan_args(matches);
let every = matches
.value_of(ARG_EVERY_PERIOD)
.unwrap()
.parse::<u32>()
.unwrap();
println!(
"*** Information for a loan of {} during {} years with period of {} at {}% ***\n",
loan.capital,
loan.years,
loan.period,
loan.interest_rate_year * 100_f32
);
let mut loan_table = table!([
"At (periods)",
"At (~years)",
"Ending balance",
"Capital paid",
"Total interest",
"~Interest overhead ratio",
]);
loan_table.add_row(get_row(&loan, 1));
for at in (every..(loan.years as u32 * loan.period as u32)).step_by(every as usize) {
loan_table.add_row(get_row(&loan, at));
}
loan_table.add_row(get_row(&loan, loan.years as u32 * loan.period as u32));
loan_table.printstd();
}
fn get_row(loan: &Loan, at: u32) -> Row {
let years_round = format!("{:.1}", at as f32 / loan.period as f32);
let capital_paid = loan.capital_at(at);
let interest_paid = loan.interest_at(at);
row![
at,
years_round,
format!("{:.2}", loan.capital as f64 - capital_paid),
format!("{:.2}", capital_paid),
format!("{:.2}", interest_paid),
format!("{:.2}%", (interest_paid / capital_paid) * 100_f64),
]
}
| 31.357143 | 90 | 0.594989 |
89d0f04576af9739bed9310b4a2fd7eee77c0ecb | 2,051 | //! Run asynchronous actions.
mod action;
pub use action::Action;
use iced_futures::MaybeSend;
use std::fmt;
use std::future::Future;
/// A set of asynchronous actions to be performed by some runtime.
pub struct Command<T>(iced_futures::Command<Action<T>>);
impl<T> Command<T> {
/// Creates an empty [`Command`].
///
/// In other words, a [`Command`] that does nothing.
pub const fn none() -> Self {
Self(iced_futures::Command::none())
}
/// Creates a [`Command`] that performs a single [`Action`].
pub const fn single(action: Action<T>) -> Self {
Self(iced_futures::Command::single(action))
}
/// Creates a [`Command`] that performs the action of the given future.
pub fn perform<A>(
future: impl Future<Output = T> + 'static + MaybeSend,
f: impl Fn(T) -> A + 'static + MaybeSend,
) -> Command<A> {
use iced_futures::futures::FutureExt;
Command::single(Action::Future(Box::pin(future.map(f))))
}
/// Creates a [`Command`] that performs the actions of all the given
/// commands.
///
/// Once this command is run, all the commands will be executed at once.
pub fn batch(commands: impl IntoIterator<Item = Command<T>>) -> Self {
Self(iced_futures::Command::batch(
commands.into_iter().map(|Command(command)| command),
))
}
/// Applies a transformation to the result of a [`Command`].
pub fn map<A>(
self,
f: impl Fn(T) -> A + 'static + MaybeSend + Sync + Clone,
) -> Command<A>
where
T: 'static,
{
let Command(command) = self;
Command(command.map(move |action| action.map(f.clone())))
}
/// Returns all of the actions of the [`Command`].
pub fn actions(self) -> Vec<Action<T>> {
let Command(command) = self;
command.actions()
}
}
impl<T> fmt::Debug for Command<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Command(command) = self;
command.fmt(f)
}
}
| 27.346667 | 76 | 0.590931 |
bfa236923b8a10feb627e9f71c6f2fed3e975304 | 2,451 | use std::fmt;
use std::io;
pub(crate) enum EncryptError {
Age(age::EncryptError),
InvalidRecipient(String),
Io(io::Error),
PassphraseMissing,
UnsupportedKey(String, age::ssh::UnsupportedKey),
}
impl From<age::EncryptError> for EncryptError {
fn from(e: age::EncryptError) -> Self {
match e {
age::EncryptError::Io(e) => EncryptError::Io(e),
_ => EncryptError::Age(e),
}
}
}
impl From<io::Error> for EncryptError {
fn from(e: io::Error) -> Self {
EncryptError::Io(e)
}
}
impl fmt::Display for EncryptError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EncryptError::Age(e) => write!(f, "{}", e),
EncryptError::InvalidRecipient(recipient) => write!(
f, "Invalid recipient: {}", recipient.as_str()
),
EncryptError::Io(e) => write!(f, "{}", e),
EncryptError::PassphraseMissing => write!(f, "Passphrase not provided"),
EncryptError::UnsupportedKey(filename, k) => k.display(f, Some(filename.as_str())),
}
}
}
pub(crate) enum DecryptError {
Age(age::DecryptError),
IdentityNotFound(String),
Io(io::Error),
MissingIdentities,
UnsupportedKey(String, age::ssh::UnsupportedKey),
}
impl From<age::DecryptError> for DecryptError {
fn from(e: age::DecryptError) -> Self {
DecryptError::Age(e)
}
}
impl From<io::Error> for DecryptError {
fn from(e: io::Error) -> Self {
DecryptError::Io(e)
}
}
impl fmt::Display for DecryptError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DecryptError::Age(e) => write!(f, "{}", e),
DecryptError::IdentityNotFound(filename) => write!(
f, "Identity file not found: {}", filename.as_str()
),
DecryptError::Io(e) => write!(f, "{}", e),
DecryptError::MissingIdentities => write!(f, "Missing identities."),
DecryptError::UnsupportedKey(filename, k) => k.display(f, Some(filename.as_str())),
}
}
}
pub(crate) enum Error {
Decryption(DecryptError),
Encryption(EncryptError),
}
impl From<DecryptError> for Error {
fn from(e: DecryptError) -> Self {
Error::Decryption(e)
}
}
impl From<EncryptError> for Error {
fn from(e: EncryptError) -> Self {
Error::Encryption(e)
}
}
| 26.934066 | 95 | 0.577723 |
14e2e85625e793080ecd0f99952f6be3fd07ff74 | 1,579 | use crate::surface::Surface;
use std::error::Error;
pub trait Canvas {
fn get_surface<'a>(&'a mut self) -> &'a mut dyn Surface;
fn update(&mut self) -> Result<(), Box<dyn Error>>;
}
#[cfg(test)]
mod canvas_test {
use super::*;
use crate::color::{Color, ColorU8};
use crate::test::SurfaceMock;
struct CanvasMock {
pub fill_color: ColorU8,
pub canvas_color: ColorU8,
pub canvas_surface: SurfaceMock,
}
impl CanvasMock {
pub fn new() -> CanvasMock {
CanvasMock {
fill_color: ColorU8::new_gray(0),
canvas_color: ColorU8::new_gray(0),
canvas_surface: SurfaceMock::new(),
}
}
fn fill(&mut self, color: &dyn Color) {
self.fill_color.set(color);
}
}
impl Canvas for CanvasMock {
fn get_surface<'a>(&'a mut self) -> &'a mut dyn Surface {
&mut self.canvas_surface
}
fn update(&mut self) -> Result<(), Box<dyn Error>> {
self.canvas_color = self.fill_color.clone();
Ok(())
}
}
#[test]
fn test_fill() {
let mut canvas = CanvasMock::new();
canvas.fill(&ColorU8::new_rgb(128, 32, 64));
assert_eq!(ColorU8::new(128, 32, 64, 255), canvas.fill_color);
}
#[test]
fn test_update() {
let mut canvas = CanvasMock::new();
canvas.fill(&ColorU8::new_rgb(128, 32, 64));
canvas.update().unwrap();
assert_eq!(ColorU8::new(128, 32, 64, 255), canvas.canvas_color);
}
}
| 26.316667 | 72 | 0.543382 |
eb51cb01686aec1145a4009788729335b7f2eac4 | 3,134 | //! A singleton global object that provides support for shared memory. clients can
//! create wl_shm_pool objects using the create_pool request. at connection setup
//! time, the wl_shm object emits one or more format events to inform clients about
//! the valid pixel formats that can be used for buffers.
//
//
// GENERATED BY OUR WAYLAND-SCANNER. DO NOT EDIT!
//
//
#![allow(unused)]
#![allow(clippy::from_over_into)]
#![allow(clippy::match_single_binding)]
use crate::wl::{
Array, Connection, Handle, Interface, Message, NewId, ObjectId, Opcode, Payload, PayloadType,
RawMessage, SendError,
};
use alloc::rc::Rc;
use alloc::string::String;
use core::cell::RefCell;
use smallvec::smallvec;
use crate::wl::protocols::common::wl_buffer::WlBuffer;
use crate::wl::protocols::common::wl_callback::WlCallback;
use crate::wl::protocols::common::wl_compositor::WlCompositor;
use crate::wl::protocols::common::wl_data_device::WlDataDevice;
use crate::wl::protocols::common::wl_data_device_manager::WlDataDeviceManager;
use crate::wl::protocols::common::wl_data_offer::WlDataOffer;
use crate::wl::protocols::common::wl_data_source::WlDataSource;
use crate::wl::protocols::common::wl_display::WlDisplay;
use crate::wl::protocols::common::wl_keyboard::WlKeyboard;
use crate::wl::protocols::common::wl_output::WlOutput;
use crate::wl::protocols::common::wl_pointer::WlPointer;
use crate::wl::protocols::common::wl_region::WlRegion;
use crate::wl::protocols::common::wl_registry::WlRegistry;
use crate::wl::protocols::common::wl_seat::WlSeat;
use crate::wl::protocols::common::wl_shell::WlShell;
use crate::wl::protocols::common::wl_shell_surface::WlShellSurface;
use crate::wl::protocols::common::wl_shm::*;
use crate::wl::protocols::common::wl_shm_pool::WlShmPool;
use crate::wl::protocols::common::wl_subcompositor::WlSubcompositor;
use crate::wl::protocols::common::wl_subsurface::WlSubsurface;
use crate::wl::protocols::common::wl_surface::WlSurface;
use crate::wl::protocols::common::wl_touch::WlTouch;
use crate::wl::protocols::common::xdg_popup::XdgPopup;
use crate::wl::protocols::common::xdg_positioner::XdgPositioner;
use crate::wl::protocols::common::xdg_surface::XdgSurface;
use crate::wl::protocols::common::xdg_toplevel::XdgToplevel;
use crate::wl::protocols::common::xdg_wm_base::XdgWmBase;
pub trait WlShmExt {
/// Create a new wl_shm_pool object. the pool can be used to create shared memory
/// based buffer objects. the server will mmap size bytes of the passed file
/// descriptor, to use as backing memory for the pool.
fn create_pool(&self, id: NewId, fd: Handle, size: i32) -> Result<(), SendError>;
}
impl WlShmExt for WlShm {
/// Create a new wl_shm_pool object. the pool can be used to create shared memory
/// based buffer objects. the server will mmap size bytes of the passed file
/// descriptor, to use as backing memory for the pool.
fn create_pool(&self, id: NewId, fd: Handle, size: i32) -> Result<(), SendError> {
self.connection()
.borrow_mut()
.send(Request::CreatePool { id, fd, size }.into_raw(self.id()))
}
}
| 44.140845 | 97 | 0.734844 |
7a217055f54740d1b2f786b9d292b5b8e219fa31 | 232 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod core;
mod legacy;
pub(crate) mod stream;
pub(crate) mod types;
| 25.777778 | 73 | 0.74569 |
2f87f71a798792cba8b626eef45675863e3485c2 | 26,152 | extern crate byteorder;
extern crate cgmath;
extern crate disjoint_sets;
extern crate itertools;
extern crate nom;
use byteorder::*;
use cgmath::prelude::*;
use cgmath::{
AbsDiffEq, Basis3, ElementWise, InnerSpace, Point2, Point3, Quaternion, Rotation, Rotation3,
Vector3,
};
use disjoint_sets::*;
use itertools::Itertools;
use nom::character::streaming::*;
use nom::error::VerboseError;
use nom::multi::count;
use nom::number::streaming::*;
use nom::sequence::*;
use nom::*;
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::io::{BufWriter, Write};
use std::iter::FromIterator;
use std::path::Path;
use std::str::FromStr;
/// Possible errors in generating and reading bundle adjustment problems.
#[derive(Debug)]
pub enum Error {
/// Failure to parse problem from a file.
ParseError(String),
/// Problem contains no cameras or points.
EmptyProblem(String),
/// Failure read/write a problem.
IOError(std::io::Error),
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::IOError(e)
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::ParseError(s) => write!(f, "{}", s),
Error::EmptyProblem(s) => write!(f, "{}", s),
Error::IOError(s) => write!(f, "{}", s),
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
// Generic error, underlying cause isn't tracked.
None
}
}
#[test]
fn rodrigues_idempotent() {
let vecs = [
Vector3::new(1., 2., 3.),
Vector3::new(0., 0., 0.),
Vector3::new(-1.2, 0., 1.7),
];
for v in vecs.iter() {
let v_ = to_rodrigues(from_rodrigues(*v));
assert!((v_ - v).magnitude() < 1e-10, "{:?} != {:?}", v, v_);
}
}
/// Convert Rodrigues vector to a rotation.
fn from_rodrigues(x: Vector3<f64>) -> Basis3<f64> {
let theta2 = x.dot(x);
if theta2 > cgmath::Rad::<f64>::default_epsilon() {
let angle = cgmath::Rad(x.magnitude());
let axis = x.normalize();
cgmath::Basis3::from_axis_angle(axis, angle)
} else {
// taylor series approximation from ceres-solver
Basis3::from(Quaternion::from(cgmath::Matrix3::new(
1.0, x[2], -x[1], -x[2], 1.0, x[0], x[1], -x[0], 1.0,
)))
}
}
/// Convert rotation to Rodrigues vector.
fn to_rodrigues(x: Basis3<f64>) -> Vector3<f64> {
let q = Quaternion::from(x);
let angle = 2.0 * q.s.acos();
if (1. - q.s * q.s) < std::f64::EPSILON {
Vector3::zero()
} else {
let axis = q.v / (1.0 - q.s * q.s).sqrt();
axis.normalize() * angle
}
}
/// A projective camera.
///
/// The camera must have a center and a way to project points to and from the camera frame.
pub trait Camera {
/// Project a point from the world into the camera coordinate system
fn project_world(&self, p: &Point3<f64>) -> Point3<f64>;
/// Project a point from camera space into pixel coordinates
fn project(&self, p: cgmath::Point3<f64>) -> cgmath::Point2<f64>;
/// Create a camera from a position and direction.
fn from_position_direction(position: Point3<f64>, dir: Basis3<f64>) -> Self;
/// Center of the camera.
fn center(&self) -> Point3<f64>;
/// Transform a camera with a rotational and translational modification.
fn transform(self, delta_dir: Basis3<f64>, delta_loc: Vector3<f64>) -> Self;
/// Project a point in this camera's coordinate system into the world.
fn to_world(&self, p: Point3<f64>) -> Point3<f64>;
}
/// Camera expressed as Rx+t with intrinsics.
///
/// The camera points down the negative z axis. Up is the positive y axis.
#[derive(Debug, Clone)]
pub struct SnavelyCamera {
/// Translational parameter `t`
pub loc: Vector3<f64>,
/// Rotational parameter `R`
pub dir: Basis3<f64>,
/// Intrinsics. `intrin[0]` is the focal length, `intrin[1]` is the squared distortion, and `intrin[2]` is the quadratic distortion.
pub intrin: Vector3<f64>,
}
impl Camera for SnavelyCamera {
fn project_world(&self, p: &Point3<f64>) -> cgmath::Point3<f64> {
self.dir.rotate_point(*p) + self.loc
}
fn project(&self, p: cgmath::Point3<f64>) -> cgmath::Point2<f64> {
let p_ = cgmath::Vector2::new(-p.x / p.z, -p.y / p.z);
let r = 1.0
+ self.distortion().0 * p_.magnitude2()
+ self.distortion().1 * p_.magnitude().powf(4.0);
Point2::from_vec(self.focal_length() * r * p_)
}
fn from_position_direction(position: Point3<f64>, dir: Basis3<f64>) -> Self {
SnavelyCamera {
loc: -1.0 * (dir.rotate_point(position)).to_vec(),
dir,
intrin: Vector3::new(1., 0., 0.),
}
}
fn center(&self) -> Point3<f64> {
Point3::from_vec(-(self.dir.invert().rotate_vector(self.loc)))
}
fn transform(self, delta_dir: Basis3<f64>, delta_loc: Vector3<f64>) -> Self {
SnavelyCamera {
dir: self.dir * delta_dir,
loc: -1.0 * self.dir.rotate_point(self.center() + delta_loc).to_vec(),
intrin: self.intrin,
}
}
fn to_world(&self, p: Point3<f64>) -> Point3<f64> {
self.dir.invert().rotate_point(p - self.loc)
}
}
impl SnavelyCamera {
/// Parse a camera from a vector of parameters. Order is rotation as a 3 element Rodrigues vector, translation, intrinsics.
pub fn from_vec(x: Vec<f64>) -> Self {
SnavelyCamera {
dir: from_rodrigues(Vector3::new(x[0], x[1], x[2])),
loc: Vector3::new(x[3], x[4], x[5]),
intrin: Vector3::new(x[6], x[7], x[8]),
}
}
/// Parse a camera to a vector of parameters. Order is rotation as a 3 element Rodrigues vector, translation, intrinsics.
pub fn to_vec(&self) -> Vec<f64> {
let r = to_rodrigues(self.dir);
vec![
r.x,
r.y,
r.z,
self.loc.x,
self.loc.y,
self.loc.z,
self.intrin.x,
self.intrin.y,
self.intrin.z,
]
}
/// R parameter of camera.
pub fn rotation(&self) -> Basis3<f64> {
self.dir
}
pub fn focal_length(&self) -> f64 {
self.intrin[0]
}
pub fn distortion(&self) -> (f64, f64) {
(self.intrin[1], self.intrin[2])
}
/// Adjust the intrinsics of the camera as `intrin + delta`.
pub fn modify_intrin(self, delta: Vector3<f64>) -> Self {
SnavelyCamera {
dir: self.dir,
loc: self.loc,
intrin: self.intrin + delta,
}
}
}
#[test]
fn test_project_world() {
let p = Point3::new(0.0, 0.0, -1.0);
let c = SnavelyCamera::from_vec(vec![0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]);
let p_camera = c.project_world(&p);
assert!(p_camera.z < 0.0);
assert!(p_camera.x == 0.0 && p_camera.y == 0.0);
}
#[test]
fn test_project() {
let p = Point3::new(0.0, 0.0, -1.0);
let c = SnavelyCamera::from_vec(vec![0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]);
let uv = c.project(c.project_world(&p));
assert!(uv.x == 0.0 && uv.y == 0.0);
}
#[test]
fn test_project_isomorphic() {
let p = Point3::new(1.0, 3.0, -1.0);
let c = SnavelyCamera::from_vec(vec![3.0, 5.0, -2.0, 0.5, -0.2, 0.1, 1.0, 0.0, 0.0]);
assert!(c.to_world(c.project_world(&p)).abs_diff_eq(&p, 1e-8));
}
/// Bundle adjustment problem composed of cameras, points, and observations of points by cameras.
///
/// Observations are stored as an array of arrays where `v[i][j] = (k, (u, v))` indicates that camera
/// `i` sees point `k` at `(u, v)` in the camera frame.
#[derive(Debug, Clone)]
pub struct BAProblem<C: Camera> {
pub cameras: Vec<C>,
pub points: Vec<Point3<f64>>,
pub vis_graph: Vec<Vec<(usize, (f64, f64))>>,
}
impl<C: Camera> BAProblem<C> {
/// Amount of reprojection error in the problem. Computed as the `norm`-norm of the difference
/// of all observed points from their projection.
pub fn total_reprojection_error(&self, norm: f64) -> f64 {
self.cameras
.iter()
.zip(&self.vis_graph)
.map(|(camera, adj)| {
adj.iter()
.map(|(o, (u, v))| {
let p = camera.project(camera.project_world(&self.points[*o]));
(p.x - u).abs().powf(norm) + (p.y - v).abs().powf(norm)
})
.sum::<f64>()
})
.sum::<f64>()
.powf(1. / norm)
}
/// Center of mass of cameras and points.
pub fn mean(&self) -> Vector3<f64> {
let num = (self.cameras.len() + self.points.len()) as f64;
self.cameras
.iter()
.map(|x| x.center())
.chain(self.points.clone().into_iter())
.fold(Vector3::new(0.0, 0.0, 0.0), |a, b| a + b.to_vec() / num)
}
/// Standard deviation of cameras and points from the center of mass.
pub fn std(&self) -> Vector3<f64> {
let num = (self.cameras.len() + self.points.len()) as f64;
let mean = self.mean();
(self
.cameras
.iter()
.map(|x| x.center())
.chain(self.points.clone().into_iter())
.map(|x| (x.to_vec() - mean).mul_element_wise(x.to_vec() - mean))
.sum::<Vector3<f64>>()
/ num)
.map(|x| x.sqrt())
}
/// Smallest and largest coordinates of the problem.
pub fn extent(&self) -> (Vector3<f64>, Vector3<f64>) {
let min = self
.cameras
.iter()
.map(|x| x.center())
.chain(self.points.clone().into_iter())
.fold(
Vector3::new(std::f64::INFINITY, std::f64::INFINITY, std::f64::INFINITY),
|x, y| Vector3::new(x.x.min(y.x), x.y.min(y.y), x.z.min(y.z)),
);
let max = self
.cameras
.iter()
.map(|x| x.center())
.chain(self.points.clone().into_iter())
.fold(
Vector3::new(
-std::f64::INFINITY,
-std::f64::INFINITY,
-std::f64::INFINITY,
),
|x, y| Vector3::new(x.x.max(y.x), x.y.max(y.y), x.z.max(y.z)),
);
(min, max)
}
/// Dimensions in x,y,z of the problem.
pub fn dimensions(&self) -> Vector3<f64> {
let (min, max) = self.extent();
max - min
}
/// Create a new bundle adjustment problem from a set a cameras, points, and observations.
/// Observations are a tuple of camera index, point index, u, v where the camera sees the point
/// at u,v.
pub fn new(cams: Vec<C>, points: Vec<Point3<f64>>, obs: Vec<(usize, usize, f64, f64)>) -> Self {
let mut vis_graph = vec![Vec::new(); cams.len()];
for (cam_i, p_i, obs_x, obs_y) in obs {
assert!(cam_i < cams.len());
assert!(p_i < points.len());
vis_graph[cam_i].push((p_i, (obs_x, obs_y)));
}
BAProblem {
cameras: cams,
points,
vis_graph,
}
}
/// Create a new bundle adjustment problem from a set a cameras, points, and observations.
/// Observations are a vector containing vectors of the points seen by the camera at the
/// respective index.
pub fn from_visibility(
cams: Vec<C>,
points: Vec<Point3<f64>>,
obs: Vec<Vec<(usize, (f64, f64))>>,
) -> Self {
assert!(cams.len() == obs.len());
for o in &obs {
for (ci, _) in o {
assert!(ci < &points.len());
}
}
BAProblem {
cameras: cams,
points,
vis_graph: obs,
}
}
pub fn num_points(&self) -> usize {
self.points.len()
}
pub fn num_cameras(&self) -> usize {
self.cameras.len()
}
/// Number of camera-point observations.
pub fn num_observations(&self) -> usize {
self.vis_graph.iter().map(|x| x.len()).sum()
}
}
impl<C: Camera + Clone> BAProblem<C> {
/// Select a subset of the problem with camera indices in `ci` and point indices in `pi`.
pub fn subset(self, ci: &[usize], pi: &[usize]) -> Self {
let cameras = ci
.iter()
.map(|i| self.cameras[*i].clone())
.collect::<Vec<_>>();
let points = pi.iter().map(|i| self.points[*i]).collect::<Vec<_>>();
// use i64 here so we can mark points that aren't in the final set
let mut point_indices: Vec<i64> = vec![-1; self.points.len()];
for (i, p) in pi.iter().enumerate() {
point_indices[*p] = i as i64;
}
let obs = ci
.iter()
.map(|i| self.vis_graph[*i].clone())
.map(|obs| {
obs.iter()
.filter(|(i, _)| point_indices[*i] >= 0)
.map(|(i, uv)| (point_indices[*i] as usize, *uv))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
BAProblem {
cameras,
points,
vis_graph: obs,
}
}
/// Remove cameras that see less than 4 points and points seen less than twice.
pub fn remove_singletons(self) -> Self {
// remove cameras that see less than 4 points
let ci = self
.vis_graph
.iter()
.enumerate()
.filter(|(_, v)| v.len() > 3)
.map(|(i, _)| i)
.collect::<Vec<_>>();
let mut point_count: Vec<i64> = vec![0; self.points.len()];
// TODO: skip cameras that we have already removed
for obs in self.vis_graph.iter() {
for (i, _) in obs.iter() {
point_count[*i] += 1;
}
}
// remove points seen less than twice
let pi = point_count
.iter()
.enumerate()
.filter(|(_, c)| **c > 1)
.map(|(i, _)| i)
.collect::<Vec<_>>();
self.subset(ci.as_slice(), pi.as_slice())
}
/// Get the largest connected component of cameras and points.
pub fn largest_connected_component(self) -> Self {
if self.num_cameras() == 0 {
return self;
}
let num_cameras = self.num_cameras();
let num_points = self.num_points();
let mut uf = UnionFind::new(self.num_points() + self.num_cameras());
// point index is num_cameras + point id
for (i, obs) in self.vis_graph.iter().enumerate() {
for (j, _) in obs {
let p = j + self.num_cameras();
if !uf.equiv(i, p) {
uf.union(i, p);
}
}
}
// find largest set
let sets = uf.to_vec();
let mut hm = HashMap::new();
for s in sets.iter() {
let x = hm.entry(*s).or_insert(0);
*x += 1;
}
let lcc_id = *(hm
.iter()
.sorted_by(|a, b| Ord::cmp(&b.1, &a.1))
.next()
.unwrap()
.0);
// compute component
// new cameras and points
let cameras = self
.cameras
.into_iter()
.zip(sets.iter())
.filter(|x| *x.1 == lcc_id)
.map(|x| x.0)
.collect::<Vec<_>>();
let points = self
.points
.into_iter()
.zip(sets[num_cameras..].iter())
.filter(|x| *x.1 == lcc_id)
.map(|x| x.0)
.collect::<Vec<_>>();
// map from old id to new
let point_ids = sets[num_cameras..(num_cameras + num_points)]
.iter()
.enumerate()
.filter(|x| *x.1 == lcc_id)
.map(|x| x.0);
let point_map =
HashMap::<usize, usize>::from_iter(point_ids.enumerate().map(|(x, y)| (y, x)));
// new camera id is implicitly handled by filtering
let vis_graph = self
.vis_graph
.into_iter()
.enumerate()
.filter(|x| sets[x.0] == lcc_id)
.map(|(_, obs)| {
obs.into_iter()
.filter(|x| sets[x.0] == lcc_id)
.map(|(i, p)| (point_map[&i], p))
.collect()
})
.collect();
BAProblem {
cameras,
points,
vis_graph,
}
}
/// Construct the largest connected component that contains cameras viewing 4 or more points
/// and points viewed at least twice.
pub fn cull(self) -> Self {
let mut nc = self.num_cameras();
let mut np = self.num_points();
let mut culled = self.largest_connected_component().remove_singletons();
while culled.num_cameras() != nc || culled.num_points() != np {
nc = culled.num_cameras();
np = culled.num_points();
culled = culled.largest_connected_component().remove_singletons();
}
culled
}
}
impl BAProblem<SnavelyCamera> {
/// Parse a bundle adjustment problem from a file in the Bundle Adjustment in the Large text
/// file format.
///
/// ```txt
/// <num_cameras> <num_points> <num_observations>
/// <camera_index_1> <point_index_1> <x_1> <y_1>
/// ...
/// <camera_index_num_observations> <point_index_num_observations> <x_num_observations> <y_num_observations>
/// <camera_1>
/// ...
/// <camera_num_cameras>
/// <point_1>
/// ...
/// <point_num_points>
/// ```
/// where cameras are:
/// ```txt
/// <R_1>
/// <R_2>
/// <R_3>
/// <t_1>
/// <t_2>
/// <t_3>
/// <focal length>
/// <distortion^2>
/// <distortion^4>
/// ```
pub fn from_file_text(filepath: &Path) -> Result<Self, Error> {
fn parse_internal(
input: &str,
) -> IResult<&str, BAProblem<SnavelyCamera>, VerboseError<&str>> {
fn unsigned(input: &str) -> IResult<&str, usize, VerboseError<&str>> {
nom::combinator::map_res(digit1, usize::from_str)(input)
}
let (input, num_cameras) = unsigned(input)?;
let (input, _) = multispace0(input)?;
let (input, num_points) = unsigned(input)?;
let (input, _) = multispace0(input)?;
let (input, num_observations) = unsigned(input)?;
let (input, _) = multispace0(input)?;
let (input, observations) = count(
tuple((
preceded(multispace0, unsigned),
preceded(multispace0, unsigned),
preceded(multispace0, double),
preceded(multispace0, double),
)),
num_observations,
)(input)?;
let camera = nom::combinator::map(count(preceded(multispace0, double), 9), |x| {
SnavelyCamera::from_vec(x)
});
let (input, cameras) = count(camera, num_cameras)(input)?;
let point = nom::combinator::map(count(preceded(multispace0, double), 3), |x| {
Point3::new(x[0], x[1], x[2])
});
let (input, points) = count(point, num_points)(input)?;
Ok((input, BAProblem::new(cameras, points, observations)))
}
let mut file = File::open(filepath)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
parse_internal(contents.as_ref())
.map(|x| x.1)
.map_err(|x| match x {
nom::Err::Error(e) | nom::Err::Failure(e) => {
Error::ParseError(nom::error::convert_error(contents.as_ref(), e))
}
nom::Err::Incomplete(x) => Error::ParseError(format!("{:?}", x)),
})
}
/// Parse a bundle adjustment problem from a file in the Bundle Adjustment in the Large binary
/// file format.
pub fn from_file_binary(filepath: &Path) -> Result<Self, Error> {
fn parse_internal(
input: &[u8],
) -> IResult<&[u8], BAProblem<SnavelyCamera>, VerboseError<&[u8]>> {
let (input, num_cameras) = be_u64(input)?;
let (input, num_points) = be_u64(input)?;
let (input, _num_observations) = be_u64(input)?;
let (input, observations) = count(
|input| {
let (input, num_obs) = be_u64(input)?;
let (input, obs) = count(
tuple((
nom::combinator::map(be_u64, |x| x as usize),
tuple((be_f64, be_f64)),
)),
num_obs as usize,
)(input)?;
Ok((input, obs))
},
num_cameras as usize,
)(input)?;
let (input, cameras) = count(
|input| {
let (input, v) = count(be_f64, 9)(input)?;
Ok((input, SnavelyCamera::from_vec(v)))
},
num_cameras as usize,
)(input)?;
let (input, points) = count(
|input| {
let (input, p) = count(be_f64, 3)(input)?;
Ok((input, Point3::new(p[0], p[1], p[2])))
},
num_points as usize,
)(input)?;
Ok((
input,
BAProblem {
cameras,
points,
vis_graph: observations,
},
))
}
let mut file = File::open(filepath)?;
let mut contents = Vec::new();
file.read_to_end(&mut contents)?;
parse_internal(contents.as_slice())
.map(|x| x.1)
.map_err(|x| match x {
nom::Err::Error(_) | nom::Err::Failure(_) => {
Error::ParseError("Binary parse error".to_string())
}
nom::Err::Incomplete(x) => Error::ParseError(format!("{:?}", x)),
})
}
/// Parse a bundle adjustment problem from a file in the Bundle Adjustment in the Large format.
/// Supports both binary and text formats.
pub fn from_file(path: &Path) -> Result<Self, Error> {
match path.extension().unwrap().to_str().unwrap() {
"bal" => Self::from_file_text(path),
"bbal" => Self::from_file_binary(path),
ext => Err(Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!("unknown file extension {}", ext),
))),
}
}
/// Write problem in Bundle Adjustment in the Large text format.
pub fn write_text(&self, path: &std::path::Path) -> Result<(), std::io::Error> {
let mut file = BufWriter::new(File::create(path).unwrap());
writeln!(
&mut file,
"{} {} {}",
self.cameras.len(),
self.points.len(),
self.vis_graph.iter().map(|x| x.len()).sum::<usize>()
)?;
for (i, obs) in self.vis_graph.iter().enumerate() {
for (p, (u, v)) in obs {
writeln!(&mut file, "{} {} {} {}", i, p, u, v)?;
}
}
for camera in &self.cameras {
writeln!(&mut file, "{}", camera.to_vec().iter().join(" "))?;
}
for point in &self.points {
writeln!(&mut file, "{} {} {}", point[0], point[1], point[2])?;
}
Ok(())
}
/// Write problem in Bundle Adjustment in the Large binary format.
pub fn write_binary(&self, path: &std::path::Path) -> Result<(), std::io::Error> {
let mut file = BufWriter::new(File::create(path).unwrap());
file.write_u64::<BigEndian>(self.cameras.len() as u64)?;
file.write_u64::<BigEndian>(self.points.len() as u64)?;
file.write_u64::<BigEndian>(self.vis_graph.iter().map(|x| x.len()).sum::<usize>() as u64)?;
for obs in self.vis_graph.iter() {
file.write_u64::<BigEndian>(obs.len() as u64)?;
for (p, (u, v)) in obs {
file.write_u64::<BigEndian>(*p as u64)?;
file.write_f64::<BigEndian>(*u as f64)?;
file.write_f64::<BigEndian>(*v as f64)?;
}
}
for camera in &self.cameras {
for x in camera.to_vec().into_iter() {
file.write_f64::<BigEndian>(x)?;
}
}
for point in &self.points {
file.write_f64::<BigEndian>(point[0] as f64)?;
file.write_f64::<BigEndian>(point[1] as f64)?;
file.write_f64::<BigEndian>(point[2] as f64)?;
}
Ok(())
}
/// Write BAProblem to a file in BAL format. Text or binary format is automatically chosen from
/// the filename extension. `.bal` -> text, `.bbal` -> binary.
pub fn write(&self, path: &std::path::Path) -> Result<(), std::io::Error> {
match path.extension().ok_or(std::io::Error::new(std::io::ErrorKind::InvalidInput, "file does not have an extension"))?.to_str().unwrap() {
"bal" => self.write_text(path),
"bbal" => self.write_binary(path),
ext => Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!("unknown file extension {}", ext),
)),
}
}
}
impl<C> std::fmt::Display for BAProblem<C>
where
C: Camera,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Bundle Adjustment Problem with {} cameras, {} points, and {} observations",
self.num_cameras(),
self.num_points(),
self.num_observations()
)
}
}
| 32.937028 | 147 | 0.510592 |
d5ad78dc59e77e688ead9892efe3bcef1d353c06 | 2,246 | use crate::relayer::Relayer;
use ckb_core::transaction::Transaction;
use ckb_network::{CKBProtocolContext, PeerIndex};
use ckb_protocol::{cast, BlockTransactions, FlatbuffersVectorIterator};
use ckb_store::ChainStore;
use failure::Error as FailureError;
use std::collections::hash_map::Entry;
use std::convert::TryInto;
use std::sync::Arc;
pub struct BlockTransactionsProcess<'a, CS> {
message: &'a BlockTransactions<'a>,
relayer: &'a Relayer<CS>,
nc: Arc<dyn CKBProtocolContext>,
peer: PeerIndex,
}
impl<'a, CS: ChainStore + 'static> BlockTransactionsProcess<'a, CS> {
pub fn new(
message: &'a BlockTransactions,
relayer: &'a Relayer<CS>,
nc: Arc<dyn CKBProtocolContext>,
peer: PeerIndex,
) -> Self {
BlockTransactionsProcess {
message,
relayer,
nc,
peer,
}
}
pub fn execute(self) -> Result<(), FailureError> {
let block_hash = cast!(self.message.block_hash())?.try_into()?;
if let Entry::Occupied(mut pending) = self
.relayer
.shared()
.pending_compact_blocks()
.entry(block_hash)
{
let (compact_block, peers_set) = pending.get_mut();
if peers_set.remove(&self.peer) {
let transactions: Vec<Transaction> =
FlatbuffersVectorIterator::new(cast!(self.message.transactions())?)
.map(TryInto::try_into)
.collect::<Result<_, FailureError>>()?;
let ret = {
let chain_state = self.relayer.shared.lock_chain_state();
self.relayer
.reconstruct_block(&chain_state, compact_block, transactions)
};
// TODO Add this (compact_block, peer) into RecentRejects if reconstruct_block failed?
// TODO Add this block into RecentRejects if accept_block failed?
if let Ok(block) = ret {
pending.remove();
self.relayer
.accept_block(self.nc.as_ref(), self.peer, &Arc::new(block));
}
}
}
Ok(())
}
}
| 33.522388 | 102 | 0.558326 |
1e26ea3c68b5e566b4f216ee832d6e81c6ca37c3 | 7,245 | use abstutil::Tags;
use map_model::{Direction, EditRoad, LaneSpec, LaneType};
/// Returns the index where the new lane was inserted
pub fn add_new_lane(road: &mut EditRoad, lt: LaneType, osm_tags: &Tags) -> usize {
let mut dir = Direction::Fwd;
let mut idx = 0;
match lt {
LaneType::Driving => {
dir = determine_lane_dir(road, lt, true);
// In the middle (where the direction changes)
idx = road
.lanes_ltr
.windows(2)
.position(|pair| pair[0].dir != pair[1].dir)
.map(|x| x + 1)
.unwrap_or(road.lanes_ltr.len());
}
LaneType::Biking | LaneType::Bus | LaneType::Parking | LaneType::Construction => {
let relevant_lanes: Vec<&LaneSpec> =
road.lanes_ltr.iter().filter(|x| x.lt == lt).collect();
dir = if !relevant_lanes.is_empty() {
// When a lane already exists, then default to the direction on the other side of
// the road
relevant_lanes[0].dir.opposite()
} else {
// If no lanes exist, then default to the majority direction, to help deal with
// one-way streets
determine_lane_dir(road, lt, false)
};
// Place on the dir side, before any sidewalk
idx = default_outside_lane_placement(road, dir);
}
LaneType::Sidewalk => {
// Place where it's missing
if !road.lanes_ltr[0].lt.is_walkable() {
dir = road.lanes_ltr[0].dir;
idx = 0;
} else {
dir = road.lanes_ltr.last().unwrap().dir;
idx = road.lanes_ltr.len();
}
}
LaneType::Buffer(_) => {
// Look for the bike lane that's missing a buffer
let mut fwd_bike = None;
let mut back_bike = None;
for (idx, spec) in road.lanes_ltr.iter().enumerate() {
if spec.lt == LaneType::Biking {
if spec.dir == Direction::Fwd {
fwd_bike = Some(idx);
} else {
back_bike = Some(idx);
}
}
}
// TODO This is US-centric, since it assumes the Fwd direction is on the right. We
// should probably decompose into sides like maybe_add_bike_lanes.
if let Some(i) = fwd_bike {
// If there's nothing to the left of this bike lane, not sure what's going on...
if road
.lanes_ltr
.get(i - 1)
.map(|spec| !matches!(spec.lt, LaneType::Buffer(_)))
.unwrap_or(false)
{
dir = Direction::Fwd;
idx = i;
}
}
if let Some(i) = back_bike {
if road
.lanes_ltr
.get(i + 1)
.map(|spec| !matches!(spec.lt, LaneType::Buffer(_)))
.unwrap_or(false)
{
dir = Direction::Back;
idx = i + 1;
}
}
}
_ => unreachable!(),
};
road.lanes_ltr.insert(
idx,
LaneSpec {
lt,
dir,
width: LaneSpec::typical_lane_widths(lt, osm_tags)[0].0,
},
);
idx
}
/// Place the new lane according to its direction on the outside unless the outside is walkable in
/// which case place inside the walkable lane
fn default_outside_lane_placement(road: &mut EditRoad, dir: Direction) -> usize {
if road.lanes_ltr[0].dir == dir {
if road.lanes_ltr[0].lt.is_walkable() {
1
} else {
0
}
} else if road.lanes_ltr.last().unwrap().lt.is_walkable() {
road.lanes_ltr.len() - 1
} else {
road.lanes_ltr.len()
}
}
/// If there are more lanes of type lt pointing forward, then insert the new one backwards, and
/// vice versa
fn determine_lane_dir(road: &mut EditRoad, lt: LaneType, minority: bool) -> Direction {
if (road
.lanes_ltr
.iter()
.filter(|x| x.dir == Direction::Fwd && x.lt == lt)
.count() as f64
/ road.lanes_ltr.iter().filter(|x| x.lt == lt).count() as f64)
<= 0.5
{
if minority {
Direction::Fwd
} else {
Direction::Back
}
} else if minority {
Direction::Back
} else {
Direction::Fwd
}
}
#[cfg(test)]
mod tests {
use super::*;
use map_model::BufferType;
#[test]
fn test_add_new_lane() {
let mut ok = true;
for (description, input_lt, input_dir, new_lt, expected_lt, expected_dir) in vec![
(
"Two-way with parking, adding bike lane to first side",
"spddps",
"vvv^^^",
LaneType::Biking,
// TODO Current heuristics put it between parking and sidewalk, but this isn't
// right
"spddpbs",
"vvv^^^^",
),
(
"Two-way with parking, adding bike lane to second side",
"spddpbs",
"vvv^^^^",
LaneType::Biking,
// TODO Current heuristics put it between parking and sidewalk, but this isn't
// right
"sbpddpbs",
"vvvv^^^^",
),
(
"Add driving lane, balanced numbers",
"sdds",
"vv^^",
LaneType::Driving,
"sddds",
"vv^^^",
),
(
"Add driving lane, imbalanced",
"sddds",
"vv^^^",
LaneType::Driving,
"sdddds",
"vvv^^^",
),
(
"Add buffer, one bike lane fwd",
"sddbs",
"vv^^^",
LaneType::Buffer(BufferType::Stripes),
"sdd|bs",
"vv^^^^",
),
(
"Add buffer, one bike lane back",
"sbdds",
"vvv^^",
LaneType::Buffer(BufferType::Stripes),
"sb|dds",
"vvvv^^",
),
(
"Add second buffer",
"sbdd|bs",
"vvv^^^^",
LaneType::Buffer(BufferType::Stripes),
"sb|dd|bs",
"vvvv^^^^",
),
] {
let input = EditRoad::create_for_test(input_lt, input_dir);
let mut actual_output = input.clone();
add_new_lane(&mut actual_output, new_lt, &Tags::empty());
actual_output.check_lanes_ltr(
description.to_string(),
input_lt,
input_dir,
expected_lt,
expected_dir,
&mut ok,
);
}
assert!(ok);
}
}
| 32.488789 | 98 | 0.446101 |
4a559b3cf325a239fe6fbe2136d7e2f50284993f | 3,622 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::Result;
use mvhashmap::{MVHashMapView, Version};
use std::{fmt::Debug, hash::Hash};
/// The execution result of a transaction
#[derive(Debug)]
pub enum ExecutionStatus<T, E> {
/// Transaction was executed successfully.
Success(T),
/// Transaction hit a none recoverable error during execution, halt the execution and propagate
/// the error back to the caller.
Abort(E),
/// Transaction was executed successfully, but will skip the execution of the trailing
/// transactions in the list
SkipRest(T),
/// Transaction has an unexpected read dependency that is blocked by `Version` transaction.
/// Put the transaction back to the scheduler.
Retry(/*blocked_by*/ Version),
}
/// Trait that defines a transaction that could be parallel executed by the scheduler. Each
/// transaction will write to a key value storage as their side effect.
pub trait Transaction: Clone + Sync + Send + 'static {
type Key: PartialOrd + Send + Sync + Clone + Hash + Eq;
type Value: Send + Sync;
}
/// Trait for inferencing the read and write set of a transaction.
pub trait ReadWriteSetInferencer: Sync {
/// Type of transaction and its associated key.
type T: Transaction;
/// Get the read set of a transaction. Read set estimation is used simply to improve the
/// performance by exposing the read dependencies. Imprecise estimation won't cause execution
/// failure.
fn infer_reads(&self, txn: &Self::T) -> Result<Vec<<Self::T as Transaction>::Key>>;
/// Get the write set of a transaction. Write set estimation is crucial to the execution
/// correctness as there's no way to resolve read-after-write conflict where a write is
/// unexpected. Thus we require write to be an over approximation for now.
fn infer_writes(&self, txn: &Self::T) -> Result<Vec<<Self::T as Transaction>::Key>>;
}
/// Trait for single threaded transaction executor.
// TODO: Sync should not be required. Sync is only introduced because this trait occurs as a phantom type of executor struct.
pub trait ExecutorTask: Sync {
/// Type of transaction and its associated key and value.
type T: Transaction;
/// The output of a transaction. This should contain the side effect of this transaction.
type Output: TransactionOutput<T = Self::T>;
/// Type of error when the executor failed to process a transaction and needs to abort.
type Error: Clone + Send + Sync;
/// Type to intialize the single thread transaction executor. Copy and Sync are required because
/// we will create an instance of executor on each individual thread.
type Argument: Sync + Copy;
/// Create an instance of the transaction executor.
fn init(args: Self::Argument) -> Self;
/// Execute one single transaction given the view of the current state.
fn execute_transaction(
&self,
view: MVHashMapView<<Self::T as Transaction>::Key, <Self::T as Transaction>::Value>,
txn: &Self::T,
) -> ExecutionStatus<Self::Output, Self::Error>;
}
/// Trait for execution result of a transaction.
pub trait TransactionOutput: Send + Sync {
/// Type of transaction and its associated key and value.
type T: Transaction;
/// Get the side effect of a transaction from its output.
fn get_writes(
&self,
) -> Vec<(
<Self::T as Transaction>::Key,
<Self::T as Transaction>::Value,
)>;
/// Execution output for transactions that comes after SkipRest signal.
fn skip_output() -> Self;
}
| 40.244444 | 125 | 0.695748 |
110fa5a5ce1bd350e8566c89cd13eb629a702ddb | 5,463 | use std::io::Write;
use std::sync::Arc;
use anyhow::Result;
use nom::multi::count;
use nom::number::complete::le_i64;
use nom::IResult;
use crate::fst_impls::vector_fst::VectorFstState;
use crate::fst_impls::VectorFst;
use crate::fst_properties::FstProperties;
use crate::fst_traits::{CoreFst, ExpandedFst, Fst, MutableFst, SerializableFst};
use crate::parsers::bin_fst::fst_header::{FstFlags, FstHeader, OpenFstString, FST_MAGIC_NUMBER};
use crate::parsers::bin_fst::utils_parsing::{
parse_bin_fst_tr, parse_final_weight, parse_start_state,
};
use crate::parsers::bin_fst::utils_serialization::write_bin_fst_tr;
use crate::parsers::nom_utils::NomCustomError;
use crate::parsers::text_fst::ParsedTextFst;
use crate::parsers::write_bin_i64;
use crate::semirings::SerializableSemiring;
use crate::{StateId, Tr, Trs, TrsVec, EPS_LABEL};
use crate::prelude::SerializeBinary;
impl<W: SerializableSemiring> SerializeBinary for VectorFst<W> {
fn parse_binary(i: &[u8]) -> IResult<&[u8], Self, NomCustomError<&[u8]>> {
let (i, header) = FstHeader::parse(
i,
VECTOR_MIN_FILE_VERSION,
Some(VectorFst::<W>::fst_type()),
Tr::<W>::tr_type(),
)?;
let (i, states) = count(parse_vector_fst_state, header.num_states as usize)(i)?;
Ok((
i,
VectorFst {
start_state: parse_start_state(header.start),
states,
isymt: header.isymt,
osymt: header.osymt,
properties: FstProperties::from_bits_truncate(header.properties),
},
))
}
fn write_binary<WB: Write>(&self, writer: &mut WB) -> Result<()> {
let num_trs: usize = (0..self.num_states())
.map(|s: usize| unsafe { self.num_trs_unchecked(s as StateId) })
.sum();
let mut flags = FstFlags::empty();
if self.input_symbols().is_some() {
flags |= FstFlags::HAS_ISYMBOLS;
}
if self.output_symbols().is_some() {
flags |= FstFlags::HAS_OSYMBOLS;
}
let hdr = FstHeader {
magic_number: FST_MAGIC_NUMBER,
fst_type: OpenFstString::new(Self::fst_type()),
tr_type: OpenFstString::new(Tr::<W>::tr_type()),
version: 2i32,
// TODO: Set flags if the content is aligned
flags,
// Static properties are added to the property bits to be compliant with OpenFst format.
properties: self.properties.bits() | VectorFst::<W>::static_properties(),
start: self.start_state.map(|v| v as i64).unwrap_or(-1),
num_states: self.num_states() as i64,
num_trs: num_trs as i64,
isymt: self.input_symbols().cloned(),
osymt: self.output_symbols().cloned(),
};
hdr.write(writer)?;
// FstBody
for state in 0..self.num_states() {
let state = state as StateId;
let f_weight = unsafe { self.final_weight_unchecked(state).unwrap_or_else(W::zero) };
f_weight.write_binary(writer)?;
write_bin_i64(writer, unsafe { self.num_trs_unchecked(state) } as i64)?;
for tr in unsafe { self.get_trs_unchecked(state).trs() } {
write_bin_fst_tr(writer, tr)?;
}
}
Ok(())
}
}
impl<W: SerializableSemiring> SerializableFst<W> for VectorFst<W> {
fn fst_type() -> String {
"vector".to_string()
}
fn from_parsed_fst_text(parsed_fst_text: ParsedTextFst<W>) -> Result<Self> {
let start_state = parsed_fst_text.start();
let num_states = parsed_fst_text.num_states();
let states = vec![VectorFstState::<W>::new(); num_states];
let mut fst = VectorFst {
states,
start_state,
isymt: None,
osymt: None,
properties: FstProperties::empty(),
};
for transition in parsed_fst_text.transitions.into_iter() {
let weight = transition.weight.unwrap_or_else(W::one);
let tr = Tr::new(
transition.ilabel,
transition.olabel,
weight,
transition.nextstate,
);
fst.add_tr(transition.state, tr)?;
}
for final_state in parsed_fst_text.final_states.into_iter() {
let weight = final_state.weight.unwrap_or_else(W::one);
fst.set_final(final_state.state, weight)?;
}
// Compute properties. Should be moved elsewhere
fst.compute_and_update_properties_all()?;
Ok(fst)
}
}
static VECTOR_MIN_FILE_VERSION: i32 = 2;
#[derive(Debug, PartialEq)]
struct Transition {
ilabel: i32,
olabel: i32,
weight: f32,
nextstate: i32,
}
fn parse_vector_fst_state<W: SerializableSemiring>(
i: &[u8],
) -> IResult<&[u8], VectorFstState<W>, NomCustomError<&[u8]>> {
let (i, final_weight) = W::parse_binary(i)?;
let (i, num_trs) = le_i64(i)?;
let (i, trs) = count(parse_bin_fst_tr, num_trs as usize)(i)?;
let niepsilons = trs.iter().filter(|t| t.ilabel == EPS_LABEL).count();
let noepsilons = trs.iter().filter(|t| t.olabel == EPS_LABEL).count();
Ok((
i,
VectorFstState {
final_weight: parse_final_weight(final_weight),
trs: TrsVec(Arc::new(trs)),
niepsilons,
noepsilons,
},
))
}
| 33.722222 | 100 | 0.59546 |
0ac0dc396cc1a304c566ca8fe41bb56c3696649c | 32,714 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Atomic types
//!
//! Atomic types provide primitive shared-memory communication between
//! threads, and are the building blocks of other concurrent
//! types.
//!
//! This module defines atomic versions of a select number of primitive
//! types, including `AtomicBool`, `AtomicInt`, `AtomicUint`, and `AtomicOption`.
//! Atomic types present operations that, when used correctly, synchronize
//! updates between threads.
//!
//! Each method takes an `Ordering` which represents the strength of
//! the memory barrier for that operation. These orderings are the
//! same as [C++11 atomic orderings][1].
//!
//! [1]: http://gcc.gnu.org/wiki/Atomic/GCCMM/AtomicSync
//!
//! Atomic variables are safe to share between threads (they implement `Sync`)
//! but they do not themselves provide the mechanism for sharing. The most
//! common way to share an atomic variable is to put it into an `Arc` (an
//! atomically-reference-counted shared pointer).
//!
//! Most atomic types may be stored in static variables, initialized using
//! the provided static initializers like `INIT_ATOMIC_BOOL`. Atomic statics
//! are often used for lazy global initialization.
//!
//!
//! # Examples
//!
//! A simple spinlock:
//!
//! ```
//! use std::sync::Arc;
//! use std::sync::atomic::{AtomicUint, Ordering};
//! use std::thread::Thread;
//!
//! fn main() {
//! let spinlock = Arc::new(AtomicUint::new(1));
//!
//! let spinlock_clone = spinlock.clone();
//! Thread::spawn(move|| {
//! spinlock_clone.store(0, Ordering::SeqCst);
//! }).detach();
//!
//! // Wait for the other task to release the lock
//! while spinlock.load(Ordering::SeqCst) != 0 {}
//! }
//! ```
//!
//! Keep a global count of live tasks:
//!
//! ```
//! use std::sync::atomic::{AtomicUint, Ordering, ATOMIC_UINT_INIT};
//!
//! static GLOBAL_TASK_COUNT: AtomicUint = ATOMIC_UINT_INIT;
//!
//! let old_task_count = GLOBAL_TASK_COUNT.fetch_add(1, Ordering::SeqCst);
//! println!("live tasks: {}", old_task_count + 1);
//! ```
#![stable]
use self::Ordering::*;
use kinds::Sync;
use intrinsics;
use cell::UnsafeCell;
/// A boolean type which can be safely shared between threads.
#[stable]
pub struct AtomicBool {
v: UnsafeCell<uint>,
}
unsafe impl Sync for AtomicBool {}
/// A signed integer type which can be safely shared between threads.
#[stable]
pub struct AtomicInt {
v: UnsafeCell<int>,
}
unsafe impl Sync for AtomicInt {}
/// An unsigned integer type which can be safely shared between threads.
#[stable]
pub struct AtomicUint {
v: UnsafeCell<uint>,
}
unsafe impl Sync for AtomicUint {}
/// A raw pointer type which can be safely shared between threads.
#[stable]
pub struct AtomicPtr<T> {
p: UnsafeCell<uint>,
}
unsafe impl<T> Sync for AtomicPtr<T> {}
/// Atomic memory orderings
///
/// Memory orderings limit the ways that both the compiler and CPU may reorder
/// instructions around atomic operations. At its most restrictive,
/// "sequentially consistent" atomics allow neither reads nor writes
/// to be moved either before or after the atomic operation; on the other end
/// "relaxed" atomics allow all reorderings.
///
/// Rust's memory orderings are [the same as
/// C++'s](http://gcc.gnu.org/wiki/Atomic/GCCMM/AtomicSync).
#[stable]
#[derive(Copy)]
pub enum Ordering {
/// No ordering constraints, only atomic operations.
#[stable]
Relaxed,
/// When coupled with a store, all previous writes become visible
/// to another thread that performs a load with `Acquire` ordering
/// on the same value.
#[stable]
Release,
/// When coupled with a load, all subsequent loads will see data
/// written before a store with `Release` ordering on the same value
/// in another thread.
#[stable]
Acquire,
/// When coupled with a load, uses `Acquire` ordering, and with a store
/// `Release` ordering.
#[stable]
AcqRel,
/// Like `AcqRel` with the additional guarantee that all threads see all
/// sequentially consistent operations in the same order.
#[stable]
SeqCst,
}
/// An `AtomicBool` initialized to `false`.
#[unstable = "may be renamed, pending conventions for static initalizers"]
pub const ATOMIC_BOOL_INIT: AtomicBool =
AtomicBool { v: UnsafeCell { value: 0 } };
/// An `AtomicInt` initialized to `0`.
#[unstable = "may be renamed, pending conventions for static initalizers"]
pub const ATOMIC_INT_INIT: AtomicInt =
AtomicInt { v: UnsafeCell { value: 0 } };
/// An `AtomicUint` initialized to `0`.
#[unstable = "may be renamed, pending conventions for static initalizers"]
pub const ATOMIC_UINT_INIT: AtomicUint =
AtomicUint { v: UnsafeCell { value: 0, } };
/// Deprecated
#[deprecated = "renamed to ATOMIC_BOOL_INIT"]
pub const INIT_ATOMIC_BOOL: AtomicBool = ATOMIC_BOOL_INIT;
/// Deprecated
#[deprecated = "renamed to ATOMIC_INT_INIT"]
pub const INIT_ATOMIC_INT: AtomicInt = ATOMIC_INT_INIT;
/// Deprecated
#[deprecated = "renamed to ATOMIC_UINT_INIT"]
pub const INIT_ATOMIC_UINT: AtomicUint = ATOMIC_UINT_INIT;
// NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
const UINT_TRUE: uint = -1;
impl AtomicBool {
/// Creates a new `AtomicBool`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::AtomicBool;
///
/// let atomic_true = AtomicBool::new(true);
/// let atomic_false = AtomicBool::new(false);
/// ```
#[inline]
#[stable]
pub fn new(v: bool) -> AtomicBool {
let val = if v { UINT_TRUE } else { 0 };
AtomicBool { v: UnsafeCell::new(val) }
}
/// Loads a value from the bool.
///
/// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is `Release` or `AcqRel`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let some_bool = AtomicBool::new(true);
///
/// let value = some_bool.load(Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> bool {
unsafe { atomic_load(self.v.get() as *const uint, order) > 0 }
}
/// Stores a value into the bool.
///
/// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let some_bool = AtomicBool::new(true);
///
/// some_bool.store(false, Ordering::Relaxed);
/// ```
///
/// # Panics
///
/// Panics if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: bool, order: Ordering) {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Stores a value into the bool, returning the old value.
///
/// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let some_bool = AtomicBool::new(true);
///
/// let value = some_bool.swap(false, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn swap(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_swap(self.v.get(), val, order) > 0 }
}
/// Stores a value into the bool if the current value is the same as the expected value.
///
/// If the return value is equal to `old` then the value was updated.
///
/// `swap` also takes an `Ordering` argument which describes the memory ordering of this
/// operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let some_bool = AtomicBool::new(true);
///
/// let value = some_bool.store(false, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool {
let old = if old { UINT_TRUE } else { 0 };
let new = if new { UINT_TRUE } else { 0 };
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) > 0 }
}
/// Logical "and" with a boolean value.
///
/// Performs a logical "and" operation on the current value and the argument `val`, and sets
/// the new value to the result.
///
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_and(false, Ordering::SeqCst));
/// assert_eq!(false, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_and(true, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_and(false, Ordering::SeqCst));
/// assert_eq!(false, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_and(self.v.get(), val, order) > 0 }
}
/// Logical "nand" with a boolean value.
///
/// Performs a logical "nand" operation on the current value and the argument `val`, and sets
/// the new value to the result.
///
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_nand(false, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_nand(true, Ordering::SeqCst));
/// assert_eq!(0, foo.load(Ordering::SeqCst) as int);
/// assert_eq!(false, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_nand(false, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_nand(self.v.get(), val, order) > 0 }
}
/// Logical "or" with a boolean value.
///
/// Performs a logical "or" operation on the current value and the argument `val`, and sets the
/// new value to the result.
///
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_or(false, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_or(true, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_or(false, Ordering::SeqCst));
/// assert_eq!(false, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_or(self.v.get(), val, order) > 0 }
}
/// Logical "xor" with a boolean value.
///
/// Performs a logical "xor" operation on the current value and the argument `val`, and sets
/// the new value to the result.
///
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_xor(false, Ordering::SeqCst));
/// assert_eq!(true, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_xor(true, Ordering::SeqCst));
/// assert_eq!(false, foo.load(Ordering::SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_xor(false, Ordering::SeqCst));
/// assert_eq!(false, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_xor(self.v.get(), val, order) > 0 }
}
}
impl AtomicInt {
/// Creates a new `AtomicInt`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::AtomicInt;
///
/// let atomic_forty_two = AtomicInt::new(42);
/// ```
#[inline]
#[stable]
pub fn new(v: int) -> AtomicInt {
AtomicInt {v: UnsafeCell::new(v)}
}
/// Loads a value from the int.
///
/// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is `Release` or `AcqRel`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let some_int = AtomicInt::new(5);
///
/// let value = some_int.load(Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> int {
unsafe { atomic_load(self.v.get() as *const int, order) }
}
/// Stores a value into the int.
///
/// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let some_int = AtomicInt::new(5);
///
/// some_int.store(10, Ordering::Relaxed);
/// ```
///
/// # Panics
///
/// Panics if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: int, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Stores a value into the int, returning the old value.
///
/// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let some_int = AtomicInt::new(5);
///
/// let value = some_int.swap(10, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn swap(&self, val: int, order: Ordering) -> int {
unsafe { atomic_swap(self.v.get(), val, order) }
}
/// Stores a value into the int if the current value is the same as the expected value.
///
/// If the return value is equal to `old` then the value was updated.
///
/// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
/// this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let some_int = AtomicInt::new(5);
///
/// let value = some_int.compare_and_swap(5, 10, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add an int to the current value, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let foo = AtomicInt::new(0);
/// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst));
/// assert_eq!(10, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_add(&self, val: int, order: Ordering) -> int {
unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract an int from the current value, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let foo = AtomicInt::new(0);
/// assert_eq!(0, foo.fetch_sub(10, Ordering::SeqCst));
/// assert_eq!(-10, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_sub(&self, val: int, order: Ordering) -> int {
unsafe { atomic_sub(self.v.get(), val, order) }
}
/// Bitwise and with the current int, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let foo = AtomicInt::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst));
/// assert_eq!(0b100001, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_and(&self, val: int, order: Ordering) -> int {
unsafe { atomic_and(self.v.get(), val, order) }
}
/// Bitwise or with the current int, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let foo = AtomicInt::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst));
/// assert_eq!(0b111111, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_or(&self, val: int, order: Ordering) -> int {
unsafe { atomic_or(self.v.get(), val, order) }
}
/// Bitwise xor with the current int, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicInt, Ordering};
///
/// let foo = AtomicInt::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst));
/// assert_eq!(0b011110, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_xor(&self, val: int, order: Ordering) -> int {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
impl AtomicUint {
/// Creates a new `AtomicUint`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::AtomicUint;
///
/// let atomic_forty_two = AtomicUint::new(42u);
/// ```
#[inline]
#[stable]
pub fn new(v: uint) -> AtomicUint {
AtomicUint { v: UnsafeCell::new(v) }
}
/// Loads a value from the uint.
///
/// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is `Release` or `AcqRel`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let some_uint = AtomicUint::new(5);
///
/// let value = some_uint.load(Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> uint {
unsafe { atomic_load(self.v.get() as *const uint, order) }
}
/// Stores a value into the uint.
///
/// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let some_uint = AtomicUint::new(5);
///
/// some_uint.store(10, Ordering::Relaxed);
/// ```
///
/// # Panics
///
/// Panics if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: uint, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Stores a value into the uint, returning the old value.
///
/// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let some_uint = AtomicUint::new(5);
///
/// let value = some_uint.swap(10, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn swap(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_swap(self.v.get(), val, order) }
}
/// Stores a value into the uint if the current value is the same as the expected value.
///
/// If the return value is equal to `old` then the value was updated.
///
/// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
/// this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let some_uint = AtomicUint::new(5);
///
/// let value = some_uint.compare_and_swap(5, 10, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add to the current uint, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let foo = AtomicUint::new(0);
/// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst));
/// assert_eq!(10, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_add(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract from the current uint, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let foo = AtomicUint::new(10);
/// assert_eq!(10, foo.fetch_sub(10, Ordering::SeqCst));
/// assert_eq!(0, foo.load(Ordering::SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_sub(self.v.get(), val, order) }
}
/// Bitwise and with the current uint, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst));
/// assert_eq!(0b100001, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_and(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_and(self.v.get(), val, order) }
}
/// Bitwise or with the current uint, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst));
/// assert_eq!(0b111111, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_or(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_or(self.v.get(), val, order) }
}
/// Bitwise xor with the current uint, returning the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicUint, Ordering};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst));
/// assert_eq!(0b011110, foo.load(Ordering::SeqCst));
#[inline]
#[stable]
pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
impl<T> AtomicPtr<T> {
/// Creates a new `AtomicPtr`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::AtomicPtr;
///
/// let ptr = &mut 5i;
/// let atomic_ptr = AtomicPtr::new(ptr);
/// ```
#[inline]
#[stable]
pub fn new(p: *mut T) -> AtomicPtr<T> {
AtomicPtr { p: UnsafeCell::new(p as uint) }
}
/// Loads a value from the pointer.
///
/// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is `Release` or `AcqRel`.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicPtr, Ordering};
///
/// let ptr = &mut 5i;
/// let some_ptr = AtomicPtr::new(ptr);
///
/// let value = some_ptr.load(Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
atomic_load(self.p.get() as *const *mut T, order) as *mut T
}
}
/// Stores a value into the pointer.
///
/// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicPtr, Ordering};
///
/// let ptr = &mut 5i;
/// let some_ptr = AtomicPtr::new(ptr);
///
/// let other_ptr = &mut 10i;
///
/// some_ptr.store(other_ptr, Ordering::Relaxed);
/// ```
///
/// # Panics
///
/// Panics if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, ptr: *mut T, order: Ordering) {
unsafe { atomic_store(self.p.get(), ptr as uint, order); }
}
/// Stores a value into the pointer, returning the old value.
///
/// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicPtr, Ordering};
///
/// let ptr = &mut 5i;
/// let some_ptr = AtomicPtr::new(ptr);
///
/// let other_ptr = &mut 10i;
///
/// let value = some_ptr.swap(other_ptr, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T }
}
/// Stores a value into the pointer if the current value is the same as the expected value.
///
/// If the return value is equal to `old` then the value was updated.
///
/// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
/// this operation.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicPtr, Ordering};
///
/// let ptr = &mut 5i;
/// let some_ptr = AtomicPtr::new(ptr);
///
/// let other_ptr = &mut 10i;
/// let another_ptr = &mut 10i;
///
/// let value = some_ptr.compare_and_swap(other_ptr, another_ptr, Ordering::Relaxed);
/// ```
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
unsafe {
atomic_compare_and_swap(self.p.get(), old as uint,
new as uint, order) as *mut T
}
}
}
#[inline]
unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
match order {
Release => intrinsics::atomic_store_rel(dst, val),
Relaxed => intrinsics::atomic_store_relaxed(dst, val),
SeqCst => intrinsics::atomic_store(dst, val),
Acquire => panic!("there is no such thing as an acquire store"),
AcqRel => panic!("there is no such thing as an acquire/release store"),
}
}
#[inline]
#[stable]
unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
match order {
Acquire => intrinsics::atomic_load_acq(dst),
Relaxed => intrinsics::atomic_load_relaxed(dst),
SeqCst => intrinsics::atomic_load(dst),
Release => panic!("there is no such thing as a release load"),
AcqRel => panic!("there is no such thing as an acquire/release load"),
}
}
#[inline]
#[stable]
unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xchg_acq(dst, val),
Release => intrinsics::atomic_xchg_rel(dst, val),
AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
SeqCst => intrinsics::atomic_xchg(dst, val)
}
}
/// Returns the old value (like __sync_fetch_and_add).
#[inline]
#[stable]
unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xadd_acq(dst, val),
Release => intrinsics::atomic_xadd_rel(dst, val),
AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
SeqCst => intrinsics::atomic_xadd(dst, val)
}
}
/// Returns the old value (like __sync_fetch_and_sub).
#[inline]
#[stable]
unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xsub_acq(dst, val),
Release => intrinsics::atomic_xsub_rel(dst, val),
AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
SeqCst => intrinsics::atomic_xsub(dst, val)
}
}
#[inline]
#[stable]
unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
Release => intrinsics::atomic_cxchg_rel(dst, old, new),
AcqRel => intrinsics::atomic_cxchg_acqrel(dst, old, new),
Relaxed => intrinsics::atomic_cxchg_relaxed(dst, old, new),
SeqCst => intrinsics::atomic_cxchg(dst, old, new),
}
}
#[inline]
#[stable]
unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_and_acq(dst, val),
Release => intrinsics::atomic_and_rel(dst, val),
AcqRel => intrinsics::atomic_and_acqrel(dst, val),
Relaxed => intrinsics::atomic_and_relaxed(dst, val),
SeqCst => intrinsics::atomic_and(dst, val)
}
}
#[inline]
#[stable]
unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_nand_acq(dst, val),
Release => intrinsics::atomic_nand_rel(dst, val),
AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
SeqCst => intrinsics::atomic_nand(dst, val)
}
}
#[inline]
#[stable]
unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_or_acq(dst, val),
Release => intrinsics::atomic_or_rel(dst, val),
AcqRel => intrinsics::atomic_or_acqrel(dst, val),
Relaxed => intrinsics::atomic_or_relaxed(dst, val),
SeqCst => intrinsics::atomic_or(dst, val)
}
}
#[inline]
#[stable]
unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xor_acq(dst, val),
Release => intrinsics::atomic_xor_rel(dst, val),
AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
SeqCst => intrinsics::atomic_xor(dst, val)
}
}
/// An atomic fence.
///
/// A fence 'A' which has `Release` ordering semantics, synchronizes with a
/// fence 'B' with (at least) `Acquire` semantics, if and only if there exists
/// atomic operations X and Y, both operating on some atomic object 'M' such
/// that A is sequenced before X, Y is synchronized before B and Y observes
/// the change to M. This provides a happens-before dependence between A and B.
///
/// Atomic operations with `Release` or `Acquire` semantics can also synchronize
/// with a fence.
///
/// A fence which has `SeqCst` ordering, in addition to having both `Acquire`
/// and `Release` semantics, participates in the global program order of the
/// other `SeqCst` operations and/or fences.
///
/// Accepts `Acquire`, `Release`, `AcqRel` and `SeqCst` orderings.
///
/// # Panics
///
/// Panics if `order` is `Relaxed`.
#[inline]
#[stable]
pub fn fence(order: Ordering) {
unsafe {
match order {
Acquire => intrinsics::atomic_fence_acq(),
Release => intrinsics::atomic_fence_rel(),
AcqRel => intrinsics::atomic_fence_acqrel(),
SeqCst => intrinsics::atomic_fence(),
Relaxed => panic!("there is no such thing as a relaxed fence")
}
}
}
| 30.688555 | 99 | 0.586049 |
e60808ceb5d1927c0c49a51e3450127ce43202d3 | 3,920 | use serde_derive::{Deserialize, Serialize};
pub mod client;
pub mod errors;
pub use errors::*;
pub type Result<T = ()> = std::result::Result<T, Box<dyn std::error::Error>>;
#[derive(Clone, Deserialize, Debug)]
pub struct Device {
app_eui: String,
app_key: String,
dev_eui: String,
id: String,
name: String,
organization_id: String,
oui: usize,
}
impl Device {
pub fn id(&self) -> &String {
&self.id
}
pub fn app_eui(&self) -> &String {
&self.app_eui
}
pub fn dev_eui(&self) -> &String {
&self.dev_eui
}
pub fn app_key(&self) -> &String {
&self.app_key
}
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct GetDevice {
app_eui: String,
app_key: String,
dev_eui: String,
}
impl GetDevice {
pub fn from_user_input(app_eui: String, app_key: String, dev_eui: String) -> Result<GetDevice> {
let app_eui_decoded = hex::decode(app_eui.clone())?;
if app_eui_decoded.len() != 8 {
return Err(Error::InvalidAppEui.into());
}
let app_key_decoded = hex::decode(app_key.clone())?;
if app_key_decoded.len() != 16 {
return Err(Error::InvalidAppKey.into());
}
let dev_eui_decoded = hex::decode(dev_eui.clone())?;
if dev_eui_decoded.len() != 8 {
return Err(Error::InvalidDevEui.into());
}
Ok(GetDevice {
app_eui,
app_key,
dev_eui,
})
}
pub fn app_eui(&self) -> &String {
&self.app_eui
}
pub fn dev_eui(&self) -> &String {
&self.dev_eui
}
pub fn app_key(&self) -> &String {
&self.app_key
}
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct NewDevice {
app_eui: String,
app_key: String,
dev_eui: String,
name: String,
}
impl NewDevice {
pub fn from_user_input(
app_eui: String,
app_key: String,
dev_eui: String,
name: String,
) -> Result<NewDevice> {
let app_eui_decoded = hex::decode(app_eui.clone())?;
if app_eui_decoded.len() != 8 {
return Err(Error::InvalidAppEui.into());
}
let app_key_decoded = hex::decode(app_key.clone())?;
if app_key_decoded.len() != 16 {
return Err(Error::InvalidAppKey.into());
}
let dev_eui_decoded = hex::decode(dev_eui.clone())?;
if dev_eui_decoded.len() != 8 {
return Err(Error::InvalidDevEui.into());
}
Ok(NewDevice {
app_eui,
app_key,
dev_eui,
name,
})
}
pub fn app_eui(&self) -> &String {
&self.app_eui
}
pub fn app_key(&self) -> &String {
&self.app_key
}
pub fn dev_eui(&self) -> &String {
&self.dev_eui
}
}
impl NewLabel {
pub fn from_string(string: &str) -> NewLabel {
NewLabel {
name: string.to_owned(),
}
}
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct NewLabel {
name: String,
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct Label {
id: String,
name: String,
}
impl Label {
pub fn id(&self) -> &String {
&self.id
}
pub fn name(&self) -> &String {
&self.name
}
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct DeviceLabel {
label: String,
}
impl DeviceLabel {
pub fn from_uuid(label: String) -> Result<DeviceLabel> {
validate_uuid_input(&label)?;
Ok(DeviceLabel { label })
}
pub fn get_uuid(&self) -> &String {
&self.label
}
}
/// Throws an error if UUID isn't properly input
pub fn validate_uuid_input(id: &str) -> Result {
if let Err(err) = uuid::Uuid::parse_str(id) {
println!("{} [input: {}]", err, id);
return Err(Error::InvalidUuid.into());
}
Ok(())
}
| 21.189189 | 100 | 0.556633 |
fc11b74d3f455472a3cd76ce0082775c2ebca593 | 3,561 | use std::sync::Arc;
use super::grid::Grid;
use crate::{model::{record::Record, schema::GridSchema}, Context, utils::{self, csv::CsvReaders}};
const UNMATCHED: &str = "0"; // = 0 ascii.
const COL_STATUS: usize = 0;
///
/// Iterator allows iterating the record (indexes) in the grid.
///
pub struct GridIterator {
pos: usize, // reader index.
schema: Arc<GridSchema>,
data_readers: CsvReaders,
derived_readers: Option<CsvReaders>,
}
impl GridIterator {
pub fn new(ctx: &Context, grid: &Grid) -> Self {
let data_readers = grid.schema().files()
.iter()
.map(|file| utils::csv::reader(file.path(), true))
.collect();
let derived_readers = match ctx.phase() {
crate::Phase::MatchAndGroup |
crate::Phase::ComleteAndArchive =>
Some(grid.schema().files()
.iter()
.map(|file| utils::csv::reader(file.derived_path(), true))
.collect()),
_ => None,
};
Self {
pos: 0,
schema: Arc::new(grid.schema().clone()),
data_readers,
derived_readers,
}
}
}
impl Iterator for GridIterator {
type Item = Record;
fn next(&mut self) -> Option<Self::Item> {
loop {
// If we've reached the end of the last file, return None.
if self.pos == self.data_readers.len() {
return None
}
// Read a row from the csv file.
match read_next(self.pos, &mut self.data_readers, &mut self.derived_readers, true) {
Ok(result) => {
if let Some((data, derived)) = result {
return Some(Record::new(self.pos, self.schema.clone(), data, derived))
}
// If there was no data in the file, move onto the next file.
self.pos += 1;
},
Err(err) => panic!("Failed to read next record for group: {}", err),
}
}
}
}
///
/// Advances the data file reader (and if present, the derived file reader) by one record and returns the record(s) to the caller.
///
/// If the readers are at the end of file then returns None.
///
/// If the data reader encounters a matched record, then, if filter_status is true, returns None.
///
/// If an error is returned from either reader, then it is returned to the caller.
///
fn read_next(pos: usize, data_readers: &mut CsvReaders, derived_readers: &mut Option<CsvReaders>, filter_status: bool)
-> Result<Option<(csv::ByteRecord /* record data */, csv::ByteRecord /* derived data */)>, csv::Error> {
let mut data_buffer = csv::ByteRecord::new();
let mut derived_buffer = csv::ByteRecord::new();
loop {
match data_readers[pos].read_byte_record(&mut data_buffer) {
Ok(result) => {
if let Some(derived_readers) = derived_readers {
let _ = derived_readers[pos].read_byte_record(&mut derived_buffer);
}
match result {
true => {
if !filter_status || String::from_utf8_lossy(data_buffer.get(COL_STATUS).expect("no status")) == UNMATCHED {
return Ok(Some((data_buffer, derived_buffer)))
}
},
false => return Ok(None),
}
},
Err(err) => return Err(err),
}
}
} | 34.240385 | 132 | 0.53412 |
09df15f754538a7b4d776ca095e009b5e6e2f6f5 | 1,222 | // Test that `binding @ subpat` acts as a product context with respect to duplicate binding names.
// The code that is tested here lives in resolve (see `resolve_pattern_inner`).
#![feature(bindings_after_at)]
fn main() {
fn f(a @ a @ a: ()) {}
//~^ ERROR identifier `a` is bound more than once in this parameter list
//~| ERROR identifier `a` is bound more than once in this parameter list
match Ok(0) {
Ok(a @ b @ a)
//~^ ERROR identifier `a` is bound more than once in the same pattern
| Err(a @ b @ a)
//~^ ERROR identifier `a` is bound more than once in the same pattern
=> {}
}
let a @ a @ a = ();
//~^ ERROR identifier `a` is bound more than once in the same pattern
//~| ERROR identifier `a` is bound more than once in the same pattern
let ref a @ ref a = ();
//~^ ERROR identifier `a` is bound more than once in the same pattern
let ref mut a @ ref mut a = ();
//~^ ERROR identifier `a` is bound more than once in the same pattern
let a @ (Ok(a) | Err(a)) = Ok(());
//~^ ERROR identifier `a` is bound more than once in the same pattern
//~| ERROR identifier `a` is bound more than once in the same pattern
}
| 39.419355 | 98 | 0.61784 |
722ae5509ebf564dbad3f59dad27998609366779 | 1,280 | use crate::{config::Config, get};
use serenity::{
framework::standard::{
macros::{command, group},
Args, CommandResult,
},
model::{channel::Message, id::ChannelId},
prelude::*,
};
#[group]
#[commands(log_channel, log_channel_set)]
#[prefixes("log")]
struct LogChannel;
#[command("get")]
#[description("Check the current log channel")]
#[usage("")]
pub async fn log_channel(ctx: &Context, msg: &Message) -> CommandResult {
match get!(ctx, Config, read).log_channel() {
Some(ch) => {
msg.channel_id
.say(&ctx, format!("Log channel: {}", ch.mention()))
.await?
}
None => msg.channel_id.say(&ctx, "No log channel").await?,
};
Ok(())
}
#[command("set")]
#[description("Set the logging channel")]
#[usage("#channel_mention")]
pub async fn log_channel_set(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let channel_id = args.single::<ChannelId>().ok();
get!(ctx, Config, write).set_log_channel(channel_id)?;
msg.channel_id
.say(
&ctx,
if channel_id.is_some() {
"Log channel set"
} else {
"Log channel disabled"
},
)
.await?;
Ok(())
}
| 26.122449 | 93 | 0.553906 |
bfa92ff6890bafc42124b743586c69a1f8df869f | 7,214 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::Msr {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
}
#[doc = r" Value of the field"]
pub struct RxR {
bits: u8,
}
impl RxR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SampR {
bits: u8,
}
impl SampR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct RxmR {
bits: u8,
}
impl RxmR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct TxmR {
bits: u8,
}
impl TxmR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SlakiR {
bits: u8,
}
impl SlakiR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct WkuiR {
bits: u8,
}
impl WkuiR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct ErriR {
bits: u8,
}
impl ErriR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SlakR {
bits: u8,
}
impl SlakR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct InakR {
bits: u8,
}
impl InakR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _SlakiW<'a> {
w: &'a mut W,
}
impl<'a> _SlakiW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _WkuiW<'a> {
w: &'a mut W,
}
impl<'a> _WkuiW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _ErriW<'a> {
w: &'a mut W,
}
impl<'a> _ErriW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 11 - RX"]
#[inline(always)]
pub fn rx(&self) -> RxR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) as u8
};
RxR { bits }
}
#[doc = "Bit 10 - SAMP"]
#[inline(always)]
pub fn samp(&self) -> SampR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SampR { bits }
}
#[doc = "Bit 9 - RXM"]
#[inline(always)]
pub fn rxm(&self) -> RxmR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) as u8
};
RxmR { bits }
}
#[doc = "Bit 8 - TXM"]
#[inline(always)]
pub fn txm(&self) -> TxmR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u8
};
TxmR { bits }
}
#[doc = "Bit 4 - SLAKI"]
#[inline(always)]
pub fn slaki(&self) -> SlakiR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SlakiR { bits }
}
#[doc = "Bit 3 - WKUI"]
#[inline(always)]
pub fn wkui(&self) -> WkuiR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
};
WkuiR { bits }
}
#[doc = "Bit 2 - ERRI"]
#[inline(always)]
pub fn erri(&self) -> ErriR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) as u8
};
ErriR { bits }
}
#[doc = "Bit 1 - SLAK"]
#[inline(always)]
pub fn slak(&self) -> SlakR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SlakR { bits }
}
#[doc = "Bit 0 - INAK"]
#[inline(always)]
pub fn inak(&self) -> InakR {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
InakR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 3074 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 4 - SLAKI"]
#[inline(always)]
pub fn slaki(&mut self) -> _SlakiW {
_SlakiW { w: self }
}
#[doc = "Bit 3 - WKUI"]
#[inline(always)]
pub fn wkui(&mut self) -> _WkuiW {
_WkuiW { w: self }
}
#[doc = "Bit 2 - ERRI"]
#[inline(always)]
pub fn erri(&mut self) -> _ErriW {
_ErriW { w: self }
}
}
| 23.346278 | 59 | 0.476573 |
eb20037fad273a2f846b7d1d64dd6f23c6beb474 | 10,800 | //! Configures and executes the proxy
#![deny(warnings, rust_2018_idioms)]
pub mod admin;
pub mod dst;
pub mod env;
pub mod identity;
pub mod oc_collector;
pub mod tap;
pub use self::metrics::Metrics;
use futures::{future, FutureExt, TryFutureExt};
pub use linkerd_app_core::{self as core, metrics, trace};
use linkerd_app_core::{control::ControlAddr, dns, drain, proxy::http, svc, Error, ProxyRuntime};
use linkerd_app_gateway as gateway;
use linkerd_app_inbound::{self as inbound, Inbound};
use linkerd_app_outbound::{self as outbound, Outbound};
use linkerd_channel::into_stream::IntoStream;
use std::{net::SocketAddr, pin::Pin};
use tokio::{sync::mpsc, time::Duration};
use tracing::instrument::Instrument;
use tracing::{debug, error, info, info_span};
/// Spawns a sidecar proxy.
///
/// The proxy binds two listeners:
///
/// - a private socket (TCP or UNIX) for outbound requests to other instances;
/// - and a public socket (TCP and optionally TLS) for inbound requests from other
/// instances.
///
/// The public listener forwards requests to a local socket (TCP or UNIX).
///
/// The private listener routes requests to service-discovery-aware load-balancer.
///
#[derive(Clone, Debug)]
pub struct Config {
pub outbound: outbound::Config,
pub inbound: inbound::Config,
pub gateway: gateway::Config,
pub dns: dns::Config,
pub identity: identity::Config,
pub dst: dst::Config,
pub admin: admin::Config,
pub tap: tap::Config,
pub oc_collector: oc_collector::Config,
}
pub struct App {
admin: admin::Admin,
drain: drain::Signal,
dst: ControlAddr,
identity: identity::Identity,
inbound_addr: SocketAddr,
oc_collector: oc_collector::OcCollector,
outbound_addr: SocketAddr,
start_proxy: Pin<Box<dyn std::future::Future<Output = ()> + Send + 'static>>,
tap: tap::Tap,
}
impl Config {
pub fn try_from_env() -> Result<Self, env::EnvError> {
env::Env.try_config()
}
/// Build an application.
///
/// It is currently required that this be run on a Tokio runtime, since some
/// services are created eagerly and must spawn tasks to do so.
pub async fn build(
self,
shutdown_tx: mpsc::UnboundedSender<()>,
log_level: trace::Handle,
) -> Result<App, Error> {
use metrics::FmtMetrics;
let Config {
admin,
dns,
dst,
identity,
inbound,
oc_collector,
outbound,
gateway,
tap,
} = self;
debug!("building app");
let (metrics, report) = Metrics::new(admin.metrics_retain_idle);
let dns = dns.build();
let identity = info_span!("identity")
.in_scope(|| identity.build(dns.resolver.clone(), metrics.control.clone()))?;
let report = identity.metrics().and_then(report);
let (drain_tx, drain_rx) = drain::channel();
let tap = info_span!("tap").in_scope(|| tap.build(identity.local(), drain_rx.clone()))?;
let dst = {
let metrics = metrics.control.clone();
let dns = dns.resolver.clone();
info_span!("dst").in_scope(|| dst.build(dns, metrics, identity.local()))
}?;
let oc_collector = {
let identity = identity.local();
let dns = dns.resolver;
let client_metrics = metrics.control;
let metrics = metrics.opencensus;
info_span!("opencensus")
.in_scope(|| oc_collector.build(identity, dns, metrics, client_metrics))
}?;
let admin = {
let identity = identity.local();
let drain = drain_rx.clone();
let metrics = metrics.inbound.clone();
info_span!("admin").in_scope(move || {
admin.build(identity, report, metrics, log_level, drain, shutdown_tx)
})?
};
let dst_addr = dst.addr.clone();
let inbound = Inbound::new(
inbound,
ProxyRuntime {
identity: identity.local(),
metrics: metrics.inbound,
tap: tap.registry(),
span_sink: oc_collector.span_sink(),
drain: drain_rx.clone(),
},
);
let outbound = Outbound::new(
outbound,
ProxyRuntime {
identity: identity.local(),
metrics: metrics.outbound,
tap: tap.registry(),
span_sink: oc_collector.span_sink(),
drain: drain_rx,
},
);
let gateway_stack = gateway::stack(
gateway,
inbound.clone(),
outbound.to_tcp_connect(),
dst.profiles.clone(),
dst.resolve.clone(),
);
let (inbound_addr, inbound_serve) = inbound.serve(dst.profiles.clone(), gateway_stack);
let (outbound_addr, outbound_serve) = outbound.serve(dst.profiles, dst.resolve);
let start_proxy = Box::pin(async move {
tokio::spawn(outbound_serve.instrument(info_span!("outbound")));
tokio::spawn(inbound_serve.instrument(info_span!("inbound")));
});
Ok(App {
admin,
dst: dst_addr,
drain: drain_tx,
identity,
inbound_addr,
oc_collector,
outbound_addr,
start_proxy,
tap,
})
}
}
impl App {
pub fn admin_addr(&self) -> SocketAddr {
self.admin.listen_addr
}
pub fn inbound_addr(&self) -> SocketAddr {
self.inbound_addr
}
pub fn outbound_addr(&self) -> SocketAddr {
self.outbound_addr
}
pub fn tap_addr(&self) -> Option<SocketAddr> {
match self.tap {
tap::Tap::Disabled { .. } => None,
tap::Tap::Enabled { listen_addr, .. } => Some(listen_addr),
}
}
pub fn dst_addr(&self) -> &ControlAddr {
&self.dst
}
pub fn local_identity(&self) -> Option<&identity::LocalCrtKey> {
match self.identity {
identity::Identity::Disabled => None,
identity::Identity::Enabled { ref local, .. } => Some(local),
}
}
pub fn identity_addr(&self) -> Option<&ControlAddr> {
match self.identity {
identity::Identity::Disabled => None,
identity::Identity::Enabled { ref addr, .. } => Some(addr),
}
}
pub fn opencensus_addr(&self) -> Option<&ControlAddr> {
match self.oc_collector {
oc_collector::OcCollector::Disabled { .. } => None,
oc_collector::OcCollector::Enabled(ref oc) => Some(&oc.addr),
}
}
pub fn spawn(self) -> drain::Signal {
let App {
admin,
drain,
identity,
oc_collector,
start_proxy,
tap,
..
} = self;
// Run a daemon thread for all administrative tasks.
//
// The main reactor holds `admin_shutdown_tx` until the reactor drops
// the task. This causes the daemon reactor to stop.
let (admin_shutdown_tx, admin_shutdown_rx) = tokio::sync::oneshot::channel::<()>();
debug!("spawning daemon thread");
tokio::spawn(future::pending().map(|()| drop(admin_shutdown_tx)));
std::thread::Builder::new()
.name("admin".into())
.spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("building admin runtime must succeed");
rt.block_on(
async move {
debug!("running admin thread");
// Start the admin server to serve the readiness endpoint.
tokio::spawn(
admin
.serve
.map_err(|e| panic!("admin server died: {}", e))
.instrument(info_span!("admin", listen.addr = %admin.listen_addr)),
);
// Kick off the identity so that the process can become ready.
if let identity::Identity::Enabled { local, task, .. } = identity {
tokio::spawn(task.instrument(info_span!("identity")));
let latch = admin.latch;
tokio::spawn(
local
.await_crt()
.map_ok(move |id| {
latch.release();
info!("Certified identity: {}", id.name().as_ref());
})
.map_err(|_| {
// The daemon task was lost?!
panic!("Failed to certify identity!");
})
.instrument(info_span!("identity")),
);
} else {
admin.latch.release()
}
if let tap::Tap::Enabled {
registry, serve, ..
} = tap
{
tokio::spawn(
registry
.clean(
tokio::time::interval(Duration::from_secs(60))
.into_stream(),
)
.instrument(info_span!("tap_clean")),
);
tokio::spawn(
serve
.map_err(|error| error!(%error, "server died"))
.instrument(info_span!("tap")),
);
}
if let oc_collector::OcCollector::Enabled(oc) = oc_collector {
tokio::spawn(oc.task.instrument(info_span!("opencensus")));
}
// we don't care if the admin shutdown channel is
// dropped or actually triggered.
let _ = admin_shutdown_rx.await;
}
.instrument(info_span!("daemon")),
)
})
.expect("admin");
tokio::spawn(start_proxy);
drain
}
}
| 33.64486 | 99 | 0.494074 |
e518a20227d333de09cfbbb2c1e6f75cdaed0983 | 2,570 | pub use analog_ledger::blockstore_processor::CacheBlockMetaSender;
use {
crossbeam_channel::{Receiver, RecvTimeoutError},
analog_ledger::blockstore::Blockstore,
analog_measure::measure::Measure,
analog_runtime::bank::Bank,
std::{
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread::{self, Builder, JoinHandle},
time::Duration,
},
};
pub type CacheBlockMetaReceiver = Receiver<Arc<Bank>>;
pub struct CacheBlockMetaService {
thread_hdl: JoinHandle<()>,
}
const CACHE_BLOCK_TIME_WARNING_MS: u64 = 150;
impl CacheBlockMetaService {
#[allow(clippy::new_ret_no_self)]
pub fn new(
cache_block_meta_receiver: CacheBlockMetaReceiver,
blockstore: Arc<Blockstore>,
exit: &Arc<AtomicBool>,
) -> Self {
let exit = exit.clone();
let thread_hdl = Builder::new()
.name("analog-cache-block-time".to_string())
.spawn(move || loop {
if exit.load(Ordering::Relaxed) {
break;
}
let recv_result = cache_block_meta_receiver.recv_timeout(Duration::from_secs(1));
match recv_result {
Err(RecvTimeoutError::Disconnected) => {
break;
}
Ok(bank) => {
let mut cache_block_meta_timer = Measure::start("cache_block_meta_timer");
Self::cache_block_meta(bank, &blockstore);
cache_block_meta_timer.stop();
if cache_block_meta_timer.as_ms() > CACHE_BLOCK_TIME_WARNING_MS {
warn!(
"cache_block_meta operation took: {}ms",
cache_block_meta_timer.as_ms()
);
}
}
_ => {}
}
})
.unwrap();
Self { thread_hdl }
}
fn cache_block_meta(bank: Arc<Bank>, blockstore: &Arc<Blockstore>) {
if let Err(e) = blockstore.cache_block_time(bank.slot(), bank.clock().unix_timestamp) {
error!("cache_block_time failed: slot {:?} {:?}", bank.slot(), e);
}
if let Err(e) = blockstore.cache_block_height(bank.slot(), bank.block_height()) {
error!("cache_block_height failed: slot {:?} {:?}", bank.slot(), e);
}
}
pub fn join(self) -> thread::Result<()> {
self.thread_hdl.join()
}
}
| 34.266667 | 98 | 0.524514 |
bb039cd6b91d758943a1a867e80ba05d6c07e180 | 3,451 | #![cfg(feature = "velodyne-test")]
use anyhow::{ensure, Result};
use itertools::izip;
use lidar_utils::velodyne::{
Config, DataPacket, FrameConverter, PointCloudConverter, PositionPacket,
Vlp16_Strongest_FrameConverter, Vlp16_Strongest_PcdConverter, Vlp32_Strongest_FrameConverter,
Vlp32_Strongest_PcdConverter,
};
use pcap::Capture;
use std::mem;
const UDP_HEADER_SIZE: usize = 42;
#[test]
#[cfg(feature = "pcap")]
fn velodyne_vlp_16_pcap_file() -> Result<()> {
let mut cap = Capture::from_file("test_files/velodyne_vlp16.pcap")?;
cap.filter("udp")?;
let mut data_packets = vec![];
let mut position_packets = vec![];
while let Ok(packet) = cap.next() {
if packet.data.len() == mem::size_of::<DataPacket>() + UDP_HEADER_SIZE {
data_packets.push(DataPacket::from_pcap(&packet)?);
} else if packet.data.len() == mem::size_of::<PositionPacket>() + UDP_HEADER_SIZE {
position_packets.push(PositionPacket::from_pcap(&packet)?);
}
}
// timestamp test
{
let is_timestamp_valid = izip!(data_packets.iter(), data_packets.iter().skip(1))
.all(|(former, latter)| former.timestamp < latter.timestamp);
ensure!(is_timestamp_valid, "invalid timestamp detected");
}
// convert to point cloud
{
let config = Config::vlp_16_strongest_return();
let mut converter = Vlp16_Strongest_PcdConverter::from_config(config);
data_packets.iter().try_for_each(|packet| -> Result<_> {
converter.convert(packet)?;
Ok(())
})?;
}
// convert to frames
{
let config = Config::vlp_16_strongest_return();
let mut converter = Vlp16_Strongest_FrameConverter::from_config(config);
data_packets.iter().try_for_each(|packet| -> Result<_> {
converter.convert(packet)?;
Ok(())
})?;
}
Ok(())
}
#[test]
#[cfg(feature = "pcap")]
fn velodyne_vlp_32_pcap_file() -> Result<()> {
let mut cap = Capture::from_file("test_files/velodyne_vlp32.pcap")?;
cap.filter("udp")?;
let mut data_packets = vec![];
let mut position_packets = vec![];
while let Ok(packet) = cap.next() {
if packet.data.len() == mem::size_of::<DataPacket>() + UDP_HEADER_SIZE {
data_packets.push(DataPacket::from_pcap(&packet)?);
} else if packet.data.len() == mem::size_of::<PositionPacket>() + UDP_HEADER_SIZE {
position_packets.push(PositionPacket::from_pcap(&packet)?);
}
}
// timestamp test
{
let is_timestamp_valid = izip!(data_packets.iter(), data_packets.iter().skip(1))
.all(|(former, latter)| former.timestamp < latter.timestamp);
ensure!(is_timestamp_valid, "invalid timestamp detected");
}
// convert to point cloud
{
let config = Config::vlp_32c_strongest_return();
let mut converter = Vlp32_Strongest_PcdConverter::from_config(config);
data_packets.iter().try_for_each(|packet| -> Result<_> {
converter.convert(packet)?;
Ok(())
})?;
}
// convert to frames
{
let config = Config::vlp_32c_strongest_return();
let mut converter = Vlp32_Strongest_FrameConverter::from_config(config);
data_packets.iter().try_for_each(|packet| -> Result<_> {
converter.convert(packet)?;
Ok(())
})?;
}
Ok(())
}
| 31.372727 | 97 | 0.62069 |
2306bfd83ef4f3cfc1b7f51805a8f606dc52fe64 | 17,463 | // std
use std::collections::{BTreeMap, VecDeque};
// Internal
use crate::{
build::AppSettings as AS,
build::{Arg, ArgSettings},
parse::{ArgMatcher, Parser},
util::Id,
INTERNAL_ERROR_MSG,
};
pub(crate) struct Usage<'b, 'c, 'z>
where
'b: 'c,
'c: 'z,
{
p: &'z Parser<'b, 'c>,
}
impl<'b, 'c, 'z> Usage<'b, 'c, 'z> {
pub(crate) fn new(p: &'z Parser<'b, 'c>) -> Self {
Usage { p }
}
// Creates a usage string for display. This happens just after all arguments were parsed, but before
// any subcommands have been parsed (so as to give subcommands their own usage recursively)
pub(crate) fn create_usage_with_title(&self, used: &[Id]) -> String {
debug!("Usage::create_usage_with_title");
let mut usage = String::with_capacity(75);
usage.push_str("USAGE:\n ");
usage.push_str(&*self.create_usage_no_title(used));
usage
}
// Creates a usage string (*without title*) if one was not provided by the user manually.
pub(crate) fn create_usage_no_title(&self, used: &[Id]) -> String {
debug!("Usage::create_usage_no_title");
if let Some(u) = self.p.app.usage_str {
String::from(&*u)
} else if used.is_empty() {
self.create_help_usage(true)
} else {
self.create_smart_usage(used)
}
}
// Creates a usage string for display in help messages (i.e. not for errors)
pub(crate) fn create_help_usage(&self, incl_reqs: bool) -> String {
debug!("Usage::create_help_usage; incl_reqs={:?}", incl_reqs);
let mut usage = String::with_capacity(75);
let name = self
.p
.app
.usage
.as_ref()
.unwrap_or_else(|| self.p.app.bin_name.as_ref().unwrap_or(&self.p.app.name));
usage.push_str(&*name);
let req_string = if incl_reqs {
self.get_required_usage_from(&[], None, false)
.iter()
.fold(String::new(), |a, s| a + &format!(" {}", s)[..])
} else {
String::new()
};
let flags = self.needs_flags_tag();
if flags && !self.p.is_set(AS::UnifiedHelpMessage) {
usage.push_str(" [FLAGS]");
} else if flags {
usage.push_str(" [OPTIONS]");
}
if !self.p.is_set(AS::UnifiedHelpMessage)
&& self
.p
.app
.get_opts_no_heading()
.any(|o| !o.is_set(ArgSettings::Required) && !o.is_set(ArgSettings::Hidden))
{
usage.push_str(" [OPTIONS]");
}
usage.push_str(&req_string[..]);
let has_last = self
.p
.app
.get_positionals()
.any(|p| p.is_set(ArgSettings::Last));
// places a '--' in the usage string if there are args and options
// supporting multiple values
if self
.p
.app
.get_opts_no_heading()
.any(|o| o.is_set(ArgSettings::MultipleValues))
&& self
.p
.app
.get_positionals()
.any(|p| !p.is_set(ArgSettings::Required))
&& !(self.p.app.has_visible_subcommands()
|| self.p.is_set(AS::AllowExternalSubcommands))
&& !has_last
{
usage.push_str(" [--]");
}
let not_req_or_hidden = |p: &Arg| {
(!p.is_set(ArgSettings::Required) || p.is_set(ArgSettings::Last))
&& !p.is_set(ArgSettings::Hidden)
};
if self.p.app.get_positionals().any(not_req_or_hidden) {
if let Some(args_tag) = self.get_args_tag(incl_reqs) {
usage.push_str(&*args_tag);
} else {
usage.push_str(" [ARGS]");
}
if has_last && incl_reqs {
let pos = self
.p
.app
.get_positionals()
.find(|p| p.is_set(ArgSettings::Last))
.expect(INTERNAL_ERROR_MSG);
debug!("Usage::create_help_usage: '{}' has .last(true)", pos.name);
let req = pos.is_set(ArgSettings::Required);
if req
&& self
.p
.app
.get_positionals()
.any(|p| !p.is_set(ArgSettings::Required))
{
usage.push_str(" -- <");
} else if req {
usage.push_str(" [--] <");
} else {
usage.push_str(" [-- <");
}
usage.push_str(&*pos.name_no_brackets());
usage.push_str(">");
usage.push_str(pos.multiple_str());
if !req {
usage.push_str("]");
}
}
}
// incl_reqs is only false when this function is called recursively
if self.p.app.has_visible_subcommands() && incl_reqs
|| self.p.is_set(AS::AllowExternalSubcommands)
{
if self.p.is_set(AS::SubcommandsNegateReqs) || self.p.is_set(AS::ArgsNegateSubcommands)
{
if !self.p.is_set(AS::ArgsNegateSubcommands) {
usage.push_str("\n ");
usage.push_str(&*self.create_help_usage(false));
usage.push_str(" <");
usage.push_str(self.p.app.subcommand_placeholder.unwrap_or("SUBCOMMAND"));
usage.push_str(">");
} else {
usage.push_str("\n ");
usage.push_str(&*name);
usage.push_str(" <");
usage.push_str(self.p.app.subcommand_placeholder.unwrap_or("SUBCOMMAND"));
usage.push_str(">");
}
} else if self.p.is_set(AS::SubcommandRequired)
|| self.p.is_set(AS::SubcommandRequiredElseHelp)
{
usage.push_str(" <");
usage.push_str(self.p.app.subcommand_placeholder.unwrap_or("SUBCOMMAND"));
usage.push_str(">");
} else {
usage.push_str(" [");
usage.push_str(self.p.app.subcommand_placeholder.unwrap_or("SUBCOMMAND"));
usage.push_str("]");
}
}
usage.shrink_to_fit();
debug!("Usage::create_help_usage: usage={}", usage);
usage
}
// Creates a context aware usage string, or "smart usage" from currently used
// args, and requirements
fn create_smart_usage(&self, used: &[Id]) -> String {
debug!("Usage::create_smart_usage");
let mut usage = String::with_capacity(75);
let r_string = self
.get_required_usage_from(used, None, true)
.iter()
.fold(String::new(), |acc, s| acc + &format!(" {}", s)[..]);
usage.push_str(
&self
.p
.app
.usage
.as_ref()
.unwrap_or_else(|| self.p.app.bin_name.as_ref().unwrap_or(&self.p.app.name))[..],
);
usage.push_str(&*r_string);
if self.p.is_set(AS::SubcommandRequired) {
usage.push_str(" <");
usage.push_str(self.p.app.subcommand_placeholder.unwrap_or("SUBCOMMAND"));
usage.push_str(">");
}
usage.shrink_to_fit();
usage
}
// Gets the `[ARGS]` tag for the usage string
fn get_args_tag(&self, incl_reqs: bool) -> Option<String> {
debug!("Usage::get_args_tag; incl_reqs = {:?}", incl_reqs);
let mut count = 0;
'outer: for pos in self
.p
.app
.get_positionals()
.filter(|pos| !pos.is_set(ArgSettings::Required))
.filter(|pos| !pos.is_set(ArgSettings::Hidden))
.filter(|pos| !pos.is_set(ArgSettings::Last))
{
debug!("Usage::get_args_tag:iter:{}", pos.name);
for grp_s in self.p.app.groups_for_arg(&pos.id) {
debug!("Usage::get_args_tag:iter:{:?}:iter:{:?}", pos.name, grp_s);
// if it's part of a required group we don't want to count it
if self
.p
.app
.groups
.iter()
.any(|g| g.required && (g.id == grp_s))
{
continue 'outer;
}
}
count += 1;
debug!(
"Usage::get_args_tag:iter: {} Args not required or hidden",
count
);
}
if !self.p.is_set(AS::DontCollapseArgsInUsage) && count > 1 {
debug!("Usage::get_args_tag:iter: More than one, returning [ARGS]");
return None; // [ARGS]
} else if count == 1 && incl_reqs {
let pos = self
.p
.app
.get_positionals()
.find(|pos| {
!pos.is_set(ArgSettings::Required)
&& !pos.is_set(ArgSettings::Hidden)
&& !pos.is_set(ArgSettings::Last)
})
.expect(INTERNAL_ERROR_MSG);
debug!(
"Usage::get_args_tag:iter: Exactly one, returning '{}'",
pos.name
);
return Some(format!(
" [{}]{}",
pos.name_no_brackets(),
pos.multiple_str()
));
} else if self.p.is_set(AS::DontCollapseArgsInUsage)
&& self.p.has_positionals()
&& incl_reqs
{
debug!("Usage::get_args_tag:iter: Don't collapse returning all");
return Some(
self.p
.app
.get_positionals()
.filter(|pos| !pos.is_set(ArgSettings::Required))
.filter(|pos| !pos.is_set(ArgSettings::Hidden))
.filter(|pos| !pos.is_set(ArgSettings::Last))
.map(|pos| format!(" [{}]{}", pos.name_no_brackets(), pos.multiple_str()))
.collect::<Vec<_>>()
.join(""),
);
} else if !incl_reqs {
debug!("Usage::get_args_tag:iter: incl_reqs=false, building secondary usage string");
let highest_req_pos = self
.p
.app
.get_positionals()
.filter_map(|pos| {
if pos.is_set(ArgSettings::Required) && !pos.is_set(ArgSettings::Last) {
Some(pos.index)
} else {
None
}
})
.max()
.unwrap_or_else(|| Some(self.p.app.get_positionals().count() as u64));
return Some(
self.p
.app
.get_positionals()
.filter(|pos| pos.index <= highest_req_pos)
.filter(|pos| !pos.is_set(ArgSettings::Required))
.filter(|pos| !pos.is_set(ArgSettings::Hidden))
.filter(|pos| !pos.is_set(ArgSettings::Last))
.map(|pos| format!(" [{}]{}", pos.name_no_brackets(), pos.multiple_str()))
.collect::<Vec<_>>()
.join(""),
);
}
Some("".into())
}
// Determines if we need the `[FLAGS]` tag in the usage string
fn needs_flags_tag(&self) -> bool {
debug!("Usage::needs_flags_tag");
'outer: for f in self.p.app.get_flags_no_heading() {
debug!("Usage::needs_flags_tag:iter: f={}", f.name);
if let Some(l) = f.long {
if l == "help" || l == "version" {
// Don't print `[FLAGS]` just for help or version
continue;
}
}
for grp_s in self.p.app.groups_for_arg(&f.id) {
debug!("Usage::needs_flags_tag:iter:iter: grp_s={:?}", grp_s);
if self
.p
.app
.groups
.iter()
.any(|g| g.id == grp_s && g.required)
{
debug!("Usage::needs_flags_tag:iter:iter: Group is required");
continue 'outer;
}
}
if f.is_set(ArgSettings::Hidden) {
continue;
}
debug!("Usage::needs_flags_tag:iter: [FLAGS] required");
return true;
}
debug!("Usage::needs_flags_tag: [FLAGS] not required");
false
}
// Returns the required args in usage string form by fully unrolling all groups
// `incl_last`: should we incldue args that are Arg::Last? (i.e. `prog [foo] -- [last]). We
// can't do that for required usages being built for subcommands because it would look like:
// `prog [foo] -- [last] <subcommand>` which is totally wrong.
pub(crate) fn get_required_usage_from(
&self,
incls: &[Id],
matcher: Option<&ArgMatcher>,
incl_last: bool,
) -> VecDeque<String> {
debug!(
"Usage::get_required_usage_from: incls={:?}, matcher={:?}, incl_last={:?}",
incls,
matcher.is_some(),
incl_last
);
let mut ret_val = VecDeque::new();
let mut unrolled_reqs = vec![];
for a in self.p.required.iter() {
if let Some(ref m) = matcher {
for aa in self.p.app.unroll_requirements_for_arg(a, m) {
unrolled_reqs.push(aa);
}
}
// always include the required arg itself. it will not be enumerated
// by unroll_requirements_for_arg.
unrolled_reqs.push(a.clone());
}
debug!(
"Usage::get_required_usage_from: unrolled_reqs={:?}",
unrolled_reqs
);
let args_in_groups = self
.p
.app
.groups
.iter()
.filter(|gn| self.p.required.contains(&gn.id))
.flat_map(|g| self.p.app.unroll_args_in_group(&g.id))
.collect::<Vec<_>>();
let pmap = if let Some(m) = matcher {
unrolled_reqs
.iter()
.chain(incls.iter())
.filter(|a| self.p.app.get_positionals().any(|p| &&p.id == a))
.filter(|&pos| !m.contains(pos))
.filter_map(|pos| self.p.app.find(pos))
.filter(|&pos| incl_last || !pos.is_set(ArgSettings::Last))
.filter(|pos| !args_in_groups.contains(&pos.id))
.map(|pos| (pos.index.unwrap(), pos))
.collect::<BTreeMap<u64, &Arg>>() // sort by index
} else {
unrolled_reqs
.iter()
.chain(incls.iter())
.filter(|a| self.p.app.get_positionals().any(|p| &&p.id == a))
.filter_map(|pos| self.p.app.find(pos))
.filter(|&pos| incl_last || !pos.is_set(ArgSettings::Last))
.filter(|pos| !args_in_groups.contains(&pos.id))
.map(|pos| (pos.index.unwrap(), pos))
.collect::<BTreeMap<u64, &Arg>>() // sort by index
};
for p in pmap.values() {
debug!("Usage::get_required_usage_from:iter:{:?}", p.id);
if args_in_groups.is_empty() || !args_in_groups.contains(&p.id) {
ret_val.push_back(p.to_string());
}
}
for a in unrolled_reqs
.iter()
.chain(incls.iter())
.filter(|name| !self.p.app.get_positionals().any(|p| &&p.id == name))
.filter(|name| !self.p.app.groups.iter().any(|g| &&g.id == name))
.filter(|name| !args_in_groups.contains(name))
.filter(|name| !(matcher.is_some() && matcher.as_ref().unwrap().contains(name)))
{
debug!("Usage::get_required_usage_from:iter:{:?}", a);
let arg = self
.p
.app
.find(&a)
.map(ToString::to_string)
.expect(INTERNAL_ERROR_MSG);
ret_val.push_back(arg);
}
let mut g_vec: Vec<String> = vec![];
for g in unrolled_reqs
.iter()
.filter(|n| self.p.app.groups.iter().any(|g| g.id == **n))
{
// don't print requirement for required groups that have an arg.
if let Some(m) = matcher {
let have_group_entry = self
.p
.app
.unroll_args_in_group(&g)
.iter()
.any(|arg| m.contains(&arg));
if have_group_entry {
continue;
}
}
let elem = self.p.app.format_group(g);
if !g_vec.contains(&elem) {
g_vec.push(elem);
}
}
for g in g_vec {
ret_val.push_back(g);
}
debug!("Usage::get_required_usage_from: ret_val={:?}", ret_val);
ret_val
}
}
| 36.841772 | 104 | 0.470996 |
015685218bd5ba2ee036b210995de5541393604f | 5,726 | use super::UdpSocket;
use bytes::{BufMut, BytesMut};
use core::task::{Context, Poll};
use futures_core::{ready, Stream};
use futures_sink::Sink;
use log::trace;
use std::io;
use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4};
use std::pin::Pin;
use tokio_codec::{Decoder, Encoder};
/// A unified `Stream` and `Sink` interface to an underlying `UdpSocket`, using
/// the `Encoder` and `Decoder` traits to encode and decode frames.
///
/// Raw UDP sockets work with datagrams, but higher-level code usually wants to
/// batch these into meaningful chunks, called "frames". This method layers
/// framing on top of this socket by using the `Encoder` and `Decoder` traits to
/// handle encoding and decoding of messages frames. Note that the incoming and
/// outgoing frame types may be distinct.
///
/// This function returns a *single* object that is both `Stream` and `Sink`;
/// grouping this into a single object is often useful for layering things which
/// require both read and write access to the underlying object.
///
/// If you want to work more directly with the streams and sink, consider
/// calling `split` on the `UdpFramed` returned by this method, which will break
/// them into separate objects, allowing them to interact more easily.
#[must_use = "sinks do nothing unless polled"]
#[derive(Debug)]
pub struct UdpFramed<C> {
socket: UdpSocket,
codec: C,
rd: BytesMut,
wr: BytesMut,
out_addr: SocketAddr,
flushed: bool,
}
impl<C: Decoder + Unpin> Stream for UdpFramed<C> {
type Item = Result<(C::Item, SocketAddr), C::Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let pin = self.get_mut();
pin.rd.reserve(INITIAL_RD_CAPACITY);
let (n, addr) = unsafe {
// Read into the buffer without having to initialize the memory.
let res = ready!(Pin::new(&mut pin.socket).poll_recv_from_priv(cx, pin.rd.bytes_mut()));
let (n, addr) = res?;
pin.rd.advance_mut(n);
(n, addr)
};
trace!("received {} bytes, decoding", n);
let frame_res = pin.codec.decode(&mut pin.rd);
pin.rd.clear();
let frame = frame_res?;
let result = frame.map(|frame| Ok((frame, addr))); // frame -> (frame, addr)
trace!("frame decoded from buffer");
Poll::Ready(result)
}
}
impl<C: Encoder + Unpin> Sink<(C::Item, SocketAddr)> for UdpFramed<C> {
type Error = C::Error;
fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
if !self.flushed {
match self.poll_flush(cx)? {
Poll::Ready(()) => {}
Poll::Pending => return Poll::Pending,
}
}
Poll::Ready(Ok(()))
}
fn start_send(self: Pin<&mut Self>, item: (C::Item, SocketAddr)) -> Result<(), Self::Error> {
trace!("sending frame");
let (frame, out_addr) = item;
let pin = self.get_mut();
pin.codec.encode(frame, &mut pin.wr)?;
pin.out_addr = out_addr;
pin.flushed = false;
trace!("frame encoded; length={}", pin.wr.len());
Ok(())
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
if self.flushed {
return Poll::Ready(Ok(()));
}
trace!("flushing frame; length={}", self.wr.len());
let Self {
ref mut socket,
ref mut out_addr,
ref mut wr,
..
} = *self;
let n = ready!(socket.poll_send_to_priv(cx, &wr, &out_addr))?;
trace!("written {}", n);
let wrote_all = n == self.wr.len();
self.wr.clear();
self.flushed = true;
let res = if wrote_all {
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"failed to write entire datagram to socket",
)
.into())
};
Poll::Ready(res)
}
fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
ready!(self.poll_flush(cx))?;
Poll::Ready(Ok(()))
}
}
const INITIAL_RD_CAPACITY: usize = 64 * 1024;
const INITIAL_WR_CAPACITY: usize = 8 * 1024;
impl<C> UdpFramed<C> {
/// Create a new `UdpFramed` backed by the given socket and codec.
///
/// See struct level documentation for more details.
pub fn new(socket: UdpSocket, codec: C) -> UdpFramed<C> {
UdpFramed {
socket,
codec,
out_addr: SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 0)),
rd: BytesMut::with_capacity(INITIAL_RD_CAPACITY),
wr: BytesMut::with_capacity(INITIAL_WR_CAPACITY),
flushed: true,
}
}
/// Returns a reference to the underlying I/O stream wrapped by `Framed`.
///
/// # Note
///
/// Care should be taken to not tamper with the underlying stream of data
/// coming in as it may corrupt the stream of frames otherwise being worked
/// with.
pub fn get_ref(&self) -> &UdpSocket {
&self.socket
}
/// Returns a mutable reference to the underlying I/O stream wrapped by
/// `Framed`.
///
/// # Note
///
/// Care should be taken to not tamper with the underlying stream of data
/// coming in as it may corrupt the stream of frames otherwise being worked
/// with.
pub fn get_mut(&mut self) -> &mut UdpSocket {
&mut self.socket
}
/// Consumes the `Framed`, returning its underlying I/O stream.
pub fn into_inner(self) -> UdpSocket {
self.socket
}
}
| 31.811111 | 100 | 0.586622 |
71932f2d6e2300c7864fb33b746be9a07a3345a0 | 9,424 | use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use std::os::unix::prelude::*;
use tempfile::tempfile;
use libc::{_exit, STDOUT_FILENO};
use nix::fcntl::{OFlag, open};
use nix::pty::*;
use nix::sys::stat;
use nix::sys::termios::*;
use nix::unistd::{write, close, pause};
/// Regression test for Issue #659
/// This is the correct way to explicitly close a `PtyMaster`
#[test]
fn test_explicit_close() {
let mut f = {
let m = posix_openpt(OFlag::O_RDWR).unwrap();
close(m.into_raw_fd()).unwrap();
tempfile().unwrap()
};
// This should work. But if there's been a double close, then it will
// return EBADF
f.write_all(b"whatever").unwrap();
}
/// Test equivalence of `ptsname` and `ptsname_r`
#[test]
#[cfg(any(target_os = "android", target_os = "linux"))]
fn test_ptsname_equivalence() {
let _m = crate::PTSNAME_MTX.lock();
// Open a new PTTY master
let master_fd = posix_openpt(OFlag::O_RDWR).unwrap();
assert!(master_fd.as_raw_fd() > 0);
// Get the name of the slave
let slave_name = unsafe { ptsname(&master_fd) }.unwrap() ;
let slave_name_r = ptsname_r(&master_fd).unwrap();
assert_eq!(slave_name, slave_name_r);
}
/// Test data copying of `ptsname`
// TODO need to run in a subprocess, since ptsname is non-reentrant
#[test]
#[cfg(any(target_os = "android", target_os = "linux"))]
fn test_ptsname_copy() {
let _m = crate::PTSNAME_MTX.lock();
// Open a new PTTY master
let master_fd = posix_openpt(OFlag::O_RDWR).unwrap();
assert!(master_fd.as_raw_fd() > 0);
// Get the name of the slave
let slave_name1 = unsafe { ptsname(&master_fd) }.unwrap();
let slave_name2 = unsafe { ptsname(&master_fd) }.unwrap();
assert_eq!(slave_name1, slave_name2);
// Also make sure that the string was actually copied and they point to different parts of
// memory.
assert!(slave_name1.as_ptr() != slave_name2.as_ptr());
}
/// Test data copying of `ptsname_r`
#[test]
#[cfg(any(target_os = "android", target_os = "linux"))]
fn test_ptsname_r_copy() {
// Open a new PTTY master
let master_fd = posix_openpt(OFlag::O_RDWR).unwrap();
assert!(master_fd.as_raw_fd() > 0);
// Get the name of the slave
let slave_name1 = ptsname_r(&master_fd).unwrap();
let slave_name2 = ptsname_r(&master_fd).unwrap();
assert_eq!(slave_name1, slave_name2);
assert!(slave_name1.as_ptr() != slave_name2.as_ptr());
}
/// Test that `ptsname` returns different names for different devices
#[test]
#[cfg(any(target_os = "android", target_os = "linux"))]
fn test_ptsname_unique() {
let _m = crate::PTSNAME_MTX.lock();
// Open a new PTTY master
let master1_fd = posix_openpt(OFlag::O_RDWR).unwrap();
assert!(master1_fd.as_raw_fd() > 0);
// Open a second PTTY master
let master2_fd = posix_openpt(OFlag::O_RDWR).unwrap();
assert!(master2_fd.as_raw_fd() > 0);
// Get the name of the slave
let slave_name1 = unsafe { ptsname(&master1_fd) }.unwrap();
let slave_name2 = unsafe { ptsname(&master2_fd) }.unwrap();
assert!(slave_name1 != slave_name2);
}
/// Common setup for testing PTTY pairs
fn open_ptty_pair() -> (PtyMaster, File) {
let _m = crate::PTSNAME_MTX.lock();
// Open a new PTTY master
let master = posix_openpt(OFlag::O_RDWR).expect("posix_openpt failed");
// Allow a slave to be generated for it
grantpt(&master).expect("grantpt failed");
unlockpt(&master).expect("unlockpt failed");
// Get the name of the slave
let slave_name = unsafe { ptsname(&master) }.expect("ptsname failed");
// Open the slave device
let slave_fd = open(Path::new(&slave_name), OFlag::O_RDWR, stat::Mode::empty()).unwrap();
#[cfg(target_os = "illumos")]
// TODO: rewrite using ioctl!
#[allow(clippy::comparison_chain)]
{
use libc::{ioctl, I_FIND, I_PUSH};
// On illumos systems, as per pts(7D), one must push STREAMS modules
// after opening a device path returned from ptsname().
let ptem = b"ptem\0";
let ldterm = b"ldterm\0";
let r = unsafe { ioctl(slave_fd, I_FIND, ldterm.as_ptr()) };
if r < 0 {
panic!("I_FIND failure");
} else if r == 0 {
if unsafe { ioctl(slave_fd, I_PUSH, ptem.as_ptr()) } < 0 {
panic!("I_PUSH ptem failure");
}
if unsafe { ioctl(slave_fd, I_PUSH, ldterm.as_ptr()) } < 0 {
panic!("I_PUSH ldterm failure");
}
}
}
let slave = unsafe { File::from_raw_fd(slave_fd) };
(master, slave)
}
/// Test opening a master/slave PTTY pair
///
/// This uses a common `open_ptty_pair` because much of these functions aren't useful by
/// themselves. So for this test we perform the basic act of getting a file handle for a
/// master/slave PTTY pair, then just sanity-check the raw values.
#[test]
fn test_open_ptty_pair() {
let (master, slave) = open_ptty_pair();
assert!(master.as_raw_fd() > 0);
assert!(slave.as_raw_fd() > 0);
}
/// Put the terminal in raw mode.
fn make_raw(fd: RawFd) {
let mut termios = tcgetattr(fd).unwrap();
cfmakeraw(&mut termios);
tcsetattr(fd, SetArg::TCSANOW, &termios).unwrap();
}
/// Test `io::Read` on the PTTY master
#[test]
fn test_read_ptty_pair() {
let (mut master, mut slave) = open_ptty_pair();
make_raw(slave.as_raw_fd());
let mut buf = [0u8; 5];
slave.write_all(b"hello").unwrap();
master.read_exact(&mut buf).unwrap();
assert_eq!(&buf, b"hello");
}
/// Test `io::Write` on the PTTY master
#[test]
fn test_write_ptty_pair() {
let (mut master, mut slave) = open_ptty_pair();
make_raw(slave.as_raw_fd());
let mut buf = [0u8; 5];
master.write_all(b"adios").unwrap();
slave.read_exact(&mut buf).unwrap();
assert_eq!(&buf, b"adios");
}
#[test]
fn test_openpty() {
// openpty uses ptname(3) internally
let _m = crate::PTSNAME_MTX.lock();
let pty = openpty(None, None).unwrap();
assert!(pty.master > 0);
assert!(pty.slave > 0);
// Writing to one should be readable on the other one
let string = "foofoofoo\n";
let mut buf = [0u8; 10];
write(pty.master, string.as_bytes()).unwrap();
crate::read_exact(pty.slave, &mut buf);
assert_eq!(&buf, string.as_bytes());
// Read the echo as well
let echoed_string = "foofoofoo\r\n";
let mut buf = [0u8; 11];
crate::read_exact(pty.master, &mut buf);
assert_eq!(&buf, echoed_string.as_bytes());
let string2 = "barbarbarbar\n";
let echoed_string2 = "barbarbarbar\r\n";
let mut buf = [0u8; 14];
write(pty.slave, string2.as_bytes()).unwrap();
crate::read_exact(pty.master, &mut buf);
assert_eq!(&buf, echoed_string2.as_bytes());
close(pty.master).unwrap();
close(pty.slave).unwrap();
}
#[test]
fn test_openpty_with_termios() {
// openpty uses ptname(3) internally
let _m = crate::PTSNAME_MTX.lock();
// Open one pty to get attributes for the second one
let mut termios = {
let pty = openpty(None, None).unwrap();
assert!(pty.master > 0);
assert!(pty.slave > 0);
let termios = tcgetattr(pty.slave).unwrap();
close(pty.master).unwrap();
close(pty.slave).unwrap();
termios
};
// Make sure newlines are not transformed so the data is preserved when sent.
termios.output_flags.remove(OutputFlags::ONLCR);
let pty = openpty(None, &termios).unwrap();
// Must be valid file descriptors
assert!(pty.master > 0);
assert!(pty.slave > 0);
// Writing to one should be readable on the other one
let string = "foofoofoo\n";
let mut buf = [0u8; 10];
write(pty.master, string.as_bytes()).unwrap();
crate::read_exact(pty.slave, &mut buf);
assert_eq!(&buf, string.as_bytes());
// read the echo as well
let echoed_string = "foofoofoo\n";
crate::read_exact(pty.master, &mut buf);
assert_eq!(&buf, echoed_string.as_bytes());
let string2 = "barbarbarbar\n";
let echoed_string2 = "barbarbarbar\n";
let mut buf = [0u8; 13];
write(pty.slave, string2.as_bytes()).unwrap();
crate::read_exact(pty.master, &mut buf);
assert_eq!(&buf, echoed_string2.as_bytes());
close(pty.master).unwrap();
close(pty.slave).unwrap();
}
#[test]
fn test_forkpty() {
use nix::unistd::ForkResult::*;
use nix::sys::signal::*;
use nix::sys::wait::wait;
// forkpty calls openpty which uses ptname(3) internally.
let _m0 = crate::PTSNAME_MTX.lock();
// forkpty spawns a child process
let _m1 = crate::FORK_MTX.lock();
let string = "naninani\n";
let echoed_string = "naninani\r\n";
let pty = unsafe {
forkpty(None, None).unwrap()
};
match pty.fork_result {
Child => {
write(STDOUT_FILENO, string.as_bytes()).unwrap();
pause(); // we need the child to stay alive until the parent calls read
unsafe { _exit(0); }
},
Parent { child } => {
let mut buf = [0u8; 10];
assert!(child.as_raw() > 0);
crate::read_exact(pty.master, &mut buf);
kill(child, SIGTERM).unwrap();
wait().unwrap(); // keep other tests using generic wait from getting our child
assert_eq!(&buf, echoed_string.as_bytes());
close(pty.master).unwrap();
},
}
}
| 31.205298 | 94 | 0.628077 |
fffa0902c500722caf63f55c8bc570d2231c3935 | 674 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod connection_constants;
mod connection_interface;
mod connection_util;
pub use connection_constants::ConnectionConstants;
pub use connection_interface::ConnectionInterface;
pub use connection_util::{
assert_connection_selections, build_connection_metadata,
build_connection_metadata_as_directive, build_edge_selections, build_page_info_selections,
extract_connection_directive, extract_connection_metadata_from_directive, get_default_filters,
ConnectionMetadata,
};
| 33.7 | 98 | 0.821958 |
4800a1473c61cb91836e1e671cdc5ddd489a5f8d | 10,849 | use super::ServiceConfig;
use crate::{
auth_service::Session,
http_service::{
util as http_util,
HttpError,
HttpResult,
},
util::get_timestamp_string,
};
use hyper::{
body::{
self,
Body,
Buf,
},
header::{
self,
HeaderMap,
},
StatusCode,
};
use lazy_static::lazy_static;
use log::{
error,
info,
warn,
};
use regex::Regex;
use serde_json::json;
use std::fmt::Display;
use tokio::{
fs::{
create_dir_all,
remove_dir_all,
File,
},
io::{
AsyncReadExt,
AsyncWriteExt,
},
process::Command,
};
pub struct Service {
config: ServiceConfig,
}
impl Service {
/// Creates a new instance of the Service struct
pub fn new(config: ServiceConfig) -> Self {
Self { config }
}
/// Handles a upload-api `POST` request
pub async fn handle_post_request(
&self,
path: &str,
header_map: &HeaderMap,
body: Body,
session: &Session,
) -> HttpResult {
// Get the user information.
let (user_id, user_name) = match session {
Session::Valid(claims) | Session::Expired(claims) => {
(claims.sub(), claims.screen_name())
}
_ => {
return Err(HttpError::Unauthorized(None));
}
};
// Make sure the uploaded file is within the size limit.
let length = Self::get_content_length(&header_map)?;
if length > self.config.video_size_limit() {
warn!(
"User {}:{} tried to upload a large file of size {}",
user_name, user_id, length
);
return Err(HttpError::BadRequest("File is too big".into()));
}
info!(
"User {}:{} is uploading a file of size {}",
user_name, user_id, length
);
// Route the request
let (root_path, _) =
Self::extract_paths(path).map_err(|message| HttpError::BadRequest(message.into()))?;
match root_path {
"video" => Self::handle_video_upload(body, &self.config).await,
_ => {
let message = format!(
"Invalid upload-api path '{path}' for the specified method",
path = path
);
Err(HttpError::BadRequest(message.into()))
}
}
}
/// Handles video upload requests
async fn handle_video_upload(body: Body, config: &ServiceConfig) -> HttpResult {
let target_file_name = get_timestamp_string();
let target_file_path = format!("{}/{}", config.publish_dir(), target_file_name);
// Create the temporary directory
let tmp_dir = format!("{}/{}", config.video_tmp_dir(), get_timestamp_string());
create_dir_all(&tmp_dir).await.map_err(|error| {
error!("Could not create temporary directory: {}", error);
HttpError::InternalError(None)
})?;
async fn remove_temp_dir(dir: &str) {
if let Err(error) = remove_dir_all(dir).await {
error!(
"Failed to clean up temporary directory '{}': {}",
&dir, error
);
}
}
// Save the uploaded file there
let content = body::aggregate(body)
.await
.map(|mut buf| buf.copy_to_bytes(buf.remaining()))
.map_err(|error| {
error!("Unexpected error while collecting body: {}", error);
HttpError::InternalError(None)
})?;
let saved_file = {
let result = Self::save_content(content.as_ref(), &tmp_dir).await;
if result.is_err() {
remove_temp_dir(&tmp_dir).await;
}
result?
};
// Get the video duration
let duration = {
let result = Self::get_video_duration(&saved_file).await;
if result.is_err() {
remove_temp_dir(&tmp_dir).await;
}
result?
};
// Generate the web media
let preview_data = {
let result =
Self::generate_web_media(&saved_file, duration / 2, target_file_path, &tmp_dir)
.await;
remove_temp_dir(&tmp_dir).await;
result?
};
let resp_body = json!({
"fileName": target_file_name,
"previewData": preview_data
});
Ok(http_util::build_json_response(&resp_body, StatusCode::OK))
}
/// Saves the specified content to disk for processing
async fn save_content(content: &[u8], out_dir: &str) -> Result<String, HttpError> {
let out_file = format!("{}/file", out_dir);
let mut file = File::create(&out_file).await.map_err(|error| {
error!("Could not create file: {}", error);
HttpError::InternalError(None)
})?;
file.write_all(content)
.await
.map(|_| out_file)
.map_err(|error| {
error!("Could not save content to file: {}", error);
HttpError::InternalError(None)
})
}
/// Gets the duration of the video
async fn get_video_duration(in_file: &str) -> Result<u64, HttpError> {
lazy_static! {
static ref DUR_REGEX: Regex = Regex::new(r"^(?P<duration>\d+)(?s).*$").unwrap();
}
let result = Command::new("ffprobe")
.args(&["-v", "error"])
.args(&["-show_entries", "format=duration"])
.args(&["-print_format", "default=noprint_wrappers=1:nokey=1"])
.arg(in_file)
.output()
.await
.map_err(|error| {
error!("Failed to run ffprobe process: {}", error);
HttpError::InternalError(None)
})?;
if !result.status.success() {
error!("ffprobe execution failed: {}", result.status);
return Err(HttpError::InternalError(None));
}
let duration_str = String::from_utf8_lossy(&result.stdout);
let captures = DUR_REGEX.captures(&duration_str).ok_or_else(|| {
error!("Unable to parse duration: {}", duration_str);
HttpError::InternalError(None)
})?;
Ok(captures["duration"].parse::<u64>().unwrap())
}
/// Processes a video file for web embedding into the same dir as the input
/// file. See https://developers.google.com/media/vp9/settings/vod/ and https://trac.ffmpeg.org/wiki/Encode/H.264 for reference.
async fn generate_web_media(
in_file: &str,
preview_point_secs: u64,
target_file: String,
tmp_dir: &str,
) -> Result<String, HttpError> {
let target_webm = format!("{}.webm", target_file);
let target_mp4 = format!("{}.mp4", target_file);
let tmp_image = format!("{}/file.jpg", tmp_dir);
// Configure arguments for scaling and removing audio
let mut command = Command::new("ffmpeg");
let command = command
.arg("-an")
.args(&["-r", "25"])
.args(&["-i", &in_file])
.args(&[
"-filter_complex",
"[0:v]scale=320:-1,pad=320:240:(ow-iw)/2:(oh-ih)/2,split=2[webm][mp4]",
]);
// Configure arguments for webm output
let command = command
.args(&["-map", "[webm]"])
.args(&["-c:v", "libvpx-vp9"])
.args(&["-crf", "37"])
.args(&["-tile-columns", "0"])
.args(&["-threads", "2"])
.arg("-an")
.arg(&target_webm);
// Configure arguments for mp4 output
let command = command
.args(&["-map", "[mp4]"])
.args(&["-c:v", "h264"])
.args(&["-threads", "2"])
.arg("-an")
.arg(&target_mp4);
// Configure arguments for creating a preview image
let seek_arg = format!("00:00:{:02}", preview_point_secs);
let command = command
.args(&["-ss", &seek_arg])
.args(&["-frames:v", "1"])
.args(&["-filter:v", "scale=8:-1,pad=8:6:(ow-iw)/2:(oh-ih)/2"])
.arg(&tmp_image);
// Execute the command and read the resulting preview file
let result = command.output().await.map_err(|error| {
error!("Failed to run ffmpeg process: {}", error);
HttpError::InternalError(None)
})?;
if !result.status.success() {
error!("ffmpeg processing failed: {}", result.status);
error!("{}", String::from_utf8_lossy(&result.stderr));
return Err(HttpError::InternalError(None));
}
let mut file = File::open(tmp_image).await.map_err(|error| {
error!("Failed to open preview image: {}", error);
HttpError::InternalError(None)
})?;
let mut content = Vec::<u8>::new();
file.read_to_end(&mut content)
.await
.map(move |_| base64::encode(&content))
.map_err(|error| {
error!("Failed to read preview image: {}", error);
HttpError::InternalError(None)
})
}
/// Gets the content length header value
fn get_content_length(header_map: &HeaderMap) -> Result<u32, HttpError> {
let entry = header_map
.get(header::CONTENT_LENGTH)
.ok_or_else(|| HttpError::BadRequest("Missing Content-Length header".into()))?;
fn convert_error<T: Display>(error: T) -> HttpError {
let message = format!("Invalid Content-Length value: {}", error);
HttpError::BadRequest(message.into())
}
let length = entry.to_str().map_err(convert_error)?;
length.parse::<u32>().map_err(convert_error)
}
fn extract_paths<'a>(path: &'a str) -> Result<(&'a str, &'a str), String> {
// Pull out the 'version', 'root path' and 'relative path' from the path.
lazy_static! {
static ref PATH_REGEX: Regex =
Regex::new(r"^v(?P<ver>\d+)/+(?P<root_path>\w+)(?:/+(?P<relative_path>.+)?)?$")
.unwrap();
}
let captures = PATH_REGEX
.captures(path)
.ok_or("Invalid upload-api path format".to_string())?;
// Validate the version (currently only version #1)
if !captures["ver"].eq("1") {
return Err("Unsupported upload-api version".to_string());
}
let root_path = captures.name("root_path").unwrap().as_str();
let relative_path = captures.name("relative_path").map_or("", |m| m.as_str());
Ok((root_path, relative_path))
}
}
| 32.28869 | 132 | 0.525671 |
9c4246e079b749074bb154e64badc4bca42f7f0f | 27,798 | // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::{self, ValueRef, Integer, Pointer, Float, Double, Struct, Array, Vector, AttributePlace};
use base;
use common::{type_is_fat_ptr, BlockAndBuilder, C_uint};
use context::CrateContext;
use cabi_x86;
use cabi_x86_64;
use cabi_x86_win64;
use cabi_arm;
use cabi_aarch64;
use cabi_powerpc;
use cabi_powerpc64;
use cabi_s390x;
use cabi_mips;
use cabi_mips64;
use cabi_asmjs;
use cabi_msp430;
use cabi_sparc;
use machine::{llalign_of_min, llsize_of, llsize_of_alloc};
use type_::Type;
use type_of;
use rustc::hir;
use rustc::ty::{self, Ty};
use libc::c_uint;
use std::cmp;
pub use syntax::abi::Abi;
pub use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
use rustc::ty::layout::Layout;
#[derive(Clone, Copy, PartialEq, Debug)]
enum ArgKind {
/// Pass the argument directly using the normal converted
/// LLVM type or by coercing to another specified type
Direct,
/// Pass the argument indirectly via a hidden pointer
Indirect,
/// Ignore the argument (useful for empty struct)
Ignore,
}
// Hack to disable non_upper_case_globals only for the bitflags! and not for the rest
// of this module
pub use self::attr_impl::ArgAttribute;
#[allow(non_upper_case_globals)]
mod attr_impl {
// The subset of llvm::Attribute needed for arguments, packed into a bitfield.
bitflags! {
#[derive(Default, Debug)]
flags ArgAttribute : u16 {
const ByVal = 1 << 0,
const NoAlias = 1 << 1,
const NoCapture = 1 << 2,
const NonNull = 1 << 3,
const ReadOnly = 1 << 4,
const SExt = 1 << 5,
const StructRet = 1 << 6,
const ZExt = 1 << 7,
const InReg = 1 << 8,
}
}
}
macro_rules! for_each_kind {
($flags: ident, $f: ident, $($kind: ident),+) => ({
$(if $flags.contains(ArgAttribute::$kind) { $f(llvm::Attribute::$kind) })+
})
}
impl ArgAttribute {
fn for_each_kind<F>(&self, mut f: F) where F: FnMut(llvm::Attribute) {
for_each_kind!(self, f,
ByVal, NoAlias, NoCapture, NonNull, ReadOnly, SExt, StructRet, ZExt, InReg)
}
}
/// A compact representation of LLVM attributes (at least those relevant for this module)
/// that can be manipulated without interacting with LLVM's Attribute machinery.
#[derive(Copy, Clone, Debug, Default)]
pub struct ArgAttributes {
regular: ArgAttribute,
dereferenceable_bytes: u64,
}
impl ArgAttributes {
pub fn set(&mut self, attr: ArgAttribute) -> &mut Self {
self.regular = self.regular | attr;
self
}
pub fn set_dereferenceable(&mut self, bytes: u64) -> &mut Self {
self.dereferenceable_bytes = bytes;
self
}
pub fn apply_llfn(&self, idx: AttributePlace, llfn: ValueRef) {
unsafe {
self.regular.for_each_kind(|attr| attr.apply_llfn(idx, llfn));
if self.dereferenceable_bytes != 0 {
llvm::LLVMRustAddDereferenceableAttr(llfn,
idx.as_uint(),
self.dereferenceable_bytes);
}
}
}
pub fn apply_callsite(&self, idx: AttributePlace, callsite: ValueRef) {
unsafe {
self.regular.for_each_kind(|attr| attr.apply_callsite(idx, callsite));
if self.dereferenceable_bytes != 0 {
llvm::LLVMRustAddDereferenceableCallSiteAttr(callsite,
idx.as_uint(),
self.dereferenceable_bytes);
}
}
}
}
/// Information about how a specific C type
/// should be passed to or returned from a function
///
/// This is borrowed from clang's ABIInfo.h
#[derive(Clone, Copy, Debug)]
pub struct ArgType {
kind: ArgKind,
/// Original LLVM type
pub original_ty: Type,
/// Sizing LLVM type (pointers are opaque).
/// Unlike original_ty, this is guaranteed to be complete.
///
/// For example, while we're computing the function pointer type in
/// `struct Foo(fn(Foo));`, `original_ty` is still LLVM's `%Foo = {}`.
/// The field type will likely end up being `void(%Foo)*`, but we cannot
/// use `%Foo` to compute properties (e.g. size and alignment) of `Foo`,
/// until `%Foo` is completed by having all of its field types inserted,
/// so `ty` holds the "sizing type" of `Foo`, which replaces all pointers
/// with opaque ones, resulting in `{i8*}` for `Foo`.
/// ABI-specific logic can then look at the size, alignment and fields of
/// `{i8*}` in order to determine how the argument will be passed.
/// Only later will `original_ty` aka `%Foo` be used in the LLVM function
/// pointer type, without ever having introspected it.
pub ty: Type,
/// Signedness for integer types, None for other types
pub signedness: Option<bool>,
/// Coerced LLVM Type
pub cast: Option<Type>,
/// Dummy argument, which is emitted before the real argument
pub pad: Option<Type>,
/// LLVM attributes of argument
pub attrs: ArgAttributes
}
impl ArgType {
fn new(original_ty: Type, ty: Type) -> ArgType {
ArgType {
kind: ArgKind::Direct,
original_ty: original_ty,
ty: ty,
signedness: None,
cast: None,
pad: None,
attrs: ArgAttributes::default()
}
}
pub fn make_indirect(&mut self, ccx: &CrateContext) {
assert_eq!(self.kind, ArgKind::Direct);
// Wipe old attributes, likely not valid through indirection.
self.attrs = ArgAttributes::default();
let llarg_sz = llsize_of_alloc(ccx, self.ty);
// For non-immediate arguments the callee gets its own copy of
// the value on the stack, so there are no aliases. It's also
// program-invisible so can't possibly capture
self.attrs.set(ArgAttribute::NoAlias)
.set(ArgAttribute::NoCapture)
.set_dereferenceable(llarg_sz);
self.kind = ArgKind::Indirect;
}
pub fn ignore(&mut self) {
assert_eq!(self.kind, ArgKind::Direct);
self.kind = ArgKind::Ignore;
}
pub fn extend_integer_width_to(&mut self, bits: u64) {
// Only integers have signedness
if let Some(signed) = self.signedness {
if self.ty.int_width() < bits {
self.attrs.set(if signed {
ArgAttribute::SExt
} else {
ArgAttribute::ZExt
});
}
}
}
pub fn is_indirect(&self) -> bool {
self.kind == ArgKind::Indirect
}
pub fn is_ignore(&self) -> bool {
self.kind == ArgKind::Ignore
}
/// Get the LLVM type for an lvalue of the original Rust type of
/// this argument/return, i.e. the result of `type_of::type_of`.
pub fn memory_ty(&self, ccx: &CrateContext) -> Type {
if self.original_ty == Type::i1(ccx) {
Type::i8(ccx)
} else {
self.original_ty
}
}
/// Store a direct/indirect value described by this ArgType into a
/// lvalue for the original Rust type of this argument/return.
/// Can be used for both storing formal arguments into Rust variables
/// or results of call/invoke instructions into their destinations.
pub fn store(&self, bcx: &BlockAndBuilder, mut val: ValueRef, dst: ValueRef) {
if self.is_ignore() {
return;
}
let ccx = bcx.ccx;
if self.is_indirect() {
let llsz = llsize_of(ccx, self.ty);
let llalign = llalign_of_min(ccx, self.ty);
base::call_memcpy(bcx, dst, val, llsz, llalign as u32);
} else if let Some(ty) = self.cast {
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
let can_store_through_cast_ptr = false;
if can_store_through_cast_ptr {
let cast_dst = bcx.pointercast(dst, ty.ptr_to());
let store = bcx.store(val, cast_dst);
let llalign = llalign_of_min(ccx, self.ty);
unsafe {
llvm::LLVMSetAlignment(store, llalign);
}
} else {
// The actual return type is a struct, but the ABI
// adaptation code has cast it into some scalar type. The
// code that follows is the only reliable way I have
// found to do a transform like i64 -> {i32,i32}.
// Basically we dump the data onto the stack then memcpy it.
//
// Other approaches I tried:
// - Casting rust ret pointer to the foreign type and using Store
// is (a) unsafe if size of foreign type > size of rust type and
// (b) runs afoul of strict aliasing rules, yielding invalid
// assembly under -O (specifically, the store gets removed).
// - Truncating foreign type to correct integral type and then
// bitcasting to the struct type yields invalid cast errors.
// We instead thus allocate some scratch space...
let llscratch = bcx.fcx().alloca(ty, "abi_cast");
base::Lifetime::Start.call(bcx, llscratch);
// ...where we first store the value...
bcx.store(val, llscratch);
// ...and then memcpy it to the intended destination.
base::call_memcpy(bcx,
bcx.pointercast(dst, Type::i8p(ccx)),
bcx.pointercast(llscratch, Type::i8p(ccx)),
C_uint(ccx, llsize_of_alloc(ccx, self.ty)),
cmp::min(llalign_of_min(ccx, self.ty),
llalign_of_min(ccx, ty)) as u32);
base::Lifetime::End.call(bcx, llscratch);
}
} else {
if self.original_ty == Type::i1(ccx) {
val = bcx.zext(val, Type::i8(ccx));
}
bcx.store(val, dst);
}
}
pub fn store_fn_arg(&self, bcx: &BlockAndBuilder, idx: &mut usize, dst: ValueRef) {
if self.pad.is_some() {
*idx += 1;
}
if self.is_ignore() {
return;
}
let val = llvm::get_param(bcx.fcx().llfn, *idx as c_uint);
*idx += 1;
self.store(bcx, val, dst);
}
}
/// Metadata describing how the arguments to a native function
/// should be passed in order to respect the native ABI.
///
/// I will do my best to describe this structure, but these
/// comments are reverse-engineered and may be inaccurate. -NDM
#[derive(Clone)]
pub struct FnType {
/// The LLVM types of each argument.
pub args: Vec<ArgType>,
/// LLVM return type.
pub ret: ArgType,
pub variadic: bool,
pub cconv: llvm::CallConv
}
impl FnType {
pub fn new<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
abi: Abi,
sig: &ty::FnSig<'tcx>,
extra_args: &[Ty<'tcx>]) -> FnType {
let mut fn_ty = FnType::unadjusted(ccx, abi, sig, extra_args);
fn_ty.adjust_for_abi(ccx, abi, sig);
fn_ty
}
pub fn unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
abi: Abi,
sig: &ty::FnSig<'tcx>,
extra_args: &[Ty<'tcx>]) -> FnType {
use self::Abi::*;
let cconv = match ccx.sess().target.target.adjust_abi(abi) {
RustIntrinsic | PlatformIntrinsic |
Rust | RustCall => llvm::CCallConv,
// It's the ABI's job to select this, not us.
System => bug!("system abi should be selected elsewhere"),
Stdcall => llvm::X86StdcallCallConv,
Fastcall => llvm::X86FastcallCallConv,
Vectorcall => llvm::X86_VectorCall,
C => llvm::CCallConv,
Win64 => llvm::X86_64_Win64,
SysV64 => llvm::X86_64_SysV,
Aapcs => llvm::ArmAapcsCallConv,
// These API constants ought to be more specific...
Cdecl => llvm::CCallConv,
};
let mut inputs = sig.inputs();
let extra_args = if abi == RustCall {
assert!(!sig.variadic && extra_args.is_empty());
match sig.inputs().last().unwrap().sty {
ty::TyTuple(ref tupled_arguments) => {
inputs = &sig.inputs()[0..sig.inputs().len() - 1];
&tupled_arguments[..]
}
_ => {
bug!("argument to function with \"rust-call\" ABI \
is not a tuple");
}
}
} else {
assert!(sig.variadic || extra_args.is_empty());
extra_args
};
let target = &ccx.sess().target.target;
let win_x64_gnu = target.target_os == "windows"
&& target.arch == "x86_64"
&& target.target_env == "gnu";
let linux_s390x = target.target_os == "linux"
&& target.arch == "s390x"
&& target.target_env == "gnu";
let rust_abi = match abi {
RustIntrinsic | PlatformIntrinsic | Rust | RustCall => true,
_ => false
};
let arg_of = |ty: Ty<'tcx>, is_return: bool| {
if ty.is_bool() {
let llty = Type::i1(ccx);
let mut arg = ArgType::new(llty, llty);
arg.attrs.set(ArgAttribute::ZExt);
arg
} else {
let mut arg = ArgType::new(type_of::type_of(ccx, ty),
type_of::sizing_type_of(ccx, ty));
if ty.is_integral() {
arg.signedness = Some(ty.is_signed());
}
// Rust enum types that map onto C enums also need to follow
// the target ABI zero-/sign-extension rules.
if let Layout::CEnum { signed, .. } = *ccx.layout_of(ty) {
arg.signedness = Some(signed);
}
if llsize_of_alloc(ccx, arg.ty) == 0 {
// For some forsaken reason, x86_64-pc-windows-gnu
// doesn't ignore zero-sized struct arguments.
// The same is true for s390x-unknown-linux-gnu.
if is_return || rust_abi ||
(!win_x64_gnu && !linux_s390x) {
arg.ignore();
}
}
arg
}
};
let ret_ty = sig.output();
let mut ret = arg_of(ret_ty, true);
if !type_is_fat_ptr(ccx, ret_ty) {
// The `noalias` attribute on the return value is useful to a
// function ptr caller.
if let ty::TyBox(_) = ret_ty.sty {
// `Box` pointer return values never alias because ownership
// is transferred
ret.attrs.set(ArgAttribute::NoAlias);
}
// We can also mark the return value as `dereferenceable` in certain cases
match ret_ty.sty {
// These are not really pointers but pairs, (pointer, len)
ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
ty::TyBox(ty) => {
let llty = type_of::sizing_type_of(ccx, ty);
let llsz = llsize_of_alloc(ccx, llty);
ret.attrs.set_dereferenceable(llsz);
}
_ => {}
}
}
let mut args = Vec::with_capacity(inputs.len() + extra_args.len());
// Handle safe Rust thin and fat pointers.
let rust_ptr_attrs = |ty: Ty<'tcx>, arg: &mut ArgType| match ty.sty {
// `Box` pointer parameters never alias because ownership is transferred
ty::TyBox(inner) => {
arg.attrs.set(ArgAttribute::NoAlias);
Some(inner)
}
ty::TyRef(b, mt) => {
use rustc::ty::{BrAnon, ReLateBound};
// `&mut` pointer parameters never alias other parameters, or mutable global data
//
// `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as
// both `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely
// on memory dependencies rather than pointer equality
let interior_unsafe = mt.ty.type_contents(ccx.tcx()).interior_unsafe();
if mt.mutbl != hir::MutMutable && !interior_unsafe {
arg.attrs.set(ArgAttribute::NoAlias);
}
if mt.mutbl == hir::MutImmutable && !interior_unsafe {
arg.attrs.set(ArgAttribute::ReadOnly);
}
// When a reference in an argument has no named lifetime, it's
// impossible for that reference to escape this function
// (returned or stored beyond the call by a closure).
if let ReLateBound(_, BrAnon(_)) = *b {
arg.attrs.set(ArgAttribute::NoCapture);
}
Some(mt.ty)
}
_ => None
};
for ty in inputs.iter().chain(extra_args.iter()) {
let mut arg = arg_of(ty, false);
if type_is_fat_ptr(ccx, ty) {
let original_tys = arg.original_ty.field_types();
let sizing_tys = arg.ty.field_types();
assert_eq!((original_tys.len(), sizing_tys.len()), (2, 2));
let mut data = ArgType::new(original_tys[0], sizing_tys[0]);
let mut info = ArgType::new(original_tys[1], sizing_tys[1]);
if let Some(inner) = rust_ptr_attrs(ty, &mut data) {
data.attrs.set(ArgAttribute::NonNull);
if ccx.tcx().struct_tail(inner).is_trait() {
info.attrs.set(ArgAttribute::NonNull);
}
}
args.push(data);
args.push(info);
} else {
if let Some(inner) = rust_ptr_attrs(ty, &mut arg) {
let llty = type_of::sizing_type_of(ccx, inner);
let llsz = llsize_of_alloc(ccx, llty);
arg.attrs.set_dereferenceable(llsz);
}
args.push(arg);
}
}
FnType {
args: args,
ret: ret,
variadic: sig.variadic,
cconv: cconv
}
}
pub fn adjust_for_abi<'a, 'tcx>(&mut self,
ccx: &CrateContext<'a, 'tcx>,
abi: Abi,
sig: &ty::FnSig<'tcx>) {
if abi == Abi::Rust || abi == Abi::RustCall ||
abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic {
let fixup = |arg: &mut ArgType| {
let mut llty = arg.ty;
// Replace newtypes with their inner-most type.
while llty.kind() == llvm::TypeKind::Struct {
let inner = llty.field_types();
if inner.len() != 1 {
break;
}
llty = inner[0];
}
if !llty.is_aggregate() {
// Scalars and vectors, always immediate.
if llty != arg.ty {
// Needs a cast as we've unpacked a newtype.
arg.cast = Some(llty);
}
return;
}
let size = llsize_of_alloc(ccx, llty);
if size > llsize_of_alloc(ccx, ccx.int_type()) {
arg.make_indirect(ccx);
} else if size > 0 {
// We want to pass small aggregates as immediates, but using
// a LLVM aggregate type for this leads to bad optimizations,
// so we pick an appropriately sized integer type instead.
arg.cast = Some(Type::ix(ccx, size * 8));
}
};
// Fat pointers are returned by-value.
if !self.ret.is_ignore() {
if !type_is_fat_ptr(ccx, sig.output()) {
fixup(&mut self.ret);
}
}
for arg in &mut self.args {
if arg.is_ignore() { continue; }
fixup(arg);
}
if self.ret.is_indirect() {
self.ret.attrs.set(ArgAttribute::StructRet);
}
return;
}
match &ccx.sess().target.target.arch[..] {
"x86" => {
let flavor = if abi == Abi::Fastcall {
cabi_x86::Flavor::Fastcall
} else {
cabi_x86::Flavor::General
};
cabi_x86::compute_abi_info(ccx, self, flavor);
},
"x86_64" => if abi == Abi::SysV64 {
cabi_x86_64::compute_abi_info(ccx, self);
} else if abi == Abi::Win64 || ccx.sess().target.target.options.is_like_windows {
cabi_x86_win64::compute_abi_info(ccx, self);
} else {
cabi_x86_64::compute_abi_info(ccx, self);
},
"aarch64" => cabi_aarch64::compute_abi_info(ccx, self),
"arm" => {
let flavor = if ccx.sess().target.target.target_os == "ios" {
cabi_arm::Flavor::Ios
} else {
cabi_arm::Flavor::General
};
cabi_arm::compute_abi_info(ccx, self, flavor);
},
"mips" => cabi_mips::compute_abi_info(ccx, self),
"mips64" => cabi_mips64::compute_abi_info(ccx, self),
"powerpc" => cabi_powerpc::compute_abi_info(ccx, self),
"powerpc64" => cabi_powerpc64::compute_abi_info(ccx, self),
"s390x" => cabi_s390x::compute_abi_info(ccx, self),
"asmjs" => cabi_asmjs::compute_abi_info(ccx, self),
"wasm32" => cabi_asmjs::compute_abi_info(ccx, self),
"msp430" => cabi_msp430::compute_abi_info(ccx, self),
"sparc" => cabi_sparc::compute_abi_info(ccx, self),
a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a))
}
if self.ret.is_indirect() {
self.ret.attrs.set(ArgAttribute::StructRet);
}
}
pub fn llvm_type(&self, ccx: &CrateContext) -> Type {
let mut llargument_tys = Vec::new();
let llreturn_ty = if self.ret.is_ignore() {
Type::void(ccx)
} else if self.ret.is_indirect() {
llargument_tys.push(self.ret.original_ty.ptr_to());
Type::void(ccx)
} else {
self.ret.cast.unwrap_or(self.ret.original_ty)
};
for arg in &self.args {
if arg.is_ignore() {
continue;
}
// add padding
if let Some(ty) = arg.pad {
llargument_tys.push(ty);
}
let llarg_ty = if arg.is_indirect() {
arg.original_ty.ptr_to()
} else {
arg.cast.unwrap_or(arg.original_ty)
};
llargument_tys.push(llarg_ty);
}
if self.variadic {
Type::variadic_func(&llargument_tys, &llreturn_ty)
} else {
Type::func(&llargument_tys, &llreturn_ty)
}
}
pub fn apply_attrs_llfn(&self, llfn: ValueRef) {
let mut i = if self.ret.is_indirect() { 1 } else { 0 };
if !self.ret.is_ignore() {
self.ret.attrs.apply_llfn(llvm::AttributePlace::Argument(i), llfn);
}
i += 1;
for arg in &self.args {
if !arg.is_ignore() {
if arg.pad.is_some() { i += 1; }
arg.attrs.apply_llfn(llvm::AttributePlace::Argument(i), llfn);
i += 1;
}
}
}
pub fn apply_attrs_callsite(&self, callsite: ValueRef) {
let mut i = if self.ret.is_indirect() { 1 } else { 0 };
if !self.ret.is_ignore() {
self.ret.attrs.apply_callsite(llvm::AttributePlace::Argument(i), callsite);
}
i += 1;
for arg in &self.args {
if !arg.is_ignore() {
if arg.pad.is_some() { i += 1; }
arg.attrs.apply_callsite(llvm::AttributePlace::Argument(i), callsite);
i += 1;
}
}
if self.cconv != llvm::CCallConv {
llvm::SetInstructionCallConv(callsite, self.cconv);
}
}
}
pub fn align_up_to(off: usize, a: usize) -> usize {
return (off + a - 1) / a * a;
}
fn align(off: usize, ty: Type, pointer: usize) -> usize {
let a = ty_align(ty, pointer);
return align_up_to(off, a);
}
pub fn ty_align(ty: Type, pointer: usize) -> usize {
match ty.kind() {
Integer => ((ty.int_width() as usize) + 7) / 8,
Pointer => pointer,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t, pointer)))
}
}
Array => {
let elt = ty.element_type();
ty_align(elt, pointer)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
ty_align(elt, pointer) * len
}
_ => bug!("ty_align: unhandled type")
}
}
pub fn ty_size(ty: Type, pointer: usize) -> usize {
match ty.kind() {
Integer => ((ty.int_width() as usize) + 7) / 8,
Pointer => pointer,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
let str_tys = ty.field_types();
str_tys.iter().fold(0, |s, t| s + ty_size(*t, pointer))
} else {
let str_tys = ty.field_types();
let size = str_tys.iter().fold(0, |s, t| {
align(s, *t, pointer) + ty_size(*t, pointer)
});
align(size, ty, pointer)
}
}
Array => {
let len = ty.array_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, pointer);
len * eltsz
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, pointer);
len * eltsz
},
_ => bug!("ty_size: unhandled type")
}
}
| 36.576316 | 99 | 0.515685 |
71f7c3d90adcf2f3f2047281b4c32986a4c35203 | 49,771 | // Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//! Enables pre-boot setup, instantiation and booting of a Firecracker VMM.
use std::convert::TryFrom;
use std::fmt::{Display, Formatter};
use std::io::{self, Read, Seek, SeekFrom};
use std::os::unix::io::{AsRawFd, RawFd};
use std::sync::{Arc, Mutex};
#[cfg(target_arch = "aarch64")]
use crate::construct_kvm_mpidrs;
#[cfg(target_arch = "x86_64")]
use crate::device_manager::legacy::PortIODeviceManager;
use crate::device_manager::mmio::MMIODeviceManager;
use crate::device_manager::persist::MMIODevManagerConstructorArgs;
use crate::persist::{MicrovmState, MicrovmStateError};
use crate::vmm_config::boot_source::BootConfig;
use crate::vstate::{
system::KvmContext,
vcpu::{Vcpu, VcpuConfig},
vm::Vm,
};
use crate::{device_manager, Error, Vmm, VmmEventsObserver};
use arch::InitrdConfig;
use devices::legacy::Serial;
use devices::virtio::{Balloon, Block, MmioTransport, Net, VirtioDevice, Vsock, VsockUnixBackend};
use kernel::cmdline::Cmdline as KernelCmdline;
use logger::warn;
use polly::event_manager::{Error as EventManagerError, EventManager, Subscriber};
use seccomp::{BpfProgramRef, SeccompFilter};
use snapshot::Persist;
use utils::eventfd::EventFd;
use utils::terminal::Terminal;
use utils::time::TimestampUs;
use vm_memory::{GuestAddress, GuestMemoryMmap};
/// Errors associated with starting the instance.
#[derive(Debug)]
pub enum StartMicrovmError {
/// Unable to attach block device to Vmm.
AttachBlockDevice(io::Error),
/// This error is thrown by the minimal boot loader implementation.
ConfigureSystem(arch::Error),
/// Internal errors are due to resource exhaustion.
CreateNetDevice(devices::virtio::net::Error),
/// Failed to create a `RateLimiter` object.
CreateRateLimiter(io::Error),
/// Memory regions are overlapping or mmap fails.
GuestMemoryMmap(vm_memory::Error),
/// Cannot load initrd due to an invalid memory configuration.
InitrdLoad,
/// Cannot load initrd due to an invalid image.
InitrdRead(io::Error),
/// Internal error encountered while starting a microVM.
Internal(Error),
/// The kernel command line is invalid.
KernelCmdline(String),
/// Cannot load kernel due to invalid memory configuration or invalid kernel image.
KernelLoader(kernel::loader::Error),
/// Cannot load command line string.
LoadCommandline(kernel::cmdline::Error),
/// Cannot start the VM because the kernel was not configured.
MissingKernelConfig,
/// Cannot start the VM because the size of the guest memory was not specified.
MissingMemSizeConfig,
/// The net device configuration is missing the tap device.
NetDeviceNotConfigured,
/// Cannot open the block device backing file.
OpenBlockDevice(io::Error),
/// Cannot register an EventHandler.
RegisterEvent(EventManagerError),
/// Cannot initialize a MMIO Device or add a device to the MMIO Bus or cmdline.
RegisterMmioDevice(device_manager::mmio::Error),
/// Cannot restore microvm state.
RestoreMicrovmState(MicrovmStateError),
}
/// It's convenient to automatically convert `kernel::cmdline::Error`s
/// to `StartMicrovmError`s.
impl std::convert::From<kernel::cmdline::Error> for StartMicrovmError {
fn from(e: kernel::cmdline::Error) -> StartMicrovmError {
StartMicrovmError::KernelCmdline(e.to_string())
}
}
impl Display for StartMicrovmError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
use self::StartMicrovmError::*;
match self {
AttachBlockDevice(err) => {
write!(f, "Unable to attach block device to Vmm. Error: {}", err)
}
ConfigureSystem(e) => write!(f, "System configuration error: {:?}", e),
CreateRateLimiter(err) => write!(f, "Cannot create RateLimiter: {}", err),
CreateNetDevice(err) => {
let mut err_msg = format!("{:?}", err);
err_msg = err_msg.replace("\"", "");
write!(f, "Cannot create network device. {}", err_msg)
}
GuestMemoryMmap(err) => {
// Remove imbricated quotes from error message.
let mut err_msg = format!("{:?}", err);
err_msg = err_msg.replace("\"", "");
write!(f, "Invalid Memory Configuration: {}", err_msg)
}
InitrdLoad => write!(
f,
"Cannot load initrd due to an invalid memory configuration."
),
InitrdRead(err) => write!(f, "Cannot load initrd due to an invalid image: {}", err),
Internal(err) => write!(f, "Internal error while starting microVM: {:?}", err),
KernelCmdline(err) => write!(f, "Invalid kernel command line: {}", err),
KernelLoader(err) => {
let mut err_msg = format!("{}", err);
err_msg = err_msg.replace("\"", "");
write!(
f,
"Cannot load kernel due to invalid memory configuration or invalid kernel \
image. {}",
err_msg
)
}
LoadCommandline(err) => {
let mut err_msg = format!("{}", err);
err_msg = err_msg.replace("\"", "");
write!(f, "Cannot load command line string. {}", err_msg)
}
MissingKernelConfig => write!(f, "Cannot start microvm without kernel configuration."),
MissingMemSizeConfig => {
write!(f, "Cannot start microvm without guest mem_size config.")
}
NetDeviceNotConfigured => {
write!(f, "The net device configuration is missing the tap device.")
}
OpenBlockDevice(err) => {
let mut err_msg = format!("{:?}", err);
err_msg = err_msg.replace("\"", "");
write!(f, "Cannot open the block device backing file. {}", err_msg)
}
RegisterEvent(err) => write!(f, "Cannot register EventHandler. {:?}", err),
RegisterMmioDevice(err) => {
let mut err_msg = format!("{}", err);
err_msg = err_msg.replace("\"", "");
write!(
f,
"Cannot initialize a MMIO Device or add a device to the MMIO Bus or cmdline. {}",
err_msg
)
}
RestoreMicrovmState(err) => write!(f, "Cannot restore microvm state. Error: {}", err),
}
}
}
// Wrapper over io::Stdin that implements `Serial::ReadableFd` and `vmm::VmmEventsObserver`.
pub(crate) struct SerialStdin(io::Stdin);
impl SerialStdin {
/// Returns a `SerialStdin` wrapper over `io::stdin`.
pub fn get() -> Self {
SerialStdin(io::stdin())
}
}
impl io::Read for SerialStdin {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
}
impl AsRawFd for SerialStdin {
fn as_raw_fd(&self) -> RawFd {
self.0.as_raw_fd()
}
}
impl devices::legacy::ReadableFd for SerialStdin {}
impl VmmEventsObserver for SerialStdin {
fn on_vmm_boot(&mut self) -> std::result::Result<(), utils::errno::Error> {
// Set raw mode for stdin.
self.0.lock().set_raw_mode().map_err(|e| {
warn!("Cannot set raw mode for the terminal. {:?}", e);
e
})?;
// Set non blocking stdin.
self.0.lock().set_non_block(true).map_err(|e| {
warn!("Cannot set non block for the terminal. {:?}", e);
e
})
}
fn on_vmm_stop(&mut self) -> std::result::Result<(), utils::errno::Error> {
self.0.lock().set_canon_mode().map_err(|e| {
warn!("Cannot set canonical mode for the terminal. {:?}", e);
e
})
}
}
#[cfg_attr(target_arch = "aarch64", allow(unused))]
fn create_vmm_and_vcpus(
event_manager: &mut EventManager,
guest_memory: GuestMemoryMmap,
track_dirty_pages: bool,
vcpu_count: u8,
) -> std::result::Result<(Vmm, Vec<Vcpu>), StartMicrovmError> {
use self::StartMicrovmError::*;
// Set up Kvm Vm and register memory regions.
let mut vm = setup_kvm_vm(&guest_memory, track_dirty_pages)?;
// Vmm exit event.
let exit_evt = EventFd::new(libc::EFD_NONBLOCK)
.map_err(Error::EventFd)
.map_err(Internal)?;
// Instantiate the MMIO device manager.
// 'mmio_base' address has to be an address which is protected by the kernel
// and is architectural specific.
let mmio_device_manager =
MMIODeviceManager::new(arch::MMIO_MEM_START, (arch::IRQ_BASE, arch::IRQ_MAX));
let vcpus;
// For x86_64 we need to create the interrupt controller before calling `KVM_CREATE_VCPUS`
// while on aarch64 we need to do it the other way around.
#[cfg(target_arch = "x86_64")]
let pio_device_manager = {
setup_interrupt_controller(&mut vm)?;
vcpus = create_vcpus(&vm, vcpu_count, &exit_evt).map_err(Internal)?;
// Serial device setup.
let serial_device = setup_serial_device(
event_manager,
Box::new(SerialStdin::get()),
Box::new(io::stdout()),
)
.map_err(Internal)?;
// x86_64 uses the i8042 reset event as the Vmm exit event.
let reset_evt = exit_evt
.try_clone()
.map_err(Error::EventFd)
.map_err(Internal)?;
create_pio_dev_manager_with_legacy_devices(&vm, serial_device, reset_evt)
.map_err(Internal)?
};
// On aarch64, the vCPUs need to be created (i.e call KVM_CREATE_VCPU) before setting up the
// IRQ chip because the `KVM_CREATE_VCPU` ioctl will return error if the IRQCHIP
// was already initialized.
// Search for `kvm_arch_vcpu_create` in arch/arm/kvm/arm.c.
#[cfg(target_arch = "aarch64")]
{
vcpus = create_vcpus(&vm, vcpu_count, &exit_evt).map_err(Internal)?;
setup_interrupt_controller(&mut vm, vcpu_count)?;
}
let vmm = Vmm {
events_observer: Some(Box::new(SerialStdin::get())),
guest_memory,
vcpus_handles: Vec::new(),
exit_evt,
vm,
mmio_device_manager,
#[cfg(target_arch = "x86_64")]
pio_device_manager,
};
Ok((vmm, vcpus))
}
/// Builds and starts a microVM based on the current Firecracker VmResources configuration.
///
/// This is the default build recipe, one could build other microVM flavors by using the
/// independent functions in this module instead of calling this recipe.
///
/// An `Arc` reference of the built `Vmm` is also plugged in the `EventManager`, while another
/// is returned.
pub fn build_microvm_for_boot(
vm_resources: &super::resources::VmResources,
event_manager: &mut EventManager,
seccomp_filter: BpfProgramRef,
) -> std::result::Result<Arc<Mutex<Vmm>>, StartMicrovmError> {
use self::StartMicrovmError::*;
let boot_config = vm_resources.boot_source().ok_or(MissingKernelConfig)?;
let track_dirty_pages = vm_resources.track_dirty_pages();
let guest_memory = create_guest_memory(
vm_resources
.vm_config()
.mem_size_mib
.ok_or(MissingMemSizeConfig)?,
track_dirty_pages,
)?;
let vcpu_config = vm_resources.vcpu_config();
let entry_addr = load_kernel(boot_config, &guest_memory)?;
let initrd = load_initrd_from_config(boot_config, &guest_memory)?;
// Clone the command-line so that a failed boot doesn't pollute the original.
#[allow(unused_mut)]
let mut boot_cmdline = boot_config.cmdline.clone();
// Timestamp for measuring microVM boot duration.
let request_ts = TimestampUs::default();
let (mut vmm, mut vcpus) = create_vmm_and_vcpus(
event_manager,
guest_memory,
track_dirty_pages,
vcpu_config.vcpu_count,
)?;
// The boot timer device needs to be the first device attached in order
// to maintain the same MMIO address referenced in the documentation
// and tests.
if vm_resources.boot_timer {
attach_boot_timer_device(&mut vmm, request_ts)?;
}
if let Some(balloon) = vm_resources.balloon.get() {
attach_balloon_device(&mut vmm, &mut boot_cmdline, balloon, event_manager)?;
}
attach_block_devices(
&mut vmm,
&mut boot_cmdline,
vm_resources.block.list.iter(),
event_manager,
)?;
attach_net_devices(
&mut vmm,
&mut boot_cmdline,
vm_resources.net_builder.iter(),
event_manager,
)?;
if let Some(unix_vsock) = vm_resources.vsock.get() {
attach_unixsock_vsock_device(&mut vmm, &mut boot_cmdline, unix_vsock, event_manager)?;
}
#[cfg(target_arch = "aarch64")]
attach_legacy_devices_aarch64(event_manager, &mut vmm, &mut boot_cmdline).map_err(Internal)?;
configure_system_for_boot(
&vmm,
vcpus.as_mut(),
vcpu_config,
entry_addr,
&initrd,
boot_cmdline,
)?;
// Move vcpus to their own threads and start their state machine in the 'Paused' state.
vmm.start_vcpus(vcpus, seccomp_filter).map_err(Internal)?;
// Load seccomp filters for the VMM thread.
// Execution panics if filters cannot be loaded, use --seccomp-level=0 if skipping filters
// altogether is the desired behaviour.
// Keep this as the last step before resuming vcpus.
SeccompFilter::apply(seccomp_filter.to_vec())
.map_err(Error::SeccompFilters)
.map_err(Internal)?;
// The vcpus start off in the `Paused` state, let them run.
vmm.resume_vm().map_err(Internal)?;
let vmm = Arc::new(Mutex::new(vmm));
event_manager
.add_subscriber(vmm.clone())
.map_err(RegisterEvent)?;
Ok(vmm)
}
/// Builds and starts a microVM based on the provided MicrovmState.
///
/// An `Arc` reference of the built `Vmm` is also plugged in the `EventManager`, while another
/// is returned.
pub fn build_microvm_from_snapshot(
event_manager: &mut EventManager,
microvm_state: MicrovmState,
guest_memory: GuestMemoryMmap,
track_dirty_pages: bool,
seccomp_filter: BpfProgramRef,
) -> std::result::Result<Arc<Mutex<Vmm>>, StartMicrovmError> {
use self::StartMicrovmError::*;
let vcpu_count = u8::try_from(microvm_state.vcpu_states.len())
.map_err(|_| MicrovmStateError::InvalidInput)
.map_err(RestoreMicrovmState)?;
// Build Vmm.
let (mut vmm, vcpus) = create_vmm_and_vcpus(
event_manager,
guest_memory.clone(),
track_dirty_pages,
vcpu_count,
)?;
#[cfg(target_arch = "aarch64")]
{
let mpidrs = construct_kvm_mpidrs(µvm_state.vcpu_states);
// Restore kvm vm state.
vmm.vm
.restore_state(&mpidrs, µvm_state.vm_state)
.map_err(MicrovmStateError::RestoreVmState)
.map_err(RestoreMicrovmState)?;
}
// Restore kvm vm state.
#[cfg(target_arch = "x86_64")]
vmm.vm
.restore_state(µvm_state.vm_state)
.map_err(MicrovmStateError::RestoreVmState)
.map_err(RestoreMicrovmState)?;
// Restore devices states.
let mmio_ctor_args = MMIODevManagerConstructorArgs {
mem: guest_memory,
vm: vmm.vm.fd(),
event_manager,
};
vmm.mmio_device_manager =
MMIODeviceManager::restore(mmio_ctor_args, µvm_state.device_states)
.map_err(MicrovmStateError::RestoreDevices)
.map_err(RestoreMicrovmState)?;
// Move vcpus to their own threads and start their state machine in the 'Paused' state.
vmm.start_vcpus(vcpus, seccomp_filter)
.map_err(StartMicrovmError::Internal)?;
// Restore vcpus kvm state.
vmm.restore_vcpu_states(microvm_state.vcpu_states)
.map_err(RestoreMicrovmState)?;
let vmm = Arc::new(Mutex::new(vmm));
event_manager
.add_subscriber(vmm.clone())
.map_err(StartMicrovmError::RegisterEvent)?;
// Load seccomp filters for the VMM thread.
// Keep this as the last step of the building process.
SeccompFilter::apply(seccomp_filter.to_vec())
.map_err(Error::SeccompFilters)
.map_err(StartMicrovmError::Internal)?;
Ok(vmm)
}
/// Creates GuestMemory of `mem_size_mib` MiB in size.
pub fn create_guest_memory(
mem_size_mib: usize,
track_dirty_pages: bool,
) -> std::result::Result<GuestMemoryMmap, StartMicrovmError> {
let mem_size = mem_size_mib << 20;
let arch_mem_regions = arch::arch_memory_regions(mem_size);
Ok(
GuestMemoryMmap::from_ranges_guarded(&arch_mem_regions, track_dirty_pages)
.map_err(StartMicrovmError::GuestMemoryMmap)?,
)
}
fn load_kernel(
boot_config: &BootConfig,
guest_memory: &GuestMemoryMmap,
) -> std::result::Result<GuestAddress, StartMicrovmError> {
let mut kernel_file = boot_config
.kernel_file
.try_clone()
.map_err(|e| StartMicrovmError::Internal(Error::KernelFile(e)))?;
let entry_addr =
kernel::loader::load_kernel(guest_memory, &mut kernel_file, arch::get_kernel_start())
.map_err(StartMicrovmError::KernelLoader)?;
Ok(entry_addr)
}
fn load_initrd_from_config(
boot_cfg: &BootConfig,
vm_memory: &GuestMemoryMmap,
) -> std::result::Result<Option<InitrdConfig>, StartMicrovmError> {
use self::StartMicrovmError::InitrdRead;
Ok(match &boot_cfg.initrd_file {
Some(f) => Some(load_initrd(
vm_memory,
&mut f.try_clone().map_err(InitrdRead)?,
)?),
None => None,
})
}
/// Loads the initrd from a file into the given memory slice.
///
/// * `vm_memory` - The guest memory the initrd is written to.
/// * `image` - The initrd image.
///
/// Returns the result of initrd loading
fn load_initrd<F>(
vm_memory: &GuestMemoryMmap,
image: &mut F,
) -> std::result::Result<InitrdConfig, StartMicrovmError>
where
F: Read + Seek,
{
use self::StartMicrovmError::{InitrdLoad, InitrdRead};
let size: usize;
// Get the image size
match image.seek(SeekFrom::End(0)) {
Err(e) => return Err(InitrdRead(e)),
Ok(0) => {
return Err(InitrdRead(io::Error::new(
io::ErrorKind::InvalidData,
"Initrd image seek returned a size of zero",
)))
}
Ok(s) => size = s as usize,
};
// Go back to the image start
image.seek(SeekFrom::Start(0)).map_err(InitrdRead)?;
// Get the target address
let address = arch::initrd_load_addr(vm_memory, size).map_err(|_| InitrdLoad)?;
// Load the image into memory
vm_memory
.read_from(GuestAddress(address), image, size)
.map_err(|_| InitrdLoad)?;
Ok(InitrdConfig {
address: GuestAddress(address),
size,
})
}
pub(crate) fn setup_kvm_vm(
guest_memory: &GuestMemoryMmap,
track_dirty_pages: bool,
) -> std::result::Result<Vm, StartMicrovmError> {
use self::StartMicrovmError::Internal;
let kvm = KvmContext::new()
.map_err(Error::KvmContext)
.map_err(Internal)?;
let mut vm = Vm::new(kvm.fd()).map_err(Error::Vm).map_err(Internal)?;
vm.memory_init(&guest_memory, kvm.max_memslots(), track_dirty_pages)
.map_err(Error::Vm)
.map_err(Internal)?;
Ok(vm)
}
/// Sets up the irqchip for a x86_64 microVM.
#[cfg(target_arch = "x86_64")]
pub fn setup_interrupt_controller(vm: &mut Vm) -> std::result::Result<(), StartMicrovmError> {
vm.setup_irqchip()
.map_err(Error::Vm)
.map_err(StartMicrovmError::Internal)
}
/// Sets up the irqchip for a aarch64 microVM.
#[cfg(target_arch = "aarch64")]
pub fn setup_interrupt_controller(
vm: &mut Vm,
vcpu_count: u8,
) -> std::result::Result<(), StartMicrovmError> {
vm.setup_irqchip(vcpu_count)
.map_err(Error::Vm)
.map_err(StartMicrovmError::Internal)
}
/// Sets up the serial device.
pub fn setup_serial_device(
event_manager: &mut EventManager,
input: Box<dyn devices::legacy::ReadableFd + Send>,
out: Box<dyn io::Write + Send>,
) -> super::Result<Arc<Mutex<Serial>>> {
let interrupt_evt = EventFd::new(libc::EFD_NONBLOCK).map_err(Error::EventFd)?;
let kick_stdin_read_evt = EventFd::new(libc::EFD_NONBLOCK).map_err(Error::EventFd)?;
let serial = Arc::new(Mutex::new(Serial::new_in_out(
interrupt_evt,
input,
out,
Some(kick_stdin_read_evt),
)));
if let Err(e) = event_manager.add_subscriber(serial.clone()) {
// TODO: We just log this message, and immediately return Ok, instead of returning the
// actual error because this operation always fails with EPERM when adding a fd which
// has been redirected to /dev/null via dup2 (this may happen inside the jailer).
// Find a better solution to this (and think about the state of the serial device
// while we're at it).
warn!("Could not add serial input event to epoll: {:?}", e);
}
Ok(serial)
}
#[cfg(target_arch = "aarch64")]
/// Sets up the RTC device.
pub fn setup_rtc_device() -> super::Result<Arc<Mutex<devices::legacy::RTC>>> {
let rtc_evt = EventFd::new(libc::EFD_NONBLOCK).map_err(Error::EventFd)?;
let rtc = Arc::new(Mutex::new(devices::legacy::RTC::new(rtc_evt)));
Ok(rtc)
}
#[cfg(target_arch = "x86_64")]
fn create_pio_dev_manager_with_legacy_devices(
vm: &Vm,
serial: Arc<Mutex<devices::legacy::Serial>>,
i8042_reset_evfd: EventFd,
) -> std::result::Result<PortIODeviceManager, super::Error> {
let mut pio_dev_mgr =
PortIODeviceManager::new(serial, i8042_reset_evfd).map_err(Error::CreateLegacyDevice)?;
pio_dev_mgr
.register_devices(vm.fd())
.map_err(Error::LegacyIOBus)?;
Ok(pio_dev_mgr)
}
#[cfg(target_arch = "aarch64")]
fn attach_legacy_devices_aarch64(
event_manager: &mut EventManager,
vmm: &mut Vmm,
cmdline: &mut KernelCmdline,
) -> super::Result<()> {
// Serial device setup.
if cmdline.as_str().contains("console=") {
let serial = setup_serial_device(
event_manager,
Box::new(SerialStdin::get()),
Box::new(io::stdout()),
)?;
vmm.mmio_device_manager
.register_mmio_serial(vmm.vm.fd(), serial, None)
.map_err(Error::RegisterMMIODevice)?;
vmm.mmio_device_manager
.add_mmio_serial_to_cmdline(cmdline)
.map_err(Error::RegisterMMIODevice)?;
}
let rtc = setup_rtc_device()?;
vmm.mmio_device_manager
.register_mmio_rtc(vmm.vm.fd(), rtc, None)
.map_err(Error::RegisterMMIODevice)
}
fn create_vcpus(vm: &Vm, vcpu_count: u8, exit_evt: &EventFd) -> super::Result<Vec<Vcpu>> {
let mut vcpus = Vec::with_capacity(vcpu_count as usize);
for cpu_idx in 0..vcpu_count {
let exit_evt = exit_evt.try_clone().map_err(Error::EventFd)?;
let vcpu = Vcpu::new(cpu_idx, vm, exit_evt).map_err(Error::VcpuCreate)?;
#[cfg(target_arch = "aarch64")]
vcpu.kvm_vcpu.init(vm.fd()).map_err(Error::VcpuInit)?;
vcpus.push(vcpu);
}
Ok(vcpus)
}
/// Configures the system for booting Linux.
#[cfg_attr(target_arch = "aarch64", allow(unused))]
pub fn configure_system_for_boot(
vmm: &Vmm,
vcpus: &mut [Vcpu],
vcpu_config: VcpuConfig,
entry_addr: GuestAddress,
initrd: &Option<InitrdConfig>,
boot_cmdline: KernelCmdline,
) -> std::result::Result<(), StartMicrovmError> {
use self::StartMicrovmError::*;
#[cfg(target_arch = "x86_64")]
{
for vcpu in vcpus.iter_mut() {
vcpu.kvm_vcpu
.configure(
vmm.guest_memory(),
entry_addr,
&vcpu_config,
vmm.vm.supported_cpuid().clone(),
)
.map_err(Error::VcpuConfigure)
.map_err(Internal)?;
}
// Write the kernel command line to guest memory. This is x86_64 specific, since on
// aarch64 the command line will be specified through the FDT.
kernel::loader::load_cmdline(
vmm.guest_memory(),
GuestAddress(arch::x86_64::layout::CMDLINE_START),
&boot_cmdline.as_cstring().map_err(LoadCommandline)?,
)
.map_err(LoadCommandline)?;
arch::x86_64::configure_system(
&vmm.guest_memory,
vm_memory::GuestAddress(arch::x86_64::layout::CMDLINE_START),
boot_cmdline.len() + 1,
initrd,
vcpus.len() as u8,
)
.map_err(ConfigureSystem)?;
}
#[cfg(target_arch = "aarch64")]
{
for vcpu in vcpus.iter_mut() {
vcpu.kvm_vcpu
.configure(vmm.guest_memory(), entry_addr)
.map_err(Error::VcpuConfigure)
.map_err(Internal)?;
}
let vcpu_mpidr = vcpus
.iter_mut()
.map(|cpu| cpu.kvm_vcpu.get_mpidr())
.collect();
arch::aarch64::configure_system(
&vmm.guest_memory,
&boot_cmdline.as_cstring().map_err(LoadCommandline)?,
vcpu_mpidr,
vmm.mmio_device_manager.get_device_info(),
vmm.vm.get_irqchip(),
initrd,
)
.map_err(ConfigureSystem)?;
}
Ok(())
}
/// Attaches a VirtioDevice device to the device manager and event manager.
fn attach_virtio_device<T: 'static + VirtioDevice + Subscriber>(
event_manager: &mut EventManager,
vmm: &mut Vmm,
id: String,
device: Arc<Mutex<T>>,
cmdline: &mut KernelCmdline,
) -> std::result::Result<(), StartMicrovmError> {
use self::StartMicrovmError::*;
event_manager
.add_subscriber(device.clone())
.map_err(RegisterEvent)?;
// The device mutex mustn't be locked here otherwise it will deadlock.
let device = MmioTransport::new(vmm.guest_memory().clone(), device);
vmm.mmio_device_manager
.register_mmio_virtio_for_boot(vmm.vm.fd(), id, device, cmdline)
.map_err(RegisterMmioDevice)
.map(|_| ())
}
pub(crate) fn attach_boot_timer_device(
vmm: &mut Vmm,
request_ts: TimestampUs,
) -> std::result::Result<(), StartMicrovmError> {
use self::StartMicrovmError::*;
let boot_timer = devices::pseudo::BootTimer::new(request_ts);
vmm.mmio_device_manager
.register_mmio_boot_timer(boot_timer)
.map_err(RegisterMmioDevice)?;
Ok(())
}
fn attach_block_devices<'a>(
vmm: &mut Vmm,
cmdline: &mut KernelCmdline,
blocks: impl Iterator<Item = &'a Arc<Mutex<Block>>>,
event_manager: &mut EventManager,
) -> std::result::Result<(), StartMicrovmError> {
for block in blocks {
let id = {
let locked = block.lock().expect("Poisoned lock");
if locked.is_root_device() {
cmdline.insert_str(if let Some(partuuid) = locked.partuuid() {
format!("root=PARTUUID={}", partuuid)
} else {
// If no PARTUUID was specified for the root device, try with the /dev/vda.
"root=/dev/vda".to_string()
})?;
let flags = if locked.is_read_only() { "ro" } else { "rw" };
cmdline.insert_str(flags)?;
}
locked.id().clone()
};
// The device mutex mustn't be locked here otherwise it will deadlock.
attach_virtio_device(event_manager, vmm, id, block.clone(), cmdline)?;
}
Ok(())
}
fn attach_net_devices<'a>(
vmm: &mut Vmm,
cmdline: &mut KernelCmdline,
net_devices: impl Iterator<Item = &'a Arc<Mutex<Net>>>,
event_manager: &mut EventManager,
) -> std::result::Result<(), StartMicrovmError> {
for net_device in net_devices {
let id = net_device.lock().expect("Poisoned lock").id().clone();
// The device mutex mustn't be locked here otherwise it will deadlock.
attach_virtio_device(event_manager, vmm, id, net_device.clone(), cmdline)?;
}
Ok(())
}
fn attach_unixsock_vsock_device(
vmm: &mut Vmm,
cmdline: &mut KernelCmdline,
unix_vsock: &Arc<Mutex<Vsock<VsockUnixBackend>>>,
event_manager: &mut EventManager,
) -> std::result::Result<(), StartMicrovmError> {
let id = String::from(unix_vsock.lock().expect("Poisoned lock").id());
// The device mutex mustn't be locked here otherwise it will deadlock.
attach_virtio_device(event_manager, vmm, id, unix_vsock.clone(), cmdline)
}
fn attach_balloon_device(
vmm: &mut Vmm,
cmdline: &mut KernelCmdline,
balloon: &Arc<Mutex<Balloon>>,
event_manager: &mut EventManager,
) -> std::result::Result<(), StartMicrovmError> {
let id = String::from(balloon.lock().expect("Poisoned lock").id());
// The device mutex mustn't be locked here otherwise it will deadlock.
attach_virtio_device(event_manager, vmm, id, balloon.clone(), cmdline)
}
#[cfg(test)]
pub mod tests {
use std::io::Cursor;
use super::*;
use crate::vmm_config::balloon::{BalloonBuilder, BalloonDeviceConfig, BALLOON_DEV_ID};
use crate::vmm_config::boot_source::DEFAULT_KERNEL_CMDLINE;
use crate::vmm_config::drive::{BlockBuilder, BlockDeviceConfig, CacheType};
use crate::vmm_config::net::{NetBuilder, NetworkInterfaceConfig};
use crate::vmm_config::vsock::tests::default_config;
use crate::vmm_config::vsock::{VsockBuilder, VsockDeviceConfig};
use arch::DeviceType;
use devices::virtio::{TYPE_BALLOON, TYPE_BLOCK, TYPE_VSOCK};
use kernel::cmdline::Cmdline;
use polly::event_manager::EventManager;
use utils::tempfile::TempFile;
pub(crate) struct CustomBlockConfig {
drive_id: String,
is_root_device: bool,
partuuid: Option<String>,
is_read_only: bool,
cache_type: CacheType,
}
impl CustomBlockConfig {
pub(crate) fn new(
drive_id: String,
is_root_device: bool,
partuuid: Option<String>,
is_read_only: bool,
cache_type: CacheType,
) -> Self {
CustomBlockConfig {
drive_id,
is_root_device,
partuuid,
is_read_only,
cache_type,
}
}
}
fn default_mmio_device_manager() -> MMIODeviceManager {
MMIODeviceManager::new(arch::MMIO_MEM_START, (arch::IRQ_BASE, arch::IRQ_MAX))
}
#[cfg(target_arch = "x86_64")]
fn default_portio_device_manager() -> PortIODeviceManager {
PortIODeviceManager::new(
Arc::new(Mutex::new(Serial::new_sink(
EventFd::new(libc::EFD_NONBLOCK).unwrap(),
))),
EventFd::new(libc::EFD_NONBLOCK).unwrap(),
)
.unwrap()
}
pub(crate) fn default_kernel_cmdline() -> Cmdline {
let mut kernel_cmdline = kernel::cmdline::Cmdline::new(4096);
kernel_cmdline.insert_str(DEFAULT_KERNEL_CMDLINE).unwrap();
kernel_cmdline
}
pub(crate) fn default_vmm() -> Vmm {
let guest_memory = create_guest_memory(128, false).unwrap();
let exit_evt = EventFd::new(libc::EFD_NONBLOCK)
.map_err(Error::EventFd)
.map_err(StartMicrovmError::Internal)
.unwrap();
let mut vm = setup_kvm_vm(&guest_memory, false).unwrap();
let mmio_device_manager = default_mmio_device_manager();
#[cfg(target_arch = "x86_64")]
let pio_device_manager = default_portio_device_manager();
#[cfg(target_arch = "x86_64")]
setup_interrupt_controller(&mut vm).unwrap();
#[cfg(target_arch = "aarch64")]
{
let exit_evt = EventFd::new(libc::EFD_NONBLOCK).unwrap();
let _vcpu = Vcpu::new(1, &vm, exit_evt).unwrap();
setup_interrupt_controller(&mut vm, 1).unwrap();
}
Vmm {
events_observer: Some(Box::new(SerialStdin::get())),
guest_memory,
vcpus_handles: Vec::new(),
exit_evt,
vm,
mmio_device_manager,
#[cfg(target_arch = "x86_64")]
pio_device_manager,
}
}
pub(crate) fn insert_block_devices(
vmm: &mut Vmm,
cmdline: &mut Cmdline,
event_manager: &mut EventManager,
custom_block_cfgs: Vec<CustomBlockConfig>,
) -> Vec<TempFile> {
let mut block_dev_configs = BlockBuilder::new();
let mut block_files = Vec::new();
for custom_block_cfg in &custom_block_cfgs {
block_files.push(TempFile::new().unwrap());
let block_device_config = BlockDeviceConfig {
drive_id: String::from(&custom_block_cfg.drive_id),
path_on_host: block_files
.last()
.unwrap()
.as_path()
.to_str()
.unwrap()
.to_string(),
is_root_device: custom_block_cfg.is_root_device,
partuuid: custom_block_cfg.partuuid.clone(),
is_read_only: custom_block_cfg.is_read_only,
cache_type: custom_block_cfg.cache_type,
rate_limiter: None,
};
block_dev_configs.insert(block_device_config).unwrap();
}
attach_block_devices(vmm, cmdline, block_dev_configs.list.iter(), event_manager).unwrap();
block_files
}
pub(crate) fn insert_net_device(
vmm: &mut Vmm,
cmdline: &mut Cmdline,
event_manager: &mut EventManager,
net_config: NetworkInterfaceConfig,
) {
let mut net_builder = NetBuilder::new();
net_builder.build(net_config).unwrap();
let res = attach_net_devices(vmm, cmdline, net_builder.iter(), event_manager);
assert!(res.is_ok());
}
pub(crate) fn insert_vsock_device(
vmm: &mut Vmm,
cmdline: &mut Cmdline,
event_manager: &mut EventManager,
vsock_config: VsockDeviceConfig,
) {
let vsock_dev_id = vsock_config.vsock_id.clone();
let vsock = VsockBuilder::create_unixsock_vsock(vsock_config).unwrap();
let vsock = Arc::new(Mutex::new(vsock));
assert!(attach_unixsock_vsock_device(vmm, cmdline, &vsock, event_manager).is_ok());
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_VSOCK), &vsock_dev_id)
.is_some());
}
pub(crate) fn insert_balloon_device(
vmm: &mut Vmm,
cmdline: &mut Cmdline,
event_manager: &mut EventManager,
balloon_config: BalloonDeviceConfig,
) {
let mut builder = BalloonBuilder::new();
assert!(builder.set(balloon_config).is_ok());
let balloon = builder.get().unwrap();
assert!(attach_balloon_device(vmm, cmdline, balloon, event_manager).is_ok());
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BALLOON), BALLOON_DEV_ID)
.is_some());
}
fn make_test_bin() -> Vec<u8> {
let mut fake_bin = Vec::new();
fake_bin.resize(1_000_000, 0xAA);
fake_bin
}
fn create_guest_mem_at(at: GuestAddress, size: usize) -> GuestMemoryMmap {
GuestMemoryMmap::from_ranges(&[(at, size)]).unwrap()
}
pub(crate) fn create_guest_mem_with_size(size: usize) -> GuestMemoryMmap {
create_guest_mem_at(GuestAddress(0x0), size)
}
#[test]
// Test that loading the initrd is successful on different archs.
fn test_load_initrd() {
use vm_memory::GuestMemory;
let image = make_test_bin();
let mem_size: usize = image.len() * 2 + arch::PAGE_SIZE;
#[cfg(target_arch = "x86_64")]
let gm = create_guest_mem_with_size(mem_size);
#[cfg(target_arch = "aarch64")]
let gm = create_guest_mem_with_size(mem_size + arch::aarch64::layout::FDT_MAX_SIZE);
let res = load_initrd(&gm, &mut Cursor::new(&image));
assert!(res.is_ok());
let initrd = res.unwrap();
assert!(gm.address_in_range(initrd.address));
assert_eq!(initrd.size, image.len());
}
#[test]
fn test_load_initrd_no_memory() {
let gm = create_guest_mem_with_size(79);
let image = make_test_bin();
let res = load_initrd(&gm, &mut Cursor::new(&image));
assert!(res.is_err());
assert_eq!(
StartMicrovmError::InitrdLoad.to_string(),
res.err().unwrap().to_string()
);
}
#[test]
fn test_load_initrd_unaligned() {
let image = vec![1, 2, 3, 4];
let gm = create_guest_mem_at(GuestAddress(arch::PAGE_SIZE as u64 + 1), image.len() * 2);
let res = load_initrd(&gm, &mut Cursor::new(&image));
assert!(res.is_err());
assert_eq!(
StartMicrovmError::InitrdLoad.to_string(),
res.err().unwrap().to_string()
);
}
#[test]
fn test_stdin_wrapper() {
let wrapper = SerialStdin::get();
assert_eq!(wrapper.as_raw_fd(), io::stdin().as_raw_fd())
}
#[test]
fn test_create_guest_memory() {
let mem_size = 4096 * 2;
// Case 1: create guest memory without dirty page tracking
{
let guest_memory = create_guest_memory(mem_size, false).unwrap();
assert!(!guest_memory.is_dirty_tracking_enabled());
}
// Case 2: create guest memory with dirty page tracking
{
let guest_memory = create_guest_memory(mem_size, true).unwrap();
assert!(guest_memory.is_dirty_tracking_enabled());
}
}
#[test]
fn test_create_vcpus() {
let vcpu_count = 2;
let guest_memory = create_guest_memory(128, false).unwrap();
#[allow(unused_mut)]
let mut vm = setup_kvm_vm(&guest_memory, false).unwrap();
let evfd = EventFd::new(libc::EFD_NONBLOCK).unwrap();
#[cfg(target_arch = "x86_64")]
setup_interrupt_controller(&mut vm).unwrap();
let vcpu_vec = create_vcpus(&vm, vcpu_count, &evfd).unwrap();
assert_eq!(vcpu_vec.len(), vcpu_count as usize);
}
#[test]
fn test_attach_net_devices() {
let mut event_manager = EventManager::new().expect("Unable to create EventManager");
let mut vmm = default_vmm();
let network_interface = NetworkInterfaceConfig {
iface_id: String::from("netif"),
host_dev_name: String::from("hostname"),
guest_mac: None,
rx_rate_limiter: None,
tx_rate_limiter: None,
allow_mmds_requests: true,
};
let mut cmdline = default_kernel_cmdline();
insert_net_device(
&mut vmm,
&mut cmdline,
&mut event_manager,
network_interface.clone(),
);
// We can not attach it once more.
let mut net_builder = NetBuilder::new();
assert!(net_builder.build(network_interface).is_err());
}
#[test]
fn test_attach_block_devices() {
let mut event_manager = EventManager::new().expect("Unable to create EventManager");
// Use case 1: root block device is not specified through PARTUUID.
{
let drive_id = String::from("root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
true,
None,
true,
CacheType::Unsafe,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=/dev/vda ro"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
// Use case 2: root block device is specified through PARTUUID.
{
let drive_id = String::from("root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
true,
Some("0eaa91a0-01".to_string()),
false,
CacheType::Unsafe,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=PARTUUID=0eaa91a0-01 rw"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
// Use case 3: root block device is not added at all.
{
let drive_id = String::from("non_root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
false,
Some("0eaa91a0-01".to_string()),
false,
CacheType::Unsafe,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(!cmdline.as_str().contains("root=PARTUUID="));
assert!(!cmdline.as_str().contains("root=/dev/vda"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
// Use case 4: rw root block device and other rw and ro drives.
{
let block_configs = vec![
CustomBlockConfig::new(
String::from("root"),
true,
Some("0eaa91a0-01".to_string()),
false,
CacheType::Unsafe,
),
CustomBlockConfig::new(
String::from("secondary"),
false,
None,
true,
CacheType::Unsafe,
),
CustomBlockConfig::new(
String::from("third"),
false,
None,
false,
CacheType::Unsafe,
),
];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=PARTUUID=0eaa91a0-01 rw"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), "root")
.is_some());
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), "secondary")
.is_some());
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), "third")
.is_some());
// Check if these three block devices are inserted in kernel_cmdline.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
assert!(cmdline
.as_str()
.contains("virtio_mmio.device=4K@0xd0000000:5 virtio_mmio.device=4K@0xd0001000:6 virtio_mmio.device=4K@0xd0002000:7"));
}
// Use case 5: root block device is rw.
{
let drive_id = String::from("root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
true,
None,
false,
CacheType::Unsafe,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=/dev/vda rw"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
// Use case 6: root block device is ro, with PARTUUID.
{
let drive_id = String::from("root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
true,
Some("0eaa91a0-01".to_string()),
true,
CacheType::Unsafe,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=PARTUUID=0eaa91a0-01 ro"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
// Use case 7: root block device is rw with flush enabled
{
let drive_id = String::from("root");
let block_configs = vec![CustomBlockConfig::new(
drive_id.clone(),
true,
None,
false,
CacheType::Writeback,
)];
let mut vmm = default_vmm();
let mut cmdline = default_kernel_cmdline();
insert_block_devices(&mut vmm, &mut cmdline, &mut event_manager, block_configs);
assert!(cmdline.as_str().contains("root=/dev/vda rw"));
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::Virtio(TYPE_BLOCK), drive_id.as_str())
.is_some());
}
}
#[test]
fn test_attach_boot_timer_device() {
let mut vmm = default_vmm();
let request_ts = TimestampUs::default();
let res = attach_boot_timer_device(&mut vmm, request_ts);
assert!(res.is_ok());
assert!(vmm
.mmio_device_manager
.get_device(DeviceType::BootTimer, &DeviceType::BootTimer.to_string())
.is_some());
}
#[test]
fn test_attach_balloon_device() {
let mut event_manager = EventManager::new().expect("Unable to create EventManager");
let mut vmm = default_vmm();
let balloon_config = BalloonDeviceConfig {
amount_mb: 0,
deflate_on_oom: false,
stats_polling_interval_s: 0,
};
let mut cmdline = default_kernel_cmdline();
insert_balloon_device(&mut vmm, &mut cmdline, &mut event_manager, balloon_config);
// Check if the vsock device is described in kernel_cmdline.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
assert!(cmdline
.as_str()
.contains("virtio_mmio.device=4K@0xd0000000:5"));
}
#[test]
fn test_attach_vsock_device() {
let mut event_manager = EventManager::new().expect("Unable to create EventManager");
let mut vmm = default_vmm();
let mut tmp_sock_file = TempFile::new().unwrap();
tmp_sock_file.remove().unwrap();
let vsock_config = default_config(&tmp_sock_file);
let mut cmdline = default_kernel_cmdline();
insert_vsock_device(&mut vmm, &mut cmdline, &mut event_manager, vsock_config);
// Check if the vsock device is described in kernel_cmdline.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
assert!(cmdline
.as_str()
.contains("virtio_mmio.device=4K@0xd0000000:5"));
}
#[test]
fn test_error_messages() {
use crate::builder::StartMicrovmError::*;
let err = AttachBlockDevice(io::Error::from_raw_os_error(0));
let _ = format!("{}{:?}", err, err);
let err = CreateNetDevice(devices::virtio::net::Error::EventFd(
io::Error::from_raw_os_error(0),
));
let _ = format!("{}{:?}", err, err);
let err = CreateRateLimiter(io::Error::from_raw_os_error(0));
let _ = format!("{}{:?}", err, err);
let err = Internal(Error::Serial(io::Error::from_raw_os_error(0)));
let _ = format!("{}{:?}", err, err);
let err = KernelCmdline(String::from("dummy --cmdline"));
let _ = format!("{}{:?}", err, err);
let err = KernelLoader(kernel::loader::Error::InvalidElfMagicNumber);
let _ = format!("{}{:?}", err, err);
let err = LoadCommandline(kernel::cmdline::Error::TooLarge);
let _ = format!("{}{:?}", err, err);
let err = MissingKernelConfig;
let _ = format!("{}{:?}", err, err);
let err = MissingMemSizeConfig;
let _ = format!("{}{:?}", err, err);
let err = NetDeviceNotConfigured;
let _ = format!("{}{:?}", err, err);
let err = OpenBlockDevice(io::Error::from_raw_os_error(0));
let _ = format!("{}{:?}", err, err);
let err = RegisterEvent(EventManagerError::EpollCreate(
io::Error::from_raw_os_error(0),
));
let _ = format!("{}{:?}", err, err);
}
#[test]
fn test_kernel_cmdline_err_to_startuvm_err() {
let err = StartMicrovmError::from(kernel::cmdline::Error::HasSpace);
let _ = format!("{}{:?}", err, err);
}
}
| 35.198727 | 135 | 0.601254 |
7a2bcab8706da357a1b0731713a2122766d25f7f | 67,432 | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15883
// FIXME: cover these topics:
// path, reader, writer, stream, raii (close not needed),
// stdio, print!, println!, file access, process spawning,
// error handling
/*! I/O, including files, networking, timers, and processes
`std::io` provides Rust's basic I/O types,
for reading and writing to files, TCP, UDP,
and other types of sockets and pipes,
manipulating the file system, spawning processes and signal handling.
# Examples
Some examples of obvious things you might want to do
* Read lines from stdin
```rust
use std::io;
for line in io::stdin().lines() {
print!("{}", line.unwrap());
}
```
* Read a complete file
```rust
use std::io::File;
let contents = File::open(&Path::new("message.txt")).read_to_end();
```
* Write a line to a file
```rust
# #![allow(unused_must_use)]
use std::io::File;
let mut file = File::create(&Path::new("message.txt"));
file.write(b"hello, file!\n");
# drop(file);
# ::std::io::fs::unlink(&Path::new("message.txt"));
```
* Iterate over the lines of a file
```rust,no_run
use std::io::BufferedReader;
use std::io::File;
let path = Path::new("message.txt");
let mut file = BufferedReader::new(File::open(&path));
for line in file.lines() {
print!("{}", line.unwrap());
}
```
* Pull the lines of a file into a vector of strings
```rust,no_run
use std::io::BufferedReader;
use std::io::File;
let path = Path::new("message.txt");
let mut file = BufferedReader::new(File::open(&path));
let lines: Vec<String> = file.lines().map(|x| x.unwrap()).collect();
```
* Make a simple TCP client connection and request
```rust
# #![allow(unused_must_use)]
use std::io::TcpStream;
# // connection doesn't fail if a server is running on 8080
# // locally, we still want to be type checking this code, so lets
# // just stop it running (#11576)
# if false {
let mut socket = TcpStream::connect("127.0.0.1", 8080).unwrap();
socket.write(b"GET / HTTP/1.0\n\n");
let response = socket.read_to_end();
# }
```
* Make a simple TCP server
```rust
# fn main() { }
# fn foo() {
# #![allow(dead_code)]
use std::io::{TcpListener, TcpStream};
use std::io::{Acceptor, Listener};
let listener = TcpListener::bind("127.0.0.1", 80);
// bind the listener to the specified address
let mut acceptor = listener.listen();
fn handle_client(mut stream: TcpStream) {
// ...
# &mut stream; // silence unused mutability/variable warning
}
// accept connections and process them, spawning a new tasks for each one
for stream in acceptor.incoming() {
match stream {
Err(e) => { /* connection failed */ }
Ok(stream) => spawn(proc() {
// connection succeeded
handle_client(stream)
})
}
}
// close the socket server
drop(acceptor);
# }
```
# Error Handling
I/O is an area where nearly every operation can result in unexpected
errors. Errors should be painfully visible when they happen, and handling them
should be easy to work with. It should be convenient to handle specific I/O
errors, and it should also be convenient to not deal with I/O errors.
Rust's I/O employs a combination of techniques to reduce boilerplate
while still providing feedback about errors. The basic strategy:
* All I/O operations return `IoResult<T>` which is equivalent to
`Result<T, IoError>`. The `Result` type is defined in the `std::result`
module.
* If the `Result` type goes unused, then the compiler will by default emit a
warning about the unused result. This is because `Result` has the
`#[must_use]` attribute.
* Common traits are implemented for `IoResult`, e.g.
`impl<R: Reader> Reader for IoResult<R>`, so that error values do not have
to be 'unwrapped' before use.
These features combine in the API to allow for expressions like
`File::create(&Path::new("diary.txt")).write(b"Met a girl.\n")`
without having to worry about whether "diary.txt" exists or whether
the write succeeds. As written, if either `new` or `write_line`
encounters an error then the result of the entire expression will
be an error.
If you wanted to handle the error though you might write:
```rust
# #![allow(unused_must_use)]
use std::io::File;
match File::create(&Path::new("diary.txt")).write(b"Met a girl.\n") {
Ok(()) => (), // succeeded
Err(e) => println!("failed to write to my diary: {}", e),
}
# ::std::io::fs::unlink(&Path::new("diary.txt"));
```
So what actually happens if `create` encounters an error?
It's important to know that what `new` returns is not a `File`
but an `IoResult<File>`. If the file does not open, then `new` will simply
return `Err(..)`. Because there is an implementation of `Writer` (the trait
required ultimately required for types to implement `write_line`) there is no
need to inspect or unwrap the `IoResult<File>` and we simply call `write_line`
on it. If `new` returned an `Err(..)` then the followup call to `write_line`
will also return an error.
## `try!`
Explicit pattern matching on `IoResult`s can get quite verbose, especially
when performing many I/O operations. Some examples (like those above) are
alleviated with extra methods implemented on `IoResult`, but others have more
complex interdependencies among each I/O operation.
The `try!` macro from `std::macros` is provided as a method of early-return
inside `Result`-returning functions. It expands to an early-return on `Err`
and otherwise unwraps the contained `Ok` value.
If you wanted to read several `u32`s from a file and return their product:
```rust
use std::io::{File, IoResult};
fn file_product(p: &Path) -> IoResult<u32> {
let mut f = File::open(p);
let x1 = try!(f.read_le_u32());
let x2 = try!(f.read_le_u32());
Ok(x1 * x2)
}
match file_product(&Path::new("numbers.bin")) {
Ok(x) => println!("{}", x),
Err(e) => println!("Failed to read numbers!")
}
```
With `try!` in `file_product`, each `read_le_u32` need not be directly
concerned with error handling; instead its caller is responsible for
responding to errors that may occur while attempting to read the numbers.
*/
#![experimental]
#![deny(unused_must_use)]
use char::Char;
use collections::Collection;
use default::Default;
use fmt;
use int;
use iter::Iterator;
use libc;
use mem::transmute;
use ops::{BitOr, BitAnd, Sub, Not};
use option::{Option, Some, None};
use os;
use boxed::Box;
use result::{Ok, Err, Result};
use rt::rtio;
use slice::{Slice, MutableSlice, ImmutableSlice};
use str::{Str, StrSlice};
use str;
use string::String;
use uint;
use unicode::char::UnicodeChar;
use vec::Vec;
// Reexports
pub use self::stdio::stdin;
pub use self::stdio::stdout;
pub use self::stdio::stderr;
pub use self::stdio::print;
pub use self::stdio::println;
pub use self::fs::File;
pub use self::timer::Timer;
pub use self::net::ip::IpAddr;
pub use self::net::tcp::TcpListener;
pub use self::net::tcp::TcpStream;
pub use self::net::udp::UdpStream;
pub use self::pipe::PipeStream;
pub use self::process::{Process, Command};
pub use self::tempfile::TempDir;
pub use self::mem::{MemReader, BufReader, MemWriter, BufWriter};
pub use self::buffered::{BufferedReader, BufferedWriter, BufferedStream,
LineBufferedWriter};
pub use self::comm_adapters::{ChanReader, ChanWriter};
// this comes first to get the iotest! macro
pub mod test;
mod buffered;
mod comm_adapters;
mod mem;
mod result;
mod tempfile;
pub mod extensions;
pub mod fs;
pub mod net;
pub mod pipe;
pub mod process;
pub mod signal;
pub mod stdio;
pub mod timer;
pub mod util;
/// The default buffer size for various I/O operations
// libuv recommends 64k buffers to maximize throughput
// https://groups.google.com/forum/#!topic/libuv/oQO1HJAIDdA
static DEFAULT_BUF_SIZE: uint = 1024 * 64;
/// A convenient typedef of the return value of any I/O action.
pub type IoResult<T> = Result<T, IoError>;
/// The type passed to I/O condition handlers to indicate error
///
/// # FIXME
///
/// Is something like this sufficient? It's kind of archaic
#[deriving(PartialEq, Eq, Clone)]
pub struct IoError {
/// An enumeration which can be matched against for determining the flavor
/// of error.
pub kind: IoErrorKind,
/// A human-readable description about the error
pub desc: &'static str,
/// Detailed information about this error, not always available
pub detail: Option<String>
}
impl IoError {
/// Convert an `errno` value into an `IoError`.
///
/// If `detail` is `true`, the `detail` field of the `IoError`
/// struct is filled with an allocated string describing the error
/// in more detail, retrieved from the operating system.
pub fn from_errno(errno: uint, detail: bool) -> IoError {
#[cfg(windows)]
fn get_err(errno: i32) -> (IoErrorKind, &'static str) {
match errno {
libc::EOF => (EndOfFile, "end of file"),
libc::ERROR_NO_DATA => (BrokenPipe, "the pipe is being closed"),
libc::ERROR_FILE_NOT_FOUND => (FileNotFound, "file not found"),
libc::ERROR_INVALID_NAME => (InvalidInput, "invalid file name"),
libc::WSAECONNREFUSED => (ConnectionRefused, "connection refused"),
libc::WSAECONNRESET => (ConnectionReset, "connection reset"),
libc::ERROR_ACCESS_DENIED | libc::WSAEACCES =>
(PermissionDenied, "permission denied"),
libc::WSAEWOULDBLOCK => {
(ResourceUnavailable, "resource temporarily unavailable")
}
libc::WSAENOTCONN => (NotConnected, "not connected"),
libc::WSAECONNABORTED => (ConnectionAborted, "connection aborted"),
libc::WSAEADDRNOTAVAIL => (ConnectionRefused, "address not available"),
libc::WSAEADDRINUSE => (ConnectionRefused, "address in use"),
libc::ERROR_BROKEN_PIPE => (EndOfFile, "the pipe has ended"),
libc::ERROR_OPERATION_ABORTED =>
(TimedOut, "operation timed out"),
libc::WSAEINVAL => (InvalidInput, "invalid argument"),
libc::ERROR_CALL_NOT_IMPLEMENTED =>
(IoUnavailable, "function not implemented"),
libc::ERROR_INVALID_HANDLE =>
(MismatchedFileTypeForOperation,
"invalid handle provided to function"),
libc::ERROR_NOTHING_TO_TERMINATE =>
(InvalidInput, "no process to kill"),
// libuv maps this error code to EISDIR. we do too. if it is found
// to be incorrect, we can add in some more machinery to only
// return this message when ERROR_INVALID_FUNCTION after certain
// Windows calls.
libc::ERROR_INVALID_FUNCTION => (InvalidInput,
"illegal operation on a directory"),
_ => (OtherIoError, "unknown error")
}
}
#[cfg(not(windows))]
fn get_err(errno: i32) -> (IoErrorKind, &'static str) {
// FIXME: this should probably be a bit more descriptive...
match errno {
libc::EOF => (EndOfFile, "end of file"),
libc::ECONNREFUSED => (ConnectionRefused, "connection refused"),
libc::ECONNRESET => (ConnectionReset, "connection reset"),
libc::EPERM | libc::EACCES =>
(PermissionDenied, "permission denied"),
libc::EPIPE => (BrokenPipe, "broken pipe"),
libc::ENOTCONN => (NotConnected, "not connected"),
libc::ECONNABORTED => (ConnectionAborted, "connection aborted"),
libc::EADDRNOTAVAIL => (ConnectionRefused, "address not available"),
libc::EADDRINUSE => (ConnectionRefused, "address in use"),
libc::ENOENT => (FileNotFound, "no such file or directory"),
libc::EISDIR => (InvalidInput, "illegal operation on a directory"),
libc::ENOSYS => (IoUnavailable, "function not implemented"),
libc::EINVAL => (InvalidInput, "invalid argument"),
libc::ENOTTY =>
(MismatchedFileTypeForOperation,
"file descriptor is not a TTY"),
libc::ETIMEDOUT => (TimedOut, "operation timed out"),
libc::ECANCELED => (TimedOut, "operation aborted"),
// These two constants can have the same value on some systems,
// but different values on others, so we can't use a match
// clause
x if x == libc::EAGAIN || x == libc::EWOULDBLOCK =>
(ResourceUnavailable, "resource temporarily unavailable"),
_ => (OtherIoError, "unknown error")
}
}
let (kind, desc) = get_err(errno as i32);
IoError {
kind: kind,
desc: desc,
detail: if detail && kind == OtherIoError {
Some(os::error_string(errno).as_slice().chars().map(|c| c.to_lowercase()).collect())
} else {
None
},
}
}
/// Retrieve the last error to occur as a (detailed) IoError.
///
/// This uses the OS `errno`, and so there should not be any task
/// descheduling or migration (other than that performed by the
/// operating system) between the call(s) for which errors are
/// being checked and the call of this function.
pub fn last_error() -> IoError {
IoError::from_errno(os::errno() as uint, true)
}
fn from_rtio_error(err: rtio::IoError) -> IoError {
let rtio::IoError { code, extra, detail } = err;
let mut ioerr = IoError::from_errno(code, false);
ioerr.detail = detail;
ioerr.kind = match ioerr.kind {
TimedOut if extra > 0 => ShortWrite(extra),
k => k,
};
return ioerr;
}
}
impl fmt::Show for IoError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
IoError { kind: OtherIoError, desc: "unknown error", detail: Some(ref detail) } =>
write!(fmt, "{}", detail),
IoError { detail: None, desc, .. } =>
write!(fmt, "{}", desc),
IoError { detail: Some(ref detail), desc, .. } =>
write!(fmt, "{} ({})", desc, detail)
}
}
}
/// A list specifying general categories of I/O error.
#[deriving(PartialEq, Eq, Clone, Show)]
pub enum IoErrorKind {
/// Any I/O error not part of this list.
OtherIoError,
/// The operation could not complete because end of file was reached.
EndOfFile,
/// The file was not found.
FileNotFound,
/// The file permissions disallowed access to this file.
PermissionDenied,
/// A network connection failed for some reason not specified in this list.
ConnectionFailed,
/// The network operation failed because the network connection was closed.
Closed,
/// The connection was refused by the remote server.
ConnectionRefused,
/// The connection was reset by the remote server.
ConnectionReset,
/// The connection was aborted (terminated) by the remote server.
ConnectionAborted,
/// The network operation failed because it was not connected yet.
NotConnected,
/// The operation failed because a pipe was closed.
BrokenPipe,
/// A file already existed with that name.
PathAlreadyExists,
/// No file exists at that location.
PathDoesntExist,
/// The path did not specify the type of file that this operation required. For example,
/// attempting to copy a directory with the `fs::copy()` operation will fail with this error.
MismatchedFileTypeForOperation,
/// The operation temporarily failed (for example, because a signal was received), and retrying
/// may succeed.
ResourceUnavailable,
/// No I/O functionality is available for this task.
IoUnavailable,
/// A parameter was incorrect in a way that caused an I/O error not part of this list.
InvalidInput,
/// The I/O operation's timeout expired, causing it to be canceled.
TimedOut,
/// This write operation failed to write all of its data.
///
/// Normally the write() method on a Writer guarantees that all of its data
/// has been written, but some operations may be terminated after only
/// partially writing some data. An example of this is a timed out write
/// which successfully wrote a known number of bytes, but bailed out after
/// doing so.
///
/// The payload contained as part of this variant is the number of bytes
/// which are known to have been successfully written.
ShortWrite(uint),
/// The Reader returned 0 bytes from `read()` too many times.
NoProgress,
}
/// A trait that lets you add a `detail` to an IoError easily
trait UpdateIoError<T> {
/// Returns an IoError with updated description and detail
fn update_err(self, desc: &'static str, detail: |&IoError| -> String) -> Self;
/// Returns an IoError with updated detail
fn update_detail(self, detail: |&IoError| -> String) -> Self;
/// Returns an IoError with update description
fn update_desc(self, desc: &'static str) -> Self;
}
impl<T> UpdateIoError<T> for IoResult<T> {
fn update_err(self, desc: &'static str, detail: |&IoError| -> String) -> IoResult<T> {
self.map_err(|mut e| {
let detail = detail(&e);
e.desc = desc;
e.detail = Some(detail);
e
})
}
fn update_detail(self, detail: |&IoError| -> String) -> IoResult<T> {
self.map_err(|mut e| { e.detail = Some(detail(&e)); e })
}
fn update_desc(self, desc: &'static str) -> IoResult<T> {
self.map_err(|mut e| { e.desc = desc; e })
}
}
static NO_PROGRESS_LIMIT: uint = 1000;
/// A trait for objects which are byte-oriented streams. Readers are defined by
/// one method, `read`. This function will block until data is available,
/// filling in the provided buffer with any data read.
///
/// Readers are intended to be composable with one another. Many objects
/// throughout the I/O and related libraries take and provide types which
/// implement the `Reader` trait.
pub trait Reader {
// Only method which need to get implemented for this trait
/// Read bytes, up to the length of `buf` and place them in `buf`.
/// Returns the number of bytes read. The number of bytes read may
/// be less than the number requested, even 0. Returns `Err` on EOF.
///
/// # Error
///
/// If an error occurs during this I/O operation, then it is returned as
/// `Err(IoError)`. Note that end-of-file is considered an error, and can be
/// inspected for in the error's `kind` field. Also note that reading 0
/// bytes is not considered an error in all circumstances
///
/// # Implementation Note
///
/// When implementing this method on a new Reader, you are strongly encouraged
/// not to return 0 if you can avoid it.
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint>;
// Convenient helper methods based on the above methods
/// Reads at least `min` bytes and places them in `buf`.
/// Returns the number of bytes read.
///
/// This will continue to call `read` until at least `min` bytes have been
/// read. If `read` returns 0 too many times, `NoProgress` will be
/// returned.
///
/// # Error
///
/// If an error occurs at any point, that error is returned, and no further
/// bytes are read.
fn read_at_least(&mut self, min: uint, buf: &mut [u8]) -> IoResult<uint> {
if min > buf.len() {
return Err(IoError {
detail: Some(String::from_str("the buffer is too short")),
..standard_error(InvalidInput)
});
}
let mut read = 0;
while read < min {
let mut zeroes = 0;
loop {
match self.read(buf.mut_slice_from(read)) {
Ok(0) => {
zeroes += 1;
if zeroes >= NO_PROGRESS_LIMIT {
return Err(standard_error(NoProgress));
}
}
Ok(n) => {
read += n;
break;
}
err@Err(_) => return err
}
}
}
Ok(read)
}
/// Reads a single byte. Returns `Err` on EOF.
fn read_byte(&mut self) -> IoResult<u8> {
let mut buf = [0];
try!(self.read_at_least(1, buf));
Ok(buf[0])
}
/// Reads up to `len` bytes and appends them to a vector.
/// Returns the number of bytes read. The number of bytes read may be
/// less than the number requested, even 0. Returns Err on EOF.
///
/// # Error
///
/// If an error occurs during this I/O operation, then it is returned
/// as `Err(IoError)`. See `read()` for more details.
fn push(&mut self, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
let start_len = buf.len();
buf.reserve_additional(len);
let n = {
let s = unsafe { slice_vec_capacity(buf, start_len, start_len + len) };
try!(self.read(s))
};
unsafe { buf.set_len(start_len + n) };
Ok(n)
}
/// Reads at least `min` bytes, but no more than `len`, and appends them to
/// a vector.
/// Returns the number of bytes read.
///
/// This will continue to call `read` until at least `min` bytes have been
/// read. If `read` returns 0 too many times, `NoProgress` will be
/// returned.
///
/// # Error
///
/// If an error occurs at any point, that error is returned, and no further
/// bytes are read.
fn push_at_least(&mut self, min: uint, len: uint, buf: &mut Vec<u8>) -> IoResult<uint> {
if min > len {
return Err(IoError {
detail: Some(String::from_str("the buffer is too short")),
..standard_error(InvalidInput)
});
}
let start_len = buf.len();
buf.reserve_additional(len);
// we can't just use self.read_at_least(min, slice) because we need to push
// successful reads onto the vector before any returned errors.
let mut read = 0;
while read < min {
read += {
let s = unsafe { slice_vec_capacity(buf, start_len + read, start_len + len) };
try!(self.read_at_least(1, s))
};
unsafe { buf.set_len(start_len + read) };
}
Ok(read)
}
/// Reads exactly `len` bytes and gives you back a new vector of length
/// `len`
///
/// # Error
///
/// Fails with the same conditions as `read`. Additionally returns error
/// on EOF. Note that if an error is returned, then some number of bytes may
/// have already been consumed from the underlying reader, and they are lost
/// (not returned as part of the error). If this is unacceptable, then it is
/// recommended to use the `push_at_least` or `read` methods.
fn read_exact(&mut self, len: uint) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
match self.push_at_least(len, len, &mut buf) {
Ok(_) => Ok(buf),
Err(e) => Err(e),
}
}
/// Reads all remaining bytes from the stream.
///
/// # Error
///
/// Returns any non-EOF error immediately. Previously read bytes are
/// discarded when an error is returned.
///
/// When EOF is encountered, all bytes read up to that point are returned.
fn read_to_end(&mut self) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(DEFAULT_BUF_SIZE);
loop {
match self.push_at_least(1, DEFAULT_BUF_SIZE, &mut buf) {
Ok(_) => {}
Err(ref e) if e.kind == EndOfFile => break,
Err(e) => return Err(e)
}
}
return Ok(buf);
}
/// Reads all of the remaining bytes of this stream, interpreting them as a
/// UTF-8 encoded stream. The corresponding string is returned.
///
/// # Error
///
/// This function returns all of the same errors as `read_to_end` with an
/// additional error if the reader's contents are not a valid sequence of
/// UTF-8 bytes.
fn read_to_string(&mut self) -> IoResult<String> {
self.read_to_end().and_then(|s| {
match String::from_utf8(s) {
Ok(s) => Ok(s),
Err(_) => Err(standard_error(InvalidInput)),
}
})
}
/// Create an iterator that reads a single byte on
/// each iteration, until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn bytes<'r>(&'r mut self) -> extensions::Bytes<'r, Self> {
extensions::Bytes::new(self)
}
// Byte conversion helpers
/// Reads `n` little-endian unsigned integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
val += (try!(self.read_u8()) as u64) << pos;
pos += 8;
i -= 1;
}
Ok(val)
}
/// Reads `n` little-endian signed integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_le_int_n(&mut self, nbytes: uint) -> IoResult<i64> {
self.read_le_uint_n(nbytes).map(|i| extend_sign(i, nbytes))
}
/// Reads `n` big-endian unsigned integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
let mut val = 0u64;
let mut i = nbytes;
while i > 0 {
i -= 1;
val += (try!(self.read_u8()) as u64) << i * 8;
}
Ok(val)
}
/// Reads `n` big-endian signed integer bytes.
///
/// `n` must be between 1 and 8, inclusive.
fn read_be_int_n(&mut self, nbytes: uint) -> IoResult<i64> {
self.read_be_uint_n(nbytes).map(|i| extend_sign(i, nbytes))
}
/// Reads a little-endian unsigned integer.
///
/// The number of bytes returned is system-dependent.
fn read_le_uint(&mut self) -> IoResult<uint> {
self.read_le_uint_n(uint::BYTES).map(|i| i as uint)
}
/// Reads a little-endian integer.
///
/// The number of bytes returned is system-dependent.
fn read_le_int(&mut self) -> IoResult<int> {
self.read_le_int_n(int::BYTES).map(|i| i as int)
}
/// Reads a big-endian unsigned integer.
///
/// The number of bytes returned is system-dependent.
fn read_be_uint(&mut self) -> IoResult<uint> {
self.read_be_uint_n(uint::BYTES).map(|i| i as uint)
}
/// Reads a big-endian integer.
///
/// The number of bytes returned is system-dependent.
fn read_be_int(&mut self) -> IoResult<int> {
self.read_be_int_n(int::BYTES).map(|i| i as int)
}
/// Reads a big-endian `u64`.
///
/// `u64`s are 8 bytes long.
fn read_be_u64(&mut self) -> IoResult<u64> {
self.read_be_uint_n(8)
}
/// Reads a big-endian `u32`.
///
/// `u32`s are 4 bytes long.
fn read_be_u32(&mut self) -> IoResult<u32> {
self.read_be_uint_n(4).map(|i| i as u32)
}
/// Reads a big-endian `u16`.
///
/// `u16`s are 2 bytes long.
fn read_be_u16(&mut self) -> IoResult<u16> {
self.read_be_uint_n(2).map(|i| i as u16)
}
/// Reads a big-endian `i64`.
///
/// `i64`s are 8 bytes long.
fn read_be_i64(&mut self) -> IoResult<i64> {
self.read_be_int_n(8)
}
/// Reads a big-endian `i32`.
///
/// `i32`s are 4 bytes long.
fn read_be_i32(&mut self) -> IoResult<i32> {
self.read_be_int_n(4).map(|i| i as i32)
}
/// Reads a big-endian `i16`.
///
/// `i16`s are 2 bytes long.
fn read_be_i16(&mut self) -> IoResult<i16> {
self.read_be_int_n(2).map(|i| i as i16)
}
/// Reads a big-endian `f64`.
///
/// `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
fn read_be_f64(&mut self) -> IoResult<f64> {
self.read_be_u64().map(|i| unsafe {
transmute::<u64, f64>(i)
})
}
/// Reads a big-endian `f32`.
///
/// `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
fn read_be_f32(&mut self) -> IoResult<f32> {
self.read_be_u32().map(|i| unsafe {
transmute::<u32, f32>(i)
})
}
/// Reads a little-endian `u64`.
///
/// `u64`s are 8 bytes long.
fn read_le_u64(&mut self) -> IoResult<u64> {
self.read_le_uint_n(8)
}
/// Reads a little-endian `u32`.
///
/// `u32`s are 4 bytes long.
fn read_le_u32(&mut self) -> IoResult<u32> {
self.read_le_uint_n(4).map(|i| i as u32)
}
/// Reads a little-endian `u16`.
///
/// `u16`s are 2 bytes long.
fn read_le_u16(&mut self) -> IoResult<u16> {
self.read_le_uint_n(2).map(|i| i as u16)
}
/// Reads a little-endian `i64`.
///
/// `i64`s are 8 bytes long.
fn read_le_i64(&mut self) -> IoResult<i64> {
self.read_le_int_n(8)
}
/// Reads a little-endian `i32`.
///
/// `i32`s are 4 bytes long.
fn read_le_i32(&mut self) -> IoResult<i32> {
self.read_le_int_n(4).map(|i| i as i32)
}
/// Reads a little-endian `i16`.
///
/// `i16`s are 2 bytes long.
fn read_le_i16(&mut self) -> IoResult<i16> {
self.read_le_int_n(2).map(|i| i as i16)
}
/// Reads a little-endian `f64`.
///
/// `f64`s are 8 byte, IEEE754 double-precision floating point numbers.
fn read_le_f64(&mut self) -> IoResult<f64> {
self.read_le_u64().map(|i| unsafe {
transmute::<u64, f64>(i)
})
}
/// Reads a little-endian `f32`.
///
/// `f32`s are 4 byte, IEEE754 single-precision floating point numbers.
fn read_le_f32(&mut self) -> IoResult<f32> {
self.read_le_u32().map(|i| unsafe {
transmute::<u32, f32>(i)
})
}
/// Read a u8.
///
/// `u8`s are 1 byte.
fn read_u8(&mut self) -> IoResult<u8> {
self.read_byte()
}
/// Read an i8.
///
/// `i8`s are 1 byte.
fn read_i8(&mut self) -> IoResult<i8> {
self.read_byte().map(|i| i as i8)
}
/// Creates a wrapper around a mutable reference to the reader.
///
/// This is useful to allow applying adaptors while still
/// retaining ownership of the original value.
fn by_ref<'a>(&'a mut self) -> RefReader<'a, Self> {
RefReader { inner: self }
}
}
impl<'a> Reader for Box<Reader+'a> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.read(buf) }
}
impl<'a> Reader for &'a mut Reader+'a {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.read(buf) }
}
/// Returns a slice of `v` between `start` and `end`.
///
/// Similar to `slice()` except this function only bounds the slice on the
/// capacity of `v`, not the length.
///
/// # Failure
///
/// Fails when `start` or `end` point outside the capacity of `v`, or when
/// `start` > `end`.
// Private function here because we aren't sure if we want to expose this as
// API yet. If so, it should be a method on Vec.
unsafe fn slice_vec_capacity<'a, T>(v: &'a mut Vec<T>, start: uint, end: uint) -> &'a mut [T] {
use raw::Slice;
use ptr::RawPtr;
assert!(start <= end);
assert!(end <= v.capacity());
transmute(Slice {
data: v.as_ptr().offset(start as int),
len: end - start
})
}
/// A `RefReader` is a struct implementing `Reader` which contains a reference
/// to another reader. This is often useful when composing streams.
///
/// # Example
///
/// ```
/// # fn main() {}
/// # fn process_input<R: Reader>(r: R) {}
/// # fn foo() {
/// use std::io;
/// use std::io::util::LimitReader;
///
/// let mut stream = io::stdin();
///
/// // Only allow the function to process at most one kilobyte of input
/// {
/// let stream = LimitReader::new(stream.by_ref(), 1024);
/// process_input(stream);
/// }
///
/// // 'stream' is still available for use here
///
/// # }
/// ```
pub struct RefReader<'a, R:'a> {
/// The underlying reader which this is referencing
inner: &'a mut R
}
impl<'a, R: Reader> Reader for RefReader<'a, R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.inner.read(buf) }
}
impl<'a, R: Buffer> Buffer for RefReader<'a, R> {
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { self.inner.fill_buf() }
fn consume(&mut self, amt: uint) { self.inner.consume(amt) }
}
fn extend_sign(val: u64, nbytes: uint) -> i64 {
let shift = (8 - nbytes) * 8;
(val << shift) as i64 >> shift
}
/// A trait for objects which are byte-oriented streams. Writers are defined by
/// one method, `write`. This function will block until the provided buffer of
/// bytes has been entirely written, and it will return any failures which occur.
///
/// Another commonly overridden method is the `flush` method for writers such as
/// buffered writers.
///
/// Writers are intended to be composable with one another. Many objects
/// throughout the I/O and related libraries take and provide types which
/// implement the `Writer` trait.
pub trait Writer {
/// Write the entirety of a given buffer
///
/// # Errors
///
/// If an error happens during the I/O operation, the error is returned as
/// `Err`. Note that it is considered an error if the entire buffer could
/// not be written, and if an error is returned then it is unknown how much
/// data (if any) was actually written.
fn write(&mut self, buf: &[u8]) -> IoResult<()>;
/// Flush this output stream, ensuring that all intermediately buffered
/// contents reach their destination.
///
/// This is by default a no-op and implementers of the `Writer` trait should
/// decide whether their stream needs to be buffered or not.
fn flush(&mut self) -> IoResult<()> { Ok(()) }
/// Writes a formatted string into this writer, returning any error
/// encountered.
///
/// This method is primarily used to interface with the `format_args!`
/// macro, but it is rare that this should explicitly be called. The
/// `write!` macro should be favored to invoke this method instead.
///
/// # Errors
///
/// This function will return any I/O error reported while formatting.
fn write_fmt(&mut self, fmt: &fmt::Arguments) -> IoResult<()> {
// Create a shim which translates a Writer to a FormatWriter and saves
// off I/O errors. instead of discarding them
struct Adaptor<'a, T:'a> {
inner: &'a mut T,
error: IoResult<()>,
}
impl<'a, T: Writer> fmt::FormatWriter for Adaptor<'a, T> {
fn write(&mut self, bytes: &[u8]) -> fmt::Result {
match self.inner.write(bytes) {
Ok(()) => Ok(()),
Err(e) => {
self.error = Err(e);
Err(fmt::WriteError)
}
}
}
}
let mut output = Adaptor { inner: self, error: Ok(()) };
match fmt::write(&mut output, fmt) {
Ok(()) => Ok(()),
Err(..) => output.error
}
}
/// Write a rust string into this sink.
///
/// The bytes written will be the UTF-8 encoded version of the input string.
/// If other encodings are desired, it is recommended to compose this stream
/// with another performing the conversion, or to use `write` with a
/// converted byte-array instead.
#[inline]
fn write_str(&mut self, s: &str) -> IoResult<()> {
self.write(s.as_bytes())
}
/// Writes a string into this sink, and then writes a literal newline (`\n`)
/// byte afterwards. Note that the writing of the newline is *not* atomic in
/// the sense that the call to `write` is invoked twice (once with the
/// string and once with a newline character).
///
/// If other encodings or line ending flavors are desired, it is recommended
/// that the `write` method is used specifically instead.
#[inline]
fn write_line(&mut self, s: &str) -> IoResult<()> {
self.write_str(s).and_then(|()| self.write([b'\n']))
}
/// Write a single char, encoded as UTF-8.
#[inline]
fn write_char(&mut self, c: char) -> IoResult<()> {
let mut buf = [0u8, ..4];
let n = c.encode_utf8(buf.as_mut_slice()).unwrap_or(0);
self.write(buf.slice_to(n))
}
/// Write the result of passing n through `int::to_str_bytes`.
#[inline]
fn write_int(&mut self, n: int) -> IoResult<()> {
write!(self, "{:d}", n)
}
/// Write the result of passing n through `uint::to_str_bytes`.
#[inline]
fn write_uint(&mut self, n: uint) -> IoResult<()> {
write!(self, "{:u}", n)
}
/// Write a little-endian uint (number of bytes depends on system).
#[inline]
fn write_le_uint(&mut self, n: uint) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, uint::BYTES, |v| self.write(v))
}
/// Write a little-endian int (number of bytes depends on system).
#[inline]
fn write_le_int(&mut self, n: int) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, int::BYTES, |v| self.write(v))
}
/// Write a big-endian uint (number of bytes depends on system).
#[inline]
fn write_be_uint(&mut self, n: uint) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, uint::BYTES, |v| self.write(v))
}
/// Write a big-endian int (number of bytes depends on system).
#[inline]
fn write_be_int(&mut self, n: int) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, int::BYTES, |v| self.write(v))
}
/// Write a big-endian u64 (8 bytes).
#[inline]
fn write_be_u64(&mut self, n: u64) -> IoResult<()> {
extensions::u64_to_be_bytes(n, 8u, |v| self.write(v))
}
/// Write a big-endian u32 (4 bytes).
#[inline]
fn write_be_u32(&mut self, n: u32) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a big-endian u16 (2 bytes).
#[inline]
fn write_be_u16(&mut self, n: u16) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a big-endian i64 (8 bytes).
#[inline]
fn write_be_i64(&mut self, n: i64) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 8u, |v| self.write(v))
}
/// Write a big-endian i32 (4 bytes).
#[inline]
fn write_be_i32(&mut self, n: i32) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a big-endian i16 (2 bytes).
#[inline]
fn write_be_i16(&mut self, n: i16) -> IoResult<()> {
extensions::u64_to_be_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a big-endian IEEE754 double-precision floating-point (8 bytes).
#[inline]
fn write_be_f64(&mut self, f: f64) -> IoResult<()> {
unsafe {
self.write_be_u64(transmute(f))
}
}
/// Write a big-endian IEEE754 single-precision floating-point (4 bytes).
#[inline]
fn write_be_f32(&mut self, f: f32) -> IoResult<()> {
unsafe {
self.write_be_u32(transmute(f))
}
}
/// Write a little-endian u64 (8 bytes).
#[inline]
fn write_le_u64(&mut self, n: u64) -> IoResult<()> {
extensions::u64_to_le_bytes(n, 8u, |v| self.write(v))
}
/// Write a little-endian u32 (4 bytes).
#[inline]
fn write_le_u32(&mut self, n: u32) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a little-endian u16 (2 bytes).
#[inline]
fn write_le_u16(&mut self, n: u16) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a little-endian i64 (8 bytes).
#[inline]
fn write_le_i64(&mut self, n: i64) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 8u, |v| self.write(v))
}
/// Write a little-endian i32 (4 bytes).
#[inline]
fn write_le_i32(&mut self, n: i32) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 4u, |v| self.write(v))
}
/// Write a little-endian i16 (2 bytes).
#[inline]
fn write_le_i16(&mut self, n: i16) -> IoResult<()> {
extensions::u64_to_le_bytes(n as u64, 2u, |v| self.write(v))
}
/// Write a little-endian IEEE754 double-precision floating-point
/// (8 bytes).
#[inline]
fn write_le_f64(&mut self, f: f64) -> IoResult<()> {
unsafe {
self.write_le_u64(transmute(f))
}
}
/// Write a little-endian IEEE754 single-precision floating-point
/// (4 bytes).
#[inline]
fn write_le_f32(&mut self, f: f32) -> IoResult<()> {
unsafe {
self.write_le_u32(transmute(f))
}
}
/// Write a u8 (1 byte).
#[inline]
fn write_u8(&mut self, n: u8) -> IoResult<()> {
self.write([n])
}
/// Write an i8 (1 byte).
#[inline]
fn write_i8(&mut self, n: i8) -> IoResult<()> {
self.write([n as u8])
}
/// Creates a wrapper around a mutable reference to the writer.
///
/// This is useful to allow applying wrappers while still
/// retaining ownership of the original value.
#[inline]
fn by_ref<'a>(&'a mut self) -> RefWriter<'a, Self> {
RefWriter { inner: self }
}
}
impl<'a> Writer for Box<Writer+'a> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> { self.write(buf) }
#[inline]
fn flush(&mut self) -> IoResult<()> { self.flush() }
}
impl<'a> Writer for &'a mut Writer+'a {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> { self.write(buf) }
#[inline]
fn flush(&mut self) -> IoResult<()> { self.flush() }
}
/// A `RefWriter` is a struct implementing `Writer` which contains a reference
/// to another writer. This is often useful when composing streams.
///
/// # Example
///
/// ```
/// # fn main() {}
/// # fn process_input<R: Reader>(r: R) {}
/// # fn foo () {
/// use std::io::util::TeeReader;
/// use std::io::{stdin, MemWriter};
///
/// let mut output = MemWriter::new();
///
/// {
/// // Don't give ownership of 'output' to the 'tee'. Instead we keep a
/// // handle to it in the outer scope
/// let mut tee = TeeReader::new(stdin(), output.by_ref());
/// process_input(tee);
/// }
///
/// println!("input processed: {}", output.unwrap());
/// # }
/// ```
pub struct RefWriter<'a, W:'a> {
/// The underlying writer which this is referencing
inner: &'a mut W
}
impl<'a, W: Writer> Writer for RefWriter<'a, W> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> { self.inner.write(buf) }
#[inline]
fn flush(&mut self) -> IoResult<()> { self.inner.flush() }
}
/// A Stream is a readable and a writable object. Data written is typically
/// received by the object which reads receive data from.
pub trait Stream: Reader + Writer { }
impl<T: Reader + Writer> Stream for T {}
/// An iterator that reads a line on each iteration,
/// until `.read_line()` encounters `EndOfFile`.
///
/// # Notes about the Iteration Protocol
///
/// The `Lines` may yield `None` and thus terminate
/// an iteration, but continue to yield elements if iteration
/// is attempted again.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
pub struct Lines<'r, T:'r> {
buffer: &'r mut T,
}
impl<'r, T: Buffer> Iterator<IoResult<String>> for Lines<'r, T> {
fn next(&mut self) -> Option<IoResult<String>> {
match self.buffer.read_line() {
Ok(x) => Some(Ok(x)),
Err(IoError { kind: EndOfFile, ..}) => None,
Err(y) => Some(Err(y))
}
}
}
/// An iterator that reads a utf8-encoded character on each iteration,
/// until `.read_char()` encounters `EndOfFile`.
///
/// # Notes about the Iteration Protocol
///
/// The `Chars` may yield `None` and thus terminate
/// an iteration, but continue to yield elements if iteration
/// is attempted again.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
pub struct Chars<'r, T:'r> {
buffer: &'r mut T
}
impl<'r, T: Buffer> Iterator<IoResult<char>> for Chars<'r, T> {
fn next(&mut self) -> Option<IoResult<char>> {
match self.buffer.read_char() {
Ok(x) => Some(Ok(x)),
Err(IoError { kind: EndOfFile, ..}) => None,
Err(y) => Some(Err(y))
}
}
}
/// A Buffer is a type of reader which has some form of internal buffering to
/// allow certain kinds of reading operations to be more optimized than others.
/// This type extends the `Reader` trait with a few methods that are not
/// possible to reasonably implement with purely a read interface.
pub trait Buffer: Reader {
/// Fills the internal buffer of this object, returning the buffer contents.
/// Note that none of the contents will be "read" in the sense that later
/// calling `read` may return the same contents.
///
/// The `consume` function must be called with the number of bytes that are
/// consumed from this buffer returned to ensure that the bytes are never
/// returned twice.
///
/// # Error
///
/// This function will return an I/O error if the underlying reader was
/// read, but returned an error. Note that it is not an error to return a
/// 0-length buffer.
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]>;
/// Tells this buffer that `amt` bytes have been consumed from the buffer,
/// so they should no longer be returned in calls to `read`.
fn consume(&mut self, amt: uint);
/// Reads the next line of input, interpreted as a sequence of UTF-8
/// encoded Unicode codepoints. If a newline is encountered, then the
/// newline is contained in the returned string.
///
/// # Example
///
/// ```rust
/// use std::io;
///
/// let mut reader = io::stdin();
/// let input = reader.read_line().ok().unwrap_or("nothing".to_string());
/// ```
///
/// # Error
///
/// This function has the same error semantics as `read_until`:
///
/// * All non-EOF errors will be returned immediately
/// * If an error is returned previously consumed bytes are lost
/// * EOF is only returned if no bytes have been read
/// * Reach EOF may mean that the delimiter is not present in the return
/// value
///
/// Additionally, this function can fail if the line of input read is not a
/// valid UTF-8 sequence of bytes.
fn read_line(&mut self) -> IoResult<String> {
self.read_until(b'\n').and_then(|line|
match String::from_utf8(line) {
Ok(s) => Ok(s),
Err(_) => Err(standard_error(InvalidInput)),
}
)
}
/// Create an iterator that reads a line on each iteration until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn lines<'r>(&'r mut self) -> Lines<'r, Self> {
Lines { buffer: self }
}
/// Reads a sequence of bytes leading up to a specified delimiter. Once the
/// specified byte is encountered, reading ceases and the bytes up to and
/// including the delimiter are returned.
///
/// # Error
///
/// If any I/O error is encountered other than EOF, the error is immediately
/// returned. Note that this may discard bytes which have already been read,
/// and those bytes will *not* be returned. It is recommended to use other
/// methods if this case is worrying.
///
/// If EOF is encountered, then this function will return EOF if 0 bytes
/// have been read, otherwise the pending byte buffer is returned. This
/// is the reason that the byte buffer returned may not always contain the
/// delimiter.
fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
let mut res = Vec::new();
let mut used;
loop {
{
let available = match self.fill_buf() {
Ok(n) => n,
Err(ref e) if res.len() > 0 && e.kind == EndOfFile => {
used = 0;
break
}
Err(e) => return Err(e)
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
res.push_all(available.slice_to(i + 1));
used = i + 1;
break
}
None => {
res.push_all(available);
used = available.len();
}
}
}
self.consume(used);
}
self.consume(used);
Ok(res)
}
/// Reads the next utf8-encoded character from the underlying stream.
///
/// # Error
///
/// If an I/O error occurs, or EOF, then this function will return `Err`.
/// This function will also return error if the stream does not contain a
/// valid utf-8 encoded codepoint as the next few bytes in the stream.
fn read_char(&mut self) -> IoResult<char> {
let first_byte = try!(self.read_byte());
let width = str::utf8_char_width(first_byte);
if width == 1 { return Ok(first_byte as char) }
if width == 0 { return Err(standard_error(InvalidInput)) } // not utf8
let mut buf = [first_byte, 0, 0, 0];
{
let mut start = 1;
while start < width {
match try!(self.read(buf.mut_slice(start, width))) {
n if n == width - start => break,
n if n < width - start => { start += n; }
_ => return Err(standard_error(InvalidInput)),
}
}
}
match str::from_utf8(buf.slice_to(width)) {
Some(s) => Ok(s.char_at(0)),
None => Err(standard_error(InvalidInput))
}
}
/// Create an iterator that reads a utf8-encoded character on each iteration
/// until EOF.
///
/// # Error
///
/// Any error other than `EndOfFile` that is produced by the underlying Reader
/// is returned by the iterator and should be handled by the caller.
fn chars<'r>(&'r mut self) -> Chars<'r, Self> {
Chars { buffer: self }
}
}
/// When seeking, the resulting cursor is offset from a base by the offset given
/// to the `seek` function. The base used is specified by this enumeration.
pub enum SeekStyle {
/// Seek from the beginning of the stream
SeekSet,
/// Seek from the end of the stream
SeekEnd,
/// Seek from the current position
SeekCur,
}
/// An object implementing `Seek` internally has some form of cursor which can
/// be moved within a stream of bytes. The stream typically has a fixed size,
/// allowing seeking relative to either end.
pub trait Seek {
/// Return position of file cursor in the stream
fn tell(&self) -> IoResult<u64>;
/// Seek to an offset in a stream
///
/// A successful seek clears the EOF indicator. Seeking beyond EOF is
/// allowed, but seeking before position 0 is not allowed.
///
/// # Errors
///
/// * Seeking to a negative offset is considered an error
/// * Seeking past the end of the stream does not modify the underlying
/// stream, but the next write may cause the previous data to be filled in
/// with a bit pattern.
fn seek(&mut self, pos: i64, style: SeekStyle) -> IoResult<()>;
}
/// A listener is a value that can consume itself to start listening for
/// connections.
///
/// Doing so produces some sort of Acceptor.
pub trait Listener<T, A: Acceptor<T>> {
/// Spin up the listener and start queuing incoming connections
///
/// # Error
///
/// Returns `Err` if this listener could not be bound to listen for
/// connections. In all cases, this listener is consumed.
fn listen(self) -> IoResult<A>;
}
/// An acceptor is a value that presents incoming connections
pub trait Acceptor<T> {
/// Wait for and accept an incoming connection
///
/// # Error
///
/// Returns `Err` if an I/O error is encountered.
fn accept(&mut self) -> IoResult<T>;
/// Create an iterator over incoming connection attempts.
///
/// Note that I/O errors will be yielded by the iterator itself.
fn incoming<'r>(&'r mut self) -> IncomingConnections<'r, Self> {
IncomingConnections { inc: self }
}
}
/// An infinite iterator over incoming connection attempts.
/// Calling `next` will block the task until a connection is attempted.
///
/// Since connection attempts can continue forever, this iterator always returns
/// `Some`. The `Some` contains the `IoResult` representing whether the
/// connection attempt was successful. A successful connection will be wrapped
/// in `Ok`. A failed connection is represented as an `Err`.
pub struct IncomingConnections<'a, A:'a> {
inc: &'a mut A,
}
impl<'a, T, A: Acceptor<T>> Iterator<IoResult<T>> for IncomingConnections<'a, A> {
fn next(&mut self) -> Option<IoResult<T>> {
Some(self.inc.accept())
}
}
/// Creates a standard error for a commonly used flavor of error. The `detail`
/// field of the returned error will always be `None`.
///
/// # Example
///
/// ```
/// use std::io;
///
/// let eof = io::standard_error(io::EndOfFile);
/// let einval = io::standard_error(io::InvalidInput);
/// ```
pub fn standard_error(kind: IoErrorKind) -> IoError {
let desc = match kind {
EndOfFile => "end of file",
IoUnavailable => "I/O is unavailable",
InvalidInput => "invalid input",
OtherIoError => "unknown I/O error",
FileNotFound => "file not found",
PermissionDenied => "permission denied",
ConnectionFailed => "connection failed",
Closed => "stream is closed",
ConnectionRefused => "connection refused",
ConnectionReset => "connection reset",
ConnectionAborted => "connection aborted",
NotConnected => "not connected",
BrokenPipe => "broken pipe",
PathAlreadyExists => "file already exists",
PathDoesntExist => "no such file",
MismatchedFileTypeForOperation => "mismatched file type",
ResourceUnavailable => "resource unavailable",
TimedOut => "operation timed out",
ShortWrite(..) => "short write",
NoProgress => "no progress",
};
IoError {
kind: kind,
desc: desc,
detail: None,
}
}
/// A mode specifies how a file should be opened or created. These modes are
/// passed to `File::open_mode` and are used to control where the file is
/// positioned when it is initially opened.
pub enum FileMode {
/// Opens a file positioned at the beginning.
Open,
/// Opens a file positioned at EOF.
Append,
/// Opens a file, truncating it if it already exists.
Truncate,
}
/// Access permissions with which the file should be opened. `File`s
/// opened with `Read` will return an error if written to.
pub enum FileAccess {
/// Read-only access, requests to write will result in an error
Read,
/// Write-only access, requests to read will result in an error
Write,
/// Read-write access, no requests are denied by default
ReadWrite,
}
/// Different kinds of files which can be identified by a call to stat
#[deriving(PartialEq, Show, Hash)]
pub enum FileType {
/// This is a normal file, corresponding to `S_IFREG`
TypeFile,
/// This file is a directory, corresponding to `S_IFDIR`
TypeDirectory,
/// This file is a named pipe, corresponding to `S_IFIFO`
TypeNamedPipe,
/// This file is a block device, corresponding to `S_IFBLK`
TypeBlockSpecial,
/// This file is a symbolic link to another file, corresponding to `S_IFLNK`
TypeSymlink,
/// The type of this file is not recognized as one of the other categories
TypeUnknown,
}
/// A structure used to describe metadata information about a file. This
/// structure is created through the `stat` method on a `Path`.
///
/// # Example
///
/// ```
/// # use std::io::fs::PathExtensions;
/// # fn main() {}
/// # fn foo() {
/// let info = match Path::new("foo.txt").stat() {
/// Ok(stat) => stat,
/// Err(e) => fail!("couldn't read foo.txt: {}", e),
/// };
///
/// println!("byte size: {}", info.size);
/// # }
/// ```
#[deriving(Hash)]
pub struct FileStat {
/// The size of the file, in bytes
pub size: u64,
/// The kind of file this path points to (directory, file, pipe, etc.)
pub kind: FileType,
/// The file permissions currently on the file
pub perm: FilePermission,
// FIXME(#10301): These time fields are pretty useless without an actual
// time representation, what are the milliseconds relative
// to?
/// The time that the file was created at, in platform-dependent
/// milliseconds
pub created: u64,
/// The time that this file was last modified, in platform-dependent
/// milliseconds
pub modified: u64,
/// The time that this file was last accessed, in platform-dependent
/// milliseconds
pub accessed: u64,
/// Information returned by stat() which is not guaranteed to be
/// platform-independent. This information may be useful on some platforms,
/// but it may have different meanings or no meaning at all on other
/// platforms.
///
/// Usage of this field is discouraged, but if access is desired then the
/// fields are located here.
#[unstable]
pub unstable: UnstableFileStat,
}
/// This structure represents all of the possible information which can be
/// returned from a `stat` syscall which is not contained in the `FileStat`
/// structure. This information is not necessarily platform independent, and may
/// have different meanings or no meaning at all on some platforms.
#[unstable]
#[deriving(Hash)]
pub struct UnstableFileStat {
/// The ID of the device containing the file.
pub device: u64,
/// The file serial number.
pub inode: u64,
/// The device ID.
pub rdev: u64,
/// The number of hard links to this file.
pub nlink: u64,
/// The user ID of the file.
pub uid: u64,
/// The group ID of the file.
pub gid: u64,
/// The optimal block size for I/O.
pub blksize: u64,
/// The blocks allocated for this file.
pub blocks: u64,
/// User-defined flags for the file.
pub flags: u64,
/// The file generation number.
pub gen: u64,
}
bitflags! {
#[doc = "A set of permissions for a file or directory is represented"]
#[doc = "by a set of flags which are or'd together."]
flags FilePermission: u32 {
static UserRead = 0o400,
static UserWrite = 0o200,
static UserExecute = 0o100,
static GroupRead = 0o040,
static GroupWrite = 0o020,
static GroupExecute = 0o010,
static OtherRead = 0o004,
static OtherWrite = 0o002,
static OtherExecute = 0o001,
static UserRWX = UserRead.bits | UserWrite.bits | UserExecute.bits,
static GroupRWX = GroupRead.bits | GroupWrite.bits | GroupExecute.bits,
static OtherRWX = OtherRead.bits | OtherWrite.bits | OtherExecute.bits,
#[doc = "Permissions for user owned files, equivalent to 0644 on"]
#[doc = "unix-like systems."]
static UserFile = UserRead.bits | UserWrite.bits | GroupRead.bits | OtherRead.bits,
#[doc = "Permissions for user owned directories, equivalent to 0755 on"]
#[doc = "unix-like systems."]
static UserDir = UserRWX.bits | GroupRead.bits | GroupExecute.bits |
OtherRead.bits | OtherExecute.bits,
#[doc = "Permissions for user owned executables, equivalent to 0755"]
#[doc = "on unix-like systems."]
static UserExec = UserDir.bits,
#[doc = "All possible permissions enabled."]
static AllPermissions = UserRWX.bits | GroupRWX.bits | OtherRWX.bits,
}
}
impl Default for FilePermission {
#[inline]
fn default() -> FilePermission { FilePermission::empty() }
}
impl fmt::Show for FilePermission {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.fill = '0';
formatter.width = Some(4);
(&self.bits as &fmt::Octal).fmt(formatter)
}
}
#[cfg(test)]
mod tests {
use super::{IoResult, Reader, MemReader, NoProgress, InvalidInput};
use prelude::*;
use uint;
#[deriving(Clone, PartialEq, Show)]
enum BadReaderBehavior {
GoodBehavior(uint),
BadBehavior(uint)
}
struct BadReader<T> {
r: T,
behavior: Vec<BadReaderBehavior>,
}
impl<T: Reader> BadReader<T> {
fn new(r: T, behavior: Vec<BadReaderBehavior>) -> BadReader<T> {
BadReader { behavior: behavior, r: r }
}
}
impl<T: Reader> Reader for BadReader<T> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let BadReader { ref mut behavior, ref mut r } = *self;
loop {
if behavior.is_empty() {
// fall back on good
return r.read(buf);
}
match behavior.as_mut_slice()[0] {
GoodBehavior(0) => (),
GoodBehavior(ref mut x) => {
*x -= 1;
return r.read(buf);
}
BadBehavior(0) => (),
BadBehavior(ref mut x) => {
*x -= 1;
return Ok(0);
}
};
behavior.shift();
}
}
}
#[test]
fn test_read_at_least() {
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([GoodBehavior(uint::MAX)]));
let mut buf = [0u8, ..5];
assert!(r.read_at_least(1, buf).unwrap() >= 1);
assert!(r.read_exact(5).unwrap().len() == 5); // read_exact uses read_at_least
assert!(r.read_at_least(0, buf).is_ok());
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(50), GoodBehavior(uint::MAX)]));
assert!(r.read_at_least(1, buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(1), GoodBehavior(1),
BadBehavior(50), GoodBehavior(uint::MAX)]));
assert!(r.read_at_least(1, buf).unwrap() >= 1);
assert!(r.read_at_least(1, buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(uint::MAX)]));
assert_eq!(r.read_at_least(1, buf).unwrap_err().kind, NoProgress);
let mut r = MemReader::new(Vec::from_slice(b"hello, world!"));
assert_eq!(r.read_at_least(5, buf).unwrap(), 5);
assert_eq!(r.read_at_least(6, buf).unwrap_err().kind, InvalidInput);
}
#[test]
fn test_push_at_least() {
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([GoodBehavior(uint::MAX)]));
let mut buf = Vec::new();
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
assert!(r.push_at_least(0, 5, &mut buf).is_ok());
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(50), GoodBehavior(uint::MAX)]));
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(1), GoodBehavior(1),
BadBehavior(50), GoodBehavior(uint::MAX)]));
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
assert!(r.push_at_least(1, 5, &mut buf).unwrap() >= 1);
let mut r = BadReader::new(MemReader::new(Vec::from_slice(b"hello, world!")),
Vec::from_slice([BadBehavior(uint::MAX)]));
assert_eq!(r.push_at_least(1, 5, &mut buf).unwrap_err().kind, NoProgress);
let mut r = MemReader::new(Vec::from_slice(b"hello, world!"));
assert_eq!(r.push_at_least(5, 1, &mut buf).unwrap_err().kind, InvalidInput);
}
#[test]
fn test_show() {
use super::*;
assert_eq!(format!("{}", UserRead), "0400".to_string());
assert_eq!(format!("{}", UserFile), "0644".to_string());
assert_eq!(format!("{}", UserExec), "0755".to_string());
assert_eq!(format!("{}", UserRWX), "0700".to_string());
assert_eq!(format!("{}", GroupRWX), "0070".to_string());
assert_eq!(format!("{}", OtherRWX), "0007".to_string());
assert_eq!(format!("{}", AllPermissions), "0777".to_string());
assert_eq!(format!("{}", UserRead | UserWrite | OtherWrite), "0602".to_string());
}
}
| 34.351503 | 100 | 0.592552 |
e880eb7f200113eab8abb159d538a0234f002057 | 19,726 | use super::KernelFormat;
use crate::ops::cnn::*;
use crate::ops::nn::*;
use crate::setup_test_logger;
use proptest::collection::vec;
use proptest::prelude::*;
use tract_itertools::izip;
use tract_ndarray::prelude::*;
#[derive(Debug)]
struct ConvProblem {
shape_in: DataShape,
shape_out: DataShape,
kernel_format: KernelFormat,
group: usize,
data: ArrayD<f32>,
kernel: ArrayD<f32>,
bias: Option<ArrayD<f32>>,
}
impl ConvProblem {
fn geo_ker(&self) -> &[usize] {
&self.kernel.shape()[self.kernel_format.h_axis()..][..self.shape_in.hw_rank()]
}
fn reference(&self) -> ArrayD<f32> {
setup_test_logger();
assert_eq!(self.data.shape(), &*self.shape_in.shape);
let mut out = ArrayD::zeros(&*self.shape_out.shape);
let n = *self.shape_in.n().clone().unwrap_or(&1);
let ci_per_g = self.shape_in.c() / self.group;
let co_per_g = self.shape_out.c() / self.group;
for n in 0..n {
for g in 0..self.group {
for geo_out in tract_ndarray::indices(self.shape_out.hw_dims()) {
let mut output_coords: TVec<usize> = geo_out.slice().into();
if self.shape_in.fmt.has_n() {
output_coords.insert(0, n);
}
output_coords.insert(self.shape_out.c_axis(), 0);
for geo_ker in tract_ndarray::indices(self.geo_ker()) {
let mut input_coords: TVec<usize> =
izip!(geo_out.slice(), geo_ker.slice()).map(|(a, b)| a + b).collect();
if self.shape_in.fmt.has_n() {
input_coords.insert(0, n);
}
input_coords.insert(self.shape_in.c_axis(), 0);
for ci in 0..ci_per_g {
input_coords[self.shape_in.c_axis()] = ci + g * ci_per_g;
let i = self.data[&*input_coords];
for co in 0..co_per_g {
output_coords[self.shape_out.c_axis()] = co + g * co_per_g;
let mut kernel_coords: TVec<usize> = geo_ker.slice().into();
match self.kernel_format {
KernelFormat::OIHW => {
kernel_coords.insert(0, ci);
kernel_coords.insert(0, co + g * co_per_g);
}
KernelFormat::HWIO => {
kernel_coords.push(ci + g * ci_per_g);
kernel_coords.push(co);
}
}
let k = self.kernel[&*kernel_coords];
out[&*output_coords] += k * i;
}
}
}
}
}
}
if let Some(bias) = &self.bias {
let mut shape = vec![1; out.ndim()];
shape[self.shape_out.c_axis()] = bias.len();
out += &bias.clone().into_shape(shape).unwrap();
}
out
}
fn tract(&self) -> anyhow::Result<ArrayD<f32>> {
setup_test_logger();
assert_eq!(self.data.shape(), &*self.shape_in.shape);
let mut model = TypedModel::default();
let wire = model
.add_source("input", TypedFact::dt_shape(f32::datum_type(), &self.shape_in.shape))?;
let op = ConvUnary::new(
PoolSpec::new(
self.shape_in.fmt,
self.geo_ker().into(),
PaddingSpec::Valid,
None,
None,
Some(*self.shape_out.c()),
),
self.kernel_format.clone(),
self.kernel.clone().into_arc_tensor(),
self.group,
self.bias.clone().map(|a| a.into_arc_tensor()),
None,
);
let wire = model.wire_node("conv", op, &[wire])?[0];
model.set_output_outlets(&[wire])?;
let mut output =
model.into_optimized()?.into_runnable()?.run(tvec![self.data.clone().into_tensor()])?;
Ok(output.remove(0).into_tensor().into_array::<f32>()?)
}
}
impl Arbitrary for ConvProblem {
type Parameters = ();
type Strategy = BoxedStrategy<ConvProblem>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
(
any::<DataFormat>(),
any::<KernelFormat>(),
1usize..=3,
1usize..=4,
1usize..=4,
1usize..=3,
(1usize..=3).prop_flat_map(|r| shapes(r)),
)
.prop_flat_map(|(df, kf, n, mut ci0, co0, group, (mut ker_shape, data_shape))| {
// FIXME in HWIO order, only regular and depthwise are supported
if kf == KernelFormat::HWIO && group > 1 {
ci0 = 1;
}
let shape_in = df.from_n_c_hw(n, ci0 * group, &data_shape).unwrap();
let shape_out: TVec<_> =
izip!(&ker_shape, data_shape).map(|(k, d)| d - k + 1).collect();
let shape_out = df.from_n_c_hw(n, co0 * group, &shape_out).unwrap();
let data_in = tensor(shape_in.shape.iter().cloned().collect());
match kf {
KernelFormat::HWIO => {
ker_shape.push(ci0 * group);
ker_shape.push(co0)
}
KernelFormat::OIHW => {
ker_shape.insert(0, ci0);
ker_shape.insert(0, co0 * group)
}
};
let kernel = tensor(ker_shape);
let bias = proptest::option::of(tensor(vec![co0 * group]));
(Just((kf, shape_in, shape_out, group)), data_in, kernel, bias)
})
.prop_map(|((kernel_format, shape_in, shape_out, group), data, kernel, bias)| {
ConvProblem { shape_in, shape_out, kernel_format, group, data, kernel, bias }
})
.boxed()
}
}
impl Arbitrary for KernelFormat {
type Parameters = ();
type Strategy = BoxedStrategy<KernelFormat>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
prop_oneof!(Just(KernelFormat::OIHW), Just(KernelFormat::HWIO)).boxed()
}
}
impl Arbitrary for DataFormat {
type Parameters = ();
type Strategy = BoxedStrategy<DataFormat>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
prop_oneof!(
Just(DataFormat::HWC),
Just(DataFormat::CHW),
Just(DataFormat::NHWC),
Just(DataFormat::NCHW)
)
.boxed()
}
}
pub fn tensor(shape: Vec<usize>) -> BoxedStrategy<ArrayD<f32>> {
let len = shape.iter().product::<usize>();
vec(any::<i8>().prop_map(|i| i as f32), len..=len)
.prop_map(move |vec| ArrayD::from_shape_vec(shape.clone(), vec).unwrap())
.boxed()
}
pub fn shapes(rank: usize) -> BoxedStrategy<(Vec<usize>, Vec<usize>)> {
vec((1usize..3, 0usize..3).prop_map(|(k, exceed)| (k, k + exceed)), rank..=rank)
.prop_map(|v| v.into_iter().unzip())
.boxed()
}
proptest::proptest! {
#[test]
fn prop(pb in any::<ConvProblem>()) {
prop_assert_eq!(pb.tract().unwrap(), pb.reference());
}
}
#[test]
fn trivial_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 1, &[1, 1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 1, &[1, 1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr3(&[[[0.0f32]]]).into_dyn(),
kernel: arr4(&[[[[0.0f32]]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
#[test]
fn trivial_1() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(1, 1, &[1])?,
shape_out: DataFormat::NHWC.from_n_c_hw(1, 1, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr3(&[[[1.0f32]]]).into_dyn(),
kernel: ndarray::arr3(&[[[1.0f32]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
#[test]
fn trivial_2() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(1, 1, &[2])?,
shape_out: DataFormat::NHWC.from_n_c_hw(1, 1, &[2])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr3(&[[[1.0f32], [0.0]]]).into_dyn(),
kernel: ndarray::arr3(&[[[1.0f32]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
#[test]
fn trivial_3() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::NHWC.from_n_c_hw(1, 1, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr3(&[[[0.0f32, 1.0]]]).into_dyn(),
kernel: ndarray::arr3(&[[[0.0f32], [1.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
#[test]
fn nchw_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NCHW.from_n_c_hw(1, 1, &[2])?,
shape_out: DataFormat::NCHW.from_n_c_hw(1, 1, &[2])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr3(&[[[0f32, 1.0]]]).into_dyn(),
kernel: ndarray::arr3(&[[[1f32]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_1() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr2(&[[0.0f32, 1.0]]).into_dyn(),
kernel: ndarray::arr3(&[[[0.0f32]], [[1.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
/*
#[test]
fn group_2() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 4, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
kernel_format: KernelFormat::HWIO,
group: 2,
data: ndarray::arr2(&[[0.0f32, 0.0, 1.0, 0.0]]).into_dyn(),
kernel: ndarray::arr3(&[[[0.0f32], [0.0], [1.0], [0.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract()?, pb.reference());
Ok(())
}
*/
#[test]
fn group_3() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr2(&[[0.0f32, 1.0]]).into_dyn(),
kernel: ndarray::arr3(&[[[0.0f32]], [[1.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_4() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 4, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr2(&[[0.0f32, 1.0]]).into_dyn(),
kernel: ndarray::arr3(&[[[0.0f32]], [[0.0]], [[0.0]], [[1.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_5() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1, 1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 4, &[1, 1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr3(&[[[0.0f32, 1.0]]]).into_dyn(),
kernel: tensor4(&[[[[0.0f32]]], [[[0.0]]], [[[0.0]]], [[[0.0]]]])
.into_array::<f32>()
.unwrap()
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_6() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::NHWC.from_n_c_hw(1, 4, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr3(&[[[0.0f32, 1.0]]]).into_dyn(),
kernel: tensor3(&[[[0.0f32]], [[0.0]], [[0.0]], [[0.0]]])
.into_array::<f32>()
.unwrap()
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_7() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NCHW.from_n_c_hw(1, 2, &[2])?,
shape_out: DataFormat::NCHW.from_n_c_hw(1, 4, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr3(&[[[0.0f32, 0.0], [0.0, 1.0]]]).into_dyn(),
kernel: tensor3(&[[[0.0f32, 0.0]], [[0.0, 0.0]], [[0.0, 0.0]], [[0.0, 1.0]]])
.into_array::<f32>()
.unwrap()
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_8() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 4, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr2(&[[0.0f32, 0.0, 0.0, 1.0]]).into_dyn(),
kernel: tensor3(&[[[0.0f32], [0.0]], [[0.0], [0.0]]])
.into_array::<f32>()
.unwrap()
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_9() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[2])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 4, &[2])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::arr2(&[[0.0f32, 0.0], [0.0, 1.0]]).into_dyn(),
kernel: tensor3(&[[[0.0f32]], [[0.0]], [[0.0]], [[1.0]]])
.into_array::<f32>()
.unwrap()
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_10() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::CHW.from_n_c_hw(1, 2, &[2, 1, 4])?,
shape_out: DataFormat::CHW.from_n_c_hw(1, 4, &[2, 1, 3])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::ArrayD::<f32>::zeros(vec![2, 2, 1, 4]),
kernel: ndarray::ArrayD::from_elem(vec![4, 1, 1, 1, 2], 1.0f32),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_11() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 8, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: tract_ndarray::arr2(&[[0.0, 1.0]]).into_dyn(),
kernel: tract_ndarray::arr3(&[
[[0.0]],
[[0.0]],
[[0.0]],
[[0.0]],
[[0.0]],
[[0.0]],
[[0.0]],
[[1.0]],
])
.into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_12() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
kernel_format: KernelFormat::HWIO,
group: 2,
data: tract_ndarray::arr2(&[[0.0, 0.0]]).into_dyn(),
kernel: tract_ndarray::arr3(&[[[0.0], [0.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_13() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 4, &[1])?,
kernel_format: KernelFormat::HWIO,
group: 2,
data: tract_ndarray::arr2(&[[0.0, 1.0]]).into_dyn(),
kernel: tract_ndarray::arr3(&[[[0.0, 0.0], [1.0, 0.0]]]).into_dyn(),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn group_bias_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(1, 2, &[1])?,
shape_out: DataFormat::NHWC.from_n_c_hw(1, 4, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 2,
data: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 2]),
kernel: ndarray::ArrayD::<f32>::zeros(vec![4, 1, 1]),
bias: Some(ndarray::ArrayD::<f32>::zeros(vec![4])),
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn bias_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::HWC.from_n_c_hw(1, 1, &[2])?,
shape_out: DataFormat::HWC.from_n_c_hw(1, 1, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::ArrayD::<f32>::zeros(vec![2, 1]),
kernel: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 2]),
bias: Some(ndarray::ArrayD::<f32>::zeros(vec![1])),
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn bias_chw_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::CHW.from_n_c_hw(1, 1, &[3])?,
shape_out: DataFormat::CHW.from_n_c_hw(1, 3, &[3])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::arr2(&[[0f32, 0., 0.]]).into_dyn(),
kernel: ndarray::arr3(&[[[0f32]], [[0.]], [[0.]]]).into_dyn(),
bias: Some(ndarray::arr1(&[0f32, 0., 1.]).into_dyn()),
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn batch_0() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NHWC.from_n_c_hw(2, 1, &[2])?,
shape_out: DataFormat::NHWC.from_n_c_hw(2, 1, &[1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::ArrayD::<f32>::zeros(vec![2, 2, 1]),
kernel: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 2]),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn bias_3d_1() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::CHW.from_n_c_hw(1, 1, &[1, 1, 2])?,
shape_out: DataFormat::CHW.from_n_c_hw(1, 1, &[1, 1, 2])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 1, 2]),
kernel: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 1, 1, 1]),
bias: Some(ndarray::ArrayD::<f32>::ones(vec![1])),
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
#[test]
fn batch_3d() -> anyhow::Result<()> {
let pb = ConvProblem {
shape_in: DataFormat::NCHW.from_n_c_hw(1, 1, &[2, 2, 1])?,
shape_out: DataFormat::NCHW.from_n_c_hw(1, 1, &[2, 2, 1])?,
kernel_format: KernelFormat::OIHW,
group: 1,
data: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 2, 2, 1]),
kernel: ndarray::ArrayD::<f32>::zeros(vec![1, 1, 1, 1, 1]),
bias: None,
};
assert_eq!(pb.tract().unwrap(), pb.reference());
Ok(())
}
| 33.951807 | 98 | 0.508872 |
e2b98a496bb96c441563976012420063ca889049 | 477 | use serde::Deserialize;
use serde::Serialize;
/// This struct represents buy-side and sell-side
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Side {
Buy,
Sell,
}
// FIXME: Should be changed to TryFrom.
impl From<String> for Side {
fn from(side: String) -> Self {
if side.to_lowercase() == "buy" {
Side::Buy
} else {
Side::Sell
}
}
}
| 21.681818 | 68 | 0.603774 |
c1ff1de21fc39069484ae91c519216ddb1eebebe | 2,786 | use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
#[cfg(unix)]
use std::ptr;
use tempfile::TempDir;
use url::Url;
use crate::{Branch, Oid, Repository, RepositoryInitOptions};
macro_rules! t {
($e:expr) => {
match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {}", stringify!($e), e),
}
};
}
pub fn repo_init() -> (TempDir, Repository) {
let td = TempDir::new().unwrap();
let mut opts = RepositoryInitOptions::new();
opts.initial_head("main");
let repo = Repository::init_opts(td.path(), &opts).unwrap();
{
let mut config = repo.config().unwrap();
config.set_str("user.name", "name").unwrap();
config.set_str("user.email", "email").unwrap();
let mut index = repo.index().unwrap();
let id = index.write_tree().unwrap();
let tree = repo.find_tree(id).unwrap();
let sig = repo.signature().unwrap();
repo.commit(Some("HEAD"), &sig, &sig, "initial\n\nbody", &tree, &[])
.unwrap();
}
(td, repo)
}
pub fn commit(repo: &Repository) -> (Oid, Oid) {
let mut index = t!(repo.index());
let root = repo.path().parent().unwrap();
t!(File::create(&root.join("foo")));
t!(index.add_path(Path::new("foo")));
let tree_id = t!(index.write_tree());
let tree = t!(repo.find_tree(tree_id));
let sig = t!(repo.signature());
let head_id = t!(repo.refname_to_id("HEAD"));
let parent = t!(repo.find_commit(head_id));
let commit = t!(repo.commit(Some("HEAD"), &sig, &sig, "commit", &tree, &[&parent]));
(commit, tree_id)
}
pub fn path2url(path: &Path) -> String {
Url::from_file_path(path).unwrap().to_string()
}
pub fn worktrees_env_init(repo: &Repository) -> (TempDir, Branch<'_>) {
let oid = repo.head().unwrap().target().unwrap();
let commit = repo.find_commit(oid).unwrap();
let branch = repo.branch("wt-branch", &commit, true).unwrap();
let wtdir = TempDir::new().unwrap();
(wtdir, branch)
}
#[cfg(windows)]
pub fn realpath(original: &Path) -> io::Result<PathBuf> {
Ok(original.to_path_buf())
}
#[cfg(unix)]
pub fn realpath(original: &Path) -> io::Result<PathBuf> {
use libc::c_char;
use std::ffi::{CStr, CString, OsString};
use std::os::unix::prelude::*;
extern "C" {
fn realpath(name: *const c_char, resolved: *mut c_char) -> *mut c_char;
}
unsafe {
let cstr = CString::new(original.as_os_str().as_bytes())?;
let ptr = realpath(cstr.as_ptr(), ptr::null_mut());
if ptr.is_null() {
return Err(io::Error::last_os_error());
}
let bytes = CStr::from_ptr(ptr).to_bytes().to_vec();
libc::free(ptr as *mut _);
Ok(PathBuf::from(OsString::from_vec(bytes)))
}
}
| 30.955556 | 88 | 0.580761 |
332260d88234b405987195023291709f6cccc547 | 853 | //
//! Copyright 2020 Alibaba Group Holding Limited.
//!
//! Licensed under the Apache License, Version 2.0 (the "License");
//! you may not use this file except in compliance with the License.
//! You may obtain a copy of the License at
//!
//! http://www.apache.org/licenses/LICENSE-2.0
//!
//! Unless required by applicable law or agreed to in writing, software
//! distributed under the License is distributed on an "AS IS" BASIS,
//! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//! See the License for the specific language governing permissions and
//! limitations under the License.
use crate::api::concise::reduce::Range;
use crate::errors::BuildJobError;
use crate::stream::Stream;
use crate::Data;
pub trait Limit<D: Data> {
fn limit(&self, range: Range, size: u32) -> Result<Stream<D>, BuildJobError>;
}
| 35.541667 | 81 | 0.720985 |
1a13335a0e49d34bfc44b62f02b7011ecd4126b1 | 48,398 | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Validates all used crates and extern libraries and loads their metadata
use cstore::{self, CStore, CrateSource, MetadataBlob};
use locator::{self, CratePaths};
use schema::CrateRoot;
use rustc_data_structures::sync::{Lrc, RwLock, Lock};
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX};
use rustc::hir::svh::Svh;
use rustc::middle::allocator::AllocatorKind;
use rustc::middle::cstore::DepKind;
use rustc::mir::interpret::AllocDecodingState;
use rustc::session::{Session, CrateDisambiguator};
use rustc::session::config::{Sanitizer, self};
use rustc_target::spec::{PanicStrategy, TargetTriple};
use rustc::session::search_paths::PathKind;
use rustc::middle;
use rustc::middle::cstore::{validate_crate_name, ExternCrate, ExternCrateSource};
use rustc::util::common::record_time;
use rustc::util::nodemap::FxHashSet;
use rustc::hir::map::Definitions;
use std::ops::Deref;
use std::path::PathBuf;
use std::{cmp, fs};
use syntax::ast;
use syntax::attr;
use syntax::edition::Edition;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
use syntax::visit;
use syntax_pos::{Span, DUMMY_SP};
use log;
pub struct Library {
pub dylib: Option<(PathBuf, PathKind)>,
pub rlib: Option<(PathBuf, PathKind)>,
pub rmeta: Option<(PathBuf, PathKind)>,
pub metadata: MetadataBlob,
}
pub struct CrateLoader<'a> {
pub sess: &'a Session,
cstore: &'a CStore,
local_crate_name: Symbol,
}
fn dump_crates(cstore: &CStore) {
info!("resolved crates:");
cstore.iter_crate_data(|_, data| {
info!(" name: {}", data.root.name);
info!(" cnum: {}", data.cnum);
info!(" hash: {}", data.root.hash);
info!(" reqd: {:?}", *data.dep_kind.lock());
let CrateSource { dylib, rlib, rmeta } = data.source.clone();
dylib.map(|dl| info!(" dylib: {}", dl.0.display()));
rlib.map(|rl| info!(" rlib: {}", rl.0.display()));
rmeta.map(|rl| info!(" rmeta: {}", rl.0.display()));
});
}
// Extra info about a crate loaded for plugins or exported macros.
struct ExtensionCrate {
metadata: PMDSource,
dylib: Option<PathBuf>,
target_only: bool,
}
enum PMDSource {
Registered(Lrc<cstore::CrateMetadata>),
Owned(Library),
}
impl Deref for PMDSource {
type Target = MetadataBlob;
fn deref(&self) -> &MetadataBlob {
match *self {
PMDSource::Registered(ref cmd) => &cmd.blob,
PMDSource::Owned(ref lib) => &lib.metadata
}
}
}
enum LoadResult {
Previous(CrateNum),
Loaded(Library),
}
impl<'a> CrateLoader<'a> {
pub fn new(sess: &'a Session, cstore: &'a CStore, local_crate_name: &str) -> Self {
CrateLoader {
sess,
cstore,
local_crate_name: Symbol::intern(local_crate_name),
}
}
fn existing_match(&self, name: Symbol, hash: Option<&Svh>, kind: PathKind)
-> Option<CrateNum> {
let mut ret = None;
self.cstore.iter_crate_data(|cnum, data| {
if data.name != name { return }
match hash {
Some(hash) if *hash == data.root.hash => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency
// in which case we may have a specification on the command line for
// this library. Even though an upstream library may have loaded
// something of the same name, we have to make sure it was loaded
// from the exact same location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = &self.cstore.get_crate_data(cnum).source;
if let Some(locs) = self.sess.opts.externs.get(&*name.as_str()) {
let found = locs.iter().any(|l| {
let l = fs::canonicalize(l).ok();
source.dylib.as_ref().map(|p| &p.0) == l.as_ref() ||
source.rlib.as_ref().map(|p| &p.0) == l.as_ref()
});
if found {
ret = Some(cnum);
}
return
}
// Alright, so we've gotten this far which means that `data` has the
// right name, we don't have a hash, and we don't have a --extern
// pointing for ourselves. We're still not quite yet done because we
// have to make sure that this crate was found in the crate lookup
// path (this is a top-level dependency) as we don't want to
// implicitly load anything inside the dependency lookup path.
let prev_kind = source.dylib.as_ref().or(source.rlib.as_ref())
.or(source.rmeta.as_ref())
.expect("No sources for crate").1;
if ret.is_none() && (prev_kind == kind || prev_kind == PathKind::All) {
ret = Some(cnum);
}
});
return ret;
}
fn verify_no_symbol_conflicts(&self,
span: Span,
root: &CrateRoot) {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == root.name &&
self.sess.local_crate_disambiguator() == root.disambiguator {
span_fatal!(self.sess, span, E0519,
"the current crate is indistinguishable from one of its \
dependencies: it has the same crate-name `{}` and was \
compiled with the same `-C metadata` arguments. This \
will result in symbol conflicts between the two.",
root.name)
}
// Check for conflicts with any crate loaded so far
self.cstore.iter_crate_data(|_, other| {
if other.root.name == root.name && // same crate-name
other.root.disambiguator == root.disambiguator && // same crate-disambiguator
other.root.hash != root.hash { // but different SVH
span_fatal!(self.sess, span, E0523,
"found two different crates with name `{}` that are \
not distinguished by differing `-C metadata`. This \
will result in symbol conflicts between the two.",
root.name)
}
});
}
fn register_crate(&mut self,
root: &Option<CratePaths>,
ident: Symbol,
name: Symbol,
span: Span,
lib: Library,
dep_kind: DepKind)
-> (CrateNum, Lrc<cstore::CrateMetadata>) {
info!("register crate `extern crate {} as {}`", name, ident);
let crate_root = lib.metadata.get_root();
self.verify_no_symbol_conflicts(span, &crate_root);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone().map(|p| p.0),
rlib: lib.rlib.clone().map(|p| p.0),
rmeta: lib.rmeta.clone().map(|p| p.0),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let Library { dylib, rlib, rmeta, metadata } = lib;
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
let dependencies: Vec<CrateNum> = cnum_map.iter().cloned().collect();
let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || {
crate_root.def_path_table.decode((&metadata, self.sess))
});
let interpret_alloc_index: Vec<u32> = crate_root.interpret_alloc_index
.decode(&metadata)
.collect();
let trait_impls = crate_root
.impls
.decode((&metadata, self.sess))
.map(|trait_impls| (trait_impls.trait_id, trait_impls.impls))
.collect();
let cmeta = cstore::CrateMetadata {
name,
extern_crate: Lock::new(None),
def_path_table: Lrc::new(def_path_table),
trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
}),
root: crate_root,
blob: metadata,
cnum_map,
cnum,
dependencies: Lock::new(dependencies),
codemap_import_info: RwLock::new(vec![]),
alloc_decoding_state: AllocDecodingState::new(interpret_alloc_index),
dep_kind: Lock::new(dep_kind),
source: cstore::CrateSource {
dylib,
rlib,
rmeta,
}
};
let cmeta = Lrc::new(cmeta);
self.cstore.set_crate_data(cnum, cmeta.clone());
(cnum, cmeta)
}
fn resolve_crate(&mut self,
root: &Option<CratePaths>,
ident: Symbol,
name: Symbol,
hash: Option<&Svh>,
extra_filename: Option<&str>,
span: Span,
path_kind: PathKind,
mut dep_kind: DepKind)
-> (CrateNum, Lrc<cstore::CrateMetadata>) {
info!("resolving crate `extern crate {} as {}`", name, ident);
let result = if let Some(cnum) = self.existing_match(name, hash, path_kind) {
LoadResult::Previous(cnum)
} else {
info!("falling back to a load");
let mut locate_ctxt = locator::Context {
sess: self.sess,
span,
ident,
crate_name: name,
hash: hash.map(|a| &*a),
extra_filename: extra_filename,
filesearch: self.sess.target_filesearch(path_kind),
target: &self.sess.target.target,
triple: &self.sess.opts.target_triple,
root,
rejected_via_hash: vec![],
rejected_via_triple: vec![],
rejected_via_kind: vec![],
rejected_via_version: vec![],
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: Some(false),
metadata_loader: &*self.cstore.metadata_loader,
};
self.load(&mut locate_ctxt).or_else(|| {
dep_kind = DepKind::UnexportedMacrosOnly;
let mut proc_macro_locator = locator::Context {
target: &self.sess.host,
triple: &TargetTriple::from_triple(config::host_triple()),
filesearch: self.sess.host_filesearch(path_kind),
rejected_via_hash: vec![],
rejected_via_triple: vec![],
rejected_via_kind: vec![],
rejected_via_version: vec![],
rejected_via_filename: vec![],
is_proc_macro: Some(true),
..locate_ctxt
};
self.load(&mut proc_macro_locator)
}).unwrap_or_else(|| locate_ctxt.report_errs())
};
match result {
LoadResult::Previous(cnum) => {
let data = self.cstore.get_crate_data(cnum);
if data.root.macro_derive_registrar.is_some() {
dep_kind = DepKind::UnexportedMacrosOnly;
}
data.dep_kind.with_lock(|data_dep_kind| {
*data_dep_kind = cmp::max(*data_dep_kind, dep_kind);
});
(cnum, data)
}
LoadResult::Loaded(library) => {
self.register_crate(root, ident, name, span, library, dep_kind)
}
}
}
fn load(&mut self, locate_ctxt: &mut locator::Context) -> Option<LoadResult> {
let library = locate_ctxt.maybe_load_library_crate()?;
// In the case that we're loading a crate, but not matching
// against a hash, we could load a crate which has the same hash
// as an already loaded crate. If this is the case prevent
// duplicates by just using the first crate.
//
// Note that we only do this for target triple crates, though, as we
// don't want to match a host crate against an equivalent target one
// already loaded.
let root = library.metadata.get_root();
if locate_ctxt.triple == &self.sess.opts.target_triple {
let mut result = LoadResult::Loaded(library);
self.cstore.iter_crate_data(|cnum, data| {
if data.root.name == root.name && root.hash == data.root.hash {
assert!(locate_ctxt.hash.is_none());
info!("load success, going to previous cnum: {}", cnum);
result = LoadResult::Previous(cnum);
}
});
Some(result)
} else {
Some(LoadResult::Loaded(library))
}
}
fn update_extern_crate(&mut self,
cnum: CrateNum,
mut extern_crate: ExternCrate,
visited: &mut FxHashSet<(CrateNum, bool)>)
{
if !visited.insert((cnum, extern_crate.direct)) { return }
let cmeta = self.cstore.get_crate_data(cnum);
let mut old_extern_crate = cmeta.extern_crate.borrow_mut();
// Prefer:
// - something over nothing (tuple.0);
// - direct extern crate to indirect (tuple.1);
// - shorter paths to longer (tuple.2).
let new_rank = (
true,
extern_crate.direct,
cmp::Reverse(extern_crate.path_len),
);
let old_rank = match *old_extern_crate {
None => (false, false, cmp::Reverse(usize::max_value())),
Some(ref c) => (
true,
c.direct,
cmp::Reverse(c.path_len),
),
};
if old_rank >= new_rank {
return; // no change needed
}
*old_extern_crate = Some(extern_crate);
drop(old_extern_crate);
// Propagate the extern crate info to dependencies.
extern_crate.direct = false;
for &dep_cnum in cmeta.dependencies.borrow().iter() {
self.update_extern_crate(dep_cnum, extern_crate, visited);
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(&mut self,
root: &Option<CratePaths>,
crate_root: &CrateRoot,
metadata: &MetadataBlob,
krate: CrateNum,
span: Span,
dep_kind: DepKind)
-> cstore::CrateNumMap {
debug!("resolving deps of external crate");
if crate_root.macro_derive_registrar.is_some() {
return cstore::CrateNumMap::new();
}
// The map from crate numbers in the crate we're resolving to local crate numbers.
// We map 0 and all other holes in the map to our parent crate. The "additional"
// self-dependencies should be harmless.
::std::iter::once(krate).chain(crate_root.crate_deps
.decode(metadata)
.map(|dep| {
info!("resolving dep crate {} hash: `{}` extra filename: `{}`", dep.name, dep.hash,
dep.extra_filename);
if dep.kind == DepKind::UnexportedMacrosOnly {
return krate;
}
let dep_kind = match dep_kind {
DepKind::MacrosOnly => DepKind::MacrosOnly,
_ => dep.kind,
};
let (local_cnum, ..) = self.resolve_crate(
root, dep.name, dep.name, Some(&dep.hash), Some(&dep.extra_filename), span,
PathKind::Dependency, dep_kind,
);
local_cnum
})).collect()
}
fn read_extension_crate(&mut self, span: Span, orig_name: Symbol, rename: Symbol)
-> ExtensionCrate {
info!("read extension crate `extern crate {} as {}`", orig_name, rename);
let target_triple = &self.sess.opts.target_triple;
let host_triple = TargetTriple::from_triple(config::host_triple());
let is_cross = target_triple != &host_triple;
let mut target_only = false;
let mut locate_ctxt = locator::Context {
sess: self.sess,
span,
ident: orig_name,
crate_name: rename,
hash: None,
extra_filename: None,
filesearch: self.sess.host_filesearch(PathKind::Crate),
target: &self.sess.host,
triple: &host_triple,
root: &None,
rejected_via_hash: vec![],
rejected_via_triple: vec![],
rejected_via_kind: vec![],
rejected_via_version: vec![],
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: None,
metadata_loader: &*self.cstore.metadata_loader,
};
let library = self.load(&mut locate_ctxt).or_else(|| {
if !is_cross {
return None
}
// Try loading from target crates. This will abort later if we
// try to load a plugin registrar function,
target_only = true;
locate_ctxt.target = &self.sess.target.target;
locate_ctxt.triple = target_triple;
locate_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate);
self.load(&mut locate_ctxt)
});
let library = match library {
Some(l) => l,
None => locate_ctxt.report_errs(),
};
let (dylib, metadata) = match library {
LoadResult::Previous(cnum) => {
let data = self.cstore.get_crate_data(cnum);
(data.source.dylib.clone(), PMDSource::Registered(data))
}
LoadResult::Loaded(library) => {
let dylib = library.dylib.clone();
let metadata = PMDSource::Owned(library);
(dylib, metadata)
}
};
ExtensionCrate {
metadata,
dylib: dylib.map(|p| p.0),
target_only,
}
}
/// Load custom derive macros.
///
/// Note that this is intentionally similar to how we load plugins today,
/// but also intentionally separate. Plugins are likely always going to be
/// implemented as dynamic libraries, but we have a possible future where
/// custom derive (and other macro-1.1 style features) are implemented via
/// executables and custom IPC.
fn load_derive_macros(&mut self, root: &CrateRoot, dylib: Option<PathBuf>, span: Span)
-> Vec<(ast::Name, Lrc<SyntaxExtension>)> {
use std::{env, mem};
use proc_macro::TokenStream;
use proc_macro::__internal::Registry;
use dynamic_lib::DynamicLibrary;
use syntax_ext::deriving::custom::ProcMacroDerive;
use syntax_ext::proc_macro_impl::{AttrProcMacro, BangProcMacro};
let path = match dylib {
Some(dylib) => dylib,
None => span_bug!(span, "proc-macro crate not dylib"),
};
// Make sure the path contains a / or the linker will search for it.
let path = env::current_dir().unwrap().join(path);
let lib = match DynamicLibrary::open(Some(&path)) {
Ok(lib) => lib,
Err(err) => self.sess.span_fatal(span, &err),
};
let sym = self.sess.generate_derive_registrar_symbol(root.disambiguator);
let registrar = unsafe {
let sym = match lib.symbol(&sym) {
Ok(f) => f,
Err(err) => self.sess.span_fatal(span, &err),
};
mem::transmute::<*mut u8, fn(&mut Registry)>(sym)
};
struct MyRegistrar {
extensions: Vec<(ast::Name, Lrc<SyntaxExtension>)>,
edition: Edition,
}
impl Registry for MyRegistrar {
fn register_custom_derive(&mut self,
trait_name: &str,
expand: fn(TokenStream) -> TokenStream,
attributes: &[&'static str]) {
let attrs = attributes.iter().cloned().map(Symbol::intern).collect::<Vec<_>>();
let derive = ProcMacroDerive::new(expand, attrs.clone());
let derive = SyntaxExtension::ProcMacroDerive(
Box::new(derive), attrs, self.edition
);
self.extensions.push((Symbol::intern(trait_name), Lrc::new(derive)));
}
fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream) {
let expand = SyntaxExtension::AttrProcMacro(
Box::new(AttrProcMacro { inner: expand }), self.edition
);
self.extensions.push((Symbol::intern(name), Lrc::new(expand)));
}
fn register_bang_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream) -> TokenStream) {
let expand = SyntaxExtension::ProcMacro(
Box::new(BangProcMacro { inner: expand }), false, self.edition
);
self.extensions.push((Symbol::intern(name), Lrc::new(expand)));
}
}
let mut my_registrar = MyRegistrar { extensions: Vec::new(), edition: root.edition };
registrar(&mut my_registrar);
// Intentionally leak the dynamic library. We can't ever unload it
// since the library can make things that will live arbitrarily long.
mem::forget(lib);
my_registrar.extensions
}
/// Look for a plugin registrar. Returns library path, crate
/// SVH and DefIndex of the registrar function.
pub fn find_plugin_registrar(&mut self,
span: Span,
name: &str)
-> Option<(PathBuf, CrateDisambiguator)> {
let name = Symbol::intern(name);
let ekrate = self.read_extension_crate(span, name, name);
if ekrate.target_only {
// Need to abort before syntax expansion.
let message = format!("plugin `{}` is not available for triple `{}` \
(only found {})",
name,
config::host_triple(),
self.sess.opts.target_triple);
span_fatal!(self.sess, span, E0456, "{}", &message);
}
let root = ekrate.metadata.get_root();
match ekrate.dylib.as_ref() {
Some(dylib) => {
Some((dylib.to_path_buf(), root.disambiguator))
}
None => {
span_err!(self.sess, span, E0457,
"plugin `{}` only found in rlib format, but must be available \
in dylib format",
name);
// No need to abort because the loading code will just ignore this
// empty dylib.
None
}
}
}
fn inject_panic_runtime(&mut self, krate: &ast::Crate) {
// If we're only compiling an rlib, then there's no need to select a
// panic runtime, so we just skip this section entirely.
let any_non_rlib = self.sess.crate_types.borrow().iter().any(|ct| {
*ct != config::CrateTypeRlib
});
if !any_non_rlib {
info!("panic runtime injection skipped, only generating rlib");
self.sess.injected_panic_runtime.set(None);
return
}
// If we need a panic runtime, we try to find an existing one here. At
// the same time we perform some general validation of the DAG we've got
// going such as ensuring everything has a compatible panic strategy.
//
// The logic for finding the panic runtime here is pretty much the same
// as the allocator case with the only addition that the panic strategy
// compilation mode also comes into play.
let desired_strategy = self.sess.panic_strategy();
let mut runtime_found = false;
let mut needs_panic_runtime = attr::contains_name(&krate.attrs,
"needs_panic_runtime");
self.cstore.iter_crate_data(|cnum, data| {
needs_panic_runtime = needs_panic_runtime ||
data.root.needs_panic_runtime;
if data.root.panic_runtime {
// Inject a dependency from all #![needs_panic_runtime] to this
// #![panic_runtime] crate.
self.inject_dependency_if(cnum, "a panic runtime",
&|data| data.root.needs_panic_runtime);
runtime_found = runtime_found || *data.dep_kind.lock() == DepKind::Explicit;
}
});
// If an explicitly linked and matching panic runtime was found, or if
// we just don't need one at all, then we're done here and there's
// nothing else to do.
if !needs_panic_runtime || runtime_found {
self.sess.injected_panic_runtime.set(None);
return
}
// By this point we know that we (a) need a panic runtime and (b) no
// panic runtime was explicitly linked. Here we just load an appropriate
// default runtime for our panic strategy and then inject the
// dependencies.
//
// We may resolve to an already loaded crate (as the crate may not have
// been explicitly linked prior to this) and we may re-inject
// dependencies again, but both of those situations are fine.
//
// Also note that we have yet to perform validation of the crate graph
// in terms of everyone has a compatible panic runtime format, that's
// performed later as part of the `dependency_format` module.
let name = match desired_strategy {
PanicStrategy::Unwind => Symbol::intern("panic_unwind"),
PanicStrategy::Abort => Symbol::intern("panic_abort"),
};
info!("panic runtime not found -- loading {}", name);
let dep_kind = DepKind::Implicit;
let (cnum, data) =
self.resolve_crate(&None, name, name, None, None, DUMMY_SP, PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a panic runtime
// and the panic strategy is indeed what we thought it was.
if !data.root.panic_runtime {
self.sess.err(&format!("the crate `{}` is not a panic runtime",
name));
}
if data.root.panic_strategy != desired_strategy {
self.sess.err(&format!("the crate `{}` does not have the panic \
strategy `{}`",
name, desired_strategy.desc()));
}
self.sess.injected_panic_runtime.set(Some(cnum));
self.inject_dependency_if(cnum, "a panic runtime",
&|data| data.root.needs_panic_runtime);
}
fn inject_sanitizer_runtime(&mut self) {
if let Some(ref sanitizer) = self.sess.opts.debugging_opts.sanitizer {
// Sanitizers can only be used on some tested platforms with
// executables linked to `std`
const ASAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu",
"x86_64-apple-darwin"];
const TSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu",
"x86_64-apple-darwin"];
const LSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu"];
const MSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu"];
let supported_targets = match *sanitizer {
Sanitizer::Address => ASAN_SUPPORTED_TARGETS,
Sanitizer::Thread => TSAN_SUPPORTED_TARGETS,
Sanitizer::Leak => LSAN_SUPPORTED_TARGETS,
Sanitizer::Memory => MSAN_SUPPORTED_TARGETS,
};
if !supported_targets.contains(&&*self.sess.target.target.llvm_target) {
self.sess.err(&format!("{:?}Sanitizer only works with the `{}` target",
sanitizer,
supported_targets.join("` or `")
));
return
}
// firstyear 2017 - during testing I was unable to access an OSX machine
// to make this work on different crate types. As a result, today I have
// only been able to test and support linux as a target.
if self.sess.target.target.llvm_target == "x86_64-unknown-linux-gnu" {
if !self.sess.crate_types.borrow().iter().all(|ct| {
match *ct {
// Link the runtime
config::CrateTypeStaticlib |
config::CrateTypeExecutable => true,
// This crate will be compiled with the required
// instrumentation pass
config::CrateTypeRlib |
config::CrateTypeDylib |
config::CrateTypeCdylib =>
false,
_ => {
self.sess.err(&format!("Only executables, staticlibs, \
cdylibs, dylibs and rlibs can be compiled with \
`-Z sanitizer`"));
false
}
}
}) {
return
}
} else {
if !self.sess.crate_types.borrow().iter().all(|ct| {
match *ct {
// Link the runtime
config::CrateTypeExecutable => true,
// This crate will be compiled with the required
// instrumentation pass
config::CrateTypeRlib => false,
_ => {
self.sess.err(&format!("Only executables and rlibs can be \
compiled with `-Z sanitizer`"));
false
}
}
}) {
return
}
}
let mut uses_std = false;
self.cstore.iter_crate_data(|_, data| {
if data.name == "std" {
uses_std = true;
}
});
if uses_std {
let name = match *sanitizer {
Sanitizer::Address => "rustc_asan",
Sanitizer::Leak => "rustc_lsan",
Sanitizer::Memory => "rustc_msan",
Sanitizer::Thread => "rustc_tsan",
};
info!("loading sanitizer: {}", name);
let symbol = Symbol::intern(name);
let dep_kind = DepKind::Explicit;
let (_, data) =
self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP,
PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a sanitizer runtime
if !data.root.sanitizer_runtime {
self.sess.err(&format!("the crate `{}` is not a sanitizer runtime",
name));
}
} else {
self.sess.err(&format!("Must link std to be compiled with `-Z sanitizer`"));
}
}
}
fn inject_profiler_runtime(&mut self) {
if self.sess.opts.debugging_opts.profile ||
self.sess.opts.debugging_opts.pgo_gen.is_some()
{
info!("loading profiler");
let symbol = Symbol::intern("profiler_builtins");
let dep_kind = DepKind::Implicit;
let (_, data) =
self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP,
PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a profiler runtime
if !data.root.profiler_runtime {
self.sess.err(&format!("the crate `profiler_builtins` is not \
a profiler runtime"));
}
}
}
fn inject_allocator_crate(&mut self, krate: &ast::Crate) {
let has_global_allocator = has_global_allocator(krate);
self.sess.has_global_allocator.set(has_global_allocator);
// Check to see if we actually need an allocator. This desire comes
// about through the `#![needs_allocator]` attribute and is typically
// written down in liballoc.
let mut needs_allocator = attr::contains_name(&krate.attrs,
"needs_allocator");
self.cstore.iter_crate_data(|_, data| {
needs_allocator = needs_allocator || data.root.needs_allocator;
});
if !needs_allocator {
self.sess.injected_allocator.set(None);
self.sess.allocator_kind.set(None);
return
}
// At this point we've determined that we need an allocator. Let's see
// if our compilation session actually needs an allocator based on what
// we're emitting.
let mut need_lib_alloc = false;
let mut need_exe_alloc = false;
for ct in self.sess.crate_types.borrow().iter() {
match *ct {
config::CrateTypeExecutable => need_exe_alloc = true,
config::CrateTypeDylib |
config::CrateTypeProcMacro |
config::CrateTypeCdylib |
config::CrateTypeStaticlib => need_lib_alloc = true,
config::CrateTypeRlib => {}
}
}
if !need_lib_alloc && !need_exe_alloc {
self.sess.injected_allocator.set(None);
self.sess.allocator_kind.set(None);
return
}
// Ok, we need an allocator. Not only that but we're actually going to
// create an artifact that needs one linked in. Let's go find the one
// that we're going to link in.
//
// First up we check for global allocators. Look at the crate graph here
// and see what's a global allocator, including if we ourselves are a
// global allocator.
let mut global_allocator = if has_global_allocator {
Some(None)
} else {
None
};
self.cstore.iter_crate_data(|_, data| {
if !data.root.has_global_allocator {
return
}
match global_allocator {
Some(Some(other_crate)) => {
self.sess.err(&format!("the #[global_allocator] in {} \
conflicts with this global \
allocator in: {}",
other_crate,
data.root.name));
}
Some(None) => {
self.sess.err(&format!("the #[global_allocator] in this \
crate conflicts with global \
allocator in: {}", data.root.name));
}
None => global_allocator = Some(Some(data.root.name)),
}
});
if global_allocator.is_some() {
self.sess.allocator_kind.set(Some(AllocatorKind::Global));
self.sess.injected_allocator.set(None);
return
}
// Ok we haven't found a global allocator but we still need an
// allocator. At this point we'll either fall back to the "library
// allocator" or the "exe allocator" depending on a few variables. Let's
// figure out which one.
//
// Note that here we favor linking to the "library allocator" as much as
// possible. If we're not creating rustc's version of libstd
// (need_lib_alloc and prefer_dynamic) then we select `None`, and if the
// exe allocation crate doesn't exist for this target then we also
// select `None`.
let exe_allocation_crate_data =
if need_lib_alloc && !self.sess.opts.cg.prefer_dynamic {
None
} else {
self.sess
.target
.target
.options
.exe_allocation_crate
.as_ref()
.map(|name| {
// We've determined that we're injecting an "exe allocator" which means
// that we're going to load up a whole new crate. An example of this is
// that we're producing a normal binary on Linux which means we need to
// load the `alloc_jemalloc` crate to link as an allocator.
let name = Symbol::intern(name);
let (cnum, data) = self.resolve_crate(&None,
name,
name,
None,
None,
DUMMY_SP,
PathKind::Crate,
DepKind::Implicit);
self.sess.injected_allocator.set(Some(cnum));
data
})
};
let allocation_crate_data = exe_allocation_crate_data.or_else(|| {
// No allocator was injected
self.sess.injected_allocator.set(None);
if attr::contains_name(&krate.attrs, "default_lib_allocator") {
// Prefer self as the allocator if there's a collision
return None;
}
// We're not actually going to inject an allocator, we're going to
// require that something in our crate graph is the default lib
// allocator. This is typically libstd, so this'll rarely be an
// error.
let mut allocator = None;
self.cstore.iter_crate_data(|_, data| {
if allocator.is_none() && data.root.has_default_lib_allocator {
allocator = Some(data.clone());
}
});
allocator
});
match allocation_crate_data {
Some(data) => {
// We have an allocator. We detect separately what kind it is, to allow for some
// flexibility in misconfiguration.
let attrs = data.get_item_attrs(CRATE_DEF_INDEX, self.sess);
let kind_interned = attr::first_attr_value_str_by_name(&attrs, "rustc_alloc_kind")
.map(Symbol::as_str);
let kind_str = kind_interned
.as_ref()
.map(|s| s as &str);
let alloc_kind = match kind_str {
None |
Some("lib") => AllocatorKind::DefaultLib,
Some("exe") => AllocatorKind::DefaultExe,
Some(other) => {
self.sess.err(&format!("Allocator kind {} not known", other));
return;
}
};
self.sess.allocator_kind.set(Some(alloc_kind));
},
None => {
if !attr::contains_name(&krate.attrs, "default_lib_allocator") {
self.sess.err("no global memory allocator found but one is \
required; link to std or \
add #[global_allocator] to a static item \
that implements the GlobalAlloc trait.");
return;
}
self.sess.allocator_kind.set(Some(AllocatorKind::DefaultLib));
}
}
fn has_global_allocator(krate: &ast::Crate) -> bool {
struct Finder(bool);
let mut f = Finder(false);
visit::walk_crate(&mut f, krate);
return f.0;
impl<'ast> visit::Visitor<'ast> for Finder {
fn visit_item(&mut self, i: &'ast ast::Item) {
if attr::contains_name(&i.attrs, "global_allocator") {
self.0 = true;
}
visit::walk_item(self, i)
}
}
}
}
fn inject_dependency_if(&self,
krate: CrateNum,
what: &str,
needs_dep: &Fn(&cstore::CrateMetadata) -> bool) {
// don't perform this validation if the session has errors, as one of
// those errors may indicate a circular dependency which could cause
// this to stack overflow.
if self.sess.has_errors() {
return
}
// Before we inject any dependencies, make sure we don't inject a
// circular dependency by validating that this crate doesn't
// transitively depend on any crates satisfying `needs_dep`.
for dep in self.cstore.crate_dependencies_in_rpo(krate) {
let data = self.cstore.get_crate_data(dep);
if needs_dep(&data) {
self.sess.err(&format!("the crate `{}` cannot depend \
on a crate that needs {}, but \
it depends on `{}`",
self.cstore.get_crate_data(krate).root.name,
what,
data.root.name));
}
}
// All crates satisfying `needs_dep` do not explicitly depend on the
// crate provided for this compile, but in order for this compilation to
// be successfully linked we need to inject a dependency (to order the
// crates on the command line correctly).
self.cstore.iter_crate_data(|cnum, data| {
if !needs_dep(data) {
return
}
info!("injecting a dep from {} to {}", cnum, krate);
data.dependencies.borrow_mut().push(krate);
});
}
}
impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
fn postprocess(&mut self, krate: &ast::Crate) {
// inject the sanitizer runtime before the allocator runtime because all
// sanitizers force the use of the `alloc_system` allocator
self.inject_sanitizer_runtime();
self.inject_profiler_runtime();
self.inject_allocator_crate(krate);
self.inject_panic_runtime(krate);
if log_enabled!(log::Level::Info) {
dump_crates(&self.cstore);
}
}
fn process_extern_crate(&mut self, item: &ast::Item, definitions: &Definitions) -> CrateNum {
match item.node {
ast::ItemKind::ExternCrate(orig_name) => {
debug!("resolving extern crate stmt. ident: {} orig_name: {:?}",
item.ident, orig_name);
let orig_name = match orig_name {
Some(orig_name) => {
validate_crate_name(Some(self.sess), &orig_name.as_str(),
Some(item.span));
orig_name
}
None => item.ident.name,
};
let dep_kind = if attr::contains_name(&item.attrs, "no_link") {
DepKind::UnexportedMacrosOnly
} else {
DepKind::Explicit
};
let (cnum, ..) = self.resolve_crate(
&None, item.ident.name, orig_name, None, None,
item.span, PathKind::Crate, dep_kind,
);
let def_id = definitions.opt_local_def_id(item.id).unwrap();
let path_len = definitions.def_path(def_id.index).data.len();
self.update_extern_crate(
cnum,
ExternCrate {
src: ExternCrateSource::Extern(def_id),
span: item.span,
path_len,
direct: true,
},
&mut FxHashSet(),
);
self.cstore.add_extern_mod_stmt_cnum(item.id, cnum);
cnum
}
_ => bug!(),
}
}
fn process_path_extern(
&mut self,
name: Symbol,
span: Span,
) -> CrateNum {
let cnum = self.resolve_crate(
&None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
).0;
self.update_extern_crate(
cnum,
ExternCrate {
src: ExternCrateSource::Path,
span,
// to have the least priority in `update_extern_crate`
path_len: usize::max_value(),
direct: true,
},
&mut FxHashSet(),
);
cnum
}
fn process_use_extern(
&mut self,
name: Symbol,
span: Span,
id: ast::NodeId,
definitions: &Definitions,
) -> CrateNum {
let cnum = self.resolve_crate(
&None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
).0;
let def_id = definitions.opt_local_def_id(id).unwrap();
let path_len = definitions.def_path(def_id.index).data.len();
self.update_extern_crate(
cnum,
ExternCrate {
src: ExternCrateSource::Use,
span,
path_len,
direct: true,
},
&mut FxHashSet(),
);
cnum
}
}
| 41.472151 | 99 | 0.514009 |
7a9eaf2b4fecd2581e5caf99f46742d2936455e7 | 14,991 | use crate::test;
test!(
length1,
r#"
.[] | length
"#,
r#"
[[1,2], "string", {"a":2}, null]
"#,
r#"
2
6
1
0
"#
);
test!(
utf8bytelength1,
r#"
utf8bytelength
"#,
"
\"\u{03bc}\"
",
r#"
2
"#
);
test!(
keys1,
r#"
keys
"#,
r#"
{"abc": 1, "abcd": 2, "Foo": 3}
"#,
r#"
["Foo", "abc", "abcd"]
"#
);
test!(
keys2,
r#"
keys
"#,
r#"
[42,3,35]
"#,
r#"
[0,1,2]
"#
);
test!(
has1,
r#"
map(has("foo"))
"#,
r#"
[{"foo": 42}, {}]
"#,
r#"
[true, false]
"#
);
test!(
has2,
r#"
map(has(2))
"#,
r#"
[[0,1], ["a","b","c"]]
"#,
r#"
[false, true]
"#
);
test!(
in1,
r#"
.[] | in({"foo": 42})
"#,
r#"
["foo", "bar"]
"#,
r#"
true
false
"#
);
test!(
in2,
r#"
map(in([0,1]))
"#,
r#"
[2, 0]
"#,
r#"
[false, true]
"#
);
test!(
map1,
r#"
map(.+1)
"#,
r#"
[1,2,3]
"#,
r#"
[2,3,4]
"#
);
test!(
map2,
r#"
map_values(.+1)
"#,
r#"
{"a": 1, "b": 2, "c": 3}
"#,
r#"
{"a": 2, "b": 3, "c": 4}
"#
);
test!(
path1,
r#"
path(.a[0].b)
"#,
r#"
null
"#,
r#"
["a",0,"b"]
"#
);
test!(
path2,
r#"
[path(..)]
"#,
r#"
{"a":[{"b":1}]}
"#,
r#"
[[],["a"],["a",0],["a",0,"b"]]
"#
);
test!(
del1,
r#"
del(.foo)
"#,
r#"
{"foo": 42, "bar": 9001, "baz": 42}
"#,
r#"
{"bar": 9001, "baz": 42}
"#
);
test!(
del2,
r#"
del(.[1, 2])
"#,
r#"
["foo", "bar", "baz"]
"#,
r#"
["foo"]
"#
);
test!(
getpath1,
r#"
getpath(["a","b"])
"#,
r#"
null
"#,
r#"
null
"#
);
test!(
getpath2,
r#"
[getpath(["a","b"], ["a","c"])]
"#,
r#"
{"a":{"b":0, "c":1}}
"#,
r#"
[0, 1]
"#
);
test!(
setpath1,
r#"
setpath(["a","b"]; 1)
"#,
r#"
null
"#,
r#"
{"a": {"b": 1}}
"#
);
test!(
setpath2,
r#"
setpath(["a","b"]; 1)
"#,
r#"
{"a":{"b":0}}
"#,
r#"
{"a": {"b": 1}}
"#
);
test!(
setpath3,
r#"
setpath([0,"a"]; 1)
"#,
r#"
null
"#,
r#"
[{"a":1}]
"#
);
test!(
delpaths1,
r#"
delpaths([["a","b"]])
"#,
r#"
{"a":{"b":1},"x":{"y":2}}
"#,
r#"
{"a":{},"x":{"y":2}}
"#
);
test!(
entries1,
r#"
to_entries
"#,
r#"
{"a": 1, "b": 2}
"#,
r#"
[{"key":"a", "value":1}, {"key":"b", "value":2}]
"#
);
test!(
entries2,
r#"
from_entries
"#,
r#"
[{"key":"a", "value":1}, {"key":"b", "value":2}]
"#,
r#"
{"a": 1, "b": 2}
"#
);
test!(
entries3,
r#"
with_entries(.key |= "KEY_" + .)
"#,
r#"
{"a": 1, "b": 2}
"#,
r#"
{"KEY_a": 1, "KEY_b": 2}
"#
);
test!(
select1,
r#"
map(select(. >= 2))
"#,
r#"
[1,5,3,0,7]
"#,
r#"
[5,3,7]
"#
);
test!(
select2,
r#"
.[] | select(.id == "second")
"#,
r#"
[{"id": "first", "val": 1}, {"id": "second", "val": 2}]
"#,
r#"
{"id": "second", "val": 2}
"#
);
test!(
typefilter1,
r#"
.[]|numbers
"#,
r#"
[[],{},1,"foo",null,true,false]
"#,
r#"
1
"#
);
test!(
empty1,
r#"
1, empty, 2
"#,
r#"
null
"#,
r#"
1
2
"#
);
test!(
empty2,
r#"
[1,2,empty,3]
"#,
r#"
null
"#,
r#"
[1,2,3]
"#
);
/*
TODO: Support $__loc__
test!(
loc1,
r#"
try error("\($__loc__)") catch .
"#,
r#"
null
"#,
r#"
"{\"file\":\"<top-level>\",\"line\":1}"
"#
);
*/
test!(
paths1,
r#"
[paths]
"#,
r#"
[1,[[],{"a":2}]]
"#,
r#"
[[0],[1],[1,0],[1,1],[1,1,"a"]]
"#
);
test!(
paths2,
r#"
[paths(scalars)]
"#,
r#"
[1,[[],{"a":2}]]
"#,
r#"
[[0],[1,1,"a"]]
"#
);
test!(
add1,
r#"
add
"#,
r#"
["a","b","c"]
"#,
r#"
"abc"
"#
);
test!(
add2,
r#"
add
"#,
r#"
[1, 2, 3]
"#,
r#"
6
"#
);
test!(
add3,
r#"
add
"#,
r#"
[]
"#,
r#"
null
"#
);
test!(
any1,
r#"
any
"#,
r#"
[true, false]
"#,
r#"
true
"#
);
test!(
any2,
r#"
any
"#,
r#"
[false, false]
"#,
r#"
false
"#
);
test!(
any3,
r#"
any
"#,
r#"
[]
"#,
r#"
false
"#
);
test!(
all1,
r#"
all
"#,
r#"
[true, false]
"#,
r#"
false
"#
);
test!(
all2,
r#"
all
"#,
r#"
[true, true]
"#,
r#"
true
"#
);
test!(
all3,
r#"
all
"#,
r#"
[]
"#,
r#"
true
"#
);
test!(
flatten1,
r#"
flatten
"#,
r#"
[1, [2], [[3]]]
"#,
r#"
[1, 2, 3]
"#
);
test!(
flatten2,
r#"
flatten(1)
"#,
r#"
[1, [2], [[3]]]
"#,
r#"
[1, 2, [3]]
"#
);
test!(
flatten3,
r#"
flatten
"#,
r#"
[[]]
"#,
r#"
[]
"#
);
test!(
flatten4,
r#"
flatten
"#,
r#"
[{"foo": "bar"}, [{"foo": "baz"}]]
"#,
r#"
[{"foo": "bar"}, {"foo": "baz"}]
"#
);
test!(
range1,
r#"
range(2;4)
"#,
r#"
null
"#,
r#"
2
3
"#
);
test!(
range2,
r#"
[range(2;4)]
"#,
r#"
null
"#,
r#"
[2,3]
"#
);
test!(
range3,
r#"
[range(4)]
"#,
r#"
null
"#,
r#"
[0,1,2,3]
"#
);
test!(
range4,
r#"
[range(0;10;3)]
"#,
r#"
null
"#,
r#"
[0,3,6,9]
"#
);
test!(
range5,
r#"
[range(0;10;-1)]
"#,
r#"
null
"#,
r#"
[]
"#
);
test!(
range6,
r#"
[range(0;-5;-1)]
"#,
r#"
null
"#,
r#"
[0,-1,-2,-3,-4]
"#
);
test!(
floor1,
r#"
floor
"#,
r#"
3.14159
"#,
r#"
3
"#
);
test!(
sqrt1,
r#"
sqrt
"#,
r#"
9
"#,
r#"
3
"#
);
test!(
tonumber1,
r#"
.[] | tonumber
"#,
r#"
[1, "1"]
"#,
r#"
1
1
"#
);
test!(
tostring1,
r#"
.[] | tostring
"#,
r#"
[1, "1", [1]]
"#,
r#"
"1"
"1"
"[1]"
"#
);
test!(
type1,
r#"
map(type)
"#,
r#"
[0, false, [], {}, null, "hello"]
"#,
r#"
["number", "boolean", "array", "object", "null", "string"]
"#
);
test!(
floating_point1,
r#"
.[] | (infinite * .) < 0
"#,
r#"
[-1, 1]
"#,
r#"
true
false
"#
);
test!(
floating_point2,
r#"
infinite, nan | type
"#,
r#"
null
"#,
r#"
"number"
"number"
"#
);
test!(
sort1,
r#"
sort
"#,
r#"
[8,3,null,6]
"#,
r#"
[null,3,6,8]
"#
);
test!(
sort2,
r#"
sort_by(.foo)
"#,
r#"
[{"foo":4, "bar":10}, {"foo":3, "bar":100}, {"foo":2, "bar":1}]
"#,
r#"
[{"foo":2, "bar":1}, {"foo":3, "bar":100}, {"foo":4, "bar":10}]
"#
);
test!(
group_by1,
r#"
group_by(.foo)
"#,
r#"
[{"foo":1, "bar":10}, {"foo":3, "bar":100}, {"foo":1, "bar":1}]
"#,
r#"
[[{"foo":1, "bar":10}, {"foo":1, "bar":1}], [{"foo":3, "bar":100}]]
"#
);
test!(
min_max1,
r#"
min
"#,
r#"
[5,4,2,7]
"#,
r#"
2
"#
);
test!(
min_max2,
r#"
max_by(.foo)
"#,
r#"
[{"foo":1, "bar":14}, {"foo":2, "bar":3}]
"#,
r#"
{"foo":2, "bar":3}
"#
);
test!(
unique1,
r#"
unique
"#,
r#"
[1,2,5,3,5,3,1,3]
"#,
r#"
[1,2,3,5]
"#
);
test!(
unique2,
r#"
unique_by(.foo)
"#,
r#"
[{"foo": 1, "bar": 2}, {"foo": 1, "bar": 3}, {"foo": 4, "bar": 5}]
"#,
r#"
[{"foo": 1, "bar": 2}, {"foo": 4, "bar": 5}]
"#
);
test!(
unique3,
r#"
unique_by(length)
"#,
r#"
["chunky", "bacon", "kitten", "cicada", "asparagus"]
"#,
r#"
["bacon", "chunky", "asparagus"]
"#
);
test!(
reverse1,
r#"
reverse
"#,
r#"
[1,2,3,4]
"#,
r#"
[4,3,2,1]
"#
);
test!(
contains1,
r#"
contains("bar")
"#,
r#"
"foobar"
"#,
r#"
true
"#
);
test!(
contains2,
r#"
contains(["baz", "bar"])
"#,
r#"
["foobar", "foobaz", "blarp"]
"#,
r#"
true
"#
);
test!(
contains3,
r#"
contains(["bazzzzz", "bar"])
"#,
r#"
["foobar", "foobaz", "blarp"]
"#,
r#"
false
"#
);
test!(
contains4,
r#"
contains({foo: 12, bar: [{barp: 12}]})
"#,
r#"
{"foo": 12, "bar":[1,2,{"barp":12, "blip":13}]}
"#,
r#"
true
"#
);
test!(
contains5,
r#"
contains({foo: 12, bar: [{barp: 15}]})
"#,
r#"
{"foo": 12, "bar":[1,2,{"barp":12, "blip":13}]}
"#,
r#"
false
"#
);
test!(
indices1,
r#"
indices(", ")
"#,
r#"
"a,b, cd, efg, hijk"
"#,
r#"
[3,7,12]
"#
);
test!(
indices2,
r#"
indices(1)
"#,
r#"
[0,1,2,1,3,1,4]
"#,
r#"
[1,3,5]
"#
);
test!(
indices3,
r#"
indices([1,2])
"#,
r#"
[0,1,2,3,1,4,2,5,1,2,6,7]
"#,
r#"
[1,8]
"#
);
test!(
index1,
r#"
index(", ")
"#,
r#"
"a,b, cd, efg, hijk"
"#,
r#"
3
"#
);
test!(
index2,
r#"
rindex(", ")
"#,
r#"
"a,b, cd, efg, hijk"
"#,
r#"
12
"#
);
test!(
inside1,
r#"
inside("foobar")
"#,
r#"
"bar"
"#,
r#"
true
"#
);
test!(
inside2,
r#"
inside(["foobar", "foobaz", "blarp"])
"#,
r#"
["baz", "bar"]
"#,
r#"
true
"#
);
test!(
inside3,
r#"
inside(["foobar", "foobaz", "blarp"])
"#,
r#"
["bazzzzz", "bar"]
"#,
r#"
false
"#
);
test!(
inside4,
r#"
inside({"foo": 12, "bar":[1,2,{"barp":12, "blip":13}]})
"#,
r#"
{"foo": 12, "bar": [{"barp": 12}]}
"#,
r#"
true
"#
);
test!(
inside5,
r#"
inside({"foo": 12, "bar":[1,2,{"barp":12, "blip":13}]})
"#,
r#"
{"foo": 12, "bar": [{"barp": 15}]}
"#,
r#"
false
"#
);
test!(
startswith1,
r#"
[.[]|startswith("foo")]
"#,
r#"
["fo", "foo", "barfoo", "foobar", "barfoob"]
"#,
r#"
[false, true, false, true, false]
"#
);
test!(
endswith1,
r#"
[.[]|endswith("foo")]
"#,
r#"
["foobar", "barfoo"]
"#,
r#"
[false, true]
"#
);
test!(
combinations1,
r#"
combinations
"#,
r#"
[[1,2], [3, 4]]
"#,
r#"
[1, 3]
[1, 4]
[2, 3]
[2, 4]
"#
);
test!(
combinations2,
r#"
combinations(2)
"#,
r#"
[0,1]
"#,
r#"
[0, 0]
[0, 1]
[1, 0]
[1, 1]
"#
);
test!(
ltrimstr1,
r#"
[.[]|ltrimstr("foo")]
"#,
r#"
["fo", "foo", "barfoo", "foobar", "afoo"]
"#,
r#"
["fo","","barfoo","bar","afoo"]
"#
);
test!(
rtrimstr1,
r#"
[.[]|rtrimstr("foo")]
"#,
r#"
["fo", "foo", "barfoo", "foobar", "foob"]
"#,
r#"
["fo","","bar","foobar","foob"]
"#
);
test!(
explode1,
r#"
explode
"#,
r#"
"foobar"
"#,
r#"
[102,111,111,98,97,114]
"#
);
test!(
implode1,
r#"
implode
"#,
r#"
[65, 66, 67]
"#,
r#"
"ABC"
"#
);
test!(
split1,
r#"
split(", ")
"#,
r#"
"a, b,c,d, e, "
"#,
r#"
["a","b,c,d","e",""]
"#
);
test!(
join1,
r#"
join(", ")
"#,
r#"
["a","b,c,d","e",""]
"#,
r#"
"a, b,c,d, e, "
"#
);
test!(
join2,
r#"
join(" ")
"#,
r#"
["a",1,2.3,true,null,false]
"#,
r#"
"a 1 2.3 true false"
"#
);
test!(
while1,
r#"
[while(.<100; .*2)]
"#,
r#"
1
"#,
r#"
[1,2,4,8,16,32,64]
"#
);
test!(
until1,
r#"
[.,1]|until(.[0] < 1; [.[0] - 1, .[1] * .[0]])|.[1]
"#,
r#"
4
"#,
r#"
24
"#
);
test!(
recurse1,
r#"
recurse(.foo[])
"#,
r#"
{"foo":[{"foo": []}, {"foo":[{"foo":[]}]}]}
"#,
r#"
{"foo":[{"foo":[]},{"foo":[{"foo":[]}]}]}
{"foo":[]}
{"foo":[{"foo":[]}]}
{"foo":[]}
"#
);
test!(
recurse2,
r#"
recurse
"#,
r#"
{"a":0,"b":[1]}
"#,
r#"
{"a":0,"b":[1]}
0
[1]
1
"#
);
test!(
recurse3,
r#"
recurse(. * .; . < 20)
"#,
r#"
2
"#,
r#"
2
4
16
"#
);
test!(
walk1,
r#"
walk(if type == "array" then sort else . end)
"#,
r#"
[[4, 1, 7], [8, 5, 2], [3, 6, 9]]
"#,
r#"
[[1,4,7],[2,5,8],[3,6,9]]
"#
);
/*
TODO: Implement `sub`
test!(
walk2,
r#"
walk( if type == "object" then with_entries( .key |= sub( "^_+"; "") ) else . end )
"#,
r#"
[ { "_a": { "__b": 2 } } ]
"#,
r#"
[{"a":{"b":2}}]
"#
);
*/
/*
TODO: Implement `env`
test!(
env1,
r#"
$ENV.PAGER
"#,
r#"
null
"#,
r#"
"less"
"#
);
test!(
env2,
r#"
env.PAGER
"#,
r#"
null
"#,
r#"
"less"
"#
);
*/
test!(
transpose1,
r#"
transpose
"#,
r#"
[[1], [2,3]]
"#,
r#"
[[1,2],[null,3]]
"#
);
test!(
bsearch1,
r#"
bsearch(0)
"#,
r#"
[0, 1]
"#,
r#"
0
"#
);
test!(
bsearch2,
r#"
bsearch(0)
"#,
r#"
[1,2,3]
"#,
r#"
-1
"#
);
test!(
bsearch3,
r#"
bsearch(4) as $ix | if $ix < 0 then .[-(1+$ix)] = 4 else . end
"#,
r#"
[1,2,3]
"#,
r#"
[1,2,3,4]
"#
);
test!(
string_interpolation1,
r#"
"The input was \(.), which is one less than \(.+1)"
"#,
r#"
42
"#,
r#"
"The input was 42, which is one less than 43"
"#
);
test!(
to_from_json1,
r#"
[.[]|tostring]
"#,
r#"
[1, "foo", ["foo"]]
"#,
r#"
["1","foo","[\"foo\"]"]
"#
);
test!(
to_from_json2,
r#"
[.[]|tojson]
"#,
r#"
[1, "foo", ["foo"]]
"#,
r#"
["1","\"foo\"","[\"foo\"]"]
"#
);
test!(
to_from_json3,
r#"
[.[]|tojson|fromjson]
"#,
r#"
[1, "foo", ["foo"]]
"#,
r#"
[1,"foo",["foo"]]
"#
);
// TODO: Add more
| 10.303093 | 87 | 0.28944 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.