hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
16e59ad9320fd9e1a46324c47659e2bc14f0154c | 2,421 | use std::collections::HashMap;
use zcash_primitives::{
consensus::{self, BlockHeight},
memo::MemoBytes,
sapling::{
note_encryption::{try_sapling_note_decryption, try_sapling_output_recovery},
Note, PaymentAddress,
},
transaction::Transaction,
zip32::{AccountId, ExtendedFullViewingKey},
};
/// A decrypted shielded output.
pub struct DecryptedOutput {
/// The index of the output within [`shielded_outputs`].
///
/// [`shielded_outputs`]: zcash_primitives::transaction::TransactionData
pub index: usize,
/// The note within the output.
pub note: Note,
/// The account that decrypted the note.
pub account: AccountId,
/// The address the note was sent to.
pub to: PaymentAddress,
/// The memo bytes included with the note.
pub memo: MemoBytes,
/// True if this output was recovered using an [`OutgoingViewingKey`], meaning that
/// this is a logical output of the transaction.
///
/// [`OutgoingViewingKey`]: zcash_primitives::keys::OutgoingViewingKey
pub outgoing: bool,
}
/// Scans a [`Transaction`] for any information that can be decrypted by the set of
/// [`ExtendedFullViewingKey`]s.
pub fn decrypt_transaction<P: consensus::Parameters>(
params: &P,
height: BlockHeight,
tx: &Transaction,
extfvks: &HashMap<AccountId, ExtendedFullViewingKey>,
) -> Vec<DecryptedOutput> {
let mut decrypted = vec![];
if let Some(bundle) = tx.sapling_bundle() {
for (account, extfvk) in extfvks.iter() {
let ivk = extfvk.fvk.vk.ivk();
let ovk = extfvk.fvk.ovk;
for (index, output) in bundle.shielded_outputs.iter().enumerate() {
let ((note, to, memo), outgoing) =
match try_sapling_note_decryption(params, height, &ivk, output) {
Some(ret) => (ret, false),
None => match try_sapling_output_recovery(params, height, &ovk, output) {
Some(ret) => (ret, true),
None => continue,
},
};
decrypted.push(DecryptedOutput {
index,
note,
account: *account,
to,
memo,
outgoing,
})
}
}
}
decrypted
}
| 32.716216 | 97 | 0.569599 |
2f14716acd0da0f360594cd83ca95e3e56fb7760 | 2,450 | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::{
idhasher::BuildIdHasher,
string::{self, IntoUtf8Bytes, StringId},
};
use indexmap::IndexMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::{
collections::{HashMap, HashSet},
fmt::{self, Formatter},
str::FromStr,
};
// StringKey is a small impedence matcher around StringId.
// NOTE in particular that it does NOT do de-duplicating serde.
// and that its ordering is arbitrary (rather than string order).
#[derive(Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
#[repr(transparent)]
pub struct StringKey(StringId);
pub type StringKeyMap<V> = HashMap<StringKey, V, BuildIdHasher<u32>>;
pub type StringKeySet = HashSet<StringKey, BuildIdHasher<u32>>;
pub type StringKeyIndexMap<V> = IndexMap<StringKey, V, BuildIdHasher<u32>>;
pub trait Intern: IntoUtf8Bytes {
fn intern(self) -> StringKey {
StringKey(string::intern(self))
}
}
impl<T: IntoUtf8Bytes> Intern for T {}
impl StringKey {
pub fn lookup(self) -> &'static str {
self.0.as_str()
}
}
impl fmt::Display for StringKey {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.lookup())
}
}
impl fmt::Debug for StringKey {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.lookup())
}
}
impl Serialize for StringKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.lookup())
}
}
impl<'de> Deserialize<'de> for StringKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer).map(|s: String| s.intern())
}
}
impl FromStr for StringKey {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(s.intern())
}
}
#[macro_export]
macro_rules! intern {
($value:literal) => {{
use $crate::{string::Lazy, string_key::Intern};
static INSTANCE: Lazy<$crate::string_key::StringKey> = Lazy::new(|| $value.intern());
*INSTANCE
}};
($_:expr) => {
compile_error!("intern! macro can only be used with string literals.")
};
}
| 26.06383 | 93 | 0.638367 |
e4633022bc320e71f6b32c4312c73c00d5c4a13b | 6,158 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// All possible error types for this service.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum Error {
/// <p>You do not have permission to perform an action.</p>
AccessForbidden(crate::error::AccessForbidden),
/// <p>An internal failure occurred. Try your request again. If the problem persists, contact AWS customer support.</p>
InternalFailure(crate::error::InternalFailure),
/// <p>A resource that is required to perform an action was not found.</p>
ResourceNotFound(crate::error::ResourceNotFound),
/// <p>The service is currently unavailable.</p>
ServiceUnavailable(crate::error::ServiceUnavailable),
/// <p>There was an error validating your request.</p>
ValidationError(crate::error::ValidationError),
/// An unhandled error occurred.
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::AccessForbidden(inner) => inner.fmt(f),
Error::InternalFailure(inner) => inner.fmt(f),
Error::ResourceNotFound(inner) => inner.fmt(f),
Error::ServiceUnavailable(inner) => inner.fmt(f),
Error::ValidationError(inner) => inner.fmt(f),
Error::Unhandled(inner) => inner.fmt(f),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::BatchGetRecordError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::BatchGetRecordError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::BatchGetRecordErrorKind::AccessForbidden(inner) => {
Error::AccessForbidden(inner)
}
crate::error::BatchGetRecordErrorKind::InternalFailure(inner) => {
Error::InternalFailure(inner)
}
crate::error::BatchGetRecordErrorKind::ServiceUnavailable(inner) => {
Error::ServiceUnavailable(inner)
}
crate::error::BatchGetRecordErrorKind::ValidationError(inner) => {
Error::ValidationError(inner)
}
crate::error::BatchGetRecordErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteRecordError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::DeleteRecordError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DeleteRecordErrorKind::AccessForbidden(inner) => {
Error::AccessForbidden(inner)
}
crate::error::DeleteRecordErrorKind::InternalFailure(inner) => {
Error::InternalFailure(inner)
}
crate::error::DeleteRecordErrorKind::ServiceUnavailable(inner) => {
Error::ServiceUnavailable(inner)
}
crate::error::DeleteRecordErrorKind::ValidationError(inner) => {
Error::ValidationError(inner)
}
crate::error::DeleteRecordErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetRecordError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::GetRecordError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetRecordErrorKind::AccessForbidden(inner) => {
Error::AccessForbidden(inner)
}
crate::error::GetRecordErrorKind::InternalFailure(inner) => {
Error::InternalFailure(inner)
}
crate::error::GetRecordErrorKind::ResourceNotFound(inner) => {
Error::ResourceNotFound(inner)
}
crate::error::GetRecordErrorKind::ServiceUnavailable(inner) => {
Error::ServiceUnavailable(inner)
}
crate::error::GetRecordErrorKind::ValidationError(inner) => {
Error::ValidationError(inner)
}
crate::error::GetRecordErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutRecordError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::PutRecordError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::PutRecordErrorKind::AccessForbidden(inner) => {
Error::AccessForbidden(inner)
}
crate::error::PutRecordErrorKind::InternalFailure(inner) => {
Error::InternalFailure(inner)
}
crate::error::PutRecordErrorKind::ServiceUnavailable(inner) => {
Error::ServiceUnavailable(inner)
}
crate::error::PutRecordErrorKind::ValidationError(inner) => {
Error::ValidationError(inner)
}
crate::error::PutRecordErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl std::error::Error for Error {}
| 45.614815 | 123 | 0.578272 |
1dd2411b45297a194202331d4bc3d81c4dd13842 | 1,973 | /*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use containerd_shim as shim;
use log::info;
use shim::{api, ExitSignal, TtrpcContext, TtrpcResult};
#[derive(Clone)]
struct Service {
exit: ExitSignal,
}
impl shim::Shim for Service {
type Error = shim::Error;
type T = Service;
fn new(
_id: &str,
_namespace: &str,
_publisher: shim::RemotePublisher,
_config: &mut shim::Config,
) -> Self {
Service {
exit: ExitSignal::default(),
}
}
fn start_shim(&mut self, opts: shim::StartOpts) -> Result<String, shim::Error> {
let address = shim::spawn(opts, Vec::new())?;
Ok(address)
}
fn wait(&mut self) {
self.exit.wait();
}
fn get_task_service(&self) -> Self::T {
self.clone()
}
}
impl shim::Task for Service {
fn connect(
&self,
_ctx: &TtrpcContext,
_req: api::ConnectRequest,
) -> TtrpcResult<api::ConnectResponse> {
info!("Connect request");
Ok(api::ConnectResponse {
version: String::from("example"),
..Default::default()
})
}
fn shutdown(&self, _ctx: &TtrpcContext, _req: api::ShutdownRequest) -> TtrpcResult<api::Empty> {
info!("Shutdown request");
self.exit.signal();
Ok(api::Empty::default())
}
}
fn main() {
shim::run::<Service>("io.containerd.empty.v1")
}
| 24.974684 | 100 | 0.613279 |
d5a729438991db8fab96d6c4a68b9be92edaa038 | 11,229 | //! Types for the `m.room.power_levels` event.
use std::collections::BTreeMap;
use js_int::{int, Int};
use ruma_common::power_levels::default_power_level;
use ruma_events_macros::EventContent;
use ruma_identifiers::UserId;
use serde::{Deserialize, Serialize};
use crate::EventType;
use ruma_common::power_levels::NotificationPowerLevels;
/// The content of an `m.room.power_levels` event.
///
/// Defines the power levels (privileges) of users in the room.
#[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
#[ruma_event(type = "m.room.power_levels", kind = State)]
pub struct RoomPowerLevelsEventContent {
/// The level required to ban a user.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default = "default_power_level", skip_serializing_if = "is_default_power_level")]
#[ruma_event(skip_redaction)]
pub ban: Int,
/// The level required to send specific event types.
///
/// This is a mapping from event type to power level required.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(
feature = "compat",
serde(deserialize_with = "ruma_serde::btreemap_int_or_string_to_int_values")
)]
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[ruma_event(skip_redaction)]
pub events: BTreeMap<EventType, Int>,
/// The default level required to send message events.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default, skip_serializing_if = "ruma_serde::is_default")]
#[ruma_event(skip_redaction)]
pub events_default: Int,
/// The level required to invite a user.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default = "default_power_level", skip_serializing_if = "is_default_power_level")]
pub invite: Int,
/// The level required to kick a user.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default = "default_power_level", skip_serializing_if = "is_default_power_level")]
#[ruma_event(skip_redaction)]
pub kick: Int,
/// The level required to redact an event.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default = "default_power_level", skip_serializing_if = "is_default_power_level")]
#[ruma_event(skip_redaction)]
pub redact: Int,
/// The default level required to send state events.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default = "default_power_level", skip_serializing_if = "is_default_power_level")]
#[ruma_event(skip_redaction)]
pub state_default: Int,
/// The power levels for specific users.
///
/// This is a mapping from `user_id` to power level for that user.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(
feature = "compat",
serde(deserialize_with = "ruma_serde::btreemap_int_or_string_to_int_values")
)]
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[ruma_event(skip_redaction)]
pub users: BTreeMap<Box<UserId>, Int>,
/// The default power level for every user in the room.
///
/// If you activate the `compat` feature, deserialization will work for stringified
/// integers too.
#[cfg_attr(feature = "compat", serde(deserialize_with = "ruma_serde::int_or_string_to_int"))]
#[serde(default, skip_serializing_if = "ruma_serde::is_default")]
#[ruma_event(skip_redaction)]
pub users_default: Int,
/// The power level requirements for specific notification types.
///
/// This is a mapping from `key` to power level for that notifications key.
#[serde(default, skip_serializing_if = "ruma_serde::is_default")]
pub notifications: NotificationPowerLevels,
}
impl RoomPowerLevelsEventContent {
/// Creates a new `RoomPowerLevelsEventContent` with all-default values.
pub fn new() -> Self {
// events_default and users_default having a default of 0 while the others have a default
// of 50 is not an oversight, these defaults are from the Matrix specification.
Self {
ban: default_power_level(),
events: BTreeMap::new(),
events_default: Int::default(),
invite: default_power_level(),
kick: default_power_level(),
redact: default_power_level(),
state_default: default_power_level(),
users: BTreeMap::new(),
users_default: Int::default(),
notifications: NotificationPowerLevels::default(),
}
}
}
impl Default for RoomPowerLevelsEventContent {
fn default() -> Self {
Self::new()
}
}
/// Used with `#[serde(skip_serializing_if)]` to omit default power levels.
#[allow(clippy::trivially_copy_pass_by_ref)]
fn is_default_power_level(l: &Int) -> bool {
*l == int!(50)
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use assign::assign;
use js_int::{int, uint};
use maplit::btreemap;
use ruma_common::MilliSecondsSinceUnixEpoch;
use ruma_identifiers::{event_id, room_id, user_id};
use serde_json::{json, to_value as to_json_value};
use super::{default_power_level, NotificationPowerLevels, RoomPowerLevelsEventContent};
use crate::{EventType, StateEvent, Unsigned};
#[test]
fn serialization_with_optional_fields_as_none() {
let default = default_power_level();
let power_levels_event = StateEvent {
content: RoomPowerLevelsEventContent {
ban: default,
events: BTreeMap::new(),
events_default: int!(0),
invite: default,
kick: default,
redact: default,
state_default: default,
users: BTreeMap::new(),
users_default: int!(0),
notifications: NotificationPowerLevels::default(),
},
event_id: event_id!("$h29iv0s8:example.com").to_owned(),
origin_server_ts: MilliSecondsSinceUnixEpoch(uint!(1)),
prev_content: None,
room_id: room_id!("!n8f893n9:example.com").to_owned(),
unsigned: Unsigned::default(),
sender: user_id!("@carl:example.com").to_owned(),
state_key: "".into(),
};
let actual = to_json_value(&power_levels_event).unwrap();
let expected = json!({
"content": {},
"event_id": "$h29iv0s8:example.com",
"origin_server_ts": 1,
"room_id": "!n8f893n9:example.com",
"sender": "@carl:example.com",
"state_key": "",
"type": "m.room.power_levels"
});
assert_eq!(actual, expected);
}
#[test]
fn serialization_with_all_fields() {
let user = user_id!("@carl:example.com");
let power_levels_event = StateEvent {
content: RoomPowerLevelsEventContent {
ban: int!(23),
events: btreemap! {
EventType::Dummy => int!(23)
},
events_default: int!(23),
invite: int!(23),
kick: int!(23),
redact: int!(23),
state_default: int!(23),
users: btreemap! {
user.to_owned() => int!(23)
},
users_default: int!(23),
notifications: assign!(NotificationPowerLevels::new(), { room: int!(23) }),
},
event_id: event_id!("$h29iv0s8:example.com").to_owned(),
origin_server_ts: MilliSecondsSinceUnixEpoch(uint!(1)),
prev_content: Some(RoomPowerLevelsEventContent {
// Make just one field different so we at least know they're two different objects.
ban: int!(42),
events: btreemap! {
EventType::Dummy => int!(42)
},
events_default: int!(42),
invite: int!(42),
kick: int!(42),
redact: int!(42),
state_default: int!(42),
users: btreemap! {
user.to_owned() => int!(42)
},
users_default: int!(42),
notifications: assign!(NotificationPowerLevels::new(), { room: int!(42) }),
}),
room_id: room_id!("!n8f893n9:example.com").to_owned(),
unsigned: Unsigned { age: Some(int!(100)), ..Unsigned::default() },
sender: user.to_owned(),
state_key: "".into(),
};
let actual = to_json_value(&power_levels_event).unwrap();
let expected = json!({
"content": {
"ban": 23,
"events": {
"m.dummy": 23
},
"events_default": 23,
"invite": 23,
"kick": 23,
"redact": 23,
"state_default": 23,
"users": {
"@carl:example.com": 23
},
"users_default": 23,
"notifications": {
"room": 23
}
},
"event_id": "$h29iv0s8:example.com",
"origin_server_ts": 1,
"prev_content": {
"ban": 42,
"events": {
"m.dummy": 42
},
"events_default": 42,
"invite": 42,
"kick": 42,
"redact": 42,
"state_default": 42,
"users": {
"@carl:example.com": 42
},
"users_default": 42,
"notifications": {
"room": 42
}
},
"room_id": "!n8f893n9:example.com",
"sender": "@carl:example.com",
"state_key": "",
"type": "m.room.power_levels",
"unsigned": {
"age": 100
}
});
assert_eq!(actual, expected);
}
}
| 37.182119 | 99 | 0.579749 |
0392636bf708bebc686b1838df2f3c4a97f4b1be | 3,106 | pub mod day_12 {
use std::collections::HashMap;
use std::collections::HashSet;
use std::hash::Hash;
pub struct Node {
number: u32,
friends: Vec<u32>,
}
pub fn parse(s: &str) -> Node {
let mut iter = s.split_whitespace();
let number: u32 = iter.next().unwrap().parse().unwrap();
match iter.next().unwrap() {
"<->" => {}
s => panic!("Bad! {}", s),
}
let friends = iter
.map(|s| {
let mut ans = 0;
for d in s.chars() {
match d.to_digit(10) {
None => break,
Some(d) => ans = ans * 10 + d,
}
}
ans
})
.collect::<Vec<_>>();
Node { number, friends }
}
pub fn input() -> Vec<Node> {
let input = include_str!("../input.txt");
input.lines().map(|l| parse(l)).collect::<Vec<_>>()
}
fn connected_component<T>(relations: &HashMap<T, &[T]>, component: T) -> HashSet<T>
where
T: Eq + Hash + Clone,
{
let mut stack: Vec<_> = relations.get(&component).unwrap().to_vec();
let mut connected: HashSet<_> = HashSet::new();
connected.insert(component);
while let Some(explore) = stack.pop() {
if connected.insert(explore.clone()) {
let relatives = relations.get(&explore).unwrap().iter();
stack.extend(relatives.filter(|i| !connected.contains(&i)).cloned());
}
}
connected
}
pub fn part_1(input: &[Node]) -> usize {
let relations: HashMap<u32, &[u32]> = input
.iter()
.map(|n| (n.number, n.friends.as_slice()))
.collect();
connected_component(&relations, 0).len()
}
pub fn part_2(input: &[Node]) -> u32 {
let relations: HashMap<u32, &[u32]> = input
.iter()
.map(|n| (n.number, n.friends.as_slice()))
.collect();
let mut count = 0;
let mut components_found = HashSet::new();
for node in input {
if !components_found.contains(&node.number) {
let component = connected_component(&relations, node.number);
components_found.extend(component);
count += 1;
}
}
count
}
}
#[cfg(test)]
mod tests {
use super::day_12::*;
fn data() -> Vec<Node> {
vec![
"0 <-> 2",
"1 <-> 1",
"2 <-> 0, 3, 4",
"3 <-> 2, 4",
"4 <-> 2, 3, 6",
"5 <-> 6",
"6 <-> 4, 5",
]
.iter()
.map(|s| parse(s))
.collect()
}
#[test]
fn part1_known() {
assert_eq!(part_1(&data()), 6);
}
#[test]
fn part2_known() {
assert_eq!(part_2(&data()), 2);
}
#[test]
fn test_day_12() {
let input = input();
assert_eq!(part_1(&input), 378);
assert_eq!(part_2(&input), 204);
}
}
| 25.459016 | 87 | 0.444301 |
0afc3ee87e0f47edcf9e1b62dfc9bc1e13d88380 | 569 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() {
let a = ~"hello";
let b: &str = a;
io::println(b);
}
pub fn main() {
f();
}
| 28.45 | 68 | 0.681898 |
567ddcbf5bbe88d4f944d74bc19adcfbb688214d | 3,458 | #[doc = "Register `PDMA_DSCT8_SA` reader"]
pub struct R(crate::R<PDMA_DSCT8_SA_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PDMA_DSCT8_SA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PDMA_DSCT8_SA_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PDMA_DSCT8_SA_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PDMA_DSCT8_SA` writer"]
pub struct W(crate::W<PDMA_DSCT8_SA_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PDMA_DSCT8_SA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PDMA_DSCT8_SA_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PDMA_DSCT8_SA_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `SA` reader - PDMA Transfer Source Address\nThis field indicates a 32-bit source address of PDMA controller."]
pub struct SA_R(crate::FieldReader<u32, u32>);
impl SA_R {
pub(crate) fn new(bits: u32) -> Self {
SA_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SA_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SA` writer - PDMA Transfer Source Address\nThis field indicates a 32-bit source address of PDMA controller."]
pub struct SA_W<'a> {
w: &'a mut W,
}
impl<'a> SA_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - PDMA Transfer Source Address This field indicates a 32-bit source address of PDMA controller."]
#[inline(always)]
pub fn sa(&self) -> SA_R {
SA_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31 - PDMA Transfer Source Address This field indicates a 32-bit source address of PDMA controller."]
#[inline(always)]
pub fn sa(&mut self) -> SA_W {
SA_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Source Address Register of PDMA Channel n\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pdma_dsct8_sa](index.html) module"]
pub struct PDMA_DSCT8_SA_SPEC;
impl crate::RegisterSpec for PDMA_DSCT8_SA_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pdma_dsct8_sa::R](R) reader structure"]
impl crate::Readable for PDMA_DSCT8_SA_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pdma_dsct8_sa::W](W) writer structure"]
impl crate::Writable for PDMA_DSCT8_SA_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PDMA_DSCT8_SA to value 0"]
impl crate::Resettable for PDMA_DSCT8_SA_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 33.572816 | 435 | 0.632157 |
26590b22526c6fa763ece1fef21b43943be69690 | 24,746 | #[allow(deprecated)]
use crate::sysvar::recent_blockhashes;
use crate::{
decode_error::DecodeError,
instruction::{AccountMeta, Instruction, InstructionError},
nonce,
pubkey::Pubkey,
system_program,
sysvar::rent,
};
use num_derive::{FromPrimitive, ToPrimitive};
use thiserror::Error;
#[derive(Error, Debug, Serialize, Clone, PartialEq, FromPrimitive, ToPrimitive)]
pub enum SystemError {
#[error("an account with the same address already exists")]
AccountAlreadyInUse,
#[error("account does not have enough SOL to perform the operation")]
ResultWithNegativeLamports,
#[error("cannot assign account to this program id")]
InvalidProgramId,
#[error("cannot allocate account data of this length")]
InvalidAccountDataLength,
#[error("length of requested seed is too long")]
MaxSeedLengthExceeded,
#[error("provided address does not match addressed derived from seed")]
AddressWithSeedMismatch,
#[error("advancing stored nonce requires a populated RecentBlockhashes sysvar")]
NonceNoRecentBlockhashes,
#[error("stored nonce is still in recent_blockhashes")]
NonceBlockhashNotExpired,
#[error("specified nonce does not match stored nonce")]
NonceUnexpectedBlockhashValue,
}
impl<T> DecodeError<T> for SystemError {
fn type_of() -> &'static str {
"SystemError"
}
}
#[derive(Error, Debug, Clone, PartialEq, FromPrimitive, ToPrimitive)]
pub enum NonceError {
#[error("recent blockhash list is empty")]
NoRecentBlockhashes,
#[error("stored nonce is still in recent_blockhashes")]
NotExpired,
#[error("specified nonce does not match stored nonce")]
UnexpectedValue,
#[error("cannot handle request in current account state")]
BadAccountState,
}
impl<E> DecodeError<E> for NonceError {
fn type_of() -> &'static str {
"NonceError"
}
}
#[derive(Error, Debug, Clone, PartialEq, FromPrimitive, ToPrimitive)]
enum NonceErrorAdapter {
#[error("recent blockhash list is empty")]
NoRecentBlockhashes,
#[error("stored nonce is still in recent_blockhashes")]
NotExpired,
#[error("specified nonce does not match stored nonce")]
UnexpectedValue,
#[error("cannot handle request in current account state")]
BadAccountState,
}
impl<E> DecodeError<E> for NonceErrorAdapter {
fn type_of() -> &'static str {
"NonceErrorAdapter"
}
}
impl From<NonceErrorAdapter> for NonceError {
fn from(e: NonceErrorAdapter) -> Self {
match e {
NonceErrorAdapter::NoRecentBlockhashes => NonceError::NoRecentBlockhashes,
NonceErrorAdapter::NotExpired => NonceError::NotExpired,
NonceErrorAdapter::UnexpectedValue => NonceError::UnexpectedValue,
NonceErrorAdapter::BadAccountState => NonceError::BadAccountState,
}
}
}
pub fn nonce_to_instruction_error(error: NonceError, use_system_variant: bool) -> InstructionError {
if use_system_variant {
match error {
NonceError::NoRecentBlockhashes => SystemError::NonceNoRecentBlockhashes.into(),
NonceError::NotExpired => SystemError::NonceBlockhashNotExpired.into(),
NonceError::UnexpectedValue => SystemError::NonceUnexpectedBlockhashValue.into(),
NonceError::BadAccountState => InstructionError::InvalidAccountData,
}
} else {
match error {
NonceError::NoRecentBlockhashes => NonceErrorAdapter::NoRecentBlockhashes.into(),
NonceError::NotExpired => NonceErrorAdapter::NotExpired.into(),
NonceError::UnexpectedValue => NonceErrorAdapter::UnexpectedValue.into(),
NonceError::BadAccountState => NonceErrorAdapter::BadAccountState.into(),
}
}
}
pub fn instruction_to_nonce_error(
error: &InstructionError,
use_system_variant: bool,
) -> Option<NonceError> {
if use_system_variant {
match error {
InstructionError::Custom(discriminant) => {
match SystemError::decode_custom_error_to_enum(*discriminant) {
Some(SystemError::NonceNoRecentBlockhashes) => {
Some(NonceError::NoRecentBlockhashes)
}
Some(SystemError::NonceBlockhashNotExpired) => Some(NonceError::NotExpired),
Some(SystemError::NonceUnexpectedBlockhashValue) => {
Some(NonceError::UnexpectedValue)
}
_ => None,
}
}
InstructionError::InvalidAccountData => Some(NonceError::BadAccountState),
_ => None,
}
} else if let InstructionError::Custom(discriminant) = error {
let maybe: Option<NonceErrorAdapter> =
NonceErrorAdapter::decode_custom_error_to_enum(*discriminant);
maybe.map(NonceError::from)
} else {
None
}
}
/// maximum permitted size of data: 10 MB
pub const MAX_PERMITTED_DATA_LENGTH: u64 = 10 * 1024 * 1024;
#[frozen_abi(digest = "2xnDcizcPKKR7b624FeuuPd1zj5bmnkmVsBWgoKPTh4w")]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, AbiExample, AbiEnumVisitor)]
pub enum SystemInstruction {
/// Create a new account
///
/// # Account references
/// 0. `[WRITE, SIGNER]` Funding account
/// 1. `[WRITE, SIGNER]` New account
CreateAccount {
/// Number of lamports to transfer to the new account
lamports: u64,
/// Number of bytes of memory to allocate
space: u64,
/// Address of program that will own the new account
owner: Pubkey,
},
/// Assign account to a program
///
/// # Account references
/// 0. `[WRITE, SIGNER]` Assigned account public key
Assign {
/// Owner program account
owner: Pubkey,
},
/// Transfer lamports
///
/// # Account references
/// 0. `[WRITE, SIGNER]` Funding account
/// 1. `[WRITE]` Recipient account
Transfer { lamports: u64 },
/// Create a new account at an address derived from a base pubkey and a seed
///
/// # Account references
/// 0. `[WRITE, SIGNER]` Funding account
/// 1. `[WRITE]` Created account
/// 2. `[SIGNER]` (optional) Base account; the account matching the base Pubkey below must be
/// provided as a signer, but may be the same as the funding account
/// and provided as account 0
CreateAccountWithSeed {
/// Base public key
base: Pubkey,
/// String of ASCII chars, no longer than `Pubkey::MAX_SEED_LEN`
seed: String,
/// Number of lamports to transfer to the new account
lamports: u64,
/// Number of bytes of memory to allocate
space: u64,
/// Owner program account address
owner: Pubkey,
},
/// Consumes a stored nonce, replacing it with a successor
///
/// # Account references
/// 0. `[WRITE]` Nonce account
/// 1. `[]` RecentBlockhashes sysvar
/// 2. `[SIGNER]` Nonce authority
AdvanceNonceAccount,
/// Withdraw funds from a nonce account
///
/// # Account references
/// 0. `[WRITE]` Nonce account
/// 1. `[WRITE]` Recipient account
/// 2. `[]` RecentBlockhashes sysvar
/// 3. `[]` Rent sysvar
/// 4. `[SIGNER]` Nonce authority
///
/// The `u64` parameter is the lamports to withdraw, which must leave the
/// account balance above the rent exempt reserve or at zero.
WithdrawNonceAccount(u64),
/// Drive state of Uninitialized nonce account to Initialized, setting the nonce value
///
/// # Account references
/// 0. `[WRITE]` Nonce account
/// 1. `[]` RecentBlockhashes sysvar
/// 2. `[]` Rent sysvar
///
/// The `Pubkey` parameter specifies the entity authorized to execute nonce
/// instruction on the account
///
/// No signatures are required to execute this instruction, enabling derived
/// nonce account addresses
InitializeNonceAccount(Pubkey),
/// Change the entity authorized to execute nonce instructions on the account
///
/// # Account references
/// 0. `[WRITE]` Nonce account
/// 1. `[SIGNER]` Nonce authority
///
/// The `Pubkey` parameter identifies the entity to authorize
AuthorizeNonceAccount(Pubkey),
/// Allocate space in a (possibly new) account without funding
///
/// # Account references
/// 0. `[WRITE, SIGNER]` New account
Allocate {
/// Number of bytes of memory to allocate
space: u64,
},
/// Allocate space for and assign an account at an address
/// derived from a base public key and a seed
///
/// # Account references
/// 0. `[WRITE]` Allocated account
/// 1. `[SIGNER]` Base account
AllocateWithSeed {
/// Base public key
base: Pubkey,
/// String of ASCII chars, no longer than `pubkey::MAX_SEED_LEN`
seed: String,
/// Number of bytes of memory to allocate
space: u64,
/// Owner program account
owner: Pubkey,
},
/// Assign account to a program based on a seed
///
/// # Account references
/// 0. `[WRITE]` Assigned account
/// 1. `[SIGNER]` Base account
AssignWithSeed {
/// Base public key
base: Pubkey,
/// String of ASCII chars, no longer than `pubkey::MAX_SEED_LEN`
seed: String,
/// Owner program account
owner: Pubkey,
},
/// Transfer lamports from a derived address
///
/// # Account references
/// 0. `[WRITE]` Funding account
/// 1. `[SIGNER]` Base for funding account
/// 2. `[WRITE]` Recipient account
TransferWithSeed {
/// Amount to transfer
lamports: u64,
/// Seed to use to derive the funding account address
from_seed: String,
/// Owner to use to derive the funding account address
from_owner: Pubkey,
},
}
pub fn create_account(
from_pubkey: &Pubkey,
to_pubkey: &Pubkey,
lamports: u64,
space: u64,
owner: &Pubkey,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*from_pubkey, true),
AccountMeta::new(*to_pubkey, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::CreateAccount {
lamports,
space,
owner: *owner,
},
account_metas,
)
}
// we accept `to` as a parameter so that callers do their own error handling when
// calling create_with_seed()
pub fn create_account_with_seed(
from_pubkey: &Pubkey,
to_pubkey: &Pubkey, // must match create_with_seed(base, seed, owner)
base: &Pubkey,
seed: &str,
lamports: u64,
space: u64,
owner: &Pubkey,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*from_pubkey, true),
AccountMeta::new(*to_pubkey, false),
AccountMeta::new_readonly(*base, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::CreateAccountWithSeed {
base: *base,
seed: seed.to_string(),
lamports,
space,
owner: *owner,
},
account_metas,
)
}
pub fn assign(pubkey: &Pubkey, owner: &Pubkey) -> Instruction {
let account_metas = vec![AccountMeta::new(*pubkey, true)];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::Assign { owner: *owner },
account_metas,
)
}
pub fn assign_with_seed(
address: &Pubkey, // must match create_with_seed(base, seed, owner)
base: &Pubkey,
seed: &str,
owner: &Pubkey,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*address, false),
AccountMeta::new_readonly(*base, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::AssignWithSeed {
base: *base,
seed: seed.to_string(),
owner: *owner,
},
account_metas,
)
}
pub fn transfer(from_pubkey: &Pubkey, to_pubkey: &Pubkey, lamports: u64) -> Instruction {
let account_metas = vec![
AccountMeta::new(*from_pubkey, true),
AccountMeta::new(*to_pubkey, false),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::Transfer { lamports },
account_metas,
)
}
pub fn transfer_with_seed(
from_pubkey: &Pubkey, // must match create_with_seed(base, seed, owner)
from_base: &Pubkey,
from_seed: String,
from_owner: &Pubkey,
to_pubkey: &Pubkey,
lamports: u64,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*from_pubkey, false),
AccountMeta::new_readonly(*from_base, true),
AccountMeta::new(*to_pubkey, false),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::TransferWithSeed {
lamports,
from_seed,
from_owner: *from_owner,
},
account_metas,
)
}
pub fn allocate(pubkey: &Pubkey, space: u64) -> Instruction {
let account_metas = vec![AccountMeta::new(*pubkey, true)];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::Allocate { space },
account_metas,
)
}
pub fn allocate_with_seed(
address: &Pubkey, // must match create_with_seed(base, seed, owner)
base: &Pubkey,
seed: &str,
space: u64,
owner: &Pubkey,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*address, false),
AccountMeta::new_readonly(*base, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::AllocateWithSeed {
base: *base,
seed: seed.to_string(),
space,
owner: *owner,
},
account_metas,
)
}
/// Create and sign new SystemInstruction::Transfer transaction to many destinations
pub fn transfer_many(from_pubkey: &Pubkey, to_lamports: &[(Pubkey, u64)]) -> Vec<Instruction> {
to_lamports
.iter()
.map(|(to_pubkey, lamports)| transfer(from_pubkey, to_pubkey, *lamports))
.collect()
}
pub fn create_nonce_account_with_seed(
from_pubkey: &Pubkey,
nonce_pubkey: &Pubkey,
base: &Pubkey,
seed: &str,
authority: &Pubkey,
lamports: u64,
) -> Vec<Instruction> {
vec![
create_account_with_seed(
from_pubkey,
nonce_pubkey,
base,
seed,
lamports,
nonce::State::size() as u64,
&system_program::id(),
),
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::InitializeNonceAccount(*authority),
vec![
AccountMeta::new(*nonce_pubkey, false),
#[allow(deprecated)]
AccountMeta::new_readonly(recent_blockhashes::id(), false),
AccountMeta::new_readonly(rent::id(), false),
],
),
]
}
pub fn create_nonce_account(
from_pubkey: &Pubkey,
nonce_pubkey: &Pubkey,
authority: &Pubkey,
lamports: u64,
) -> Vec<Instruction> {
vec![
create_account(
from_pubkey,
nonce_pubkey,
lamports,
nonce::State::size() as u64,
&system_program::id(),
),
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::InitializeNonceAccount(*authority),
vec![
AccountMeta::new(*nonce_pubkey, false),
#[allow(deprecated)]
AccountMeta::new_readonly(recent_blockhashes::id(), false),
AccountMeta::new_readonly(rent::id(), false),
],
),
]
}
pub fn advance_nonce_account(nonce_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {
let account_metas = vec![
AccountMeta::new(*nonce_pubkey, false),
#[allow(deprecated)]
AccountMeta::new_readonly(recent_blockhashes::id(), false),
AccountMeta::new_readonly(*authorized_pubkey, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::AdvanceNonceAccount,
account_metas,
)
}
pub fn withdraw_nonce_account(
nonce_pubkey: &Pubkey,
authorized_pubkey: &Pubkey,
to_pubkey: &Pubkey,
lamports: u64,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*nonce_pubkey, false),
AccountMeta::new(*to_pubkey, false),
#[allow(deprecated)]
AccountMeta::new_readonly(recent_blockhashes::id(), false),
AccountMeta::new_readonly(rent::id(), false),
AccountMeta::new_readonly(*authorized_pubkey, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::WithdrawNonceAccount(lamports),
account_metas,
)
}
pub fn authorize_nonce_account(
nonce_pubkey: &Pubkey,
authorized_pubkey: &Pubkey,
new_authority: &Pubkey,
) -> Instruction {
let account_metas = vec![
AccountMeta::new(*nonce_pubkey, false),
AccountMeta::new_readonly(*authorized_pubkey, true),
];
Instruction::new_with_bincode(
system_program::id(),
&SystemInstruction::AuthorizeNonceAccount(*new_authority),
account_metas,
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::instruction::{Instruction, InstructionError};
use num_traits::ToPrimitive;
fn get_keys(instruction: &Instruction) -> Vec<Pubkey> {
instruction.accounts.iter().map(|x| x.pubkey).collect()
}
#[test]
fn test_move_many() {
let alice_pubkey = Pubkey::new_unique();
let bob_pubkey = Pubkey::new_unique();
let carol_pubkey = Pubkey::new_unique();
let to_lamports = vec![(bob_pubkey, 1), (carol_pubkey, 2)];
let instructions = transfer_many(&alice_pubkey, &to_lamports);
assert_eq!(instructions.len(), 2);
assert_eq!(get_keys(&instructions[0]), vec![alice_pubkey, bob_pubkey]);
assert_eq!(get_keys(&instructions[1]), vec![alice_pubkey, carol_pubkey]);
}
#[test]
fn test_create_nonce_account() {
let from_pubkey = Pubkey::new_unique();
let nonce_pubkey = Pubkey::new_unique();
let authorized = nonce_pubkey;
let ixs = create_nonce_account(&from_pubkey, &nonce_pubkey, &authorized, 42);
assert_eq!(ixs.len(), 2);
let ix = &ixs[0];
assert_eq!(ix.program_id, system_program::id());
let pubkeys: Vec<_> = ix.accounts.iter().map(|am| am.pubkey).collect();
assert!(pubkeys.contains(&from_pubkey));
assert!(pubkeys.contains(&nonce_pubkey));
}
#[test]
fn test_nonce_error_decode() {
use num_traits::FromPrimitive;
fn pretty_err<T>(err: InstructionError) -> String
where
T: 'static + std::error::Error + DecodeError<T> + FromPrimitive,
{
if let InstructionError::Custom(code) = err {
let specific_error: T = T::decode_custom_error_to_enum(code).unwrap();
format!(
"{:?}: {}::{:?} - {}",
err,
T::type_of(),
specific_error,
specific_error,
)
} else {
"".to_string()
}
}
assert_eq!(
"Custom(0): NonceError::NoRecentBlockhashes - recent blockhash list is empty",
pretty_err::<NonceError>(NonceError::NoRecentBlockhashes.into())
);
assert_eq!(
"Custom(1): NonceError::NotExpired - stored nonce is still in recent_blockhashes",
pretty_err::<NonceError>(NonceError::NotExpired.into())
);
assert_eq!(
"Custom(2): NonceError::UnexpectedValue - specified nonce does not match stored nonce",
pretty_err::<NonceError>(NonceError::UnexpectedValue.into())
);
assert_eq!(
"Custom(3): NonceError::BadAccountState - cannot handle request in current account state",
pretty_err::<NonceError>(NonceError::BadAccountState.into())
);
}
#[test]
fn test_nonce_to_instruction_error() {
assert_eq!(
nonce_to_instruction_error(NonceError::NoRecentBlockhashes, false),
NonceError::NoRecentBlockhashes.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::NotExpired, false),
NonceError::NotExpired.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::UnexpectedValue, false),
NonceError::UnexpectedValue.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::BadAccountState, false),
NonceError::BadAccountState.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::NoRecentBlockhashes, true),
SystemError::NonceNoRecentBlockhashes.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::NotExpired, true),
SystemError::NonceBlockhashNotExpired.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::UnexpectedValue, true),
SystemError::NonceUnexpectedBlockhashValue.into(),
);
assert_eq!(
nonce_to_instruction_error(NonceError::BadAccountState, true),
InstructionError::InvalidAccountData,
);
}
#[test]
fn test_instruction_to_nonce_error() {
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(NonceErrorAdapter::NoRecentBlockhashes.to_u32().unwrap(),),
false,
),
Some(NonceError::NoRecentBlockhashes),
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(NonceErrorAdapter::NotExpired.to_u32().unwrap(),),
false,
),
Some(NonceError::NotExpired),
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(NonceErrorAdapter::UnexpectedValue.to_u32().unwrap(),),
false,
),
Some(NonceError::UnexpectedValue),
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(NonceErrorAdapter::BadAccountState.to_u32().unwrap(),),
false,
),
Some(NonceError::BadAccountState),
);
assert_eq!(
instruction_to_nonce_error(&InstructionError::Custom(u32::MAX), false),
None,
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(SystemError::NonceNoRecentBlockhashes.to_u32().unwrap(),),
true,
),
Some(NonceError::NoRecentBlockhashes),
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(SystemError::NonceBlockhashNotExpired.to_u32().unwrap(),),
true,
),
Some(NonceError::NotExpired),
);
assert_eq!(
instruction_to_nonce_error(
&InstructionError::Custom(
SystemError::NonceUnexpectedBlockhashValue.to_u32().unwrap(),
),
true,
),
Some(NonceError::UnexpectedValue),
);
assert_eq!(
instruction_to_nonce_error(&InstructionError::InvalidAccountData, true),
Some(NonceError::BadAccountState),
);
assert_eq!(
instruction_to_nonce_error(&InstructionError::Custom(u32::MAX), true),
None,
);
}
#[test]
fn test_nonce_error_adapter_compat() {
assert_eq!(
NonceError::NoRecentBlockhashes.to_u32(),
NonceErrorAdapter::NoRecentBlockhashes.to_u32(),
);
assert_eq!(
NonceError::NotExpired.to_u32(),
NonceErrorAdapter::NotExpired.to_u32(),
);
assert_eq!(
NonceError::UnexpectedValue.to_u32(),
NonceErrorAdapter::UnexpectedValue.to_u32(),
);
assert_eq!(
NonceError::BadAccountState.to_u32(),
NonceErrorAdapter::BadAccountState.to_u32(),
);
}
}
| 32.054404 | 102 | 0.601835 |
912499bf96b94457d603aff4131c8791f22a2b0d | 4,442 | use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{match_def_path, meets_msrv, msrvs, path_to_local_id, paths, peel_blocks};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_semver::RustcVersion;
use rustc_span::sym;
use super::OPTION_AS_REF_DEREF;
/// lint use of `_.as_ref().map(Deref::deref)` for `Option`s
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
expr: &hir::Expr<'_>,
as_ref_recv: &hir::Expr<'_>,
map_arg: &hir::Expr<'_>,
is_mut: bool,
msrv: Option<RustcVersion>,
) {
if !meets_msrv(msrv, msrvs::OPTION_AS_DEREF) {
return;
}
let same_mutability = |m| (is_mut && m == &hir::Mutability::Mut) || (!is_mut && m == &hir::Mutability::Not);
let option_ty = cx.typeck_results().expr_ty(as_ref_recv);
if !is_type_diagnostic_item(cx, option_ty, sym::Option) {
return;
}
let deref_aliases: [&[&str]; 9] = [
&paths::DEREF_TRAIT_METHOD,
&paths::DEREF_MUT_TRAIT_METHOD,
&paths::CSTRING_AS_C_STR,
&paths::OS_STRING_AS_OS_STR,
&paths::PATH_BUF_AS_PATH,
&paths::STRING_AS_STR,
&paths::STRING_AS_MUT_STR,
&paths::VEC_AS_SLICE,
&paths::VEC_AS_MUT_SLICE,
];
let is_deref = match map_arg.kind {
hir::ExprKind::Path(ref expr_qpath) => cx
.qpath_res(expr_qpath, map_arg.hir_id)
.opt_def_id()
.map_or(false, |fun_def_id| {
deref_aliases.iter().any(|path| match_def_path(cx, fun_def_id, path))
}),
hir::ExprKind::Closure { body, .. } => {
let closure_body = cx.tcx.hir().body(body);
let closure_expr = peel_blocks(&closure_body.value);
match &closure_expr.kind {
hir::ExprKind::MethodCall(_, args, _) => {
if_chain! {
if args.len() == 1;
if path_to_local_id(&args[0], closure_body.params[0].pat.hir_id);
let adj = cx
.typeck_results()
.expr_adjustments(&args[0])
.iter()
.map(|x| &x.kind)
.collect::<Box<[_]>>();
if let [ty::adjustment::Adjust::Deref(None), ty::adjustment::Adjust::Borrow(_)] = *adj;
then {
let method_did = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id).unwrap();
deref_aliases.iter().any(|path| match_def_path(cx, method_did, path))
} else {
false
}
}
},
hir::ExprKind::AddrOf(hir::BorrowKind::Ref, m, inner) if same_mutability(m) => {
if_chain! {
if let hir::ExprKind::Unary(hir::UnOp::Deref, inner1) = inner.kind;
if let hir::ExprKind::Unary(hir::UnOp::Deref, inner2) = inner1.kind;
then {
path_to_local_id(inner2, closure_body.params[0].pat.hir_id)
} else {
false
}
}
},
_ => false,
}
},
_ => false,
};
if is_deref {
let current_method = if is_mut {
format!(".as_mut().map({})", snippet(cx, map_arg.span, ".."))
} else {
format!(".as_ref().map({})", snippet(cx, map_arg.span, ".."))
};
let method_hint = if is_mut { "as_deref_mut" } else { "as_deref" };
let hint = format!("{}.{}()", snippet(cx, as_ref_recv.span, ".."), method_hint);
let suggestion = format!("try using {} instead", method_hint);
let msg = format!(
"called `{0}` on an Option value. This can be done more directly \
by calling `{1}` instead",
current_method, hint
);
span_lint_and_sugg(
cx,
OPTION_AS_REF_DEREF,
expr.span,
&msg,
&suggestion,
hint,
Applicability::MachineApplicable,
);
}
}
| 36.710744 | 117 | 0.50878 |
7aa05706ffcfc53f1e3551f753852a5be4cf8738 | 668 | pub(crate) mod fdentry_impl;
pub(crate) mod host_impl;
pub(crate) mod hostcalls_impl;
mod filetime;
#[cfg(any(
target_os = "macos",
target_os = "netbsd",
target_os = "freebsd",
target_os = "openbsd",
target_os = "ios",
target_os = "dragonfly"
))]
mod bsd;
#[cfg(target_os = "linux")]
mod linux;
use crate::Result;
use std::fs::{File, OpenOptions};
use std::path::Path;
pub(crate) fn dev_null() -> Result<File> {
OpenOptions::new()
.read(true)
.write(true)
.open("/dev/null")
.map_err(Into::into)
}
pub fn preopen_dir<P: AsRef<Path>>(path: P) -> Result<File> {
File::open(path).map_err(Into::into)
}
| 19.647059 | 61 | 0.615269 |
714ef53e9753b54f881fef14aaaed653d1e8d4eb | 8,911 | // Generated from definition io.k8s.api.apps.v1.DeploymentSpec
/// DeploymentSpec is the specification of the desired behavior of the Deployment.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct DeploymentSpec {
/// Minimum number of seconds for which a newly created pod should be ready without any of its container crashing, for it to be considered available. Defaults to 0 (pod will be considered available as soon as it is ready)
pub min_ready_seconds: Option<i32>,
/// Indicates that the deployment is paused.
pub paused: Option<bool>,
/// The maximum time in seconds for a deployment to make progress before it is considered to be failed. The deployment controller will continue to process failed deployments and a condition with a ProgressDeadlineExceeded reason will be surfaced in the deployment status. Note that progress will not be estimated during the time a deployment is paused. Defaults to 600s.
pub progress_deadline_seconds: Option<i32>,
/// Number of desired pods. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1.
pub replicas: Option<i32>,
/// The number of old ReplicaSets to retain to allow rollback. This is a pointer to distinguish between explicit zero and not specified. Defaults to 10.
pub revision_history_limit: Option<i32>,
/// Label selector for pods. Existing ReplicaSets whose pods are selected by this will be the ones affected by this deployment. It must match the pod template's labels.
pub selector: crate::v1_13::apimachinery::pkg::apis::meta::v1::LabelSelector,
/// The deployment strategy to use to replace existing pods with new ones.
pub strategy: Option<crate::v1_13::api::apps::v1::DeploymentStrategy>,
/// Template describes the pods that will be created.
pub template: crate::v1_13::api::core::v1::PodTemplateSpec,
}
impl<'de> serde::Deserialize<'de> for DeploymentSpec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_min_ready_seconds,
Key_paused,
Key_progress_deadline_seconds,
Key_replicas,
Key_revision_history_limit,
Key_selector,
Key_strategy,
Key_template,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"minReadySeconds" => Field::Key_min_ready_seconds,
"paused" => Field::Key_paused,
"progressDeadlineSeconds" => Field::Key_progress_deadline_seconds,
"replicas" => Field::Key_replicas,
"revisionHistoryLimit" => Field::Key_revision_history_limit,
"selector" => Field::Key_selector,
"strategy" => Field::Key_strategy,
"template" => Field::Key_template,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = DeploymentSpec;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct DeploymentSpec")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_min_ready_seconds: Option<i32> = None;
let mut value_paused: Option<bool> = None;
let mut value_progress_deadline_seconds: Option<i32> = None;
let mut value_replicas: Option<i32> = None;
let mut value_revision_history_limit: Option<i32> = None;
let mut value_selector: Option<crate::v1_13::apimachinery::pkg::apis::meta::v1::LabelSelector> = None;
let mut value_strategy: Option<crate::v1_13::api::apps::v1::DeploymentStrategy> = None;
let mut value_template: Option<crate::v1_13::api::core::v1::PodTemplateSpec> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_min_ready_seconds => value_min_ready_seconds = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_paused => value_paused = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_progress_deadline_seconds => value_progress_deadline_seconds = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_replicas => value_replicas = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_revision_history_limit => value_revision_history_limit = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_selector => value_selector = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_strategy => value_strategy = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_template => value_template = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(DeploymentSpec {
min_ready_seconds: value_min_ready_seconds,
paused: value_paused,
progress_deadline_seconds: value_progress_deadline_seconds,
replicas: value_replicas,
revision_history_limit: value_revision_history_limit,
selector: value_selector.ok_or_else(|| serde::de::Error::missing_field("selector"))?,
strategy: value_strategy,
template: value_template.ok_or_else(|| serde::de::Error::missing_field("template"))?,
})
}
}
deserializer.deserialize_struct(
"DeploymentSpec",
&[
"minReadySeconds",
"paused",
"progressDeadlineSeconds",
"replicas",
"revisionHistoryLimit",
"selector",
"strategy",
"template",
],
Visitor,
)
}
}
impl serde::Serialize for DeploymentSpec {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"DeploymentSpec",
2 +
self.min_ready_seconds.as_ref().map_or(0, |_| 1) +
self.paused.as_ref().map_or(0, |_| 1) +
self.progress_deadline_seconds.as_ref().map_or(0, |_| 1) +
self.replicas.as_ref().map_or(0, |_| 1) +
self.revision_history_limit.as_ref().map_or(0, |_| 1) +
self.strategy.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.min_ready_seconds {
serde::ser::SerializeStruct::serialize_field(&mut state, "minReadySeconds", value)?;
}
if let Some(value) = &self.paused {
serde::ser::SerializeStruct::serialize_field(&mut state, "paused", value)?;
}
if let Some(value) = &self.progress_deadline_seconds {
serde::ser::SerializeStruct::serialize_field(&mut state, "progressDeadlineSeconds", value)?;
}
if let Some(value) = &self.replicas {
serde::ser::SerializeStruct::serialize_field(&mut state, "replicas", value)?;
}
if let Some(value) = &self.revision_history_limit {
serde::ser::SerializeStruct::serialize_field(&mut state, "revisionHistoryLimit", value)?;
}
serde::ser::SerializeStruct::serialize_field(&mut state, "selector", &self.selector)?;
if let Some(value) = &self.strategy {
serde::ser::SerializeStruct::serialize_field(&mut state, "strategy", value)?;
}
serde::ser::SerializeStruct::serialize_field(&mut state, "template", &self.template)?;
serde::ser::SerializeStruct::end(state)
}
}
| 51.212644 | 374 | 0.588823 |
1447ffa549bd92f8ff859258e6d68c241b04dd50 | 18,600 | //! A platform agnostic driver to interface the MFRC522 (RFID reader/writer)
//!
//! This driver was built using [`embedded-hal`] traits.
//!
//! [`embedded-hal`]: https://docs.rs/embedded-hal/~0.1
//!
//! # Examples
//!
//! You'll find an example for the Raspeberry Pi in the `examples` directory. You should find an
//! example for ARM Cortex-M microcontrollers on the [`blue-pill`] repository. If that branch is
//! gone, check the master branch.
//!
//! [`blue-pill`]: https://github.com/japaric/blue-pill/tree/singletons/examples
//!
//! # References
//!
//! - [Identification cards - Contactless integrated circuit(s) cards - Proximity cards - Part 3:
//! Initialization and anticollision][1]
//! - [MFRC522 data sheet][2]
//!
//! [1]: http://wg8.de/wg8n1496_17n3613_Ballot_FCD14443-3.pdf
//! [2]: https://www.nxp.com/docs/en/data-sheet/MFRC522.pdf
#![allow(dead_code)]
//#![deny(missing_docs)]
#![deny(warnings)]
#![no_std]
extern crate embedded_hal as hal;
extern crate generic_array;
use core::mem;
use generic_array::typenum::consts::*;
use generic_array::{ArrayLength, GenericArray};
use hal::blocking::spi;
use hal::digital::v2::OutputPin;
use hal::spi::{Mode, Phase, Polarity};
pub mod mifare;
mod picc;
/// Errors
#[derive(Debug)]
pub enum Error<E, OPE> {
/// Wrong Block Character Check (BCC)
Bcc,
/// FIFO buffer overflow
BufferOverflow,
/// Collision
Collision,
/// Wrong CRC
Crc,
/// Incomplete RX frame
IncompleteFrame,
/// Internal temperature sensor detects overheating
Overheating,
/// Parity check failed
Parity,
/// Error during MFAuthent operation
Protocol,
/// SPI bus error
Spi(E),
/// Timeout
Timeout,
/// ???
Wr,
/// Output pin operation failed
OutputPin(OPE),
}
// XXX coherence :-(
// impl<SPI> From<SPI::Error> for Error<SPI>
// where
// SPI: spi::FullDuplex<u8>,
// {
// fn from(e: SPI::Error) -> Error<SPI> {
// Error::Spi(e)
// }
// }
/// MFRC522 driver
pub struct Mfrc522<SPI, NSS> {
spi: SPI,
nss: NSS,
}
const ERR_IRQ: u8 = 1 << 1;
const IDLE_IRQ: u8 = 1 << 4;
const RX_IRQ: u8 = 1 << 5;
const TIMER_IRQ: u8 = 1 << 0;
const CRC_IRQ: u8 = 1 << 2;
impl<E, OPE, NSS, SPI> Mfrc522<SPI, NSS>
where
SPI: spi::Transfer<u8, Error = E> + spi::Write<u8, Error = E>,
NSS: OutputPin<Error = OPE>,
{
/// Creates a new driver from a SPI driver and a NSS pin
pub fn new(spi: SPI, nss: NSS) -> Result<Self, Error<E, OPE>> {
let mut mfrc522 = Mfrc522 { spi, nss };
// soft reset
mfrc522.command(Command::SoftReset)?;
while mfrc522.read(Register::Command)? & (1 << 4) != 0 {}
// configure timer to operate at 10 KHz.
// f_timer = 13.56 MHz / (2 + TPrescaler + 2)
mfrc522.write(Register::Demod, 0x4d | (1 << 4))?;
mfrc522.write(Register::TMode, (1 << 7) | 0b10)?;
mfrc522.write(Register::TPrescaler, 165)?;
// configure timer for a 5 ms timeout
mfrc522.write(Register::ReloadL, 50)?;
// forces 100% ASK modulation
// NOTE my tags don't work without this ...
mfrc522.write(Register::TxAsk, 1 << 6)?;
// set preset value for the CRC co-processor to 0x6363
// in accordance to section 6.2.4 of ISO/IEC FCD 14443-3
mfrc522.write(Register::Mode, (0x3f & (!0b11)) | 0b01)?;
// enable the antenna
mfrc522.write(Register::TxControl, 0x80 | 0b11)?;
Ok(mfrc522)
}
/// Sends a REQuest type A to nearby PICCs
pub fn reqa(&mut self) -> Result<AtqA, Error<E, OPE>> {
// NOTE REQA is a short frame (7 bits)
self.transceive(&[picc::REQA], 7)
.map(|bytes| AtqA { bytes })
}
/// Sends a WakeUP type A to nearby PICCs
pub fn wupa(&mut self) -> Result<AtqA, Error<E, OPE>> {
// NOTE WUPA is a short frame (7 bits)
self.transceive(&[picc::WUPA], 7)
.map(|bytes| AtqA { bytes })
}
/// Sends a HaLT type A to nearby PICCs
pub fn hlta(&mut self) -> Result<(), Error<E, OPE>> {
// HLTA is a standard frame
let mut tx_buffer = [picc::HLTA[0], picc::HLTA[1], 0x00, 0x00];
// Calculate the crc
// TODO: Check if it constant and skip calculation
let crc = self.calculate_crc(&tx_buffer[0..2])?;
tx_buffer[2..4].copy_from_slice(&crc);
let rx = self.transceive::<U0>(&tx_buffer, 0);
// The standard says:
// If the PICC responds with any modulation during a period of 1 ms after the end of the frame containing the
// HLTA command, this response shall be interpreted as 'not acknowledge'.
// -> Only timeout is ok
match rx {
Ok(_) => Err(Error::Protocol), // In this case a succes is not a success for the HLTA Command
Err(Error::Timeout) => Ok(()), // This is fine
Err(err) => Err(err), // Return all other errors
}
}
/// Selects an idle PICC
///
/// NOTE currently this only supports single size UIDs
// TODO anticollision loop
// TODO add optional UID to select an specific PICC
pub fn select(&mut self, _atqa: &AtqA) -> Result<Uid, Error<E, OPE>> {
let rx = self.transceive::<U5>(&[picc::SEL_CL1, 0x20], 0)?;
assert_ne!(
rx[0],
picc::CT,
"double and triple size UIDs are currently not supported"
);
let expected_bcc = rx[4];
let computed_bcc = rx[0] ^ rx[1] ^ rx[2] ^ rx[3];
// XXX can this ever fail? (buggy PICC?)
if computed_bcc != expected_bcc {
return Err(Error::Bcc);
}
let mut tx: [u8; 9] = unsafe { mem::MaybeUninit::zeroed().assume_init() };
tx[0] = picc::SEL_CL1;
tx[1] = 0x70;
tx[2..7].copy_from_slice(&rx);
let crc = self.calculate_crc(&tx[..7])?;
tx[7..].copy_from_slice(&crc);
// enable automatic CRC validation during reception
let rx2 = self.transceive::<U3>(&tx, 0)?;
let crc2 = self.calculate_crc(&rx2[..1])?;
if rx2[1..] != crc2 {
return Err(Error::Crc);
}
let sak = rx2[0];
let compliant = match (sak & (1 << 2) != 0, sak & (1 << 5) != 0) {
// indicates that the UID is incomplete -- this is unreachable because we only support
// single size UIDs
(_, true) => unreachable!(),
(true, false) => true,
(false, false) => false,
};
Ok(Uid {
bytes: [rx[0], rx[1], rx[2], rx[3]],
compliant,
})
}
/// Authenticated for Mifare Command
pub fn mfauthent(
&mut self,
add: u8,
uid: &Uid,
key: &mifare::Key,
) -> Result<(), Error<E, OPE>> {
// Write data to the fifo
let mut fifo = [0u8; 12];
let byte_key;
match key {
mifare::Key::KeyA(k) => {
byte_key = k;
fifo[0] = mifare::MifareCommand::AuthWithKeyA.value();
}
mifare::Key::KeyB(k) => {
byte_key = k;
fifo[0] = mifare::MifareCommand::AuthWithKeyB.value();
}
}
fifo[1] = add;
for (i, &byte) in byte_key.iter().enumerate() {
// 6 bytes
fifo[2 + i] = byte;
}
for (i, &byte) in uid.bytes().iter().enumerate() {
// 4 bytes
fifo[8 + i] = byte;
}
// stop any ongoing command
self.command(Command::Idle)?;
// Clear interrupt register
self.write(Register::ComIrq, 0x7f)?;
// flush FIFO buffer
self.flush_fifo_buffer()?;
// write data to transmit to the FIFO buffer
self.write_many(Register::FifoData, &fifo)?;
// Execute Authentification
self.command(Command::MFAuthent)?;
// wait for authentification complete
let mut irq;
loop {
irq = self.read(Register::ComIrq)?;
if irq & (ERR_IRQ | IDLE_IRQ) != 0 {
break;
} else if irq & TIMER_IRQ != 0 {
return Err(Error::Timeout);
}
}
// TODO: Check if authentication bit is 1
Ok(())
}
/// Stop Crypto Session with writing 0 to bit 3 in Status2Register
pub fn mfstopcrypto(&mut self) -> Result<(), Error<E, OPE>> {
// Send device a HLTA
self.hlta()?;
// Read current register state
let mut status2reg = self.read(Register::Status2Reg)?;
// Reset Bit 3
status2reg &= !0x08;
// Write Register back
self.write(Register::Status2Reg, status2reg)
}
/// Read a datablock in an encrypted session
pub fn mfread(&mut self, add: u8) -> Result<[u8; 16], Error<E, OPE>> {
// Build the tx buffer
let mut buffer = [0; 4];
buffer[0] = mifare::MifareCommand::Read.value();
buffer[1] = add;
// Calculate CRC
let crc = self.calculate_crc(&buffer[0..2])?;
buffer[2..4].copy_from_slice(&crc[..]);
// Send to PICC
let rx: [u8; 18] = self.transceive(&buffer, 0)?.into();
// Check crc
let crc = self.calculate_crc(&rx[..16])?;
if crc != rx[16..] {
Err(Error::Crc)
} else {
let mut data_block = [0; 16];
data_block.clone_from_slice(&rx[..16]);
Ok(data_block)
}
}
// Write a datablock in an encrypted session
pub fn mfwrite(&mut self, add: u8, data: &[u8; 16]) -> Result<(), Error<E, OPE>> {
// Build the tx buffer
let mut buffer = [0; 4];
buffer[0] = mifare::MifareCommand::Write.value();
buffer[1] = add;
// Calculate CRC
let crc = self.calculate_crc(&buffer[0..2])?;
buffer[2..4].copy_from_slice(&crc[..]);
// Send to PICC
let rx: [u8; 1] = self.transceive(&buffer, 0)?.into();
// Check for ACK
if rx[0] != mifare::ACK {
return Err(Error::Protocol);
}
// Build the tx buffer
let mut buffer = [0; 18];
for (i, &byte) in data.iter().enumerate() {
buffer[i] = byte;
}
// Calculate CRC
let crc = self.calculate_crc(&buffer[0..16])?;
buffer[16..18].copy_from_slice(&crc[..]);
// Send to PICC
let rx: [u8; 1] = self.transceive(&buffer, 0)?.into();
// Check for ACK
if rx[0] != mifare::ACK {
return Err(Error::Protocol);
}
Ok(())
}
/// Returns the version of the MFRC522
pub fn version(&mut self) -> Result<u8, Error<E, OPE>> {
self.read(Register::Version)
}
fn calculate_crc(&mut self, data: &[u8]) -> Result<[u8; 2], Error<E, OPE>> {
// stop any ongoing command
self.command(Command::Idle)?;
// clear the CRC_IRQ interrupt flag
self.write(Register::DivIrq, 1 << 2)?;
// flush FIFO buffer
self.flush_fifo_buffer()?;
// write data to transmit to the FIFO buffer
self.write_many(Register::FifoData, data)?;
self.command(Command::CalcCRC)?;
// TODO timeout when connection to the MFRC522 is lost
// wait for CRC to complete
let mut irq;
loop {
irq = self.read(Register::DivIrq)?;
if irq & CRC_IRQ != 0 {
self.command(Command::Idle)?;
let crc = [
self.read(Register::CrcResultL)?,
self.read(Register::CrcResultH)?,
];
break Ok(crc);
}
}
}
fn check_error_register(&mut self) -> Result<(), Error<E, OPE>> {
const PROTOCOL_ERR: u8 = 1 << 0;
const PARITY_ERR: u8 = 1 << 1;
const CRC_ERR: u8 = 1 << 2;
const COLL_ERR: u8 = 1 << 3;
const BUFFER_OVFL: u8 = 1 << 4;
const TEMP_ERR: u8 = 1 << 6;
const WR_ERR: u8 = 1 << 7;
let err = self.read(Register::Error)?;
if err & PROTOCOL_ERR != 0 {
Err(Error::Protocol)
} else if err & PARITY_ERR != 0 {
Err(Error::Parity)
} else if err & CRC_ERR != 0 {
Err(Error::Crc)
} else if err & COLL_ERR != 0 {
Err(Error::Collision)
} else if err & BUFFER_OVFL != 0 {
Err(Error::BufferOverflow)
} else if err & TEMP_ERR != 0 {
Err(Error::Overheating)
} else if err & WR_ERR != 0 {
Err(Error::Wr)
} else {
Ok(())
}
}
fn command(&mut self, command: Command) -> Result<(), Error<E, OPE>> {
self.write(Register::Command, command.value())
}
fn flush_fifo_buffer(&mut self) -> Result<(), Error<E, OPE>> {
self.write(Register::FifoLevel, 1 << 7)
}
fn transceive<RX>(
&mut self,
tx_buffer: &[u8],
tx_last_bits: u8,
) -> Result<GenericArray<u8, RX>, Error<E, OPE>>
where
RX: ArrayLength<u8>,
{
// stop any ongoing command
self.command(Command::Idle)?;
// clear all interrupt flags
self.write(Register::ComIrq, 0x7f)?;
// flush FIFO buffer
self.flush_fifo_buffer()?;
// write data to transmit to the FIFO buffer
self.write_many(Register::FifoData, tx_buffer)?;
// signal command
self.command(Command::Transceive)?;
// configure short frame and start transmission
self.write(Register::BitFraming, (1 << 7) | tx_last_bits)?;
// TODO timeout when connection to the MFRC522 is lost (?)
// wait for transmission + reception to complete
let mut irq;
loop {
irq = self.read(Register::ComIrq)?;
if irq & (RX_IRQ | ERR_IRQ | IDLE_IRQ) != 0 {
break;
} else if irq & TIMER_IRQ != 0 {
return Err(Error::Timeout);
}
}
// XXX do we need a guard here?
// check for any outstanding error
// if irq & ERR_IRQ != 0 {
self.check_error_register()?;
// }
// grab RX data
let mut rx_buffer: GenericArray<u8, RX> =
unsafe { mem::MaybeUninit::zeroed().assume_init() };
{
let rx_buffer: &mut [u8] = &mut rx_buffer;
let received_bytes = self.read(Register::FifoLevel)?;
if received_bytes as usize != rx_buffer.len() {
return Err(Error::IncompleteFrame);
}
self.read_many(Register::FifoData, rx_buffer)?;
}
Ok(rx_buffer)
}
// lowest level API
fn read(&mut self, reg: Register) -> Result<u8, Error<E, OPE>> {
let mut buffer = [reg.read_address(), 0];
self.with_nss_low(|mfr| {
let buffer = mfr.spi.transfer(&mut buffer)?;
Ok(buffer[1])
})
}
fn read_many<'b>(
&mut self,
reg: Register,
buffer: &'b mut [u8],
) -> Result<&'b [u8], Error<E, OPE>> {
let byte = reg.read_address();
self.with_nss_low(move |mfr| {
mfr.spi.transfer(&mut [byte])?;
let n = buffer.len();
for slot in &mut buffer[..n - 1] {
*slot = mfr.spi.transfer(&mut [byte])?[0];
}
buffer[n - 1] = mfr.spi.transfer(&mut [0])?[0];
Ok(&*buffer)
})
}
fn rmw<F>(&mut self, reg: Register, f: F) -> Result<(), Error<E, OPE>>
where
F: FnOnce(u8) -> u8,
{
let byte = self.read(reg)?;
self.write(reg, f(byte))?;
Ok(())
}
fn write(&mut self, reg: Register, val: u8) -> Result<(), Error<E, OPE>> {
self.with_nss_low(|mfr| mfr.spi.write(&[reg.write_address(), val]))
}
fn write_many(&mut self, reg: Register, bytes: &[u8]) -> Result<(), Error<E, OPE>> {
self.with_nss_low(|mfr| {
mfr.spi.write(&[reg.write_address()])?;
mfr.spi.write(bytes)?;
Ok(())
})
}
fn with_nss_low<F, T>(&mut self, f: F) -> Result<T, Error<E, OPE>>
where
F: FnOnce(&mut Self) -> Result<T, E>,
{
self.nss.set_low().map_err(Error::OutputPin)?;
let f_result = f(self).map_err(Error::Spi);
let pin_result = self.nss.set_high().map_err(Error::OutputPin);
if f_result.is_ok() {
pin_result?;
}
f_result
}
}
/// SPI mode
pub const MODE: Mode = Mode {
polarity: Polarity::IdleLow,
phase: Phase::CaptureOnFirstTransition,
};
#[derive(Clone, Copy)]
enum Command {
Idle,
Mem,
GenerateRandomID,
CalcCRC,
Transmit,
NoCmdChange,
Receive,
Transceive,
MFAuthent,
SoftReset,
}
impl Command {
fn value(&self) -> u8 {
match *self {
Command::Idle => 0b0000,
Command::Mem => 0b0001,
Command::GenerateRandomID => 0b0010,
Command::CalcCRC => 0b0011,
Command::Transmit => 0b0100,
Command::NoCmdChange => 0b0111,
Command::Receive => 0b1000,
Command::Transceive => 0b1100,
Command::MFAuthent => 0b1110,
Command::SoftReset => 0b1111,
}
}
}
#[derive(Clone, Copy)]
enum Register {
BitFraming = 0x0d,
Coll = 0x0e,
ComIrq = 0x04,
Command = 0x01,
CrcResultH = 0x21,
CrcResultL = 0x22,
Demod = 0x19,
DivIrq = 0x05,
Error = 0x06,
FifoData = 0x09,
FifoLevel = 0x0a,
ModWidth = 0x24,
Mode = 0x11,
ReloadH = 0x2c,
ReloadL = 0x2d,
RxMode = 0x13,
TCountValH = 0x2e,
TCountValL = 0x2f,
TMode = 0x2a,
TPrescaler = 0x2b,
TxAsk = 0x15,
TxControl = 0x14,
TxMode = 0x12,
Version = 0x37,
Status2Reg = 0x08,
}
const R: u8 = 1 << 7;
#[allow(dead_code)]
const W: u8 = 0 << 7;
impl Register {
fn read_address(&self) -> u8 {
((*self as u8) << 1) | R
}
fn write_address(&self) -> u8 {
((*self as u8) << 1) | W
}
}
/// Answer To reQuest A
pub struct AtqA {
bytes: GenericArray<u8, U2>,
}
/// Single size UID
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct Uid {
bytes: [u8; 4],
compliant: bool,
}
impl Uid {
/// The bytes of the UID
pub fn bytes(&self) -> &[u8; 4] {
&self.bytes
}
/// Is the PICC compliant with ISO/IEC 14443-4?
pub fn is_compliant(&self) -> bool {
self.compliant
}
}
| 27.474151 | 118 | 0.532258 |
64b4fc730b944c0e71e4260987abc07c7c764f86 | 3,807 | // بِسْمِ اللَّهِ الرَّحْمَنِ الرَّحِيم
// This file is part of Setheum.
// Copyright (C) 2019-2021 Setheum Labs.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use crate::precompile::PrecompileOutput;
use frame_support::{log, sp_runtime::FixedPointNumber};
use module_evm::{Context, ExitError, ExitSucceed, Precompile};
use num_enum::{IntoPrimitive, TryFromPrimitive};
use primitives::CurrencyId;
use sp_runtime::RuntimeDebug;
use sp_std::{fmt::Debug, marker::PhantomData, prelude::*, result};
use super::input::{Input, InputT, Output};
use module_support::{
AddressMapping as AddressMappingT, CurrencyIdMapping as CurrencyIdMappingT, PriceProvider as PriceProviderT,
};
/// The `Oracle` impl precompile.
///
///
/// `input` data starts with `action`.
///
/// Actions:
/// - Get price. Rest `input` bytes: `currency_id`.
pub struct OraclePrecompile<AccountId, AddressMapping, CurrencyIdMapping, PriceProvider>(
PhantomData<(AccountId, AddressMapping, CurrencyIdMapping, PriceProvider)>,
);
#[primitives_proc_macro::generate_function_selector]
#[derive(RuntimeDebug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)]
#[repr(u32)]
pub enum Action {
GetPrice = "getPrice(address)",
}
impl<AccountId, AddressMapping, CurrencyIdMapping, PriceProvider> Precompile
for OraclePrecompile<AccountId, AddressMapping, CurrencyIdMapping, PriceProvider>
where
AccountId: Debug + Clone,
AddressMapping: AddressMappingT<AccountId>,
CurrencyIdMapping: CurrencyIdMappingT,
PriceProvider: PriceProviderT<CurrencyId>,
{
fn execute(
input: &[u8],
_target_gas: Option<u64>,
_context: &Context,
) -> result::Result<PrecompileOutput, ExitError> {
let input = Input::<Action, AccountId, AddressMapping, CurrencyIdMapping>::new(input);
let action = input.action()?;
match action {
Action::GetPrice => {
let currency_id = input.currency_id_at(1)?;
let mut price = PriceProvider::get_price(currency_id).unwrap_or_default();
let maybe_decimals = CurrencyIdMapping::decimals(currency_id);
let decimals = match maybe_decimals {
Some(decimals) => decimals,
None => {
// If the option is none, let price = 0 to return 0.
// Solidity should handle the situation of price 0.
price = Default::default();
Default::default()
}
};
let maybe_adjustment_multiplier = 10u128.checked_pow((18 - decimals).into());
let adjustment_multiplier = match maybe_adjustment_multiplier {
Some(adjustment_multiplier) => adjustment_multiplier,
None => {
// If the option is none, let price = 0 to return 0.
// Solidity should handle the situation of price 0.
price = Default::default();
Default::default()
}
};
let output = price.into_inner().wrapping_div(adjustment_multiplier);
log::debug!(target: "evm", "oracle: getPrice currency_id: {:?}, price: {:?}, adjustment_multiplier: {:?}, output: {:?}", currency_id, price, adjustment_multiplier, output);
Ok(PrecompileOutput {
exit_status: ExitSucceed::Returned,
cost: 0,
output: Output::default().encode_u128(output),
logs: Default::default(),
})
}
}
}
}
| 34.926606 | 176 | 0.719727 |
e5686e44a3f53b0c84e8a76806cb7f70fd527819 | 1,849 | use sha256::sha256::Sha256;
/// convert bytes to hex string
/// code taken from hex project: https://docs.rs/crate/hex/0.1.0/source/src/lib.rs
fn to_hex_string(data: &[u8]) -> String {
static CHARS: &'static [u8] = b"0123456789abcdef";
let bytes = data.as_ref();
let mut v = Vec::with_capacity(bytes.len() * 2);
for &byte in bytes.iter() {
v.push(CHARS[(byte >> 4) as usize]);
v.push(CHARS[(byte & 0xf) as usize]);
}
unsafe { String::from_utf8_unchecked(v) }
}
fn main() {
let test_str1 = "this";
let test_str2 = "this is";
let test_str3 = "this is a";
let test_str4 = "this is a test";
let test_str5 = "this is a test string";
let test_str6 = "this is a test string to";
let test_str7 = "this is a test string to test";
let test_str8 = "this is a test string to test sha256";
let hash1 = Sha256::digest(test_str1.as_bytes());
let hash2 = Sha256::digest(test_str2.as_bytes());
let hash3 = Sha256::digest(test_str3.as_bytes());
let hash4 = Sha256::digest(test_str4.as_bytes());
let hash5 = Sha256::digest(test_str5.as_bytes());
let hash6 = Sha256::digest(test_str6.as_bytes());
let hash7 = Sha256::digest(test_str7.as_bytes());
let hash8 = Sha256::digest(test_str8.as_bytes());
println!("{: <33} => {: <33}", test_str1, to_hex_string(&hash1));
println!("{: <33} => {: <33}", test_str2, to_hex_string(&hash2));
println!("{: <33} => {: <33}", test_str3, to_hex_string(&hash3));
println!("{: <33} => {: <33}", test_str4, to_hex_string(&hash4));
println!("{: <33} => {: <33}", test_str5, to_hex_string(&hash5));
println!("{: <33} => {: <33}", test_str6, to_hex_string(&hash6));
println!("{: <33} => {: <33}", test_str7, to_hex_string(&hash7));
println!("{: <33} => {: <33}", test_str8, to_hex_string(&hash8));
}
| 40.195652 | 82 | 0.611141 |
abc77a39d6075422e7e73fd8c60c4a1bc8faeb47 | 8,025 | use tracing::{debug, warn};
use crate::{
body::BodySize,
bytes::BytesMut,
date::DateTime,
http::{
header::{HeaderMap, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING},
response::Parts,
Extensions, StatusCode, Version,
},
};
use super::{
buf::BufWrite,
codec::TransferCoding,
context::{ConnectionType, Context},
error::{Parse, ProtoError},
};
impl<D, const MAX_HEADERS: usize> Context<'_, D, MAX_HEADERS>
where
D: DateTime,
{
pub(super) fn encode_continue<W>(&mut self, buf: &mut W)
where
W: BufWrite,
{
buf.buf_static(b"HTTP/1.1 100 Continue\r\n\r\n");
}
pub(super) fn encode_head<W>(
&mut self,
parts: Parts,
size: BodySize,
buf: &mut W,
) -> Result<TransferCoding, ProtoError>
where
W: BufWrite,
{
buf.buf_head(|buf| self.encode_head_inner(parts, size, buf))
}
fn encode_head_inner(
&mut self,
parts: Parts,
size: BodySize,
buf: &mut BytesMut,
) -> Result<TransferCoding, ProtoError> {
let version = parts.version;
let status = parts.status;
// decide if content-length or transfer-encoding header would be skipped.
let skip_len = match (status, version) {
(StatusCode::SWITCHING_PROTOCOLS, _) => true,
// Sending content-length or transfer-encoding header on 2xx response
// to CONNECT is forbidden in RFC 7231.
(s, _) if self.is_connect_method() && s.is_success() => true,
(s, _) if s.is_informational() => {
warn!(target: "h1_encode", "response with 1xx status code not supported");
return Err(ProtoError::Parse(Parse::StatusCode));
}
_ => false,
};
// In some error cases, we don't know about the invalid message until already
// pushing some bytes onto the `buf`. In those cases, we don't want to send
// the half-pushed message, so rewind to before.
// let orig_len = buf.len();
// encode version, status code and reason
encode_version_status_reason(buf, version, status);
self.encode_headers(parts.headers, parts.extensions, size, buf, skip_len)
}
}
#[inline]
fn encode_version_status_reason(buf: &mut BytesMut, version: Version, status: StatusCode) {
// encode version, status code and reason
match (version, status) {
// happy path shortcut.
(Version::HTTP_11, StatusCode::OK) => {
buf.extend_from_slice(b"HTTP/1.1 200 OK\r\n");
return;
}
(Version::HTTP_11, _) => {
buf.extend_from_slice(b"HTTP/1.1 ");
}
(Version::HTTP_10, _) => {
buf.extend_from_slice(b"HTTP/1.0 ");
}
_ => {
debug!(target: "h1_encode", "response with unexpected response version");
buf.extend_from_slice(b"HTTP/1.1 ");
}
}
// a reason MUST be written, as many parsers will expect it.
let reason = status.canonical_reason().unwrap_or("<none>").as_bytes();
let status = status.as_str().as_bytes();
buf.reserve(status.len() + reason.len() + 3);
buf.extend_from_slice(status);
buf.extend_from_slice(b" ");
buf.extend_from_slice(reason);
buf.extend_from_slice(b"\r\n");
}
impl<D, const MAX_HEADERS: usize> Context<'_, D, MAX_HEADERS>
where
D: DateTime,
{
pub fn encode_headers(
&mut self,
mut headers: HeaderMap,
mut extensions: Extensions,
size: BodySize,
buf: &mut BytesMut,
mut skip_len: bool,
) -> Result<TransferCoding, ProtoError> {
let mut skip_date = false;
let mut encoding = TransferCoding::eof();
for (name, value) in headers.drain() {
let name = name.expect("Handling optional header name is not implemented");
// TODO: more spec check needed. the current check barely does anything.
match name {
CONTENT_LENGTH => {
debug_assert!(!skip_len, "CONTENT_LENGTH header can not be set");
let value = value
.to_str()
.ok()
.and_then(|v| v.parse().ok())
.ok_or(Parse::HeaderValue)?;
encoding = TransferCoding::length(value);
skip_len = true;
}
TRANSFER_ENCODING => {
debug_assert!(!skip_len, "TRANSFER_ENCODING header can not be set");
encoding = TransferCoding::encode_chunked();
skip_len = true;
}
CONNECTION => match self.ctype() {
// skip write header on close condition.
// the header is checked again and written properly afterwards.
ConnectionType::Close => continue,
_ => {
for val in value.to_str().map_err(|_| Parse::HeaderValue)?.split(',') {
let val = val.trim();
if val.eq_ignore_ascii_case("close") {
self.set_ctype(ConnectionType::Close);
} else if val.eq_ignore_ascii_case("keep-alive") {
self.set_ctype(ConnectionType::KeepAlive);
} else if val.eq_ignore_ascii_case("upgrade") {
encoding = TransferCoding::upgrade();
break;
}
}
}
},
DATE => skip_date = true,
_ => {}
}
let name = name.as_str().as_bytes();
let value = value.as_bytes();
buf.reserve(name.len() + value.len() + 4);
buf.extend_from_slice(name);
buf.extend_from_slice(b": ");
buf.extend_from_slice(value);
buf.extend_from_slice(b"\r\n");
}
if matches!(self.ctype(), ConnectionType::Close) {
buf.extend_from_slice(b"connection: close\r\n");
}
// encode transfer-encoding or content-length
if !skip_len {
match size {
BodySize::None => {
encoding = TransferCoding::eof();
}
BodySize::Stream => {
buf.extend_from_slice(b"transfer-encoding: chunked\r\n");
encoding = TransferCoding::encode_chunked();
}
BodySize::Sized(size) => {
let mut buffer = itoa::Buffer::new();
let buffer = buffer.format(size).as_bytes();
buf.reserve(buffer.len() + 18);
buf.extend_from_slice(b"content-length: ");
buf.extend_from_slice(buffer);
buf.extend_from_slice(b"\r\n");
encoding = TransferCoding::length(size as u64);
}
}
}
if self.is_head_method() {
debug_assert_eq!(
size,
BodySize::None,
"Response to request with HEAD method must not have a body"
);
encoding = TransferCoding::eof();
}
// set date header if there is not any.
if !skip_date {
buf.reserve(D::DATE_VALUE_LENGTH + 10);
buf.extend_from_slice(b"date: ");
self.date.with_date(|slice| buf.extend_from_slice(slice));
buf.extend_from_slice(b"\r\n\r\n");
} else {
buf.extend_from_slice(b"\r\n");
}
// put header map back to cache.
self.replace_headers(headers);
// put extension back to cache;
extensions.clear();
self.replace_extensions(extensions);
Ok(encoding)
}
}
| 34.148936 | 95 | 0.521994 |
8f31ba0c2e88aa8bef6d6fdec033122cb27aacab | 2,299 | // Sample code for stm32f3discovery (STM32F303VC) board
// Add following to Cargo.toml
// m = "0.1.1"
// f3 = "0.6.1"
// cortex-m = "0.6.3"
// cortex-m-rt = "0.6.3"
// panic-itm = "0.4.0"
// panic-halt = "0.2.0"
// dps310 = "0.1.2"
#![feature(core_intrinsics)]
#![no_main]
#![no_std]
#[allow(unused_imports)]
#[allow(unused_extern_crates)] // NOTE(allow) bug rust-lang/rust#53964
extern crate panic_itm; // panic handler
pub use cortex_m::{asm::bkpt, iprint, iprintln, peripheral::ITM};
pub use cortex_m_rt::entry;
pub use f3::{
hal::{delay::Delay, i2c::I2c, prelude::*, stm32f30x},
};
use dps310::{DPS310, self};
#[entry]
fn main() -> ! {
let mut cp = cortex_m::Peripherals::take().unwrap();
let dp = stm32f30x::Peripherals::take().unwrap();
// setup ITM output
let stim = &mut cp.ITM.stim[0];
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut delay = Delay::new(cp.SYST, clocks);
let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);
let scl = gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl);
let sda = gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl);
let i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1);
let address = 0x77;
let mut dps = DPS310::new(i2c, address, &dps310::Config::new()).unwrap();
let mut init_done = false;
iprintln!(stim, "Wait for init done..");
while !init_done {
let compl = dps.init_complete();
init_done = match compl {
Ok(c) => c,
Err(_e) => false
};
delay.delay_ms(200_u8);
}
iprintln!(stim, "pressure sensor init done");
dps.trigger_measurement(true, true, true).unwrap();
loop {
delay.delay_ms(200_u8);
if dps.temp_ready().unwrap() {
let temp = dps.read_temp_calibrated().unwrap();
iprintln!(stim, "Temperature: {:.1} [C]", temp);
}
if dps.pres_ready().unwrap() {
let pressure = dps.read_pressure_calibrated().unwrap();
iprintln!(stim, "pressure: {:.1} [Pa]", pressure);
}
}
}
| 25.831461 | 80 | 0.559374 |
11ec5b6ca72be9a4b39fc92c0f6362ab8fb6507e | 15,919 | //! Stand-alone WebAssembly to Cranelift IR translator.
//!
//! This module defines the `FuncTranslator` type which can translate a single WebAssembly
//! function to Cranelift IR guided by a `FuncEnvironment` which provides information about the
//! WebAssembly module and the runtime environment.
use crate::code_translator::{bitcast_arguments, translate_operator, wasm_param_types};
use crate::environ::{FuncEnvironment, ReturnMode, WasmResult};
use crate::state::{FuncTranslationState, ModuleTranslationState};
use crate::translation_utils::get_vmctx_value_label;
use crate::wasm_unsupported;
use core::convert::TryInto;
use cranelift_codegen::entity::EntityRef;
use cranelift_codegen::ir::{self, Block, InstBuilder, ValueLabel};
use cranelift_codegen::timing;
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext, Variable};
use log::info;
use wasmparser::{self, BinaryReader};
/// WebAssembly to Cranelift IR function translator.
///
/// A `FuncTranslator` is used to translate a binary WebAssembly function into Cranelift IR guided
/// by a `FuncEnvironment` object. A single translator instance can be reused to translate multiple
/// functions which will reduce heap allocation traffic.
pub struct FuncTranslator {
func_ctx: FunctionBuilderContext,
state: FuncTranslationState,
}
impl FuncTranslator {
/// Create a new translator.
pub fn new() -> Self {
Self {
func_ctx: FunctionBuilderContext::new(),
state: FuncTranslationState::new(),
}
}
/// Translate a binary WebAssembly function.
///
/// The `code` slice contains the binary WebAssembly *function code* as it appears in the code
/// section of a WebAssembly module, not including the initial size of the function code. The
/// slice is expected to contain two parts:
///
/// - The declaration of *locals*, and
/// - The function *body* as an expression.
///
/// See [the WebAssembly specification][wasm].
///
/// [wasm]: https://webassembly.github.io/spec/core/binary/modules.html#code-section
///
/// The Cranelift IR function `func` should be completely empty except for the `func.signature`
/// and `func.name` fields. The signature may contain special-purpose arguments which are not
/// regarded as WebAssembly local variables. Any signature arguments marked as
/// `ArgumentPurpose::Normal` are made accessible as WebAssembly local variables.
///
pub fn translate<FE: FuncEnvironment + ?Sized>(
&mut self,
module_translation_state: &ModuleTranslationState,
code: &[u8],
code_offset: usize,
func: &mut ir::Function,
environ: &mut FE,
) -> WasmResult<()> {
self.translate_from_reader(
module_translation_state,
BinaryReader::new_with_offset(code, code_offset),
func,
environ,
)
}
/// Translate a binary WebAssembly function from a `BinaryReader`.
pub fn translate_from_reader<FE: FuncEnvironment + ?Sized>(
&mut self,
module_translation_state: &ModuleTranslationState,
mut reader: BinaryReader,
func: &mut ir::Function,
environ: &mut FE,
) -> WasmResult<()> {
let _tt = timing::wasm_translate_function();
info!(
"translate({} bytes, {}{})",
reader.bytes_remaining(),
func.name,
func.signature
);
debug_assert_eq!(func.dfg.num_blocks(), 0, "Function must be empty");
debug_assert_eq!(func.dfg.num_insts(), 0, "Function must be empty");
// This clears the `FunctionBuilderContext`.
let mut builder = FunctionBuilder::new(func, &mut self.func_ctx);
builder.set_srcloc(cur_srcloc(&reader));
let entry_block = builder.create_block();
builder.append_block_params_for_function_params(entry_block);
builder.switch_to_block(entry_block); // This also creates values for the arguments.
builder.seal_block(entry_block); // Declare all predecessors known.
// Make sure the entry block is inserted in the layout before we make any callbacks to
// `environ`. The callback functions may need to insert things in the entry block.
builder.ensure_inserted_block();
let num_params = declare_wasm_parameters(&mut builder, entry_block, environ);
// Set up the translation state with a single pushed control block representing the whole
// function and its return values.
let exit_block = builder.create_block();
builder.append_block_params_for_function_returns(exit_block);
self.state.initialize(&builder.func.signature, exit_block);
parse_local_decls(&mut reader, &mut builder, num_params, environ)?;
parse_function_body(
module_translation_state,
reader,
&mut builder,
&mut self.state,
environ,
)?;
builder.finalize();
Ok(())
}
}
/// Declare local variables for the signature parameters that correspond to WebAssembly locals.
///
/// Return the number of local variables declared.
fn declare_wasm_parameters<FE: FuncEnvironment + ?Sized>(
builder: &mut FunctionBuilder,
entry_block: Block,
environ: &FE,
) -> usize {
let sig_len = builder.func.signature.params.len();
let mut next_local = 0;
for i in 0..sig_len {
let param_type = builder.func.signature.params[i];
// There may be additional special-purpose parameters in addition to the normal WebAssembly
// signature parameters. For example, a `vmctx` pointer.
if environ.is_wasm_parameter(&builder.func.signature, i) {
// This is a normal WebAssembly signature parameter, so create a local for it.
let local = Variable::new(next_local);
builder.declare_var(local, param_type.value_type);
next_local += 1;
let param_value = builder.block_params(entry_block)[i];
builder.def_var(local, param_value);
}
if param_type.purpose == ir::ArgumentPurpose::VMContext {
let param_value = builder.block_params(entry_block)[i];
builder.set_val_label(param_value, get_vmctx_value_label());
}
}
next_local
}
/// Parse the local variable declarations that precede the function body.
///
/// Declare local variables, starting from `num_params`.
fn parse_local_decls<FE: FuncEnvironment + ?Sized>(
reader: &mut BinaryReader,
builder: &mut FunctionBuilder,
num_params: usize,
environ: &mut FE,
) -> WasmResult<()> {
let mut next_local = num_params;
let local_count = reader.read_local_count()?;
let mut locals_total = 0;
for _ in 0..local_count {
builder.set_srcloc(cur_srcloc(reader));
let (count, ty) = reader.read_local_decl(&mut locals_total)?;
declare_locals(builder, count, ty, &mut next_local, environ)?;
}
Ok(())
}
/// Declare `count` local variables of the same type, starting from `next_local`.
///
/// Fail of too many locals are declared in the function, or if the type is not valid for a local.
fn declare_locals<FE: FuncEnvironment + ?Sized>(
builder: &mut FunctionBuilder,
count: u32,
wasm_type: wasmparser::Type,
next_local: &mut usize,
environ: &mut FE,
) -> WasmResult<()> {
// All locals are initialized to 0.
use wasmparser::Type::*;
let zeroval = match wasm_type {
I32 => builder.ins().iconst(ir::types::I32, 0),
I64 => builder.ins().iconst(ir::types::I64, 0),
F32 => builder.ins().f32const(ir::immediates::Ieee32::with_bits(0)),
F64 => builder.ins().f64const(ir::immediates::Ieee64::with_bits(0)),
V128 => {
let constant_handle = builder.func.dfg.constants.insert([0; 16].to_vec().into());
builder.ins().vconst(ir::types::I8X16, constant_handle)
}
ExternRef | FuncRef => {
environ.translate_ref_null(builder.cursor(), wasm_type.try_into()?)?
}
ty => return Err(wasm_unsupported!("unsupported local type {:?}", ty)),
};
let ty = builder.func.dfg.value_type(zeroval);
for _ in 0..count {
let local = Variable::new(*next_local);
builder.declare_var(local, ty);
builder.def_var(local, zeroval);
builder.set_val_label(zeroval, ValueLabel::new(*next_local));
*next_local += 1;
}
Ok(())
}
/// Parse the function body in `reader`.
///
/// This assumes that the local variable declarations have already been parsed and function
/// arguments and locals are declared in the builder.
fn parse_function_body<FE: FuncEnvironment + ?Sized>(
module_translation_state: &ModuleTranslationState,
mut reader: BinaryReader,
builder: &mut FunctionBuilder,
state: &mut FuncTranslationState,
environ: &mut FE,
) -> WasmResult<()> {
// The control stack is initialized with a single block representing the whole function.
debug_assert_eq!(state.control_stack.len(), 1, "State not initialized");
// Keep going until the final `End` operator which pops the outermost block.
while !state.control_stack.is_empty() {
builder.set_srcloc(cur_srcloc(&reader));
let op = reader.read_operator()?;
environ.before_translate_operator(&op, builder, state)?;
translate_operator(module_translation_state, &op, builder, state, environ)?;
environ.after_translate_operator(&op, builder, state)?;
}
// The final `End` operator left us in the exit block where we need to manually add a return
// instruction.
//
// If the exit block is unreachable, it may not have the correct arguments, so we would
// generate a return instruction that doesn't match the signature.
if state.reachable {
debug_assert!(builder.is_pristine());
if !builder.is_unreachable() {
match environ.return_mode() {
ReturnMode::NormalReturns => {
let return_types = wasm_param_types(&builder.func.signature.returns, |i| {
environ.is_wasm_return(&builder.func.signature, i)
});
bitcast_arguments(&mut state.stack, &return_types, builder);
builder.ins().return_(&state.stack)
}
ReturnMode::FallthroughReturn => builder.ins().fallthrough_return(&state.stack),
};
}
}
// Discard any remaining values on the stack. Either we just returned them,
// or the end of the function is unreachable.
state.stack.clear();
debug_assert!(reader.eof());
Ok(())
}
/// Get the current source location from a reader.
fn cur_srcloc(reader: &BinaryReader) -> ir::SourceLoc {
// We record source locations as byte code offsets relative to the beginning of the file.
// This will wrap around if byte code is larger than 4 GB.
ir::SourceLoc::new(reader.original_position() as u32)
}
#[cfg(test)]
mod tests {
use super::{FuncTranslator, ReturnMode};
use crate::environ::DummyEnvironment;
use crate::ModuleTranslationState;
use cranelift_codegen::ir::types::I32;
use cranelift_codegen::{ir, isa, settings, Context};
use log::debug;
use target_lexicon::PointerWidth;
#[test]
fn small1() {
// Implicit return.
//
// (func $small1 (param i32) (result i32)
// (i32.add (get_local 0) (i32.const 1))
// )
const BODY: [u8; 7] = [
0x00, // local decl count
0x20, 0x00, // get_local 0
0x41, 0x01, // i32.const 1
0x6a, // i32.add
0x0b, // end
];
let mut trans = FuncTranslator::new();
let flags = settings::Flags::new(settings::builder());
let runtime = DummyEnvironment::new(
isa::TargetFrontendConfig {
default_call_conv: isa::CallConv::Fast,
pointer_width: PointerWidth::U64,
},
ReturnMode::NormalReturns,
false,
);
let module_translation_state = ModuleTranslationState::new();
let mut ctx = Context::new();
ctx.func.name = ir::ExternalName::testcase("small1");
ctx.func.signature.params.push(ir::AbiParam::new(I32));
ctx.func.signature.returns.push(ir::AbiParam::new(I32));
trans
.translate(
&module_translation_state,
&BODY,
0,
&mut ctx.func,
&mut runtime.func_env(),
)
.unwrap();
debug!("{}", ctx.func.display(None));
ctx.verify(&flags).unwrap();
}
#[test]
fn small2() {
// Same as above, but with an explicit return instruction.
//
// (func $small2 (param i32) (result i32)
// (return (i32.add (get_local 0) (i32.const 1)))
// )
const BODY: [u8; 8] = [
0x00, // local decl count
0x20, 0x00, // get_local 0
0x41, 0x01, // i32.const 1
0x6a, // i32.add
0x0f, // return
0x0b, // end
];
let mut trans = FuncTranslator::new();
let flags = settings::Flags::new(settings::builder());
let runtime = DummyEnvironment::new(
isa::TargetFrontendConfig {
default_call_conv: isa::CallConv::Fast,
pointer_width: PointerWidth::U64,
},
ReturnMode::NormalReturns,
false,
);
let module_translation_state = ModuleTranslationState::new();
let mut ctx = Context::new();
ctx.func.name = ir::ExternalName::testcase("small2");
ctx.func.signature.params.push(ir::AbiParam::new(I32));
ctx.func.signature.returns.push(ir::AbiParam::new(I32));
trans
.translate(
&module_translation_state,
&BODY,
0,
&mut ctx.func,
&mut runtime.func_env(),
)
.unwrap();
debug!("{}", ctx.func.display(None));
ctx.verify(&flags).unwrap();
}
#[test]
fn infloop() {
// An infinite loop, no return instructions.
//
// (func $infloop (result i32)
// (local i32)
// (loop (result i32)
// (i32.add (get_local 0) (i32.const 1))
// (set_local 0)
// (br 0)
// )
// )
const BODY: [u8; 16] = [
0x01, // 1 local decl.
0x01, 0x7f, // 1 i32 local.
0x03, 0x7f, // loop i32
0x20, 0x00, // get_local 0
0x41, 0x01, // i32.const 0
0x6a, // i32.add
0x21, 0x00, // set_local 0
0x0c, 0x00, // br 0
0x0b, // end
0x0b, // end
];
let mut trans = FuncTranslator::new();
let flags = settings::Flags::new(settings::builder());
let runtime = DummyEnvironment::new(
isa::TargetFrontendConfig {
default_call_conv: isa::CallConv::Fast,
pointer_width: PointerWidth::U64,
},
ReturnMode::NormalReturns,
false,
);
let module_translation_state = ModuleTranslationState::new();
let mut ctx = Context::new();
ctx.func.name = ir::ExternalName::testcase("infloop");
ctx.func.signature.returns.push(ir::AbiParam::new(I32));
trans
.translate(
&module_translation_state,
&BODY,
0,
&mut ctx.func,
&mut runtime.func_env(),
)
.unwrap();
debug!("{}", ctx.func.display(None));
ctx.verify(&flags).unwrap();
}
}
| 36.595402 | 99 | 0.609335 |
33dbcebc8b4a1e05068953248b1894643011df59 | 3,688 | use crate::arch::intel::chips::port::{Port, UnsafePort};
const EOI: u8 = 0x20;
const ICW4: u8 = 0x01;
const ICW1: u8 = 0x11;
const ICW3_M: u8 = 0x04;
const ICW3_S: u8 = 0x02;
const MASKED: u8 = 0xff;
/// 单PIC 8259A的结构
#[derive(Debug)]
struct Pic {
offset: u8,
command: UnsafePort<u8>,
data: UnsafePort<u8>,
}
impl Pic {
/// 判断中断向量是否可接受的范围中
/// IRQ0-IRQ7 共8个
/// IRQ8 -IRQ15 共8个
fn handle_interrupt(&self, interrupt_id: u8) -> bool {
self.offset <= interrupt_id && interrupt_id < self.offset + 8
}
/// 向对应端口写入EOI命令完成中断
unsafe fn end_interrupt(&mut self) {
self.command.write(EOI);
}
}
/// PIC两级级联结构
#[derive(Debug)]
pub struct ChainedPics {
main: Pic,
slave: Pic,
}
impl ChainedPics {
/// 根据指定的偏移创建ChainedPics
pub const unsafe fn new(offset_1: u8, offset_2: u8) -> ChainedPics {
ChainedPics {
main: Pic {
offset: offset_1,
command: UnsafePort::new(0x20),
data: UnsafePort::new(0x21),
},
slave: Pic {
offset: offset_2,
command: UnsafePort::new(0xA0),
data: UnsafePort::new(0xA1),
},
}
}
/// 用于完成主8259A和从8259A芯片初始化操作
/// | M/S | ICWx | I/OPort| value|
/// | ----- | ---- | ------ | ---- |
/// | Main | ICW1 | 0x20 | 0x11 |
/// | | ICW2 | 0x21 | 0x20 |
/// | | ICW3 | 0x21 | 0x04 |
/// | | ICW4 | 0x21 | 0x01 |
/// | Slave | ICW1 | 0xA0 | 0x11 |
/// | | ICW2 | 0xA1 | 0x28 |
/// | | iCW3 | 0xA1 | 0x02 |
/// | | ICW4 | 0xA1 | 0x01 |
pub unsafe fn initialize(&mut self) {
// 我们需要在写入PIC之间增加延迟,尤其是在较旧的主板上。
// 但是我们不一定有任何类型的计时器,因为它们大多数需要中断。
// 通过将垃圾数据写入端口0x80,
// 各种较旧版本的Linux和其他PC操作系统已通过在端口0x80上写入垃圾数据来解决此问题
// 或者我们可以使用几个nop指令来完成
let mut wait_port: Port<u32> = Port::new(0x80);
// wait是一个闭包 我们只需要往0x80端口写一些数据即可
let mut wait = || { wait_port.write(0) };
// 写入之前保存掩码
let saved_mask1 = self.main.data.read();
let saved_mask2 = self.slave.data.read();
// 主片写入ICW1
self.main.command.write(ICW1);
wait();
// 从片写入ICW1
self.slave.command.write(ICW1);
wait();
// 主片写入ICW2
self.main.data.write(self.main.offset);
wait();
// 从片写入ICW2
self.slave.data.write(self.slave.offset);
wait();
// 主片写入ICW3
self.main.data.write(ICW3_M);
wait();
// 从片写入ICW3
self.slave.data.write(ICW3_S);
wait();
// 主片写入ICW4
self.main.data.write(ICW4);
wait();
// 从片写入ICW4
self.slave.data.write(ICW4);
wait();
// 恢复掩码
self.main.data.write(saved_mask1);
self.slave.data.write(saved_mask2);
}
/// 屏蔽8259a中断控制器
pub unsafe fn disable_8259a(&mut self) {
self.main.data.write(MASKED);
self.slave.data.write(MASKED);
}
/// 判断当前的中断号是否可以被主片或从片处理
pub fn handles_interrupt(&self, interrupt_id: u8) -> bool {
self.main.handle_interrupt(interrupt_id) || self.slave.handle_interrupt(interrupt_id)
}
/// 8259A采用非自动中断结束方式,
/// 那么CPU必须在中断处理程序结尾向8259A芯片发送EOI(End Of Interrupt,结束中断)命令
/// 来复位ISR对应位,如果中断请求来自级联8259A芯片,则必须向两个芯片都发送EOI命令
pub unsafe fn notify_end_of_interrupt(&mut self, interrupt_id: u8) {
if self.handles_interrupt(interrupt_id) {
if self.slave.handle_interrupt(interrupt_id) {
self.slave.end_interrupt();
}
self.main.end_interrupt();
}
}
} | 27.729323 | 93 | 0.545553 |
504fd56f438c9b353df77dcf1af28b1ff4abeaa9 | 30,633 | use std::convert::TryFrom;
use std::fmt;
use std::future::Future;
use std::io::Write;
use std::time::Duration;
use base64::write::EncoderWriter as Base64Encoder;
use serde::Serialize;
#[cfg(feature = "json")]
use serde_json;
use super::body::Body;
use super::client::{Client, Pending};
#[cfg(feature = "multipart")]
use super::multipart;
use super::response::Response;
#[cfg(feature = "multipart")]
use crate::header::CONTENT_LENGTH;
use crate::header::{HeaderMap, HeaderName, HeaderValue, CONTENT_TYPE};
use crate::{Method, Url};
use http::{request::Parts, Request as HttpRequest, Version};
/// A request which can be executed with `Client::execute()`.
pub struct Request {
method: Method,
url: Url,
headers: HeaderMap,
body: Option<Body>,
timeout: Option<Duration>,
version: Version,
}
/// A builder to construct the properties of a `Request`.
///
/// To construct a `RequestBuilder`, refer to the `Client` documentation.
#[must_use = "RequestBuilder does nothing until you 'send' it"]
pub struct RequestBuilder {
client: Client,
request: crate::Result<Request>,
}
impl Request {
/// Constructs a new request.
#[inline]
pub fn new(method: Method, url: Url) -> Self {
Request {
method,
url,
headers: HeaderMap::new(),
body: None,
timeout: None,
version: Version::default(),
}
}
/// Get the method.
#[inline]
pub fn method(&self) -> &Method {
&self.method
}
/// Get a mutable reference to the method.
#[inline]
pub fn method_mut(&mut self) -> &mut Method {
&mut self.method
}
/// Get the url.
#[inline]
pub fn url(&self) -> &Url {
&self.url
}
/// Get a mutable reference to the url.
#[inline]
pub fn url_mut(&mut self) -> &mut Url {
&mut self.url
}
/// Get the headers.
#[inline]
pub fn headers(&self) -> &HeaderMap {
&self.headers
}
/// Get a mutable reference to the headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut HeaderMap {
&mut self.headers
}
/// Get the body.
#[inline]
pub fn body(&self) -> Option<&Body> {
self.body.as_ref()
}
/// Get a mutable reference to the body.
#[inline]
pub fn body_mut(&mut self) -> &mut Option<Body> {
&mut self.body
}
/// Get the timeout.
#[inline]
pub fn timeout(&self) -> Option<&Duration> {
self.timeout.as_ref()
}
/// Get a mutable reference to the timeout.
#[inline]
pub fn timeout_mut(&mut self) -> &mut Option<Duration> {
&mut self.timeout
}
/// Get the http version.
#[inline]
pub fn version(&self) -> Version {
self.version
}
/// Get a mutable reference to the http version.
#[inline]
pub fn version_mut(&mut self) -> &mut Version {
&mut self.version
}
/// Attempt to clone the request.
///
/// `None` is returned if the request can not be cloned, i.e. if the body is a stream.
pub fn try_clone(&self) -> Option<Request> {
let body = match self.body.as_ref() {
Some(ref body) => Some(body.try_clone()?),
None => None,
};
let mut req = Request::new(self.method().clone(), self.url().clone());
*req.timeout_mut() = self.timeout().cloned();
*req.headers_mut() = self.headers().clone();
*req.version_mut() = self.version().clone();
req.body = body;
Some(req)
}
pub(super) fn pieces(
self,
) -> (
Method,
Url,
HeaderMap,
Option<Body>,
Option<Duration>,
Version,
) {
(
self.method,
self.url,
self.headers,
self.body,
self.timeout,
self.version,
)
}
}
impl RequestBuilder {
pub(super) fn new(client: Client, request: crate::Result<Request>) -> RequestBuilder {
let mut builder = RequestBuilder { client, request };
let auth = builder
.request
.as_mut()
.ok()
.and_then(|req| extract_authority(&mut req.url));
if let Some((username, password)) = auth {
builder.basic_auth(username, password)
} else {
builder
}
}
/// Add a `Header` to this Request.
pub fn header<K, V>(self, key: K, value: V) -> RequestBuilder
where
HeaderName: TryFrom<K>,
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
HeaderValue: TryFrom<V>,
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
{
self.header_sensitive(key, value, false)
}
/// Add a `Header` to this Request with ability to define if header_value is sensitive.
fn header_sensitive<K, V>(mut self, key: K, value: V, sensitive: bool) -> RequestBuilder
where
HeaderName: TryFrom<K>,
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
HeaderValue: TryFrom<V>,
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
{
let mut error = None;
if let Ok(ref mut req) = self.request {
match <HeaderName as TryFrom<K>>::try_from(key) {
Ok(key) => match <HeaderValue as TryFrom<V>>::try_from(value) {
Ok(mut value) => {
value.set_sensitive(sensitive);
req.headers_mut().append(key, value);
}
Err(e) => error = Some(crate::error::builder(e.into())),
},
Err(e) => error = Some(crate::error::builder(e.into())),
};
}
if let Some(err) = error {
self.request = Err(err);
}
self
}
/// Add a set of Headers to the existing ones on this Request.
///
/// The headers will be merged in to any already set.
pub fn headers(mut self, headers: crate::header::HeaderMap) -> RequestBuilder {
if let Ok(ref mut req) = self.request {
crate::util::replace_headers(req.headers_mut(), headers);
}
self
}
/// Enable HTTP basic authentication.
pub fn basic_auth<U, P>(self, username: U, password: Option<P>) -> RequestBuilder
where
U: fmt::Display,
P: fmt::Display,
{
let mut header_value = b"Basic ".to_vec();
{
let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD);
// The unwraps here are fine because Vec::write* is infallible.
write!(encoder, "{}:", username).unwrap();
if let Some(password) = password {
write!(encoder, "{}", password).unwrap();
}
}
self.header_sensitive(crate::header::AUTHORIZATION, header_value, true)
}
/// Enable HTTP bearer authentication.
pub fn bearer_auth<T>(self, token: T) -> RequestBuilder
where
T: fmt::Display,
{
let header_value = format!("Bearer {}", token);
self.header_sensitive(crate::header::AUTHORIZATION, header_value, true)
}
/// Set the request body.
pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder {
if let Ok(ref mut req) = self.request {
*req.body_mut() = Some(body.into());
}
self
}
/// Enables a request timeout.
///
/// The timeout is applied from when the request starts connecting until the
/// response body has finished. It affects only this request and overrides
/// the timeout configured using `ClientBuilder::timeout()`.
pub fn timeout(mut self, timeout: Duration) -> RequestBuilder {
if let Ok(ref mut req) = self.request {
*req.timeout_mut() = Some(timeout);
}
self
}
/// Sends a multipart/form-data body.
///
/// ```
/// # use reqwest::Error;
///
/// # async fn run() -> Result<(), Error> {
/// let client = reqwest::Client::new();
/// let form = reqwest::multipart::Form::new()
/// .text("key3", "value3")
/// .text("key4", "value4");
///
///
/// let response = client.post("your url")
/// .multipart(form)
/// .send()
/// .await?;
/// # Ok(())
/// # }
/// ```
#[cfg(feature = "multipart")]
#[cfg_attr(docsrs, doc(cfg(feature = "multipart")))]
pub fn multipart(self, mut multipart: multipart::Form) -> RequestBuilder {
let mut builder = self.header(
CONTENT_TYPE,
format!("multipart/form-data; boundary={}", multipart.boundary()).as_str(),
);
builder = match multipart.compute_length() {
Some(length) => builder.header(CONTENT_LENGTH, length),
None => builder,
};
if let Ok(ref mut req) = builder.request {
*req.body_mut() = Some(multipart.stream())
}
builder
}
/// Modify the query string of the URL.
///
/// Modifies the URL of this request, adding the parameters provided.
/// This method appends and does not overwrite. This means that it can
/// be called multiple times and that existing query parameters are not
/// overwritten if the same key is used. The key will simply show up
/// twice in the query string.
/// Calling `.query([("foo", "a"), ("foo", "b")])` gives `"foo=a&foo=b"`.
///
/// # Note
/// This method does not support serializing a single key-value
/// pair. Instead of using `.query(("key", "val"))`, use a sequence, such
/// as `.query(&[("key", "val")])`. It's also possible to serialize structs
/// and maps into a key-value pair.
///
/// # Errors
/// This method will fail if the object you provide cannot be serialized
/// into a query string.
pub fn query<T: Serialize + ?Sized>(mut self, query: &T) -> RequestBuilder {
let mut error = None;
if let Ok(ref mut req) = self.request {
let url = req.url_mut();
let mut pairs = url.query_pairs_mut();
let serializer = serde_urlencoded::Serializer::new(&mut pairs);
if let Err(err) = query.serialize(serializer) {
error = Some(crate::error::builder(err));
}
}
if let Ok(ref mut req) = self.request {
if let Some("") = req.url().query() {
req.url_mut().set_query(None);
}
}
if let Some(err) = error {
self.request = Err(err);
}
self
}
/// Set HTTP version
pub fn version(mut self, version: Version) -> RequestBuilder {
if let Ok(ref mut req) = self.request {
req.version = version;
}
self
}
/// Send a form body.
pub fn form<T: Serialize + ?Sized>(mut self, form: &T) -> RequestBuilder {
let mut error = None;
if let Ok(ref mut req) = self.request {
match serde_urlencoded::to_string(form) {
Ok(body) => {
req.headers_mut().insert(
CONTENT_TYPE,
HeaderValue::from_static("application/x-www-form-urlencoded"),
);
*req.body_mut() = Some(body.into());
}
Err(err) => error = Some(crate::error::builder(err)),
}
}
if let Some(err) = error {
self.request = Err(err);
}
self
}
/// Send a JSON body.
///
/// # Optional
///
/// This requires the optional `json` feature enabled.
///
/// # Errors
///
/// Serialization can fail if `T`'s implementation of `Serialize` decides to
/// fail, or if `T` contains a map with non-string keys.
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub fn json<T: Serialize + ?Sized>(mut self, json: &T) -> RequestBuilder {
let mut error = None;
if let Ok(ref mut req) = self.request {
match serde_json::to_vec(json) {
Ok(body) => {
req.headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
*req.body_mut() = Some(body.into());
}
Err(err) => error = Some(crate::error::builder(err)),
}
}
if let Some(err) = error {
self.request = Err(err);
}
self
}
/// Disable CORS on fetching the request.
///
/// # WASM
///
/// This option is only effective with WebAssembly target.
///
/// The [request mode][mdn] will be set to 'no-cors'.
///
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/API/Request/mode
pub fn fetch_mode_no_cors(self) -> RequestBuilder {
self
}
/// Build a `Request`, which can be inspected, modified and executed with
/// `Client::execute()`.
pub fn build(self) -> crate::Result<Request> {
self.request
}
/// Constructs the Request and sends it to the target URL, returning a
/// future Response.
///
/// # Errors
///
/// This method fails if there was an error while sending request,
/// redirect loop was detected or redirect limit was exhausted.
///
/// # Example
///
/// ```no_run
/// # use reqwest::Error;
/// #
/// # async fn run() -> Result<(), Error> {
/// let response = reqwest::Client::new()
/// .get("https://hyper.rs")
/// .send()
/// .await?;
/// # Ok(())
/// # }
/// ```
pub fn send(self) -> impl Future<Output = Result<Response, crate::Error>> {
match self.request {
Ok(req) => self.client.execute_request(req),
Err(err) => Pending::new_err(err),
}
}
/// Attempt to clone the RequestBuilder.
///
/// `None` is returned if the RequestBuilder can not be cloned,
/// i.e. if the request body is a stream.
///
/// # Examples
///
/// ```
/// # use reqwest::Error;
/// #
/// # fn run() -> Result<(), Error> {
/// let client = reqwest::Client::new();
/// let builder = client.post("http://httpbin.org/post")
/// .body("from a &str!");
/// let clone = builder.try_clone();
/// assert!(clone.is_some());
/// # Ok(())
/// # }
/// ```
pub fn try_clone(&self) -> Option<RequestBuilder> {
self.request
.as_ref()
.ok()
.and_then(|req| req.try_clone())
.map(|req| RequestBuilder {
client: self.client.clone(),
request: Ok(req),
})
}
}
impl fmt::Debug for Request {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt_request_fields(&mut f.debug_struct("Request"), self).finish()
}
}
impl fmt::Debug for RequestBuilder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut builder = f.debug_struct("RequestBuilder");
match self.request {
Ok(ref req) => fmt_request_fields(&mut builder, req).finish(),
Err(ref err) => builder.field("error", err).finish(),
}
}
}
fn fmt_request_fields<'a, 'b>(
f: &'a mut fmt::DebugStruct<'a, 'b>,
req: &Request,
) -> &'a mut fmt::DebugStruct<'a, 'b> {
f.field("method", &req.method)
.field("url", &req.url)
.field("headers", &req.headers)
}
/// Check the request URL for a "username:password" type authority, and if
/// found, remove it from the URL and return it.
pub(crate) fn extract_authority(url: &mut Url) -> Option<(String, Option<String>)> {
use percent_encoding::percent_decode;
if url.has_authority() {
let username: String = percent_decode(url.username().as_bytes())
.decode_utf8()
.ok()?
.into();
let password = url.password().and_then(|pass| {
percent_decode(pass.as_bytes())
.decode_utf8()
.ok()
.map(String::from)
});
if !username.is_empty() || password.is_some() {
url.set_username("")
.expect("has_authority means set_username shouldn't fail");
url.set_password(None)
.expect("has_authority means set_password shouldn't fail");
return Some((username, password));
}
}
None
}
impl<T> TryFrom<HttpRequest<T>> for Request
where
T: Into<Body>,
{
type Error = crate::Error;
fn try_from(req: HttpRequest<T>) -> crate::Result<Self> {
let (parts, body) = req.into_parts();
let Parts {
method,
uri,
headers,
version,
..
} = parts;
let url = Url::parse(&uri.to_string()).map_err(crate::error::builder)?;
Ok(Request {
method,
url,
headers,
body: Some(body.into()),
timeout: None,
version: version,
})
}
}
#[cfg(test)]
mod tests {
use super::{Client, HttpRequest, Request, Version};
use crate::Method;
use serde::Serialize;
use std::collections::BTreeMap;
use std::convert::TryFrom;
#[test]
fn add_query_append() {
let client = Client::new();
let some_url = "https://google.com/";
let r = client.get(some_url);
let r = r.query(&[("foo", "bar")]);
let r = r.query(&[("qux", 3)]);
let req = r.build().expect("request is valid");
assert_eq!(req.url().query(), Some("foo=bar&qux=3"));
}
#[test]
fn add_query_append_same() {
let client = Client::new();
let some_url = "https://google.com/";
let r = client.get(some_url);
let r = r.query(&[("foo", "a"), ("foo", "b")]);
let req = r.build().expect("request is valid");
assert_eq!(req.url().query(), Some("foo=a&foo=b"));
}
#[test]
fn add_query_struct() {
#[derive(Serialize)]
struct Params {
foo: String,
qux: i32,
}
let client = Client::new();
let some_url = "https://google.com/";
let r = client.get(some_url);
let params = Params {
foo: "bar".into(),
qux: 3,
};
let r = r.query(¶ms);
let req = r.build().expect("request is valid");
assert_eq!(req.url().query(), Some("foo=bar&qux=3"));
}
#[test]
fn add_query_map() {
let mut params = BTreeMap::new();
params.insert("foo", "bar");
params.insert("qux", "three");
let client = Client::new();
let some_url = "https://google.com/";
let r = client.get(some_url);
let r = r.query(¶ms);
let req = r.build().expect("request is valid");
assert_eq!(req.url().query(), Some("foo=bar&qux=three"));
}
#[test]
fn test_replace_headers() {
use http::HeaderMap;
let mut headers = HeaderMap::new();
headers.insert("foo", "bar".parse().unwrap());
headers.append("foo", "baz".parse().unwrap());
let client = Client::new();
let req = client
.get("https://hyper.rs")
.header("im-a", "keeper")
.header("foo", "pop me")
.headers(headers)
.build()
.expect("request build");
assert_eq!(req.headers()["im-a"], "keeper");
let foo = req.headers().get_all("foo").iter().collect::<Vec<_>>();
assert_eq!(foo.len(), 2);
assert_eq!(foo[0], "bar");
assert_eq!(foo[1], "baz");
}
#[test]
fn normalize_empty_query() {
let client = Client::new();
let some_url = "https://google.com/";
let empty_query: &[(&str, &str)] = &[];
let req = client
.get(some_url)
.query(empty_query)
.build()
.expect("request build");
assert_eq!(req.url().query(), None);
assert_eq!(req.url().as_str(), "https://google.com/");
}
#[test]
fn try_clone_reusable() {
let client = Client::new();
let builder = client
.post("http://httpbin.org/post")
.header("foo", "bar")
.body("from a &str!");
let req = builder
.try_clone()
.expect("clone successful")
.build()
.expect("request is valid");
assert_eq!(req.url().as_str(), "http://httpbin.org/post");
assert_eq!(req.method(), Method::POST);
assert_eq!(req.headers()["foo"], "bar");
}
#[test]
fn try_clone_no_body() {
let client = Client::new();
let builder = client.get("http://httpbin.org/get");
let req = builder
.try_clone()
.expect("clone successful")
.build()
.expect("request is valid");
assert_eq!(req.url().as_str(), "http://httpbin.org/get");
assert_eq!(req.method(), Method::GET);
assert!(req.body().is_none());
}
#[test]
#[cfg(feature = "stream")]
fn try_clone_stream() {
let chunks: Vec<Result<_, ::std::io::Error>> = vec![Ok("hello"), Ok(" "), Ok("world")];
let stream = futures_util::stream::iter(chunks);
let client = Client::new();
let builder = client
.get("http://httpbin.org/get")
.body(super::Body::wrap_stream(stream));
let clone = builder.try_clone();
assert!(clone.is_none());
}
#[test]
fn convert_url_authority_into_basic_auth() {
let client = Client::new();
let some_url = "https://Aladdin:open sesame@localhost/";
let req = client.get(some_url).build().expect("request build");
assert_eq!(req.url().as_str(), "https://localhost/");
assert_eq!(
req.headers()["authorization"],
"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="
);
}
#[test]
fn test_basic_auth_sensitive_header() {
let client = Client::new();
let some_url = "https://localhost/";
let req = client
.get(some_url)
.basic_auth("Aladdin", Some("open sesame"))
.build()
.expect("request build");
assert_eq!(req.url().as_str(), "https://localhost/");
assert_eq!(
req.headers()["authorization"],
"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="
);
assert_eq!(req.headers()["authorization"].is_sensitive(), true);
}
#[test]
fn test_bearer_auth_sensitive_header() {
let client = Client::new();
let some_url = "https://localhost/";
let req = client
.get(some_url)
.bearer_auth("Hold my bear")
.build()
.expect("request build");
assert_eq!(req.url().as_str(), "https://localhost/");
assert_eq!(req.headers()["authorization"], "Bearer Hold my bear");
assert_eq!(req.headers()["authorization"].is_sensitive(), true);
}
#[test]
fn convert_from_http_request() {
let http_request = HttpRequest::builder()
.method("GET")
.uri("http://localhost/")
.header("User-Agent", "my-awesome-agent/1.0")
.body("test test test")
.unwrap();
let req: Request = Request::try_from(http_request).unwrap();
assert_eq!(req.body().is_none(), false);
let test_data = b"test test test";
assert_eq!(req.body().unwrap().as_bytes(), Some(&test_data[..]));
let headers = req.headers();
assert_eq!(headers.get("User-Agent").unwrap(), "my-awesome-agent/1.0");
assert_eq!(req.method(), Method::GET);
assert_eq!(req.url().as_str(), "http://localhost/");
}
#[test]
fn set_http_request_version() {
let http_request = HttpRequest::builder()
.method("GET")
.uri("http://localhost/")
.header("User-Agent", "my-awesome-agent/1.0")
.version(Version::HTTP_11)
.body("test test test")
.unwrap();
let req: Request = Request::try_from(http_request).unwrap();
assert_eq!(req.body().is_none(), false);
let test_data = b"test test test";
assert_eq!(req.body().unwrap().as_bytes(), Some(&test_data[..]));
let headers = req.headers();
assert_eq!(headers.get("User-Agent").unwrap(), "my-awesome-agent/1.0");
assert_eq!(req.method(), Method::GET);
assert_eq!(req.url().as_str(), "http://localhost/");
assert_eq!(req.version(), Version::HTTP_11);
}
/*
use {body, Method};
use super::Client;
use header::{Host, Headers, ContentType};
use std::collections::HashMap;
use serde_urlencoded;
use serde_json;
#[test]
fn basic_get_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.get(some_url).unwrap().build();
assert_eq!(r.method, Method::Get);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn basic_head_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.head(some_url).unwrap().build();
assert_eq!(r.method, Method::Head);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn basic_post_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.post(some_url).unwrap().build();
assert_eq!(r.method, Method::Post);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn basic_put_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.put(some_url).unwrap().build();
assert_eq!(r.method, Method::Put);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn basic_patch_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.patch(some_url).unwrap().build();
assert_eq!(r.method, Method::Patch);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn basic_delete_request() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let r = client.delete(some_url).unwrap().build();
assert_eq!(r.method, Method::Delete);
assert_eq!(r.url.as_str(), some_url);
}
#[test]
fn add_header() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let header = Host {
hostname: "google.com".to_string(),
port: None,
};
// Add a copy of the header to the request builder
let r = r.header(header.clone()).build();
// then check it was actually added
assert_eq!(r.headers.get::<Host>(), Some(&header));
}
#[test]
fn add_headers() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let header = Host {
hostname: "google.com".to_string(),
port: None,
};
let mut headers = Headers::new();
headers.set(header);
// Add a copy of the headers to the request builder
let r = r.headers(headers.clone()).build();
// then make sure they were added correctly
assert_eq!(r.headers, headers);
}
#[test]
fn add_headers_multi() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let header = Host {
hostname: "google.com".to_string(),
port: None,
};
let mut headers = Headers::new();
headers.set(header);
// Add a copy of the headers to the request builder
let r = r.headers(headers.clone()).build();
// then make sure they were added correctly
assert_eq!(r.headers, headers);
}
#[test]
fn add_body() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let body = "Some interesting content";
let r = r.body(body).build();
let buf = body::read_to_string(r.body.unwrap()).unwrap();
assert_eq!(buf, body);
}
#[test]
fn add_form() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let mut form_data = HashMap::new();
form_data.insert("foo", "bar");
let r = r.form(&form_data).unwrap().build();
// Make sure the content type was set
assert_eq!(r.headers.get::<ContentType>(),
Some(&ContentType::form_url_encoded()));
let buf = body::read_to_string(r.body.unwrap()).unwrap();
let body_should_be = serde_urlencoded::to_string(&form_data).unwrap();
assert_eq!(buf, body_should_be);
}
#[test]
fn add_json() {
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let mut json_data = HashMap::new();
json_data.insert("foo", "bar");
let r = r.json(&json_data).unwrap().build();
// Make sure the content type was set
assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::json()));
let buf = body::read_to_string(r.body.unwrap()).unwrap();
let body_should_be = serde_json::to_string(&json_data).unwrap();
assert_eq!(buf, body_should_be);
}
#[test]
fn add_json_fail() {
use serde::{Serialize, Serializer};
use serde::ser::Error;
struct MyStruct;
impl Serialize for MyStruct {
fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
Err(S::Error::custom("nope"))
}
}
let client = Client::new().unwrap();
let some_url = "https://google.com/";
let mut r = client.post(some_url).unwrap();
let json_data = MyStruct{};
assert!(r.json(&json_data).unwrap_err().is_serialization());
}
*/
}
| 29.973581 | 95 | 0.535436 |
26d8b6960cca12102ebde88aa550ecff9908acc8 | 43,169 | use {
crate::{
config::{Config, ExplicitRelease},
stop_process::stop_process,
update_manifest::{SignedUpdateManifest, UpdateManifest},
},
chrono::{Local, TimeZone},
console::{style, Emoji},
indicatif::{ProgressBar, ProgressStyle},
serde::{Deserialize, Serialize},
solana_client::rpc_client::RpcClient,
solana_config_program::{config_instruction, get_config_data, ConfigState},
solana_sdk::{
hash::{Hash, Hasher},
message::Message,
pubkey::Pubkey,
signature::{read_keypair_file, Keypair, Signable, Signer},
transaction::Transaction,
},
std::{
fs::{self, File},
io::{self, BufReader, Read},
path::{Path, PathBuf},
sync::mpsc,
time::{Duration, Instant, SystemTime},
},
tempfile::TempDir,
url::Url,
};
#[derive(Deserialize, Debug)]
pub struct ReleaseVersion {
pub target: String,
pub commit: String,
channel: String,
}
static TRUCK: Emoji = Emoji("🚚 ", "");
static LOOKING_GLASS: Emoji = Emoji("🔍 ", "");
static BULLET: Emoji = Emoji("• ", "* ");
static SPARKLE: Emoji = Emoji("✨ ", "");
static WRAPPED_PRESENT: Emoji = Emoji("🎁 ", "");
static PACKAGE: Emoji = Emoji("📦 ", "");
static INFORMATION: Emoji = Emoji("ℹ️ ", "");
static RECYCLING: Emoji = Emoji("♻️ ", "");
/// Creates a new process bar for processing that will take an unknown amount of time
fn new_spinner_progress_bar() -> ProgressBar {
let progress_bar = ProgressBar::new(42);
progress_bar
.set_style(ProgressStyle::default_spinner().template("{spinner:.green} {wide_msg}"));
progress_bar.enable_steady_tick(100);
progress_bar
}
/// Pretty print a "name value"
fn println_name_value(name: &str, value: &str) {
println!("{} {}", style(name).bold(), value);
}
/// Downloads a file at `url` to a temporary location. If `expected_sha256` is
/// Some(_), produce an error if the SHA256 of the file contents doesn't match.
///
/// Returns a tuple consisting of:
/// * TempDir - drop this value to clean up the temporary location
/// * PathBuf - path to the downloaded file (within `TempDir`)
/// * String - SHA256 of the release
///
fn download_to_temp(
url: &str,
expected_sha256: Option<&Hash>,
) -> Result<(TempDir, PathBuf, Hash), Box<dyn std::error::Error>> {
fn sha256_file_digest<P: AsRef<Path>>(path: P) -> Result<Hash, Box<dyn std::error::Error>> {
let input = File::open(path)?;
let mut reader = BufReader::new(input);
let mut hasher = Hasher::default();
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
hasher.hash(&buffer[..count]);
}
Ok(hasher.result())
}
let url = Url::parse(url).map_err(|err| format!("Unable to parse {}: {}", url, err))?;
let temp_dir = TempDir::new()?;
let temp_file = temp_dir.path().join("download");
let client = reqwest::blocking::Client::new();
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Downloading...", TRUCK));
let response = client.get(url.as_str()).send()?;
let download_size = {
response
.headers()
.get(reqwest::header::CONTENT_LENGTH)
.and_then(|content_length| content_length.to_str().ok())
.and_then(|content_length| content_length.parse().ok())
.unwrap_or(0)
};
progress_bar.set_length(download_size);
progress_bar.set_style(
ProgressStyle::default_bar()
.template(
"{spinner:.green}{wide_msg} [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})",
)
.progress_chars("=> "),
);
progress_bar.set_message(format!("{}Downloading", TRUCK));
struct DownloadProgress<R> {
progress_bar: ProgressBar,
response: R,
}
impl<R: Read> Read for DownloadProgress<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.response.read(buf).map(|n| {
self.progress_bar.inc(n as u64);
n
})
}
}
let mut source = DownloadProgress {
progress_bar,
response,
};
let mut file = File::create(&temp_file)?;
std::io::copy(&mut source, &mut file)?;
let temp_file_sha256 = sha256_file_digest(&temp_file)
.map_err(|err| format!("Unable to hash {:?}: {}", temp_file, err))?;
if expected_sha256.is_some() && expected_sha256 != Some(&temp_file_sha256) {
return Err(io::Error::new(io::ErrorKind::Other, "Incorrect hash").into());
}
source.progress_bar.finish_and_clear();
Ok((temp_dir, temp_file, temp_file_sha256))
}
/// Extracts the release archive into the specified directory
fn extract_release_archive(
archive: &Path,
extract_dir: &Path,
) -> Result<(), Box<dyn std::error::Error>> {
use bzip2::bufread::BzDecoder;
use tar::Archive;
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Extracting...", PACKAGE));
if extract_dir.exists() {
let _ = fs::remove_dir_all(&extract_dir);
}
let tmp_extract_dir = extract_dir.with_file_name("tmp-extract");
if tmp_extract_dir.exists() {
let _ = fs::remove_dir_all(&tmp_extract_dir);
}
fs::create_dir_all(&tmp_extract_dir)?;
let tar_bz2 = File::open(archive)?;
let tar = BzDecoder::new(BufReader::new(tar_bz2));
let mut release = Archive::new(tar);
release.unpack(&tmp_extract_dir)?;
fs::rename(&tmp_extract_dir, extract_dir)?;
progress_bar.finish_and_clear();
Ok(())
}
fn load_release_version(version_yml: &Path) -> Result<ReleaseVersion, String> {
let file = File::open(&version_yml)
.map_err(|err| format!("Unable to open {:?}: {:?}", version_yml, err))?;
let version: ReleaseVersion = serde_yaml::from_reader(file)
.map_err(|err| format!("Unable to parse {:?}: {:?}", version_yml, err))?;
Ok(version)
}
/// Reads the supported TARGET triple for the given release
fn load_release_target(release_dir: &Path) -> Result<String, String> {
let mut version_yml = PathBuf::from(release_dir);
version_yml.push("solana-release");
version_yml.push("version.yml");
let version = load_release_version(&version_yml)?;
Ok(version.target)
}
/// Time in seconds since the UNIX_EPOCH
fn timestamp_secs() -> u64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs()
}
/// Create an empty update manifest for the given `update_manifest_keypair` if it doesn't already
/// exist on the cluster
fn new_update_manifest(
rpc_client: &RpcClient,
from_keypair: &Keypair,
update_manifest_keypair: &Keypair,
) -> Result<(), Box<dyn std::error::Error>> {
if rpc_client
.get_account_data(&update_manifest_keypair.pubkey())
.is_err()
{
let (recent_blockhash, _fee_calculator) = rpc_client.get_recent_blockhash()?;
let lamports = rpc_client
.get_minimum_balance_for_rent_exemption(SignedUpdateManifest::max_space() as usize)?;
let instructions = config_instruction::create_account::<SignedUpdateManifest>(
&from_keypair.pubkey(),
&update_manifest_keypair.pubkey(),
lamports,
vec![], // additional keys
);
let message = Message::new(&instructions, Some(&from_keypair.pubkey()));
let signers = [from_keypair, update_manifest_keypair];
let transaction = Transaction::new(&signers, message, recent_blockhash);
rpc_client.send_and_confirm_transaction(&transaction)?;
}
Ok(())
}
/// Update the update manifest on the cluster with new content
fn store_update_manifest(
rpc_client: &RpcClient,
from_keypair: &Keypair,
update_manifest_keypair: &Keypair,
update_manifest: &SignedUpdateManifest,
) -> Result<(), Box<dyn std::error::Error>> {
let (recent_blockhash, _fee_calculator) = rpc_client.get_recent_blockhash()?;
let signers = [from_keypair, update_manifest_keypair];
let instruction = config_instruction::store::<SignedUpdateManifest>(
&update_manifest_keypair.pubkey(),
true, // update_manifest_keypair is signer
vec![], // additional keys
update_manifest,
);
let message = Message::new(&[instruction], Some(&from_keypair.pubkey()));
let transaction = Transaction::new(&signers, message, recent_blockhash);
rpc_client.send_and_confirm_transaction(&transaction)?;
Ok(())
}
/// Read the current contents of the update manifest from the cluster
fn get_update_manifest(
rpc_client: &RpcClient,
update_manifest_pubkey: &Pubkey,
) -> Result<UpdateManifest, String> {
let data = rpc_client
.get_account_data(update_manifest_pubkey)
.map_err(|err| format!("Unable to fetch update manifest: {}", err))?;
let config_data = get_config_data(&data)
.map_err(|err| format!("Unable to get at config_data to update manifest: {}", err))?;
let signed_update_manifest =
SignedUpdateManifest::deserialize(update_manifest_pubkey, config_data)
.map_err(|err| format!("Unable to deserialize update manifest: {}", err))?;
Ok(signed_update_manifest.manifest)
}
/// Bug the user if active_release_bin_dir is not in their PATH
fn check_env_path_for_bin_dir(config: &Config) {
use std::env;
let bin_dir = config
.active_release_bin_dir()
.canonicalize()
.unwrap_or_default();
let found = match env::var_os("PATH") {
Some(paths) => env::split_paths(&paths).any(|path| {
if let Ok(path) = path.canonicalize() {
if path == bin_dir {
return true;
}
}
false
}),
None => false,
};
if !found {
println!(
"\nPlease update your PATH environment variable to include the solana programs:\n PATH=\"{}:$PATH\"\n",
config.active_release_bin_dir().to_str().unwrap()
);
}
}
/// Encodes a UTF-8 string as a null-terminated UCS-2 string in bytes
#[cfg(windows)]
pub fn string_to_winreg_bytes(s: &str) -> Vec<u8> {
use std::ffi::OsString;
use std::os::windows::ffi::OsStrExt;
let v: Vec<_> = OsString::from(format!("{}\x00", s)).encode_wide().collect();
unsafe { std::slice::from_raw_parts(v.as_ptr() as *const u8, v.len() * 2).to_vec() }
}
// This is used to decode the value of HKCU\Environment\PATH. If that
// key is not Unicode (or not REG_SZ | REG_EXPAND_SZ) then this
// returns null. The winreg library itself does a lossy unicode
// conversion.
#[cfg(windows)]
pub fn string_from_winreg_value(val: &winreg::RegValue) -> Option<String> {
use std::slice;
use winreg::enums::RegType;
match val.vtype {
RegType::REG_SZ | RegType::REG_EXPAND_SZ => {
// Copied from winreg
let words = unsafe {
slice::from_raw_parts(val.bytes.as_ptr() as *const u16, val.bytes.len() / 2)
};
let mut s = if let Ok(s) = String::from_utf16(words) {
s
} else {
return None;
};
while s.ends_with('\u{0}') {
s.pop();
}
Some(s)
}
_ => None,
}
}
// Get the windows PATH variable out of the registry as a String. If
// this returns None then the PATH variable is not Unicode and we
// should not mess with it.
#[cfg(windows)]
fn get_windows_path_var() -> Result<Option<String>, String> {
use winreg::enums::{HKEY_CURRENT_USER, KEY_READ, KEY_WRITE};
use winreg::RegKey;
let root = RegKey::predef(HKEY_CURRENT_USER);
let environment = root
.open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE)
.map_err(|err| format!("Unable to open HKEY_CURRENT_USER\\Environment: {}", err))?;
let reg_value = environment.get_raw_value("PATH");
match reg_value {
Ok(val) => {
if let Some(s) = string_from_winreg_value(&val) {
Ok(Some(s))
} else {
println!("the registry key HKEY_CURRENT_USER\\Environment\\PATH does not contain valid Unicode. Not modifying the PATH variable");
return Ok(None);
}
}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => Ok(Some(String::new())),
Err(e) => Err(e.to_string()),
}
}
#[cfg(windows)]
fn add_to_path(new_path: &str) -> bool {
use std::ptr;
use winapi::shared::minwindef::*;
use winapi::um::winuser::{
SendMessageTimeoutA, HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE,
};
use winreg::enums::{RegType, HKEY_CURRENT_USER, KEY_READ, KEY_WRITE};
use winreg::{RegKey, RegValue};
let old_path = if let Some(s) =
get_windows_path_var().unwrap_or_else(|err| panic!("Unable to get PATH: {}", err))
{
s
} else {
return false;
};
if !old_path.contains(&new_path) {
let mut new_path = new_path.to_string();
if !old_path.is_empty() {
new_path.push_str(";");
new_path.push_str(&old_path);
}
let root = RegKey::predef(HKEY_CURRENT_USER);
let environment = root
.open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE)
.unwrap_or_else(|err| panic!("Unable to open HKEY_CURRENT_USER\\Environment: {}", err));
let reg_value = RegValue {
bytes: string_to_winreg_bytes(&new_path),
vtype: RegType::REG_EXPAND_SZ,
};
environment
.set_raw_value("PATH", ®_value)
.unwrap_or_else(|err| {
panic!("Unable set HKEY_CURRENT_USER\\Environment\\PATH: {}", err)
});
// Tell other processes to update their environment
unsafe {
SendMessageTimeoutA(
HWND_BROADCAST,
WM_SETTINGCHANGE,
0 as WPARAM,
"Environment\0".as_ptr() as LPARAM,
SMTO_ABORTIFHUNG,
5000,
ptr::null_mut(),
);
}
}
println!(
"\n{}\n {}\n\n{}",
style("The HKEY_CURRENT_USER/Environment/PATH registry key has been modified to include:").bold(),
new_path,
style("Future applications will automatically have the correct environment, but you may need to restart your current shell.").bold()
);
true
}
#[cfg(unix)]
fn add_to_path(new_path: &str) -> bool {
let shell_export_string = format!(r#"export PATH="{}:$PATH""#, new_path);
let mut modified_rcfiles = false;
// Look for sh, bash, and zsh rc files
let mut rcfiles = vec![dirs_next::home_dir().map(|p| p.join(".profile"))];
if let Ok(shell) = std::env::var("SHELL") {
if shell.contains("zsh") {
let zdotdir = std::env::var("ZDOTDIR")
.ok()
.map(PathBuf::from)
.or_else(dirs_next::home_dir);
let zprofile = zdotdir.map(|p| p.join(".zprofile"));
rcfiles.push(zprofile);
}
}
if let Some(bash_profile) = dirs_next::home_dir().map(|p| p.join(".bash_profile")) {
// Only update .bash_profile if it exists because creating .bash_profile
// will cause .profile to not be read
if bash_profile.exists() {
rcfiles.push(Some(bash_profile));
}
}
let rcfiles = rcfiles.into_iter().filter_map(|f| f.filter(|f| f.exists()));
// For each rc file, append a PATH entry if not already present
for rcfile in rcfiles {
if !rcfile.exists() {
continue;
}
fn read_file(path: &Path) -> io::Result<String> {
let mut file = fs::OpenOptions::new().read(true).open(path)?;
let mut contents = String::new();
io::Read::read_to_string(&mut file, &mut contents)?;
Ok(contents)
}
match read_file(&rcfile) {
Err(err) => {
println!("Unable to read {:?}: {}", rcfile, err);
}
Ok(contents) => {
if !contents.contains(&shell_export_string) {
println!(
"Adding {} to {}",
style(&shell_export_string).italic(),
style(rcfile.to_str().unwrap()).bold()
);
fn append_file(dest: &Path, line: &str) -> io::Result<()> {
use std::io::Write;
let mut dest_file = fs::OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open(dest)?;
writeln!(&mut dest_file, "{}", line)?;
dest_file.sync_data()?;
Ok(())
}
append_file(&rcfile, &shell_export_string).unwrap_or_else(|err| {
format!("Unable to append to {:?}: {}", rcfile, err);
});
modified_rcfiles = true;
}
}
}
}
if modified_rcfiles {
println!(
"\n{}\n {}\n",
style("Close and reopen your terminal to apply the PATH changes or run the following in your existing shell:").bold().blue(),
shell_export_string
);
}
modified_rcfiles
}
pub fn init(
config_file: &str,
data_dir: &str,
json_rpc_url: &str,
update_manifest_pubkey: &Pubkey,
no_modify_path: bool,
explicit_release: Option<ExplicitRelease>,
) -> Result<(), String> {
let config = {
// Write new config file only if different, so that running |solana-install init|
// repeatedly doesn't unnecessarily re-download
let mut current_config = Config::load(config_file).unwrap_or_default();
current_config.current_update_manifest = None;
let config = Config::new(
data_dir,
json_rpc_url,
update_manifest_pubkey,
explicit_release,
);
if current_config != config {
config.save(config_file)?;
}
config
};
init_or_update(config_file, true, false)?;
let path_modified = if !no_modify_path {
add_to_path(&config.active_release_bin_dir().to_str().unwrap())
} else {
false
};
if !path_modified && !no_modify_path {
check_env_path_for_bin_dir(&config);
}
Ok(())
}
fn github_release_download_url(release_semver: &str) -> String {
format!(
"https://github.com/solana-labs/solana/releases/download/v{}/solana-release-{}.tar.bz2",
release_semver,
crate::build_env::TARGET
)
}
fn release_channel_download_url(release_channel: &str) -> String {
format!(
"http://release.solana.com/{}/solana-release-{}.tar.bz2",
release_channel,
crate::build_env::TARGET
)
}
fn release_channel_version_url(release_channel: &str) -> String {
format!(
"http://release.solana.com/{}/solana-release-{}.yml",
release_channel,
crate::build_env::TARGET
)
}
fn print_update_manifest(update_manifest: &UpdateManifest) {
let when = Local.timestamp(update_manifest.timestamp_secs as i64, 0);
println_name_value(&format!("{}release date:", BULLET), &when.to_string());
println_name_value(
&format!("{}download URL:", BULLET),
&update_manifest.download_url,
);
}
pub fn info(config_file: &str, local_info_only: bool, eval: bool) -> Result<(), String> {
let config = Config::load(config_file)?;
if eval {
println!(
"SOLANA_INSTALL_ACTIVE_RELEASE={}",
&config.active_release_dir().to_str().unwrap_or("")
);
config
.explicit_release
.map(|er| match er {
ExplicitRelease::Semver(semver) => semver,
ExplicitRelease::Channel(channel) => channel,
})
.and_then(|channel| {
println!("SOLANA_INSTALL_ACTIVE_CHANNEL={}", channel,);
Option::<String>::None
});
return Ok(());
}
println_name_value("Configuration:", &config_file);
println_name_value(
"Active release directory:",
&config.active_release_dir().to_str().unwrap_or("?"),
);
fn print_release_version(config: &Config) {
if let Ok(release_version) =
load_release_version(&config.active_release_dir().join("version.yml"))
{
println_name_value(
&format!("{}Release commit:", BULLET),
&release_version.commit[0..7],
);
}
}
if let Some(explicit_release) = &config.explicit_release {
match explicit_release {
ExplicitRelease::Semver(release_semver) => {
println_name_value(&format!("{}Release version:", BULLET), &release_semver);
println_name_value(
&format!("{}Release URL:", BULLET),
&github_release_download_url(release_semver),
);
}
ExplicitRelease::Channel(release_channel) => {
println_name_value(&format!("{}Release channel:", BULLET), &release_channel);
println_name_value(
&format!("{}Release URL:", BULLET),
&release_channel_download_url(release_channel),
);
}
}
print_release_version(&config);
} else {
println_name_value("JSON RPC URL:", &config.json_rpc_url);
println_name_value(
"Update manifest pubkey:",
&config.update_manifest_pubkey.to_string(),
);
match config.current_update_manifest {
Some(ref update_manifest) => {
println_name_value("Installed version:", "");
print_release_version(&config);
print_update_manifest(&update_manifest);
}
None => {
println_name_value("Installed version:", "None");
}
}
}
if local_info_only {
Ok(())
} else {
update(config_file, true).map(|_| ())
}
}
pub fn deploy(
json_rpc_url: &str,
from_keypair_file: &str,
download_url: &str,
update_manifest_keypair_file: &str,
) -> Result<(), String> {
let from_keypair = read_keypair_file(from_keypair_file)
.map_err(|err| format!("Unable to read {}: {}", from_keypair_file, err))?;
let update_manifest_keypair = read_keypair_file(update_manifest_keypair_file)
.map_err(|err| format!("Unable to read {}: {}", update_manifest_keypair_file, err))?;
println_name_value("JSON RPC URL:", json_rpc_url);
println_name_value(
"Update manifest pubkey:",
&update_manifest_keypair.pubkey().to_string(),
);
// Confirm the `json_rpc_url` is good and that `from_keypair` is a valid account
let rpc_client = RpcClient::new(json_rpc_url.to_string());
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Checking cluster...", LOOKING_GLASS));
let balance = rpc_client
.get_balance(&from_keypair.pubkey())
.map_err(|err| {
format!(
"Unable to get the account balance of {}: {}",
from_keypair_file, err
)
})?;
progress_bar.finish_and_clear();
if balance == 0 {
return Err(format!("{} account balance is empty", from_keypair_file));
}
// Download the release
let (temp_dir, temp_archive, temp_archive_sha256) = download_to_temp(download_url, None)
.map_err(|err| format!("Unable to download {}: {}", download_url, err))?;
if let Ok(update_manifest) = get_update_manifest(&rpc_client, &update_manifest_keypair.pubkey())
{
if temp_archive_sha256 == update_manifest.download_sha256 {
println!(
" {}{}",
INFORMATION,
style("Update is already deployed").bold()
);
return Ok(());
}
}
// Extract it and load the release version metadata
let temp_release_dir = temp_dir.path().join("archive");
extract_release_archive(&temp_archive, &temp_release_dir).map_err(|err| {
format!(
"Unable to extract {:?} into {:?}: {}",
temp_archive, temp_release_dir, err
)
})?;
let release_target = load_release_target(&temp_release_dir).map_err(|err| {
format!(
"Unable to load release target from {:?}: {}",
temp_release_dir, err
)
})?;
println_name_value("Update target:", &release_target);
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Deploying update...", PACKAGE));
// Construct an update manifest for the release
let mut update_manifest = SignedUpdateManifest {
account_pubkey: update_manifest_keypair.pubkey(),
..SignedUpdateManifest::default()
};
update_manifest.manifest.timestamp_secs = timestamp_secs();
update_manifest.manifest.download_url = download_url.to_string();
update_manifest.manifest.download_sha256 = temp_archive_sha256;
update_manifest.sign(&update_manifest_keypair);
assert!(update_manifest.verify());
// Store the new update manifest on the cluster
new_update_manifest(&rpc_client, &from_keypair, &update_manifest_keypair)
.map_err(|err| format!("Unable to create update manifest: {}", err))?;
store_update_manifest(
&rpc_client,
&from_keypair,
&update_manifest_keypair,
&update_manifest,
)
.map_err(|err| format!("Unable to store update manifest: {:?}", err))?;
progress_bar.finish_and_clear();
println!(" {}{}", SPARKLE, style("Deployment successful").bold());
Ok(())
}
#[cfg(windows)]
fn symlink_dir<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> std::io::Result<()> {
std::os::windows::fs::symlink_dir(src, dst)
}
#[cfg(not(windows))]
fn symlink_dir<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> std::io::Result<()> {
std::os::unix::fs::symlink(src, dst)
}
pub fn gc(config_file: &str) -> Result<(), String> {
let config = Config::load(config_file)?;
let entries = fs::read_dir(&config.releases_dir)
.map_err(|err| format!("Unable to read {}: {}", config.releases_dir.display(), err))?;
let mut releases = entries
.filter_map(|entry| entry.ok())
.filter_map(|entry| {
entry
.metadata()
.ok()
.map(|metadata| (entry.path(), metadata))
})
.filter_map(|(release_path, metadata)| {
if metadata.is_dir() {
Some((release_path, metadata))
} else {
None
}
})
.filter_map(|(release_path, metadata)| {
metadata
.modified()
.ok()
.map(|modified_time| (release_path, modified_time))
})
.collect::<Vec<_>>();
releases.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap()); // order by newest releases
const MAX_CACHE_LEN: usize = 5;
if releases.len() > MAX_CACHE_LEN {
let old_releases = releases.split_off(MAX_CACHE_LEN);
if !old_releases.is_empty() {
let progress_bar = new_spinner_progress_bar();
progress_bar.set_length(old_releases.len() as u64);
progress_bar.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green}{wide_msg} [{bar:40.cyan/blue}] {pos}/{len} ({eta})")
.progress_chars("=> "),
);
progress_bar.set_message(format!("{}Removing old releases", RECYCLING));
for (release, _modified_type) in old_releases {
progress_bar.inc(1);
let _ = fs::remove_dir_all(&release);
}
progress_bar.finish_and_clear();
}
}
Ok(())
}
#[derive(Debug, Deserialize, Serialize)]
pub struct GithubRelease {
pub tag_name: String,
pub prerelease: bool,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct GithubReleases(Vec<GithubRelease>);
fn semver_of(string: &str) -> Result<semver::Version, String> {
if string.starts_with('v') {
semver::Version::parse(string.split_at(1).1)
} else {
semver::Version::parse(string)
}
.map_err(|err| err.to_string())
}
fn check_for_newer_github_release(
version_filter: Option<semver::VersionReq>,
prerelease_allowed: bool,
) -> reqwest::Result<Option<String>> {
let url =
reqwest::Url::parse("https://api.github.com/repos/solana-labs/solana/releases").unwrap();
let client = reqwest::blocking::Client::builder()
.user_agent("solana-install")
.build()?;
let request = client.get(url).build()?;
let response = client.execute(request)?;
let mut releases = response
.json::<GithubReleases>()?
.0
.into_iter()
.filter_map(
|GithubRelease {
tag_name,
prerelease,
}| {
if let Ok(version) = semver_of(&tag_name) {
if (prerelease_allowed || !prerelease)
&& version_filter
.as_ref()
.map_or(true, |version_filter| version_filter.matches(&version))
{
return Some(version);
}
}
None
},
)
.collect::<Vec<_>>();
releases.sort();
Ok(releases.pop().map(|r| r.to_string()))
}
pub enum SemverUpdateType {
Fixed,
Patch,
_Minor,
}
pub fn update(config_file: &str, check_only: bool) -> Result<bool, String> {
init_or_update(config_file, false, check_only)
}
pub fn init_or_update(config_file: &str, is_init: bool, check_only: bool) -> Result<bool, String> {
let mut config = Config::load(config_file)?;
let semver_update_type = if is_init {
SemverUpdateType::Fixed
} else {
SemverUpdateType::Patch
};
let (updated_version, download_url_and_sha256, release_dir) = if let Some(explicit_release) =
&config.explicit_release
{
match explicit_release {
ExplicitRelease::Semver(current_release_semver) => {
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Checking for updates...", LOOKING_GLASS));
let github_release = check_for_newer_github_release(
semver::VersionReq::parse(&format!(
"{}{}",
match semver_update_type {
SemverUpdateType::Fixed => "=",
SemverUpdateType::Patch => "~",
SemverUpdateType::_Minor => "^",
},
current_release_semver
))
.ok(),
is_init,
)
.map_err(|err| err.to_string())?;
progress_bar.finish_and_clear();
match github_release {
None => {
return Err(format!("Unknown release: {}", current_release_semver));
}
Some(release_semver) => {
if release_semver == *current_release_semver {
if let Ok(active_release_version) = load_release_version(
&config.active_release_dir().join("version.yml"),
) {
if format!("v{}", current_release_semver)
== active_release_version.channel
{
println!(
"Install is up to date. {} is the latest compatible release",
release_semver
);
return Ok(false);
}
}
}
config.explicit_release =
Some(ExplicitRelease::Semver(release_semver.clone()));
let release_dir = config.release_dir(&release_semver);
let download_url_and_sha256 = if release_dir.exists() {
// Release already present in the cache
None
} else {
Some((github_release_download_url(&release_semver), None))
};
(release_semver, download_url_and_sha256, release_dir)
}
}
}
ExplicitRelease::Channel(release_channel) => {
let version_url = release_channel_version_url(release_channel);
let (_temp_dir, temp_file, _temp_archive_sha256) =
download_to_temp(&version_url, None)
.map_err(|err| format!("Unable to download {}: {}", version_url, err))?;
let update_release_version = load_release_version(&temp_file)?;
let release_id = format!("{}-{}", release_channel, update_release_version.commit);
let release_dir = config.release_dir(&release_id);
let current_release_version_yml =
release_dir.join("solana-release").join("version.yml");
let download_url = release_channel_download_url(release_channel);
if !current_release_version_yml.exists() {
(
format!(
"{} commit {}",
release_channel,
&update_release_version.commit[0..7]
),
Some((download_url, None)),
release_dir,
)
} else {
let current_release_version =
load_release_version(¤t_release_version_yml)?;
if update_release_version.commit == current_release_version.commit {
if let Ok(active_release_version) =
load_release_version(&config.active_release_dir().join("version.yml"))
{
if current_release_version.commit == active_release_version.commit {
// Same version, no update required
println!(
"Install is up to date. {} is the latest commit for {}",
&active_release_version.commit[0..7],
release_channel
);
return Ok(false);
}
}
// Release already present in the cache
(
format!(
"{} commit {}",
release_channel,
&update_release_version.commit[0..7]
),
None,
release_dir,
)
} else {
(
format!(
"{} (from {})",
&update_release_version.commit[0..7],
¤t_release_version.commit[0..7],
),
Some((download_url, None)),
release_dir,
)
}
}
}
}
} else {
let progress_bar = new_spinner_progress_bar();
progress_bar.set_message(format!("{}Checking for updates...", LOOKING_GLASS));
let rpc_client = RpcClient::new(config.json_rpc_url.clone());
let update_manifest = get_update_manifest(&rpc_client, &config.update_manifest_pubkey)?;
progress_bar.finish_and_clear();
if Some(&update_manifest) == config.current_update_manifest.as_ref() {
println!("Install is up to date");
return Ok(false);
}
println!("\n{}", style("An update is available:").bold());
print_update_manifest(&update_manifest);
if timestamp_secs()
< crate::build_env::BUILD_SECONDS_SINCE_UNIX_EPOCH
.parse::<u64>()
.unwrap()
{
return Err("Unable to update as system time seems unreliable".to_string());
}
if let Some(ref current_update_manifest) = config.current_update_manifest {
if update_manifest.timestamp_secs < current_update_manifest.timestamp_secs {
return Err("Unable to update to an older version".to_string());
}
}
config.current_update_manifest = Some(update_manifest.clone());
let release_dir = config.release_dir(&update_manifest.download_sha256.to_string());
let download_url = update_manifest.download_url;
let archive_sha256 = Some(update_manifest.download_sha256);
(
"latest manifest".to_string(),
Some((download_url, archive_sha256)),
release_dir,
)
};
if check_only {
println!(
" {}{}",
WRAPPED_PRESENT,
style(format!("Update available: {}", updated_version)).bold()
);
return Ok(true);
}
if let Some((download_url, archive_sha256)) = download_url_and_sha256 {
let (_temp_dir, temp_archive, _temp_archive_sha256) =
download_to_temp(&download_url, archive_sha256.as_ref())
.map_err(|err| format!("Unable to download {}: {}", download_url, err))?;
extract_release_archive(&temp_archive, &release_dir).map_err(|err| {
format!(
"Unable to extract {:?} to {:?}: {}",
temp_archive, release_dir, err
)
})?;
}
let release_target = load_release_target(&release_dir).map_err(|err| {
format!(
"Unable to load release target from {:?}: {}",
release_dir, err
)
})?;
if release_target != crate::build_env::TARGET {
return Err(format!("Incompatible update target: {}", release_target));
}
// Trigger an update to the modification time for `release_dir`
{
let path = &release_dir.join(".touch");
let _ = fs::OpenOptions::new().create(true).write(true).open(path);
let _ = fs::remove_file(path);
}
let _ = fs::remove_dir_all(config.active_release_dir());
symlink_dir(
release_dir.join("solana-release"),
config.active_release_dir(),
)
.map_err(|err| {
format!(
"Unable to symlink {:?} to {:?}: {}",
release_dir,
config.active_release_dir(),
err
)
})?;
config.save(config_file)?;
gc(config_file)?;
if is_init {
println!(
" {}{}",
SPARKLE,
style(format!("{} initialized", updated_version)).bold()
);
} else {
println!(
" {}{}",
SPARKLE,
style(format!("Update successful to {}", updated_version)).bold()
);
}
Ok(true)
}
pub fn run(
config_file: &str,
program_name: &str,
program_arguments: Vec<&str>,
) -> Result<(), String> {
let config = Config::load(config_file)?;
let mut full_program_path = config.active_release_bin_dir().join(program_name);
if cfg!(windows) && full_program_path.extension().is_none() {
full_program_path.set_extension("exe");
}
if !full_program_path.exists() {
return Err(format!(
"{} does not exist",
full_program_path.to_str().unwrap()
));
}
let mut child_option: Option<std::process::Child> = None;
let mut now = Instant::now();
let (signal_sender, signal_receiver) = mpsc::channel();
ctrlc::set_handler(move || {
let _ = signal_sender.send(());
})
.expect("Error setting Ctrl-C handler");
loop {
child_option = match child_option {
Some(mut child) => match child.try_wait() {
Ok(Some(status)) => {
println_name_value(
&format!("{} exited with:", program_name),
&status.to_string(),
);
None
}
Ok(None) => Some(child),
Err(err) => {
eprintln!("Error attempting to wait for program to exit: {}", err);
None
}
},
None => {
match std::process::Command::new(&full_program_path)
.args(&program_arguments)
.spawn()
{
Ok(child) => Some(child),
Err(err) => {
eprintln!("Failed to spawn {}: {:?}", program_name, err);
None
}
}
}
};
if config.explicit_release.is_none() && now.elapsed().as_secs() > config.update_poll_secs {
match update(config_file, false) {
Ok(true) => {
// Update successful, kill current process so it will be restart
if let Some(ref mut child) = child_option {
stop_process(child).unwrap_or_else(|err| {
eprintln!("Failed to stop child: {:?}", err);
});
}
}
Ok(false) => {} // No update available
Err(err) => {
eprintln!("Failed to apply update: {:?}", err);
}
};
now = Instant::now();
}
if let Ok(()) = signal_receiver.recv_timeout(Duration::from_secs(1)) {
// Handle SIGTERM...
if let Some(ref mut child) = child_option {
stop_process(child).unwrap_or_else(|err| {
eprintln!("Failed to stop child: {:?}", err);
});
}
std::process::exit(0);
}
}
}
| 34.785657 | 146 | 0.551692 |
14029f2915141dc7cfdd389137c12a855ea52259 | 72,571 | use super::{
EvaluationResult, Obligation, ObligationCause, ObligationCauseCode, PredicateObligation,
};
use crate::infer::InferCtxt;
use crate::traits::error_reporting::suggest_constraining_type_param;
use rustc_errors::{error_code, struct_span_err, Applicability, DiagnosticBuilder, Style};
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor;
use rustc_hir::Node;
use rustc_middle::ty::TypeckTables;
use rustc_middle::ty::{
self, AdtKind, DefIdTree, ToPredicate, Ty, TyCtxt, TypeFoldable, WithConstness,
};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::{MultiSpan, Span, DUMMY_SP};
use std::fmt;
use super::InferCtxtPrivExt;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
// This trait is public to expose the diagnostics methods to clippy.
pub trait InferCtxtExt<'tcx> {
fn suggest_restricting_param_bound(
&self,
err: &mut DiagnosticBuilder<'_>,
trait_ref: ty::PolyTraitRef<'_>,
body_id: hir::HirId,
);
fn suggest_borrow_on_unsized_slice(
&self,
code: &ObligationCauseCode<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
);
fn get_closure_name(
&self,
def_id: DefId,
err: &mut DiagnosticBuilder<'_>,
msg: &str,
) -> Option<String>;
fn suggest_fn_call(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'_>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
);
fn suggest_add_reference_to_arg(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
has_custom_message: bool,
) -> bool;
fn suggest_remove_reference(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
);
fn suggest_change_mut(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
);
fn suggest_semicolon_removal(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
span: Span,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
);
fn suggest_impl_trait(
&self,
err: &mut DiagnosticBuilder<'tcx>,
span: Span,
obligation: &PredicateObligation<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
) -> bool;
fn point_at_returns_when_relevant(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
);
fn report_closure_arg_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_ref: ty::PolyTraitRef<'tcx>,
found: ty::PolyTraitRef<'tcx>,
) -> DiagnosticBuilder<'tcx>;
fn suggest_fully_qualified_path(
&self,
err: &mut DiagnosticBuilder<'_>,
def_id: DefId,
span: Span,
trait_ref: DefId,
);
fn maybe_note_obligation_cause_for_async_await(
&self,
err: &mut DiagnosticBuilder<'_>,
obligation: &PredicateObligation<'tcx>,
) -> bool;
fn note_obligation_cause_for_async_await(
&self,
err: &mut DiagnosticBuilder<'_>,
target_span: Span,
scope_span: &Option<Span>,
expr: Option<hir::HirId>,
snippet: String,
first_generator: DefId,
last_generator: Option<DefId>,
trait_ref: ty::TraitRef<'_>,
target_ty: Ty<'tcx>,
tables: &ty::TypeckTables<'_>,
obligation: &PredicateObligation<'tcx>,
next_code: Option<&ObligationCauseCode<'tcx>>,
);
fn note_obligation_cause_code<T>(
&self,
err: &mut DiagnosticBuilder<'_>,
predicate: &T,
cause_code: &ObligationCauseCode<'tcx>,
obligated_types: &mut Vec<&ty::TyS<'tcx>>,
) where
T: fmt::Display;
fn suggest_new_overflow_limit(&self, err: &mut DiagnosticBuilder<'_>);
}
fn predicate_constraint(generics: &hir::Generics<'_>, pred: String) -> (Span, String) {
(
generics.where_clause.span_for_predicates_or_empty_place().shrink_to_hi(),
format!(
"{} {} ",
if !generics.where_clause.predicates.is_empty() { "," } else { " where" },
pred,
),
)
}
/// Type parameter needs more bounds. The trivial case is `T` `where T: Bound`, but
/// it can also be an `impl Trait` param that needs to be decomposed to a type
/// param for cleaner code.
fn suggest_restriction(
generics: &hir::Generics<'_>,
msg: &str,
err: &mut DiagnosticBuilder<'_>,
fn_sig: Option<&hir::FnSig<'_>>,
projection: Option<&ty::ProjectionTy<'_>>,
trait_ref: ty::PolyTraitRef<'_>,
) {
let span = generics.where_clause.span_for_predicates_or_empty_place();
if span.from_expansion() || span.desugaring_kind().is_some() {
return;
}
// Given `fn foo(t: impl Trait)` where `Trait` requires assoc type `A`...
if let Some((bound_str, fn_sig)) =
fn_sig.zip(projection).and_then(|(sig, p)| match p.self_ty().kind {
// Shenanigans to get the `Trait` from the `impl Trait`.
ty::Param(param) => {
// `fn foo(t: impl Trait)`
// ^^^^^ get this string
param.name.as_str().strip_prefix("impl").map(|s| (s.trim_start().to_string(), sig))
}
_ => None,
})
{
// We know we have an `impl Trait` that doesn't satisfy a required projection.
// Find all of the ocurrences of `impl Trait` for `Trait` in the function arguments'
// types. There should be at least one, but there might be *more* than one. In that
// case we could just ignore it and try to identify which one needs the restriction,
// but instead we choose to suggest replacing all instances of `impl Trait` with `T`
// where `T: Trait`.
let mut ty_spans = vec![];
let impl_trait_str = format!("impl {}", bound_str);
for input in fn_sig.decl.inputs {
if let hir::TyKind::Path(hir::QPath::Resolved(
None,
hir::Path { segments: [segment], .. },
)) = input.kind
{
if segment.ident.as_str() == impl_trait_str.as_str() {
// `fn foo(t: impl Trait)`
// ^^^^^^^^^^ get this to suggest `T` instead
// There might be more than one `impl Trait`.
ty_spans.push(input.span);
}
}
}
let type_param_name = generics.params.next_type_param_name(Some(&bound_str));
// The type param `T: Trait` we will suggest to introduce.
let type_param = format!("{}: {}", type_param_name, bound_str);
// FIXME: modify the `trait_ref` instead of string shenanigans.
// Turn `<impl Trait as Foo>::Bar: Qux` into `<T as Foo>::Bar: Qux`.
let pred = trait_ref.without_const().to_predicate().to_string();
let pred = pred.replace(&impl_trait_str, &type_param_name);
let mut sugg = vec![
match generics
.params
.iter()
.filter(|p| match p.kind {
hir::GenericParamKind::Type {
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
..
} => false,
_ => true,
})
.last()
{
// `fn foo(t: impl Trait)`
// ^ suggest `<T: Trait>` here
None => (generics.span, format!("<{}>", type_param)),
// `fn foo<A>(t: impl Trait)`
// ^^^ suggest `<A, T: Trait>` here
Some(param) => (
param.bounds_span().unwrap_or(param.span).shrink_to_hi(),
format!(", {}", type_param),
),
},
// `fn foo(t: impl Trait)`
// ^ suggest `where <T as Trait>::A: Bound`
predicate_constraint(generics, pred),
];
sugg.extend(ty_spans.into_iter().map(|s| (s, type_param_name.to_string())));
// Suggest `fn foo<T: Trait>(t: T) where <T as Trait>::A: Bound`.
// FIXME: once `#![feature(associated_type_bounds)]` is stabilized, we should suggest
// `fn foo(t: impl Trait<A: Bound>)` instead.
err.multipart_suggestion(
"introduce a type parameter with a trait bound instead of using `impl Trait`",
sugg,
Applicability::MaybeIncorrect,
);
} else {
// Trivial case: `T` needs an extra bound: `T: Bound`.
let (sp, sugg) =
predicate_constraint(generics, trait_ref.without_const().to_predicate().to_string());
let appl = Applicability::MachineApplicable;
err.span_suggestion(sp, &format!("consider further restricting {}", msg), sugg, appl);
}
}
impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
fn suggest_restricting_param_bound(
&self,
mut err: &mut DiagnosticBuilder<'_>,
trait_ref: ty::PolyTraitRef<'_>,
body_id: hir::HirId,
) {
let self_ty = trait_ref.self_ty();
let (param_ty, projection) = match &self_ty.kind {
ty::Param(_) => (true, None),
ty::Projection(projection) => (false, Some(projection)),
_ => return,
};
// FIXME: Add check for trait bound that is already present, particularly `?Sized` so we
// don't suggest `T: Sized + ?Sized`.
let mut hir_id = body_id;
while let Some(node) = self.tcx.hir().find(hir_id) {
match node {
hir::Node::TraitItem(hir::TraitItem {
generics,
kind: hir::TraitItemKind::Fn(..),
..
}) if param_ty && self_ty == self.tcx.types.self_param => {
// Restricting `Self` for a single method.
suggest_restriction(&generics, "`Self`", err, None, projection, trait_ref);
return;
}
hir::Node::TraitItem(hir::TraitItem {
generics,
kind: hir::TraitItemKind::Fn(fn_sig, ..),
..
})
| hir::Node::ImplItem(hir::ImplItem {
generics,
kind: hir::ImplItemKind::Fn(fn_sig, ..),
..
})
| hir::Node::Item(hir::Item {
kind: hir::ItemKind::Fn(fn_sig, generics, _), ..
}) if projection.is_some() => {
// Missing restriction on associated type of type parameter (unmet projection).
suggest_restriction(
&generics,
"the associated type",
err,
Some(fn_sig),
projection,
trait_ref,
);
return;
}
hir::Node::Item(
hir::Item { kind: hir::ItemKind::Trait(_, _, generics, _, _), .. }
| hir::Item { kind: hir::ItemKind::Impl { generics, .. }, .. },
) if projection.is_some() => {
// Missing restriction on associated type of type parameter (unmet projection).
suggest_restriction(
&generics,
"the associated type",
err,
None,
projection,
trait_ref,
);
return;
}
hir::Node::Item(
hir::Item { kind: hir::ItemKind::Struct(_, generics), .. }
| hir::Item { kind: hir::ItemKind::Enum(_, generics), .. }
| hir::Item { kind: hir::ItemKind::Union(_, generics), .. }
| hir::Item { kind: hir::ItemKind::Trait(_, _, generics, ..), .. }
| hir::Item { kind: hir::ItemKind::Impl { generics, .. }, .. }
| hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. }
| hir::Item { kind: hir::ItemKind::TyAlias(_, generics), .. }
| hir::Item { kind: hir::ItemKind::TraitAlias(generics, _), .. }
| hir::Item {
kind: hir::ItemKind::OpaqueTy(hir::OpaqueTy { generics, .. }), ..
},
)
| hir::Node::TraitItem(hir::TraitItem { generics, .. })
| hir::Node::ImplItem(hir::ImplItem { generics, .. })
if param_ty =>
{
// Missing generic type parameter bound.
let param_name = self_ty.to_string();
let constraint = trait_ref.print_only_trait_path().to_string();
if suggest_constraining_type_param(
self.tcx,
generics,
&mut err,
¶m_name,
&constraint,
Some(trait_ref.def_id()),
) {
return;
}
}
hir::Node::Crate(..) => return,
_ => {}
}
hir_id = self.tcx.hir().get_parent_item(hir_id);
}
}
/// When encountering an assignment of an unsized trait, like `let x = ""[..];`, provide a
/// suggestion to borrow the initializer in order to use have a slice instead.
fn suggest_borrow_on_unsized_slice(
&self,
code: &ObligationCauseCode<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
) {
if let &ObligationCauseCode::VariableType(hir_id) = code {
let parent_node = self.tcx.hir().get_parent_node(hir_id);
if let Some(Node::Local(ref local)) = self.tcx.hir().find(parent_node) {
if let Some(ref expr) = local.init {
if let hir::ExprKind::Index(_, _) = expr.kind {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
err.span_suggestion(
expr.span,
"consider borrowing here",
format!("&{}", snippet),
Applicability::MachineApplicable,
);
}
}
}
}
}
}
/// Given a closure's `DefId`, return the given name of the closure.
///
/// This doesn't account for reassignments, but it's only used for suggestions.
fn get_closure_name(
&self,
def_id: DefId,
err: &mut DiagnosticBuilder<'_>,
msg: &str,
) -> Option<String> {
let get_name =
|err: &mut DiagnosticBuilder<'_>, kind: &hir::PatKind<'_>| -> Option<String> {
// Get the local name of this closure. This can be inaccurate because
// of the possibility of reassignment, but this should be good enough.
match &kind {
hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, name, None) => {
Some(format!("{}", name))
}
_ => {
err.note(&msg);
None
}
}
};
let hir = self.tcx.hir();
let hir_id = hir.as_local_hir_id(def_id)?;
let parent_node = hir.get_parent_node(hir_id);
match hir.find(parent_node) {
Some(hir::Node::Stmt(hir::Stmt { kind: hir::StmtKind::Local(local), .. })) => {
get_name(err, &local.pat.kind)
}
// Different to previous arm because one is `&hir::Local` and the other
// is `P<hir::Local>`.
Some(hir::Node::Local(local)) => get_name(err, &local.pat.kind),
_ => None,
}
}
/// We tried to apply the bound to an `fn` or closure. Check whether calling it would
/// evaluate to a type that *would* satisfy the trait binding. If it would, suggest calling
/// it: `bar(foo)` → `bar(foo())`. This case is *very* likely to be hit if `foo` is `async`.
fn suggest_fn_call(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'_>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
) {
let self_ty = trait_ref.self_ty();
let (def_id, output_ty, callable) = match self_ty.kind {
ty::Closure(def_id, substs) => (def_id, substs.as_closure().sig().output(), "closure"),
ty::FnDef(def_id, _) => (def_id, self_ty.fn_sig(self.tcx).output(), "function"),
_ => return,
};
let msg = format!("use parentheses to call the {}", callable);
let obligation = self.mk_obligation_for_def_id(
trait_ref.def_id(),
output_ty.skip_binder(),
obligation.cause.clone(),
obligation.param_env,
);
match self.evaluate_obligation(&obligation) {
Ok(EvaluationResult::EvaluatedToOk)
| Ok(EvaluationResult::EvaluatedToOkModuloRegions)
| Ok(EvaluationResult::EvaluatedToAmbig) => {}
_ => return,
}
let hir = self.tcx.hir();
// Get the name of the callable and the arguments to be used in the suggestion.
let (snippet, sugg) = match hir.get_if_local(def_id) {
Some(hir::Node::Expr(hir::Expr {
kind: hir::ExprKind::Closure(_, decl, _, span, ..),
..
})) => {
err.span_label(*span, "consider calling this closure");
let name = match self.get_closure_name(def_id, err, &msg) {
Some(name) => name,
None => return,
};
let args = decl.inputs.iter().map(|_| "_").collect::<Vec<_>>().join(", ");
let sugg = format!("({})", args);
(format!("{}{}", name, sugg), sugg)
}
Some(hir::Node::Item(hir::Item {
ident,
kind: hir::ItemKind::Fn(.., body_id),
..
})) => {
err.span_label(ident.span, "consider calling this function");
let body = hir.body(*body_id);
let args = body
.params
.iter()
.map(|arg| match &arg.pat.kind {
hir::PatKind::Binding(_, _, ident, None)
// FIXME: provide a better suggestion when encountering `SelfLower`, it
// should suggest a method call.
if ident.name != kw::SelfLower => ident.to_string(),
_ => "_".to_string(),
})
.collect::<Vec<_>>()
.join(", ");
let sugg = format!("({})", args);
(format!("{}{}", ident, sugg), sugg)
}
_ => return,
};
if points_at_arg {
// When the obligation error has been ensured to have been caused by
// an argument, the `obligation.cause.span` points at the expression
// of the argument, so we can provide a suggestion. This is signaled
// by `points_at_arg`. Otherwise, we give a more general note.
err.span_suggestion_verbose(
obligation.cause.span.shrink_to_hi(),
&msg,
sugg,
Applicability::HasPlaceholders,
);
} else {
err.help(&format!("{}: `{}`", msg, snippet));
}
}
fn suggest_add_reference_to_arg(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
has_custom_message: bool,
) -> bool {
if !points_at_arg {
return false;
}
let span = obligation.cause.span;
let param_env = obligation.param_env;
let trait_ref = trait_ref.skip_binder();
if let ObligationCauseCode::ImplDerivedObligation(obligation) = &obligation.cause.code {
// Try to apply the original trait binding obligation by borrowing.
let self_ty = trait_ref.self_ty();
let found = self_ty.to_string();
let new_self_ty = self.tcx.mk_imm_ref(self.tcx.lifetimes.re_static, self_ty);
let substs = self.tcx.mk_substs_trait(new_self_ty, &[]);
let new_trait_ref = ty::TraitRef::new(obligation.parent_trait_ref.def_id(), substs);
let new_obligation = Obligation::new(
ObligationCause::dummy(),
param_env,
new_trait_ref.without_const().to_predicate(),
);
if self.predicate_must_hold_modulo_regions(&new_obligation) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
// We have a very specific type of error, where just borrowing this argument
// might solve the problem. In cases like this, the important part is the
// original type obligation, not the last one that failed, which is arbitrary.
// Because of this, we modify the error to refer to the original obligation and
// return early in the caller.
let msg = format!(
"the trait bound `{}: {}` is not satisfied",
found,
obligation.parent_trait_ref.skip_binder().print_only_trait_path(),
);
if has_custom_message {
err.note(&msg);
} else {
err.message = vec![(msg, Style::NoStyle)];
}
if snippet.starts_with('&') {
// This is already a literal borrow and the obligation is failing
// somewhere else in the obligation chain. Do not suggest non-sense.
return false;
}
err.span_label(
span,
&format!(
"expected an implementor of trait `{}`",
obligation.parent_trait_ref.skip_binder().print_only_trait_path(),
),
);
err.span_suggestion(
span,
"consider borrowing here",
format!("&{}", snippet),
Applicability::MaybeIncorrect,
);
return true;
}
}
}
false
}
/// Whenever references are used by mistake, like `for (i, e) in &vec.iter().enumerate()`,
/// suggest removing these references until we reach a type that implements the trait.
fn suggest_remove_reference(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
) {
let trait_ref = trait_ref.skip_binder();
let span = obligation.cause.span;
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
let refs_number =
snippet.chars().filter(|c| !c.is_whitespace()).take_while(|c| *c == '&').count();
if let Some('\'') = snippet.chars().filter(|c| !c.is_whitespace()).nth(refs_number) {
// Do not suggest removal of borrow from type arguments.
return;
}
let mut trait_type = trait_ref.self_ty();
for refs_remaining in 0..refs_number {
if let ty::Ref(_, t_type, _) = trait_type.kind {
trait_type = t_type;
let new_obligation = self.mk_obligation_for_def_id(
trait_ref.def_id,
trait_type,
ObligationCause::dummy(),
obligation.param_env,
);
if self.predicate_may_hold(&new_obligation) {
let sp = self
.tcx
.sess
.source_map()
.span_take_while(span, |c| c.is_whitespace() || *c == '&');
let remove_refs = refs_remaining + 1;
let msg = if remove_refs == 1 {
"consider removing the leading `&`-reference".to_string()
} else {
format!("consider removing {} leading `&`-references", remove_refs)
};
err.span_suggestion_short(
sp,
&msg,
String::new(),
Applicability::MachineApplicable,
);
break;
}
} else {
break;
}
}
}
}
/// Check if the trait bound is implemented for a different mutability and note it in the
/// final error.
fn suggest_change_mut(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
points_at_arg: bool,
) {
let span = obligation.cause.span;
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
let refs_number =
snippet.chars().filter(|c| !c.is_whitespace()).take_while(|c| *c == '&').count();
if let Some('\'') = snippet.chars().filter(|c| !c.is_whitespace()).nth(refs_number) {
// Do not suggest removal of borrow from type arguments.
return;
}
let trait_ref = self.resolve_vars_if_possible(trait_ref);
if trait_ref.has_infer_types_or_consts() {
// Do not ICE while trying to find if a reborrow would succeed on a trait with
// unresolved bindings.
return;
}
if let ty::Ref(region, t_type, mutability) = trait_ref.skip_binder().self_ty().kind {
let trait_type = match mutability {
hir::Mutability::Mut => self.tcx.mk_imm_ref(region, t_type),
hir::Mutability::Not => self.tcx.mk_mut_ref(region, t_type),
};
let new_obligation = self.mk_obligation_for_def_id(
trait_ref.skip_binder().def_id,
trait_type,
ObligationCause::dummy(),
obligation.param_env,
);
if self.evaluate_obligation_no_overflow(&new_obligation).must_apply_modulo_regions()
{
let sp = self
.tcx
.sess
.source_map()
.span_take_while(span, |c| c.is_whitespace() || *c == '&');
if points_at_arg && mutability == hir::Mutability::Not && refs_number > 0 {
err.span_suggestion_verbose(
sp,
"consider changing this borrow's mutability",
"&mut ".to_string(),
Applicability::MachineApplicable,
);
} else {
err.note(&format!(
"`{}` is implemented for `{:?}`, but not for `{:?}`",
trait_ref.print_only_trait_path(),
trait_type,
trait_ref.skip_binder().self_ty(),
));
}
}
}
}
}
fn suggest_semicolon_removal(
&self,
obligation: &PredicateObligation<'tcx>,
err: &mut DiagnosticBuilder<'tcx>,
span: Span,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
) {
let hir = self.tcx.hir();
let parent_node = hir.get_parent_node(obligation.cause.body_id);
let node = hir.find(parent_node);
if let Some(hir::Node::Item(hir::Item {
kind: hir::ItemKind::Fn(sig, _, body_id), ..
})) = node
{
let body = hir.body(*body_id);
if let hir::ExprKind::Block(blk, _) = &body.value.kind {
if sig.decl.output.span().overlaps(span)
&& blk.expr.is_none()
&& "()" == &trait_ref.self_ty().to_string()
{
// FIXME(estebank): When encountering a method with a trait
// bound not satisfied in the return type with a body that has
// no return, suggest removal of semicolon on last statement.
// Once that is added, close #54771.
if let Some(ref stmt) = blk.stmts.last() {
let sp = self.tcx.sess.source_map().end_point(stmt.span);
err.span_label(sp, "consider removing this semicolon");
}
}
}
}
}
/// If all conditions are met to identify a returned `dyn Trait`, suggest using `impl Trait` if
/// applicable and signal that the error has been expanded appropriately and needs to be
/// emitted.
fn suggest_impl_trait(
&self,
err: &mut DiagnosticBuilder<'tcx>,
span: Span,
obligation: &PredicateObligation<'tcx>,
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
) -> bool {
match obligation.cause.code.peel_derives() {
// Only suggest `impl Trait` if the return type is unsized because it is `dyn Trait`.
ObligationCauseCode::SizedReturnType => {}
_ => return false,
}
let hir = self.tcx.hir();
let parent_node = hir.get_parent_node(obligation.cause.body_id);
let node = hir.find(parent_node);
let (sig, body_id) = if let Some(hir::Node::Item(hir::Item {
kind: hir::ItemKind::Fn(sig, _, body_id),
..
})) = node
{
(sig, body_id)
} else {
return false;
};
let body = hir.body(*body_id);
let trait_ref = self.resolve_vars_if_possible(trait_ref);
let ty = trait_ref.skip_binder().self_ty();
let is_object_safe = match ty.kind {
ty::Dynamic(predicates, _) => {
// If the `dyn Trait` is not object safe, do not suggest `Box<dyn Trait>`.
predicates
.principal_def_id()
.map_or(true, |def_id| self.tcx.object_safety_violations(def_id).is_empty())
}
// We only want to suggest `impl Trait` to `dyn Trait`s.
// For example, `fn foo() -> str` needs to be filtered out.
_ => return false,
};
let ret_ty = if let hir::FnRetTy::Return(ret_ty) = sig.decl.output {
ret_ty
} else {
return false;
};
// Use `TypeVisitor` instead of the output type directly to find the span of `ty` for
// cases like `fn foo() -> (dyn Trait, i32) {}`.
// Recursively look for `TraitObject` types and if there's only one, use that span to
// suggest `impl Trait`.
// Visit to make sure there's a single `return` type to suggest `impl Trait`,
// otherwise suggest using `Box<dyn Trait>` or an enum.
let mut visitor = ReturnsVisitor::default();
visitor.visit_body(&body);
let tables = self.in_progress_tables.map(|t| t.borrow()).unwrap();
let mut ret_types = visitor
.returns
.iter()
.filter_map(|expr| tables.node_type_opt(expr.hir_id))
.map(|ty| self.resolve_vars_if_possible(&ty));
let (last_ty, all_returns_have_same_type) = ret_types.clone().fold(
(None, true),
|(last_ty, mut same): (std::option::Option<Ty<'_>>, bool), ty| {
let ty = self.resolve_vars_if_possible(&ty);
same &= last_ty.map_or(true, |last_ty| last_ty == ty) && ty.kind != ty::Error;
(Some(ty), same)
},
);
let all_returns_conform_to_trait =
if let Some(ty_ret_ty) = tables.node_type_opt(ret_ty.hir_id) {
match ty_ret_ty.kind {
ty::Dynamic(predicates, _) => {
let cause = ObligationCause::misc(ret_ty.span, ret_ty.hir_id);
let param_env = ty::ParamEnv::empty();
ret_types.all(|returned_ty| {
predicates.iter().all(|predicate| {
let pred = predicate.with_self_ty(self.tcx, returned_ty);
let obl = Obligation::new(cause.clone(), param_env, pred);
self.predicate_may_hold(&obl)
})
})
}
_ => false,
}
} else {
true
};
let sm = self.tcx.sess.source_map();
let (snippet, last_ty) =
if let (true, hir::TyKind::TraitObject(..), Ok(snippet), true, Some(last_ty)) = (
// Verify that we're dealing with a return `dyn Trait`
ret_ty.span.overlaps(span),
&ret_ty.kind,
sm.span_to_snippet(ret_ty.span),
// If any of the return types does not conform to the trait, then we can't
// suggest `impl Trait` nor trait objects, it is a type mismatch error.
all_returns_conform_to_trait,
last_ty,
) {
(snippet, last_ty)
} else {
return false;
};
err.code(error_code!(E0746));
err.set_primary_message("return type cannot have an unboxed trait object");
err.children.clear();
let impl_trait_msg = "for information on `impl Trait`, see \
<https://doc.rust-lang.org/book/ch10-02-traits.html\
#returning-types-that-implement-traits>";
let trait_obj_msg = "for information on trait objects, see \
<https://doc.rust-lang.org/book/ch17-02-trait-objects.html\
#using-trait-objects-that-allow-for-values-of-different-types>";
let has_dyn = snippet.split_whitespace().next().map_or(false, |s| s == "dyn");
let trait_obj = if has_dyn { &snippet[4..] } else { &snippet[..] };
if all_returns_have_same_type {
// Suggest `-> impl Trait`.
err.span_suggestion(
ret_ty.span,
&format!(
"return `impl {1}` instead, as all return paths are of type `{}`, \
which implements `{1}`",
last_ty, trait_obj,
),
format!("impl {}", trait_obj),
Applicability::MachineApplicable,
);
err.note(impl_trait_msg);
} else {
if is_object_safe {
// Suggest `-> Box<dyn Trait>` and `Box::new(returned_value)`.
// Get all the return values and collect their span and suggestion.
if let Some(mut suggestions) = visitor
.returns
.iter()
.map(|expr| {
let snip = sm.span_to_snippet(expr.span).ok()?;
Some((expr.span, format!("Box::new({})", snip)))
})
.collect::<Option<Vec<_>>>()
{
// Add the suggestion for the return type.
suggestions.push((ret_ty.span, format!("Box<dyn {}>", trait_obj)));
err.multipart_suggestion(
"return a boxed trait object instead",
suggestions,
Applicability::MaybeIncorrect,
);
}
} else {
// This is currently not possible to trigger because E0038 takes precedence, but
// leave it in for completeness in case anything changes in an earlier stage.
err.note(&format!(
"if trait `{}` was object safe, you could return a trait object",
trait_obj,
));
}
err.note(trait_obj_msg);
err.note(&format!(
"if all the returned values were of the same type you could use \
`impl {}` as the return type",
trait_obj,
));
err.note(impl_trait_msg);
err.note("you can create a new `enum` with a variant for each returned type");
}
true
}
fn point_at_returns_when_relevant(
&self,
err: &mut DiagnosticBuilder<'tcx>,
obligation: &PredicateObligation<'tcx>,
) {
match obligation.cause.code.peel_derives() {
ObligationCauseCode::SizedReturnType => {}
_ => return,
}
let hir = self.tcx.hir();
let parent_node = hir.get_parent_node(obligation.cause.body_id);
let node = hir.find(parent_node);
if let Some(hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, _, body_id), .. })) =
node
{
let body = hir.body(*body_id);
// Point at all the `return`s in the function as they have failed trait bounds.
let mut visitor = ReturnsVisitor::default();
visitor.visit_body(&body);
let tables = self.in_progress_tables.map(|t| t.borrow()).unwrap();
for expr in &visitor.returns {
if let Some(returned_ty) = tables.node_type_opt(expr.hir_id) {
let ty = self.resolve_vars_if_possible(&returned_ty);
err.span_label(expr.span, &format!("this returned value is of type `{}`", ty));
}
}
}
}
fn report_closure_arg_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_ref: ty::PolyTraitRef<'tcx>,
found: ty::PolyTraitRef<'tcx>,
) -> DiagnosticBuilder<'tcx> {
crate fn build_fn_sig_string<'tcx>(
tcx: TyCtxt<'tcx>,
trait_ref: &ty::TraitRef<'tcx>,
) -> String {
let inputs = trait_ref.substs.type_at(1);
let sig = if let ty::Tuple(inputs) = inputs.kind {
tcx.mk_fn_sig(
inputs.iter().map(|k| k.expect_ty()),
tcx.mk_ty_infer(ty::TyVar(ty::TyVid { index: 0 })),
false,
hir::Unsafety::Normal,
::rustc_target::spec::abi::Abi::Rust,
)
} else {
tcx.mk_fn_sig(
::std::iter::once(inputs),
tcx.mk_ty_infer(ty::TyVar(ty::TyVid { index: 0 })),
false,
hir::Unsafety::Normal,
::rustc_target::spec::abi::Abi::Rust,
)
};
ty::Binder::bind(sig).to_string()
}
let argument_is_closure = expected_ref.skip_binder().substs.type_at(0).is_closure();
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0631,
"type mismatch in {} arguments",
if argument_is_closure { "closure" } else { "function" }
);
let found_str = format!(
"expected signature of `{}`",
build_fn_sig_string(self.tcx, found.skip_binder())
);
err.span_label(span, found_str);
let found_span = found_span.unwrap_or(span);
let expected_str = format!(
"found signature of `{}`",
build_fn_sig_string(self.tcx, expected_ref.skip_binder())
);
err.span_label(found_span, expected_str);
err
}
fn suggest_fully_qualified_path(
&self,
err: &mut DiagnosticBuilder<'_>,
def_id: DefId,
span: Span,
trait_ref: DefId,
) {
if let Some(assoc_item) = self.tcx.opt_associated_item(def_id) {
if let ty::AssocKind::Const | ty::AssocKind::Type = assoc_item.kind {
err.note(&format!(
"{}s cannot be accessed directly on a `trait`, they can only be \
accessed through a specific `impl`",
assoc_item.kind.suggestion_descr(),
));
err.span_suggestion(
span,
"use the fully qualified path to an implementation",
format!("<Type as {}>::{}", self.tcx.def_path_str(trait_ref), assoc_item.ident),
Applicability::HasPlaceholders,
);
}
}
}
/// Adds an async-await specific note to the diagnostic when the future does not implement
/// an auto trait because of a captured type.
///
/// ```ignore (diagnostic)
/// note: future does not implement `Qux` as this value is used across an await
/// --> $DIR/issue-64130-3-other.rs:17:5
/// |
/// LL | let x = Foo;
/// | - has type `Foo`
/// LL | baz().await;
/// | ^^^^^^^^^^^ await occurs here, with `x` maybe used later
/// LL | }
/// | - `x` is later dropped here
/// ```
///
/// When the diagnostic does not implement `Send` or `Sync` specifically, then the diagnostic
/// is "replaced" with a different message and a more specific error.
///
/// ```ignore (diagnostic)
/// error: future cannot be sent between threads safely
/// --> $DIR/issue-64130-2-send.rs:21:5
/// |
/// LL | fn is_send<T: Send>(t: T) { }
/// | ---- required by this bound in `is_send`
/// ...
/// LL | is_send(bar());
/// | ^^^^^^^ future returned by `bar` is not send
/// |
/// = help: within `impl std::future::Future`, the trait `std::marker::Send` is not
/// implemented for `Foo`
/// note: future is not send as this value is used across an await
/// --> $DIR/issue-64130-2-send.rs:15:5
/// |
/// LL | let x = Foo;
/// | - has type `Foo`
/// LL | baz().await;
/// | ^^^^^^^^^^^ await occurs here, with `x` maybe used later
/// LL | }
/// | - `x` is later dropped here
/// ```
///
/// Returns `true` if an async-await specific note was added to the diagnostic.
fn maybe_note_obligation_cause_for_async_await(
&self,
err: &mut DiagnosticBuilder<'_>,
obligation: &PredicateObligation<'tcx>,
) -> bool {
debug!(
"maybe_note_obligation_cause_for_async_await: obligation.predicate={:?} \
obligation.cause.span={:?}",
obligation.predicate, obligation.cause.span
);
let source_map = self.tcx.sess.source_map();
// Attempt to detect an async-await error by looking at the obligation causes, looking
// for a generator to be present.
//
// When a future does not implement a trait because of a captured type in one of the
// generators somewhere in the call stack, then the result is a chain of obligations.
//
// Given a `async fn` A that calls a `async fn` B which captures a non-send type and that
// future is passed as an argument to a function C which requires a `Send` type, then the
// chain looks something like this:
//
// - `BuiltinDerivedObligation` with a generator witness (B)
// - `BuiltinDerivedObligation` with a generator (B)
// - `BuiltinDerivedObligation` with `std::future::GenFuture` (B)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (B)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (B)
// - `BuiltinDerivedObligation` with a generator witness (A)
// - `BuiltinDerivedObligation` with a generator (A)
// - `BuiltinDerivedObligation` with `std::future::GenFuture` (A)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
// - `BindingObligation` with `impl_send (Send requirement)
//
// The first obligation in the chain is the most useful and has the generator that captured
// the type. The last generator has information about where the bound was introduced. At
// least one generator should be present for this diagnostic to be modified.
let (mut trait_ref, mut target_ty) = match obligation.predicate {
ty::Predicate::Trait(p, _) => {
(Some(p.skip_binder().trait_ref), Some(p.skip_binder().self_ty()))
}
_ => (None, None),
};
let mut generator = None;
let mut last_generator = None;
let mut next_code = Some(&obligation.cause.code);
while let Some(code) = next_code {
debug!("maybe_note_obligation_cause_for_async_await: code={:?}", code);
match code {
ObligationCauseCode::BuiltinDerivedObligation(derived_obligation)
| ObligationCauseCode::ImplDerivedObligation(derived_obligation) => {
let ty = derived_obligation.parent_trait_ref.self_ty();
debug!(
"maybe_note_obligation_cause_for_async_await: \
parent_trait_ref={:?} self_ty.kind={:?}",
derived_obligation.parent_trait_ref, ty.kind
);
match ty.kind {
ty::Generator(did, ..) => {
generator = generator.or(Some(did));
last_generator = Some(did);
}
ty::GeneratorWitness(..) => {}
_ if generator.is_none() => {
trait_ref = Some(*derived_obligation.parent_trait_ref.skip_binder());
target_ty = Some(ty);
}
_ => {}
}
next_code = Some(derived_obligation.parent_code.as_ref());
}
_ => break,
}
}
// Only continue if a generator was found.
debug!(
"maybe_note_obligation_cause_for_async_await: generator={:?} trait_ref={:?} \
target_ty={:?}",
generator, trait_ref, target_ty
);
let (generator_did, trait_ref, target_ty) = match (generator, trait_ref, target_ty) {
(Some(generator_did), Some(trait_ref), Some(target_ty)) => {
(generator_did, trait_ref, target_ty)
}
_ => return false,
};
let span = self.tcx.def_span(generator_did);
// Do not ICE on closure typeck (#66868).
if self.tcx.hir().as_local_hir_id(generator_did).is_none() {
return false;
}
// Get the tables from the infcx if the generator is the function we are
// currently type-checking; otherwise, get them by performing a query.
// This is needed to avoid cycles.
let in_progress_tables = self.in_progress_tables.map(|t| t.borrow());
let generator_did_root = self.tcx.closure_base_def_id(generator_did);
debug!(
"maybe_note_obligation_cause_for_async_await: generator_did={:?} \
generator_did_root={:?} in_progress_tables.hir_owner={:?} span={:?}",
generator_did,
generator_did_root,
in_progress_tables.as_ref().map(|t| t.hir_owner),
span
);
let query_tables;
let tables: &TypeckTables<'tcx> = match &in_progress_tables {
Some(t) if t.hir_owner.map(|owner| owner.to_def_id()) == Some(generator_did_root) => t,
_ => {
query_tables = self.tcx.typeck_tables_of(generator_did);
&query_tables
}
};
// Look for a type inside the generator interior that matches the target type to get
// a span.
let target_ty_erased = self.tcx.erase_regions(&target_ty);
let target_span = tables
.generator_interior_types
.iter()
.find(|ty::GeneratorInteriorTypeCause { ty, .. }| {
// Careful: the regions for types that appear in the
// generator interior are not generally known, so we
// want to erase them when comparing (and anyway,
// `Send` and other bounds are generally unaffected by
// the choice of region). When erasing regions, we
// also have to erase late-bound regions. This is
// because the types that appear in the generator
// interior generally contain "bound regions" to
// represent regions that are part of the suspended
// generator frame. Bound regions are preserved by
// `erase_regions` and so we must also call
// `erase_late_bound_regions`.
let ty_erased = self.tcx.erase_late_bound_regions(&ty::Binder::bind(*ty));
let ty_erased = self.tcx.erase_regions(&ty_erased);
let eq = ty::TyS::same_type(ty_erased, target_ty_erased);
debug!(
"maybe_note_obligation_cause_for_async_await: ty_erased={:?} \
target_ty_erased={:?} eq={:?}",
ty_erased, target_ty_erased, eq
);
eq
})
.map(|ty::GeneratorInteriorTypeCause { span, scope_span, expr, .. }| {
(span, source_map.span_to_snippet(*span), scope_span, expr)
});
debug!(
"maybe_note_obligation_cause_for_async_await: target_ty={:?} \
generator_interior_types={:?} target_span={:?}",
target_ty, tables.generator_interior_types, target_span
);
if let Some((target_span, Ok(snippet), scope_span, expr)) = target_span {
self.note_obligation_cause_for_async_await(
err,
*target_span,
scope_span,
*expr,
snippet,
generator_did,
last_generator,
trait_ref,
target_ty,
tables,
obligation,
next_code,
);
true
} else {
false
}
}
/// Unconditionally adds the diagnostic note described in
/// `maybe_note_obligation_cause_for_async_await`'s documentation comment.
fn note_obligation_cause_for_async_await(
&self,
err: &mut DiagnosticBuilder<'_>,
target_span: Span,
scope_span: &Option<Span>,
expr: Option<hir::HirId>,
snippet: String,
first_generator: DefId,
last_generator: Option<DefId>,
trait_ref: ty::TraitRef<'_>,
target_ty: Ty<'tcx>,
tables: &ty::TypeckTables<'_>,
obligation: &PredicateObligation<'tcx>,
next_code: Option<&ObligationCauseCode<'tcx>>,
) {
let source_map = self.tcx.sess.source_map();
let is_async_fn = self
.tcx
.parent(first_generator)
.map(|parent_did| self.tcx.asyncness(parent_did))
.map(|parent_asyncness| parent_asyncness == hir::IsAsync::Async)
.unwrap_or(false);
let is_async_move = self
.tcx
.hir()
.as_local_hir_id(first_generator)
.and_then(|hir_id| self.tcx.hir().maybe_body_owned_by(hir_id))
.map(|body_id| self.tcx.hir().body(body_id))
.and_then(|body| body.generator_kind())
.map(|generator_kind| match generator_kind {
hir::GeneratorKind::Async(..) => true,
_ => false,
})
.unwrap_or(false);
let await_or_yield = if is_async_fn || is_async_move { "await" } else { "yield" };
// Special case the primary error message when send or sync is the trait that was
// not implemented.
let is_send = self.tcx.is_diagnostic_item(sym::send_trait, trait_ref.def_id);
let is_sync = self.tcx.is_diagnostic_item(sym::sync_trait, trait_ref.def_id);
let hir = self.tcx.hir();
let trait_explanation = if is_send || is_sync {
let (trait_name, trait_verb) =
if is_send { ("`Send`", "sent") } else { ("`Sync`", "shared") };
err.clear_code();
err.set_primary_message(format!(
"future cannot be {} between threads safely",
trait_verb
));
let original_span = err.span.primary_span().unwrap();
let mut span = MultiSpan::from_span(original_span);
let message = if let Some(name) = last_generator
.and_then(|generator_did| self.tcx.parent(generator_did))
.and_then(|parent_did| hir.as_local_hir_id(parent_did))
.and_then(|parent_hir_id| hir.opt_name(parent_hir_id))
{
format!("future returned by `{}` is not {}", name, trait_name)
} else {
format!("future is not {}", trait_name)
};
span.push_span_label(original_span, message);
err.set_span(span);
format!("is not {}", trait_name)
} else {
format!("does not implement `{}`", trait_ref.print_only_trait_path())
};
// Look at the last interior type to get a span for the `.await`.
let await_span = tables.generator_interior_types.iter().map(|t| t.span).last().unwrap();
let mut span = MultiSpan::from_span(await_span);
span.push_span_label(
await_span,
format!("{} occurs here, with `{}` maybe used later", await_or_yield, snippet),
);
span.push_span_label(target_span, format!("has type `{}`", target_ty));
// If available, use the scope span to annotate the drop location.
if let Some(scope_span) = scope_span {
span.push_span_label(
source_map.end_point(*scope_span),
format!("`{}` is later dropped here", snippet),
);
}
err.span_note(
span,
&format!(
"future {} as this value is used across an {}",
trait_explanation, await_or_yield,
),
);
if let Some(expr_id) = expr {
let expr = hir.expect_expr(expr_id);
debug!("target_ty evaluated from {:?}", expr);
let parent = hir.get_parent_node(expr_id);
if let Some(hir::Node::Expr(e)) = hir.find(parent) {
let parent_span = hir.span(parent);
let parent_did = parent.owner.to_def_id();
// ```rust
// impl T {
// fn foo(&self) -> i32 {}
// }
// T.foo();
// ^^^^^^^ a temporary `&T` created inside this method call due to `&self`
// ```
//
let is_region_borrow =
tables.expr_adjustments(expr).iter().any(|adj| adj.is_region_borrow());
// ```rust
// struct Foo(*const u8);
// bar(Foo(std::ptr::null())).await;
// ^^^^^^^^^^^^^^^^^^^^^ raw-ptr `*T` created inside this struct ctor.
// ```
debug!("parent_def_kind: {:?}", self.tcx.def_kind(parent_did));
let is_raw_borrow_inside_fn_like_call = match self.tcx.def_kind(parent_did) {
Some(DefKind::Fn) | Some(DefKind::Ctor(..)) => target_ty.is_unsafe_ptr(),
_ => false,
};
if (tables.is_method_call(e) && is_region_borrow)
|| is_raw_borrow_inside_fn_like_call
{
err.span_help(
parent_span,
"consider moving this into a `let` \
binding to create a shorter lived borrow",
);
}
}
}
// Add a note for the item obligation that remains - normally a note pointing to the
// bound that introduced the obligation (e.g. `T: Send`).
debug!("note_obligation_cause_for_async_await: next_code={:?}", next_code);
self.note_obligation_cause_code(
err,
&obligation.predicate,
next_code.unwrap(),
&mut Vec::new(),
);
}
fn note_obligation_cause_code<T>(
&self,
err: &mut DiagnosticBuilder<'_>,
predicate: &T,
cause_code: &ObligationCauseCode<'tcx>,
obligated_types: &mut Vec<&ty::TyS<'tcx>>,
) where
T: fmt::Display,
{
let tcx = self.tcx;
match *cause_code {
ObligationCauseCode::ExprAssignable
| ObligationCauseCode::MatchExpressionArm { .. }
| ObligationCauseCode::Pattern { .. }
| ObligationCauseCode::IfExpression { .. }
| ObligationCauseCode::IfExpressionWithNoElse
| ObligationCauseCode::MainFunctionType
| ObligationCauseCode::StartFunctionType
| ObligationCauseCode::IntrinsicType
| ObligationCauseCode::MethodReceiver
| ObligationCauseCode::ReturnNoExpression
| ObligationCauseCode::MiscObligation => {}
ObligationCauseCode::SliceOrArrayElem => {
err.note("slice and array elements must have `Sized` type");
}
ObligationCauseCode::TupleElem => {
err.note("only the last element of a tuple may have a dynamically sized type");
}
ObligationCauseCode::ProjectionWf(data) => {
err.note(&format!("required so that the projection `{}` is well-formed", data,));
}
ObligationCauseCode::ReferenceOutlivesReferent(ref_ty) => {
err.note(&format!(
"required so that reference `{}` does not outlive its referent",
ref_ty,
));
}
ObligationCauseCode::ObjectTypeBound(object_ty, region) => {
err.note(&format!(
"required so that the lifetime bound of `{}` for `{}` is satisfied",
region, object_ty,
));
}
ObligationCauseCode::ItemObligation(item_def_id) => {
let item_name = tcx.def_path_str(item_def_id);
let msg = format!("required by `{}`", item_name);
if let Some(sp) = tcx.hir().span_if_local(item_def_id) {
let sp = tcx.sess.source_map().guess_head_span(sp);
err.span_label(sp, &msg);
} else {
err.note(&msg);
}
}
ObligationCauseCode::BindingObligation(item_def_id, span) => {
let item_name = tcx.def_path_str(item_def_id);
let msg = format!("required by this bound in `{}`", item_name);
if let Some(ident) = tcx.opt_item_name(item_def_id) {
let sm = self.tcx.sess.source_map();
let same_line =
match (sm.lookup_line(ident.span.hi()), sm.lookup_line(span.lo())) {
(Ok(l), Ok(r)) => l.line == r.line,
_ => true,
};
if !ident.span.overlaps(span) && !same_line {
err.span_label(ident.span, "");
}
}
if span != DUMMY_SP {
err.span_label(span, &msg);
} else {
err.note(&msg);
}
}
ObligationCauseCode::ObjectCastObligation(object_ty) => {
err.note(&format!(
"required for the cast to the object type `{}`",
self.ty_to_string(object_ty)
));
}
ObligationCauseCode::Coercion { source: _, target } => {
err.note(&format!("required by cast to type `{}`", self.ty_to_string(target)));
}
ObligationCauseCode::RepeatVec(suggest_const_in_array_repeat_expressions) => {
err.note(
"the `Copy` trait is required because the repeated element will be copied",
);
if suggest_const_in_array_repeat_expressions {
err.note(
"this array initializer can be evaluated at compile-time, see issue \
#48147 <https://github.com/rust-lang/rust/issues/49147> \
for more information",
);
if tcx.sess.opts.unstable_features.is_nightly_build() {
err.help(
"add `#![feature(const_in_array_repeat_expressions)]` to the \
crate attributes to enable",
);
}
}
}
ObligationCauseCode::VariableType(_) => {
err.note("all local variables must have a statically known size");
if !self.tcx.features().unsized_locals {
err.help("unsized locals are gated as an unstable feature");
}
}
ObligationCauseCode::SizedArgumentType => {
err.note("all function arguments must have a statically known size");
if !self.tcx.features().unsized_locals {
err.help("unsized locals are gated as an unstable feature");
}
}
ObligationCauseCode::SizedReturnType => {
err.note("the return type of a function must have a statically known size");
}
ObligationCauseCode::SizedYieldType => {
err.note("the yield type of a generator must have a statically known size");
}
ObligationCauseCode::AssignmentLhsSized => {
err.note("the left-hand-side of an assignment must have a statically known size");
}
ObligationCauseCode::TupleInitializerSized => {
err.note("tuples must have a statically known size to be initialized");
}
ObligationCauseCode::StructInitializerSized => {
err.note("structs must have a statically known size to be initialized");
}
ObligationCauseCode::FieldSized { adt_kind: ref item, last } => match *item {
AdtKind::Struct => {
if last {
err.note(
"the last field of a packed struct may only have a \
dynamically sized type if it does not need drop to be run",
);
} else {
err.note(
"only the last field of a struct may have a dynamically sized type",
);
}
}
AdtKind::Union => {
err.note("no field of a union may have a dynamically sized type");
}
AdtKind::Enum => {
err.note("no field of an enum variant may have a dynamically sized type");
}
},
ObligationCauseCode::ConstSized => {
err.note("constant expressions must have a statically known size");
}
ObligationCauseCode::ConstPatternStructural => {
err.note("constants used for pattern-matching must derive `PartialEq` and `Eq`");
}
ObligationCauseCode::SharedStatic => {
err.note("shared static variables must have a type that implements `Sync`");
}
ObligationCauseCode::BuiltinDerivedObligation(ref data) => {
let parent_trait_ref = self.resolve_vars_if_possible(&data.parent_trait_ref);
let ty = parent_trait_ref.skip_binder().self_ty();
err.note(&format!("required because it appears within the type `{}`", ty));
obligated_types.push(ty);
let parent_predicate = parent_trait_ref.without_const().to_predicate();
if !self.is_recursive_obligation(obligated_types, &data.parent_code) {
self.note_obligation_cause_code(
err,
&parent_predicate,
&data.parent_code,
obligated_types,
);
}
}
ObligationCauseCode::ImplDerivedObligation(ref data) => {
let parent_trait_ref = self.resolve_vars_if_possible(&data.parent_trait_ref);
err.note(&format!(
"required because of the requirements on the impl of `{}` for `{}`",
parent_trait_ref.print_only_trait_path(),
parent_trait_ref.skip_binder().self_ty()
));
let parent_predicate = parent_trait_ref.without_const().to_predicate();
self.note_obligation_cause_code(
err,
&parent_predicate,
&data.parent_code,
obligated_types,
);
}
ObligationCauseCode::CompareImplMethodObligation { .. } => {
err.note(&format!(
"the requirement `{}` appears on the impl method \
but not on the corresponding trait method",
predicate
));
}
ObligationCauseCode::CompareImplTypeObligation { .. } => {
err.note(&format!(
"the requirement `{}` appears on the associated impl type \
but not on the corresponding associated trait type",
predicate
));
}
ObligationCauseCode::ReturnType
| ObligationCauseCode::ReturnValue(_)
| ObligationCauseCode::BlockTailExpression(_) => (),
ObligationCauseCode::TrivialBound => {
err.help("see issue #48214");
if tcx.sess.opts.unstable_features.is_nightly_build() {
err.help("add `#![feature(trivial_bounds)]` to the crate attributes to enable");
}
}
ObligationCauseCode::AssocTypeBound(ref data) => {
err.span_label(data.original, "associated type defined here");
if let Some(sp) = data.impl_span {
err.span_label(sp, "in this `impl` item");
}
for sp in &data.bounds {
err.span_label(*sp, "restricted in this bound");
}
}
}
}
fn suggest_new_overflow_limit(&self, err: &mut DiagnosticBuilder<'_>) {
let current_limit = self.tcx.sess.recursion_limit.get();
let suggested_limit = current_limit * 2;
err.help(&format!(
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate (`{}`)",
suggested_limit, self.tcx.crate_name,
));
}
}
/// Collect all the returned expressions within the input expression.
/// Used to point at the return spans when we want to suggest some change to them.
#[derive(Default)]
struct ReturnsVisitor<'v> {
returns: Vec<&'v hir::Expr<'v>>,
in_block_tail: bool,
}
impl<'v> Visitor<'v> for ReturnsVisitor<'v> {
type Map = hir::intravisit::ErasedMap<'v>;
fn nested_visit_map(&mut self) -> hir::intravisit::NestedVisitorMap<Self::Map> {
hir::intravisit::NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'v hir::Expr<'v>) {
// Visit every expression to detect `return` paths, either through the function's tail
// expression or `return` statements. We walk all nodes to find `return` statements, but
// we only care about tail expressions when `in_block_tail` is `true`, which means that
// they're in the return path of the function body.
match ex.kind {
hir::ExprKind::Ret(Some(ex)) => {
self.returns.push(ex);
}
hir::ExprKind::Block(block, _) if self.in_block_tail => {
self.in_block_tail = false;
for stmt in block.stmts {
hir::intravisit::walk_stmt(self, stmt);
}
self.in_block_tail = true;
if let Some(expr) = block.expr {
self.visit_expr(expr);
}
}
hir::ExprKind::Match(_, arms, _) if self.in_block_tail => {
for arm in arms {
self.visit_expr(arm.body);
}
}
// We need to walk to find `return`s in the entire body.
_ if !self.in_block_tail => hir::intravisit::walk_expr(self, ex),
_ => self.returns.push(ex),
}
}
fn visit_body(&mut self, body: &'v hir::Body<'v>) {
assert!(!self.in_block_tail);
if body.generator_kind().is_none() {
if let hir::ExprKind::Block(block, None) = body.value.kind {
if block.expr.is_some() {
self.in_block_tail = true;
}
}
}
hir::intravisit::walk_body(self, body);
}
}
pub trait NextTypeParamName {
fn next_type_param_name(&self, name: Option<&str>) -> String;
}
impl NextTypeParamName for &[hir::GenericParam<'_>] {
fn next_type_param_name(&self, name: Option<&str>) -> String {
// This is the whitelist of possible parameter names that we might suggest.
let name = name.and_then(|n| n.chars().next()).map(|c| c.to_string().to_uppercase());
let name = name.as_ref().map(|s| s.as_str());
let possible_names = [name.unwrap_or("T"), "T", "U", "V", "X", "Y", "Z", "A", "B", "C"];
let used_names = self
.iter()
.filter_map(|p| match p.name {
hir::ParamName::Plain(ident) => Some(ident.name),
_ => None,
})
.collect::<Vec<_>>();
possible_names
.iter()
.find(|n| !used_names.contains(&Symbol::intern(n)))
.unwrap_or(&"ParamName")
.to_string()
}
}
| 41.875938 | 100 | 0.511223 |
e9948b80a66d0a1106a3e27e7ab7135227787486 | 423 | use libc;
use utils::*;
use types::*;
extern "C" {
// QUrl
fn dos_qurl_create(url: *const libc::c_char, parsingMode: i32) -> DosQUrl;
// DOS_API void DOS_CALL dos_qurl_delete(DosQUrl *vptr);
// DOS_API char *DOS_CALL dos_qurl_to_string(const DosQUrl *vptr);
// DOS_API bool dos_qurl_isValid(const DosQUrl *vptr);
}
pub fn construct_qurl(url: &str) -> DosQUrl {
unsafe { dos_qurl_create(stoptr(url), 0) }
}
| 23.5 | 78 | 0.695035 |
18253fb0c1df60c3bd42d09297c1510d41ce188a | 13,303 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_operation_crate_operation_associate_member(
input: &crate::input::AssociateMemberInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_associate_member_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_batch_get_account_status(
input: &crate::input::BatchGetAccountStatusInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_batch_get_account_status_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_batch_get_free_trial_info(
input: &crate::input::BatchGetFreeTrialInfoInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_batch_get_free_trial_info_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_cancel_findings_report(
input: &crate::input::CancelFindingsReportInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_cancel_findings_report_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_create_filter(
input: &crate::input::CreateFilterInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_create_filter_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_create_findings_report(
input: &crate::input::CreateFindingsReportInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_create_findings_report_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_delete_filter(
input: &crate::input::DeleteFilterInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_delete_filter_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_disable(
input: &crate::input::DisableInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_disable_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_disable_delegated_admin_account(
input: &crate::input::DisableDelegatedAdminAccountInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_disable_delegated_admin_account_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_disassociate_member(
input: &crate::input::DisassociateMemberInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_disassociate_member_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_enable(
input: &crate::input::EnableInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_enable_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_enable_delegated_admin_account(
input: &crate::input::EnableDelegatedAdminAccountInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_enable_delegated_admin_account_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_get_findings_report_status(
input: &crate::input::GetFindingsReportStatusInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_get_findings_report_status_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_get_member(
input: &crate::input::GetMemberInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_get_member_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_account_permissions(
input: &crate::input::ListAccountPermissionsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_account_permissions_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_coverage(
input: &crate::input::ListCoverageInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_coverage_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_coverage_statistics(
input: &crate::input::ListCoverageStatisticsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_coverage_statistics_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_delegated_admin_accounts(
input: &crate::input::ListDelegatedAdminAccountsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_delegated_admin_accounts_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_filters(
input: &crate::input::ListFiltersInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_filters_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_finding_aggregations(
input: &crate::input::ListFindingAggregationsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_finding_aggregations_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_findings(
input: &crate::input::ListFindingsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_findings_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_members(
input: &crate::input::ListMembersInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_members_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_list_usage_totals(
input: &crate::input::ListUsageTotalsInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_list_usage_totals_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_tag_resource(
input: &crate::input::TagResourceInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_tag_resource_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_update_filter(
input: &crate::input::UpdateFilterInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_update_filter_input(&mut object, input)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_crate_operation_update_organization_configuration(
input: &crate::input::UpdateOrganizationConfigurationInput,
) -> Result<aws_smithy_http::body::SdkBody, aws_smithy_http::operation::SerializationError> {
let mut out = String::new();
let mut object = aws_smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_crate_input_update_organization_configuration_input(
&mut object,
input,
)?;
object.finish();
Ok(aws_smithy_http::body::SdkBody::from(out))
}
| 44.791246 | 100 | 0.749305 |
0e5c309d265437a2c706a86b066c8c030335e9a7 | 1,273 | use std::env;
use twilight_http::Client;
use twilight_model::{
channel::message::allowed_mentions::{AllowedMentions, AllowedMentionsBuilder},
id::Id,
};
#[tokio::main]
async fn main() -> anyhow::Result<()> {
// Initialize the tracing subscriber.
tracing_subscriber::fmt::init();
//if we want to set the default for allowed mentions we need to use the builder, keep in mind these calls can't be chained!
let client = Client::builder()
.token(env::var("DISCORD_TOKEN")?)
//add an empty allowed mentions, this will prevent any and all pings
.default_allowed_mentions(AllowedMentions::default())
.build();
let channel_id = Id::new(381_926_291_785_383_946);
let user_id = Id::new(77_469_400_222_932_992);
// Since we wish to warn a user that they attempted to ping @everyone, we
// allow the user ID to be pinged with allowed mentions.
let allowed_mentions = AllowedMentionsBuilder::new()
.user_ids(Vec::from([user_id]))
.build();
client
.create_message(channel_id)
.content(&format!(
"<@{user_id}> you are not allowed to ping @everyone!"
))?
.allowed_mentions(Some(&allowed_mentions))
.exec()
.await?;
Ok(())
}
| 31.825 | 127 | 0.648861 |
6952e7964f1b42a98c98ae7eccc00d880678a401 | 59,735 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod container_groups {
use crate::models::*;
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<ContainerGroupListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerInstance/containerGroups",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroupListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<ContainerGroupListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroupListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<ContainerGroup, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroup =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
container_group: &ContainerGroup,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(container_group).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(ContainerGroup),
Created201(ContainerGroup),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
resource: &Resource,
) -> std::result::Result<ContainerGroup, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(resource).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroup =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerGroup =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(ContainerGroup),
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<(), restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(restart::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| restart::Error::DeserializeError(source, rsp_body.clone()))?;
Err(restart::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod restart {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn stop(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<(), stop::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/stop",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(stop::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(stop::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(stop::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(stop::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| stop::Error::DeserializeError(source, rsp_body.clone()))?;
Err(stop::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod stop {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<(), start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Err(start::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod start {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_outbound_network_dependencies_endpoints(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
) -> std::result::Result<NetworkDependenciesResponse, get_outbound_network_dependencies_endpoints::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/outboundNetworkDependenciesEndpoints" , operation_config . base_path () , subscription_id , resource_group_name , container_group_name) ;
let mut url = url::Url::parse(url_str).map_err(get_outbound_network_dependencies_endpoints::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_outbound_network_dependencies_endpoints::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_outbound_network_dependencies_endpoints::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_outbound_network_dependencies_endpoints::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: NetworkDependenciesResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_outbound_network_dependencies_endpoints::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| get_outbound_network_dependencies_endpoints::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_outbound_network_dependencies_endpoints::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_outbound_network_dependencies_endpoints {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.ContainerInstance/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod location {
use crate::models::*;
pub async fn list_usage(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<UsageListResult, list_usage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerInstance/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_usage::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_usage::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_usage::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_usage::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: UsageListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_usage::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list_usage::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_usage::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_usage {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_cached_images(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<CachedImagesListResult, list_cached_images::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerInstance/locations/{}/cachedImages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_cached_images::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_cached_images::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_cached_images::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_cached_images::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CachedImagesListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_cached_images::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_cached_images::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_cached_images::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_cached_images {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_capabilities(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<CapabilitiesListResult, list_capabilities::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerInstance/locations/{}/capabilities",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_capabilities::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_capabilities::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_capabilities::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_capabilities::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CapabilitiesListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_capabilities::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_capabilities::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_capabilities::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_capabilities {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod containers {
use crate::models::*;
pub async fn list_logs(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
container_name: &str,
tail: Option<i32>,
timestamps: Option<bool>,
) -> std::result::Result<Logs, list_logs::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/containers/{}/logs",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name,
container_name
);
let mut url = url::Url::parse(url_str).map_err(list_logs::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_logs::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(tail) = tail {
url.query_pairs_mut().append_pair("tail", tail.to_string().as_str());
}
if let Some(timestamps) = timestamps {
url.query_pairs_mut().append_pair("timestamps", timestamps.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_logs::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_logs::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Logs =
serde_json::from_slice(rsp_body).map_err(|source| list_logs::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list_logs::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_logs::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_logs {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn execute_command(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
container_name: &str,
container_exec_request: &ContainerExecRequest,
) -> std::result::Result<ContainerExecResponse, execute_command::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/containers/{}/exec",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name,
container_name
);
let mut url = url::Url::parse(url_str).map_err(execute_command::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(execute_command::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(container_exec_request).map_err(execute_command::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(execute_command::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(execute_command::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerExecResponse = serde_json::from_slice(rsp_body)
.map_err(|source| execute_command::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| execute_command::Error::DeserializeError(source, rsp_body.clone()))?;
Err(execute_command::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod execute_command {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn attach(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
container_group_name: &str,
container_name: &str,
) -> std::result::Result<ContainerAttachResponse, attach::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerInstance/containerGroups/{}/containers/{}/attach",
operation_config.base_path(),
subscription_id,
resource_group_name,
container_group_name,
container_name
);
let mut url = url::Url::parse(url_str).map_err(attach::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(attach::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(attach::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(attach::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ContainerAttachResponse =
serde_json::from_slice(rsp_body).map_err(|source| attach::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: CloudError =
serde_json::from_slice(rsp_body).map_err(|source| attach::Error::DeserializeError(source, rsp_body.clone()))?;
Err(attach::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod attach {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| 48.251212 | 267 | 0.586457 |
16da9b030a9d97b3c432c52bc5535bfca031eb5f | 4,973 | #![doc(html_root_url = "https://docs.rs/loom/0.1.1")]
#![deny(missing_debug_implementations, missing_docs)]
#![cfg_attr(test, deny(warnings))]
//! Loom is a tool for testing concurrent programs.
//!
//! # Background
//!
//! Testing concurrent programs is challenging. The Rust memory model is relaxed
//! and permits a large number of possible behaviors. Loom provides a way to
//! deterministically explore the various possible execution permutations.
//!
//! Consider a simple example:
//!
//! ```ignore
//! use std::sync::Arc;
//! use std::sync::atomic::AtomicUsize;
//! use std::sync::atomic::Ordering::SeqCst;
//! use std::thread;
//!
//! #[test]
//! fn test_concurrent_logic() {
//! let v1 = Arc::new(AtomicUsize::new(0));
//! let v2 = v1.clone();
//!
//! thread::spawn(move || {
//! v1.store(1, SeqCst);
//! });
//!
//! assert_eq!(0, v2.load(SeqCst));
//! }
//! ```
//!
//! This program is obviously incorrect, yet the test can easily pass.
//!
//! The problem is compounded when Rust's relaxed memory model is considered.
//!
//! Historically, the strategy for testing concurrent code has been to run tests
//! in loops and hope that an execution fails. Doing this is not reliable, and,
//! in the event an iteration should fail, debugging the cause is exceedingly
//! difficult.
//!
//! # Solution
//!
//! Loom fixes the problem by controlling the scheduling of each thread. Loom
//! also simulates the Rust memory model such that it attempts all possible
//! valid behaviors. For example, an atomic load may return an old value instead
//! of the newest.
//!
//! The above example can be rewritten as:
//!
//! ```ignore
//! extern crate loom;
//!
//! use loom::sync::atomic::AtomicUsize;
//! use loom::thread;
//!
//! use std::sync::Arc;
//! use std::sync::atomic::Ordering::SeqCst;
//!
//! #[test]
//! fn test_concurrent_logic() {
//! loom::fuzz(|| {
//! let v1 = Arc::new(AtomicUsize::new(0));
//! let v2 = v1.clone();
//!
//! thread::spawn(move || {
//! v1.store(1, SeqCst);
//! });
//!
//! assert_eq!(0, v2.load(SeqCst));
//! });
//! }
//! ```
//!
//! Loom will run the closure many times, each time with a different thread
//! scheduling The test is guaranteed to fail.
//!
//! # Writing tests
//!
//! Test cases using loom must be fully determinstic. All sources of
//! non-determism must be via loom types. This allows loom to validate the test
//! case and control the scheduling.
//!
//! Tests must use the loom synchronization types, such as `Atomic*`, `Mutex`,
//! `Condvar`, `thread::spawn`, etc. When writing a concurrent program, the
//! `std` types should be used when running the program and the `loom` types
//! when running the test.
//!
//! One way to do this is via cfg flags.
//!
//! It is important to not include other sources of non-determism in tests, such
//! as random number generators or system calls.
//!
//! # Yielding
//!
//! Some concurrent algorithms assume a fair scheduler. For example, a spin lock
//! assumes that, at some point, another thread will make enough progress for
//! the lock to become available.
//!
//! This presents a challenge for loom as the scheduler is not fair. In such
//! cases, loops must include calls to `yield_now`. This tells loom that another
//! thread needs to be scheduled in order for the current one to make progress.
//!
//! # Dealing with combinatorial explosion
//!
//! The number of possible threads scheduling has factorial growth as the
//! program complexity increases. Loom deals with this by reducing the state
//! space. Equivalent executions are elimited. For example, if two threads
//! **read** from the same atomic variable, loom does not attempt another
//! execution given that the order in which two threads read from the same
//! atomic cannot impact the execution.
#[macro_use]
extern crate cfg_if;
// extern crate libc;
#[macro_use]
extern crate scoped_tls;
#[cfg(feature = "generator")]
extern crate generator;
#[cfg(feature = "fringe")]
extern crate fringe;
// The checkpoint feature enables serialization of the check exploration to
// disk. This is useful for replaying a known failing permutation.
cfg_if! {
if #[cfg(feature = "checkpoint")] {
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
}
}
macro_rules! if_futures {
($($t:tt)*) => {
cfg_if! {
if #[cfg(feature = "futures")] {
$($t)*
}
}
}
}
#[doc(hidden)]
#[macro_export]
macro_rules! debug {
($($t:tt)*) => {
if $crate::__debug_enabled() {
println!($($t)*);
}
};
}
pub mod fuzz;
mod rt;
pub mod sync;
pub mod thread;
#[doc(inline)]
pub use fuzz::fuzz;
if_futures! {
extern crate futures as _futures;
pub mod futures;
}
pub use rt::yield_now;
#[doc(hidden)]
pub fn __debug_enabled() -> bool {
rt::execution(|e| e.log)
}
| 28.096045 | 80 | 0.649507 |
d56bc12b680cc6e0195413fca4face58c8637fbc | 13,166 | //! Structural Search Replace
//!
//! Allows searching the AST for code that matches one or more patterns and then replacing that code
//! based on a template.
// Feature: Structural Search and Replace
//
// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
//
// All paths in both the search pattern and the replacement template must resolve in the context
// in which this command is invoked. Paths in the search pattern will then match the code if they
// resolve to the same item, even if they're written differently. For example if we invoke the
// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
// to `foo::Bar` will match.
//
// Paths in the replacement template will be rendered appropriately for the context in which the
// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
// code in the `foo` module, we'll insert just `Bar`.
//
// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
// placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
// the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
// whatever autoderef and autoref was happening implicitly in the matched code.
//
// The scope of the search / replace will be restricted to the current selection if any, otherwise
// it will apply to the whole workspace.
//
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
//
// Supported constraints:
//
// |===
// | Constraint | Restricts placeholder
//
// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
// | not(a) | Negates the constraint `a`
// |===
//
// Available via the command `rust-analyzer.ssr`.
//
// ```rust
// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
//
// // BEFORE
// String::from(foo(y + 5, z))
//
// // AFTER
// String::from((y + 5).foo(z))
// ```
//
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Structural Search Replace**
// |===
//
// Also available as an assist, by writing a comment containing the structural
// search and replace rule. You will only see the assist if the comment can
// be parsed as a valid structural search and replace rule.
//
// ```rust
// // Place the cursor on the line below to see the assist 💡.
// // foo($a, $b) ==>> ($a).foo($b)
// ```
mod from_comment;
mod matching;
mod nester;
mod parsing;
mod fragments;
mod replacing;
mod resolving;
mod search;
#[macro_use]
mod errors;
#[cfg(test)]
mod tests;
use crate::errors::bail;
pub use crate::errors::SsrError;
pub use crate::from_comment::ssr_from_comment;
pub use crate::matching::Match;
use crate::matching::MatchFailureReason;
use hir::Semantics;
use ide_db::base_db::{FileId, FilePosition, FileRange};
use resolving::ResolvedRule;
use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit;
// A structured search replace rule. Create by calling `parse` on a str.
#[derive(Debug)]
pub struct SsrRule {
/// A structured pattern that we're searching for.
pattern: parsing::RawPattern,
/// What we'll replace it with.
template: parsing::RawPattern,
parsed_rules: Vec<parsing::ParsedRule>,
}
#[derive(Debug)]
pub struct SsrPattern {
parsed_rules: Vec<parsing::ParsedRule>,
}
#[derive(Debug, Default)]
pub struct SsrMatches {
pub matches: Vec<Match>,
}
/// Searches a crate for pattern matches and possibly replaces them with something else.
pub struct MatchFinder<'db> {
/// Our source of information about the user's code.
sema: Semantics<'db, ide_db::RootDatabase>,
rules: Vec<ResolvedRule>,
resolution_scope: resolving::ResolutionScope<'db>,
restrict_ranges: Vec<FileRange>,
}
impl<'db> MatchFinder<'db> {
/// Constructs a new instance where names will be looked up as if they appeared at
/// `lookup_context`.
pub fn in_context(
db: &'db ide_db::RootDatabase,
lookup_context: FilePosition,
mut restrict_ranges: Vec<FileRange>,
) -> MatchFinder<'db> {
restrict_ranges.retain(|range| !range.range.is_empty());
let sema = Semantics::new(db);
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
}
/// Constructs an instance using the start of the first file in `db` as the lookup context.
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::symbol_index::SymbolsDatabase;
if let Some(first_file_id) =
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
{
Ok(MatchFinder::in_context(
db,
FilePosition { file_id: first_file_id, offset: 0.into() },
vec![],
))
} else {
bail!("No files to search");
}
}
/// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
/// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
/// match to it.
pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
for parsed_rule in rule.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.resolution_scope,
self.rules.len(),
)?);
}
Ok(())
}
/// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt;
let mut matches_by_file = FxHashMap::default();
for m in self.matches().matches {
matches_by_file
.entry(m.range.file_id)
.or_insert_with(SsrMatches::default)
.matches
.push(m);
}
matches_by_file
.into_iter()
.map(|(file_id, matches)| {
(
file_id,
replacing::matches_to_edit(
&matches,
&self.sema.db.file_text(file_id),
&self.rules,
),
)
})
.collect()
}
/// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
/// intend to do replacement, use `add_rule` instead.
pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
for parsed_rule in pattern.parsed_rules {
self.rules.push(ResolvedRule::new(
parsed_rule,
&self.resolution_scope,
self.rules.len(),
)?);
}
Ok(())
}
/// Returns matches for all added rules.
pub fn matches(&self) -> SsrMatches {
let mut matches = Vec::new();
let mut usage_cache = search::UsageCache::default();
for rule in &self.rules {
self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
}
nester::nest_and_remove_collisions(matches, &self.sema)
}
/// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
/// them, while recording reasons why they don't match. This API is useful for command
/// line-based debugging where providing a range is difficult.
pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
use ide_db::base_db::SourceDatabaseExt;
let file = self.sema.parse(file_id);
let mut res = Vec::new();
let file_text = self.sema.db.file_text(file_id);
let mut remaining_text = file_text.as_str();
let mut base = 0;
let len = snippet.len() as u32;
while let Some(offset) = remaining_text.find(snippet) {
let start = base + offset as u32;
let end = start + len;
self.output_debug_for_nodes_at_range(
file.syntax(),
FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
&None,
&mut res,
);
remaining_text = &remaining_text[offset + snippet.len()..];
base = end;
}
res
}
fn output_debug_for_nodes_at_range(
&self,
node: &SyntaxNode,
range: FileRange,
restrict_range: &Option<FileRange>,
out: &mut Vec<MatchDebugInfo>,
) {
for node in node.children() {
let node_range = self.sema.original_range(&node);
if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
{
continue;
}
if node_range.range == range.range {
for rule in &self.rules {
// For now we ignore rules that have a different kind than our node, otherwise
// we get lots of noise. If at some point we add support for restricting rules
// to a particular kind of thing (e.g. only match type references), then we can
// relax this. We special-case expressions, since function calls can match
// method calls.
if rule.pattern.node.kind() != node.kind()
&& !(ast::Expr::can_cast(rule.pattern.node.kind())
&& ast::Expr::can_cast(node.kind()))
{
continue;
}
out.push(MatchDebugInfo {
matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
.map_err(|e| MatchFailureReason {
reason: e.reason.unwrap_or_else(|| {
"Match failed, but no reason was given".to_owned()
}),
}),
pattern: rule.pattern.node.clone(),
node: node.clone(),
});
}
} else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
if let Some(expanded) = self.sema.expand(¯o_call) {
if let Some(tt) = macro_call.token_tree() {
self.output_debug_for_nodes_at_range(
&expanded,
range,
&Some(self.sema.original_range(tt.syntax())),
out,
);
}
}
}
self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
}
}
}
pub struct MatchDebugInfo {
node: SyntaxNode,
/// Our search pattern parsed as an expression or item, etc
pattern: SyntaxNode,
matched: Result<Match, MatchFailureReason>,
}
impl std::fmt::Debug for MatchDebugInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.matched {
Ok(_) => writeln!(f, "Node matched")?,
Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
}
writeln!(
f,
"============ AST ===========\n\
{:#?}",
self.node
)?;
writeln!(f, "========= PATTERN ==========")?;
writeln!(f, "{:#?}", self.pattern)?;
writeln!(f, "============================")?;
Ok(())
}
}
impl SsrMatches {
/// Returns `self` with any nested matches removed and made into top-level matches.
pub fn flattened(self) -> SsrMatches {
let mut out = SsrMatches::default();
self.flatten_into(&mut out);
out
}
fn flatten_into(self, out: &mut SsrMatches) {
for mut m in self.matches {
for p in m.placeholder_values.values_mut() {
std::mem::take(&mut p.inner_matches).flatten_into(out);
}
out.matches.push(m);
}
}
}
impl Match {
pub fn matched_text(&self) -> String {
self.matched_node.text().to_string()
}
}
impl std::error::Error for SsrError {}
#[cfg(test)]
impl MatchDebugInfo {
pub(crate) fn match_failure_reason(&self) -> Option<&str> {
self.matched.as_ref().err().map(|r| r.reason.as_str())
}
}
| 36.983146 | 124 | 0.581954 |
1c79a02d3da0e88d83f9b36511b09deb34bfc69d | 56,151 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// The Rust HIR.
pub use self::BindingMode::*;
pub use self::BinOp_::*;
pub use self::BlockCheckMode::*;
pub use self::CaptureClause::*;
pub use self::Decl_::*;
pub use self::Expr_::*;
pub use self::FunctionRetTy::*;
pub use self::ForeignItem_::*;
pub use self::Item_::*;
pub use self::Mutability::*;
pub use self::PrimTy::*;
pub use self::Stmt_::*;
pub use self::Ty_::*;
pub use self::TyParamBound::*;
pub use self::UnOp::*;
pub use self::UnsafeSource::*;
pub use self::Visibility::{Public, Inherited};
pub use self::PathParameters::*;
use hir::def::Def;
use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
use util::nodemap::{NodeMap, FxHashSet};
use syntax_pos::{Span, ExpnId, DUMMY_SP};
use syntax::codemap::{self, Spanned};
use syntax::abi::Abi;
use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::tokenstream::TokenStream;
use syntax::util::ThinVec;
use rustc_data_structures::indexed_vec;
use std::collections::BTreeMap;
use std::fmt;
/// HIR doesn't commit to a concrete storage type and have its own alias for a vector.
/// It can be `Vec`, `P<[T]>` or potentially `Box<[T]>`, or some other container with similar
/// behavior. Unlike AST, HIR is mostly a static structure, so we can use an owned slice instead
/// of `Vec` to avoid keeping extra capacity.
pub type HirVec<T> = P<[T]>;
macro_rules! hir_vec {
($elem:expr; $n:expr) => (
$crate::hir::HirVec::from(vec![$elem; $n])
);
($($x:expr),*) => (
$crate::hir::HirVec::from(vec![$($x),*])
);
($($x:expr,)*) => (hir_vec![$($x),*])
}
pub mod check_attr;
pub mod def;
pub mod def_id;
pub mod intravisit;
pub mod itemlikevisit;
pub mod lowering;
pub mod map;
pub mod pat_util;
pub mod print;
pub mod svh;
/// A HirId uniquely identifies a node in the HIR of then current crate. It is
/// composed of the `owner`, which is the DefIndex of the directly enclosing
/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e. the closest "item-like"),
/// and the `local_id` which is unique within the given owner.
///
/// This two-level structure makes for more stable values: One can move an item
/// around within the source code, or add or remove stuff before it, without
/// the local_id part of the HirId changing, which is a very useful property
/// incremental compilation where we have to persist things through changes to
/// the code base.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug,
RustcEncodable, RustcDecodable)]
pub struct HirId {
pub owner: DefIndex,
pub local_id: ItemLocalId,
}
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
/// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
/// the node's position within the owning item in any way, but there is a
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
/// integers starting at zero, so a mapping that maps all or most nodes within
/// an "item-like" to something else can be implement by a `Vec` instead of a
/// tree or hash map.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug,
RustcEncodable, RustcDecodable)]
pub struct ItemLocalId(pub u32);
impl ItemLocalId {
pub fn as_usize(&self) -> usize {
self.0 as usize
}
}
impl indexed_vec::Idx for ItemLocalId {
fn new(idx: usize) -> Self {
debug_assert!((idx as u32) as usize == idx);
ItemLocalId(idx as u32)
}
fn index(self) -> usize {
self.0 as usize
}
}
/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX
pub const CRATE_HIR_ID: HirId = HirId {
owner: CRATE_DEF_INDEX,
local_id: ItemLocalId(0)
};
pub const DUMMY_HIR_ID: HirId = HirId {
owner: CRATE_DEF_INDEX,
local_id: ItemLocalId(!0)
};
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
pub span: Span,
/// Either "'a", referring to a named lifetime definition,
/// or "" (aka keywords::Invalid), for elision placeholders.
///
/// HIR lowering inserts these placeholders in type paths that
/// refer to type definitions needing lifetime parameters,
/// `&T` and `&mut T`, and trait objects without `... + 'a`.
pub name: Name,
}
impl fmt::Debug for Lifetime {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"lifetime({}: {})",
self.id,
print::to_string(print::NO_ANN, |s| s.print_lifetime(self)))
}
}
impl Lifetime {
pub fn is_elided(&self) -> bool {
self.name == keywords::Invalid.name()
}
}
/// A lifetime definition, eg `'a: 'b+'c+'d`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct LifetimeDef {
pub lifetime: Lifetime,
pub bounds: HirVec<Lifetime>,
pub pure_wrt_drop: bool,
}
/// A "Path" is essentially Rust's notion of a name; for instance:
/// std::cmp::PartialEq . It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Path {
pub span: Span,
/// The definition that the path resolved to.
pub def: Def,
/// The segments in the path: the things separated by `::`.
pub segments: HirVec<PathSegment>,
}
impl Path {
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
}
}
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})",
print::to_string(print::NO_ANN, |s| s.print_path(self, false)))
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of
/// types.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
pub name: Name,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
/// this is more than just simple syntactic sugar; the use of
/// parens affects the region binding rules, so we preserve the
/// distinction.
pub parameters: PathParameters,
}
impl PathSegment {
/// Convert an identifier to the corresponding segment.
pub fn from_name(name: Name) -> PathSegment {
PathSegment {
name: name,
parameters: PathParameters::none()
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PathParameters {
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
AngleBracketedParameters(AngleBracketedParameterData),
/// The `(A,B)` and `C` in `Foo(A,B) -> C`
ParenthesizedParameters(ParenthesizedParameterData),
}
impl PathParameters {
pub fn none() -> PathParameters {
AngleBracketedParameters(AngleBracketedParameterData {
lifetimes: HirVec::new(),
types: HirVec::new(),
infer_types: true,
bindings: HirVec::new(),
})
}
/// Returns the types that the user wrote. Note that these do not necessarily map to the type
/// parameters in the parenthesized case.
pub fn types(&self) -> HirVec<&P<Ty>> {
match *self {
AngleBracketedParameters(ref data) => {
data.types.iter().collect()
}
ParenthesizedParameters(ref data) => {
data.inputs
.iter()
.chain(data.output.iter())
.collect()
}
}
}
pub fn lifetimes(&self) -> HirVec<&Lifetime> {
match *self {
AngleBracketedParameters(ref data) => {
data.lifetimes.iter().collect()
}
ParenthesizedParameters(_) => {
HirVec::new()
}
}
}
pub fn bindings(&self) -> HirVec<&TypeBinding> {
match *self {
AngleBracketedParameters(ref data) => {
data.bindings.iter().collect()
}
ParenthesizedParameters(_) => {
HirVec::new()
}
}
}
}
/// A path like `Foo<'a, T>`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct AngleBracketedParameterData {
/// The lifetime parameters for this path segment.
pub lifetimes: HirVec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: HirVec<P<Ty>>,
/// Whether to infer remaining type parameters, if any.
/// This only applies to expression and pattern paths, and
/// out of those only the segments with no type parameters
/// to begin with, e.g. `Vec::new` is `<Vec<..>>::new::<..>`.
pub infer_types: bool,
/// Bindings (equality constraints) on associated types, if present.
/// E.g., `Foo<A=Bar>`.
pub bindings: HirVec<TypeBinding>,
}
/// A path like `Foo(A,B) -> C`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ParenthesizedParameterData {
/// Overall span
pub span: Span,
/// `(A,B)`
pub inputs: HirVec<P<Ty>>,
/// `C`
pub output: Option<P<Ty>>,
}
/// The AST represents all type param bounds as types.
/// typeck::collect::compute_bounds matches these against
/// the "special" built-in traits (see middle::lang_items) and
/// detects Copy, Send and Sync.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TyParamBound {
TraitTyParamBound(PolyTraitRef, TraitBoundModifier),
RegionTyParamBound(Lifetime),
}
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitBoundModifier {
None,
Maybe,
}
pub type TyParamBounds = HirVec<TyParamBound>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TyParam {
pub name: Name,
pub id: NodeId,
pub bounds: TyParamBounds,
pub default: Option<P<Ty>>,
pub span: Span,
pub pure_wrt_drop: bool,
}
/// Represents lifetimes and type parameters attached to a declaration
/// of a function, enum, trait, etc.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Generics {
pub lifetimes: HirVec<LifetimeDef>,
pub ty_params: HirVec<TyParam>,
pub where_clause: WhereClause,
pub span: Span,
}
impl Generics {
pub fn empty() -> Generics {
Generics {
lifetimes: HirVec::new(),
ty_params: HirVec::new(),
where_clause: WhereClause {
id: DUMMY_NODE_ID,
predicates: HirVec::new(),
},
span: DUMMY_SP,
}
}
pub fn is_lt_parameterized(&self) -> bool {
!self.lifetimes.is_empty()
}
pub fn is_type_parameterized(&self) -> bool {
!self.ty_params.is_empty()
}
pub fn is_parameterized(&self) -> bool {
self.is_lt_parameterized() || self.is_type_parameterized()
}
}
pub enum UnsafeGeneric {
Region(LifetimeDef, &'static str),
Type(TyParam, &'static str),
}
impl UnsafeGeneric {
pub fn attr_name(&self) -> &'static str {
match *self {
UnsafeGeneric::Region(_, s) => s,
UnsafeGeneric::Type(_, s) => s,
}
}
}
impl Generics {
pub fn carries_unsafe_attr(&self) -> Option<UnsafeGeneric> {
for r in &self.lifetimes {
if r.pure_wrt_drop {
return Some(UnsafeGeneric::Region(r.clone(), "may_dangle"));
}
}
for t in &self.ty_params {
if t.pure_wrt_drop {
return Some(UnsafeGeneric::Type(t.clone(), "may_dangle"));
}
}
return None;
}
}
/// A `where` clause in a definition
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereClause {
pub id: NodeId,
pub predicates: HirVec<WherePredicate>,
}
/// A single predicate in a `where` clause
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum WherePredicate {
/// A type binding, eg `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
}
/// A type bound, eg `for<'c> Foo: Send+Clone+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereBoundPredicate {
pub span: Span,
/// Any lifetimes from a `for` binding
pub bound_lifetimes: HirVec<LifetimeDef>,
/// The type being bounded
pub bounded_ty: P<Ty>,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: TyParamBounds,
}
/// A lifetime predicate, e.g. `'a: 'b+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereRegionPredicate {
pub span: Span,
pub lifetime: Lifetime,
pub bounds: HirVec<Lifetime>,
}
/// An equality predicate (unsupported), e.g. `T=int`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereEqPredicate {
pub id: NodeId,
pub span: Span,
pub lhs_ty: P<Ty>,
pub rhs_ty: P<Ty>,
}
pub type CrateConfig = HirVec<P<MetaItem>>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {
pub module: Mod,
pub attrs: HirVec<Attribute>,
pub span: Span,
pub exported_macros: HirVec<MacroDef>,
// NB: We use a BTreeMap here so that `visit_all_items` iterates
// over the ids in increasing order. In principle it should not
// matter what order we visit things in, but in *practice* it
// does, because it can affect the order in which errors are
// detected, which in turn can make compile-fail tests yield
// slightly different results.
pub items: BTreeMap<NodeId, Item>,
pub trait_items: BTreeMap<TraitItemId, TraitItem>,
pub impl_items: BTreeMap<ImplItemId, ImplItem>,
pub bodies: BTreeMap<BodyId, Body>,
pub trait_impls: BTreeMap<DefId, Vec<NodeId>>,
pub trait_default_impl: BTreeMap<DefId, NodeId>,
/// A list of the body ids written out in the order in which they
/// appear in the crate. If you're going to process all the bodies
/// in the crate, you should iterate over this list rather than the keys
/// of bodies.
pub body_ids: Vec<BodyId>,
}
impl Crate {
pub fn item(&self, id: NodeId) -> &Item {
&self.items[&id]
}
pub fn trait_item(&self, id: TraitItemId) -> &TraitItem {
&self.trait_items[&id]
}
pub fn impl_item(&self, id: ImplItemId) -> &ImplItem {
&self.impl_items[&id]
}
/// Visits all items in the crate in some determinstic (but
/// unspecified) order. If you just need to process every item,
/// but don't care about nesting, this method is the best choice.
///
/// If you do care about nesting -- usually because your algorithm
/// follows lexical scoping rules -- then you want a different
/// approach. You should override `visit_nested_item` in your
/// visitor and then call `intravisit::walk_crate` instead.
pub fn visit_all_item_likes<'hir, V>(&'hir self, visitor: &mut V)
where V: itemlikevisit::ItemLikeVisitor<'hir>
{
for (_, item) in &self.items {
visitor.visit_item(item);
}
for (_, trait_item) in &self.trait_items {
visitor.visit_trait_item(trait_item);
}
for (_, impl_item) in &self.impl_items {
visitor.visit_impl_item(impl_item);
}
}
pub fn body(&self, id: BodyId) -> &Body {
&self.bodies[&id]
}
}
/// A macro definition, in this crate or imported from another.
///
/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MacroDef {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub id: NodeId,
pub span: Span,
pub body: TokenStream,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Block {
/// Statements in a block
pub stmts: HirVec<Stmt>,
/// An expression at the end of the block
/// without a semicolon, if any
pub expr: Option<P<Expr>>,
pub id: NodeId,
/// Distinguishes between `unsafe { ... }` and `{ ... }`
pub rules: BlockCheckMode,
pub span: Span,
/// The id of the expression that `break` breaks to if the block can be broken out of.
/// Currently only `Some(_)` for `catch {}` blocks
pub break_to_expr_id: Option<NodeId>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Pat {
pub id: NodeId,
pub node: PatKind,
pub span: Span,
}
impl fmt::Debug for Pat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "pat({}: {})", self.id,
print::to_string(print::NO_ANN, |s| s.print_pat(self)))
}
}
impl Pat {
// FIXME(#19596) this is a workaround, but there should be a better way
fn walk_<G>(&self, it: &mut G) -> bool
where G: FnMut(&Pat) -> bool
{
if !it(self) {
return false;
}
match self.node {
PatKind::Binding(.., Some(ref p)) => p.walk_(it),
PatKind::Struct(_, ref fields, _) => {
fields.iter().all(|field| field.node.pat.walk_(it))
}
PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => {
s.iter().all(|p| p.walk_(it))
}
PatKind::Box(ref s) | PatKind::Ref(ref s, _) => {
s.walk_(it)
}
PatKind::Slice(ref before, ref slice, ref after) => {
before.iter().all(|p| p.walk_(it)) &&
slice.iter().all(|p| p.walk_(it)) &&
after.iter().all(|p| p.walk_(it))
}
PatKind::Wild |
PatKind::Lit(_) |
PatKind::Range(..) |
PatKind::Binding(..) |
PatKind::Path(_) => {
true
}
}
}
pub fn walk<F>(&self, mut it: F) -> bool
where F: FnMut(&Pat) -> bool
{
self.walk_(&mut it)
}
}
/// A single field in a struct pattern
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
/// except is_shorthand is true
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FieldPat {
/// The identifier for the field
pub name: Name,
/// The pattern the field is destructured to
pub pat: P<Pat>,
pub is_shorthand: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BindingMode {
BindByRef(Mutability),
BindByValue(Mutability),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum RangeEnd {
Included,
Excluded,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PatKind {
/// Represents a wildcard pattern (`_`)
Wild,
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
/// The `DefId` is for the definition of the variable being bound.
Binding(BindingMode, DefId, Spanned<Name>, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
Path(QPath),
/// A tuple pattern `(a, b)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
Tuple(HirVec<P<Pat>>, Option<usize>),
/// A `box` pattern
Box(P<Pat>),
/// A reference pattern, e.g. `&mut (a, b)`
Ref(P<Pat>, Mutability),
/// A literal
Lit(P<Expr>),
/// A range pattern, e.g. `1...2` or `1..2`
Range(P<Expr>, P<Expr>, RangeEnd),
/// `[a, b, ..i, y, z]` is represented as:
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum Mutability {
MutMutable,
MutImmutable,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BinOp_ {
/// The `+` operator (addition)
BiAdd,
/// The `-` operator (subtraction)
BiSub,
/// The `*` operator (multiplication)
BiMul,
/// The `/` operator (division)
BiDiv,
/// The `%` operator (modulus)
BiRem,
/// The `&&` operator (logical and)
BiAnd,
/// The `||` operator (logical or)
BiOr,
/// The `^` operator (bitwise xor)
BiBitXor,
/// The `&` operator (bitwise and)
BiBitAnd,
/// The `|` operator (bitwise or)
BiBitOr,
/// The `<<` operator (shift left)
BiShl,
/// The `>>` operator (shift right)
BiShr,
/// The `==` operator (equality)
BiEq,
/// The `<` operator (less than)
BiLt,
/// The `<=` operator (less than or equal to)
BiLe,
/// The `!=` operator (not equal to)
BiNe,
/// The `>=` operator (greater than or equal to)
BiGe,
/// The `>` operator (greater than)
BiGt,
}
impl BinOp_ {
pub fn as_str(self) -> &'static str {
match self {
BiAdd => "+",
BiSub => "-",
BiMul => "*",
BiDiv => "/",
BiRem => "%",
BiAnd => "&&",
BiOr => "||",
BiBitXor => "^",
BiBitAnd => "&",
BiBitOr => "|",
BiShl => "<<",
BiShr => ">>",
BiEq => "==",
BiLt => "<",
BiLe => "<=",
BiNe => "!=",
BiGe => ">=",
BiGt => ">",
}
}
pub fn is_lazy(self) -> bool {
match self {
BiAnd | BiOr => true,
_ => false,
}
}
pub fn is_shift(self) -> bool {
match self {
BiShl | BiShr => true,
_ => false,
}
}
pub fn is_comparison(self) -> bool {
match self {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe => true,
BiAnd |
BiOr |
BiAdd |
BiSub |
BiMul |
BiDiv |
BiRem |
BiBitXor |
BiBitAnd |
BiBitOr |
BiShl |
BiShr => false,
}
}
/// Returns `true` if the binary operator takes its arguments by value
pub fn is_by_value(self) -> bool {
!self.is_comparison()
}
}
pub type BinOp = Spanned<BinOp_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnOp {
/// The `*` operator for dereferencing
UnDeref,
/// The `!` operator for logical inversion
UnNot,
/// The `-` operator for negation
UnNeg,
}
impl UnOp {
pub fn as_str(self) -> &'static str {
match self {
UnDeref => "*",
UnNot => "!",
UnNeg => "-",
}
}
/// Returns `true` if the unary operator takes its argument by value
pub fn is_by_value(self) -> bool {
match self {
UnNeg | UnNot => true,
_ => false,
}
}
}
/// A statement
pub type Stmt = Spanned<Stmt_>;
impl fmt::Debug for Stmt_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Sadness.
let spanned = codemap::dummy_spanned(self.clone());
write!(f,
"stmt({}: {})",
spanned.node.id(),
print::to_string(print::NO_ANN, |s| s.print_stmt(&spanned)))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum Stmt_ {
/// Could be an item or a local (let) binding:
StmtDecl(P<Decl>, NodeId),
/// Expr without trailing semi-colon (must have unit type):
StmtExpr(P<Expr>, NodeId),
/// Expr with trailing semi-colon (may have any type):
StmtSemi(P<Expr>, NodeId),
}
impl Stmt_ {
pub fn attrs(&self) -> &[Attribute] {
match *self {
StmtDecl(ref d, _) => d.node.attrs(),
StmtExpr(ref e, _) |
StmtSemi(ref e, _) => &e.attrs,
}
}
pub fn id(&self) -> NodeId {
match *self {
StmtDecl(_, id) => id,
StmtExpr(_, id) => id,
StmtSemi(_, id) => id,
}
}
}
// FIXME (pending discussion of #1697, #2178...): local should really be
// a refinement on pat.
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Local {
pub pat: P<Pat>,
pub ty: Option<P<Ty>>,
/// Initializer expression to set the value, if any
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
pub attrs: ThinVec<Attribute>,
}
pub type Decl = Spanned<Decl_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Decl_ {
/// A local (let) binding:
DeclLocal(P<Local>),
/// An item binding:
DeclItem(ItemId),
}
impl Decl_ {
pub fn attrs(&self) -> &[Attribute] {
match *self {
DeclLocal(ref l) => &l.attrs,
DeclItem(_) => &[]
}
}
}
/// represents one arm of a 'match'
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arm {
pub attrs: HirVec<Attribute>,
pub pats: HirVec<P<Pat>>,
pub guard: Option<P<Expr>>,
pub body: P<Expr>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Field {
pub name: Spanned<Name>,
pub expr: P<Expr>,
pub span: Span,
pub is_shorthand: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BlockCheckMode {
DefaultBlock,
UnsafeBlock(UnsafeSource),
PushUnsafeBlock(UnsafeSource),
PopUnsafeBlock(UnsafeSource),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnsafeSource {
CompilerGenerated,
UserProvided,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BodyId {
pub node_id: NodeId,
}
/// The body of a function or constant value.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Body {
pub arguments: HirVec<Arg>,
pub value: Expr
}
impl Body {
pub fn id(&self) -> BodyId {
BodyId {
node_id: self.value.id
}
}
}
/// An expression
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Expr {
pub id: NodeId,
pub span: Span,
pub node: Expr_,
pub attrs: ThinVec<Attribute>,
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "expr({}: {})", self.id,
print::to_string(print::NO_ANN, |s| s.print_expr(self)))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Expr_ {
/// A `box x` expression.
ExprBox(P<Expr>),
/// An array (`[a, b, c, d]`)
ExprArray(HirVec<Expr>),
/// A function call
///
/// The first field resolves to the function itself (usually an `ExprPath`),
/// and the second field is the list of arguments
ExprCall(P<Expr>, HirVec<Expr>),
/// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
///
/// The `Spanned<Name>` is the identifier for the method name.
/// The vector of `Ty`s are the ascripted type parameters for the method
/// (within the angle brackets).
///
/// The first element of the vector of `Expr`s is the expression that
/// evaluates to the object on which the method is being called on (the
/// receiver), and the remaining elements are the rest of the arguments.
///
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
ExprMethodCall(Spanned<Name>, HirVec<P<Ty>>, HirVec<Expr>),
/// A tuple (`(a, b, c ,d)`)
ExprTup(HirVec<Expr>),
/// A binary operation (For example: `a + b`, `a * b`)
ExprBinary(BinOp, P<Expr>, P<Expr>),
/// A unary operation (For example: `!x`, `*x`)
ExprUnary(UnOp, P<Expr>),
/// A literal (For example: `1`, `"foo"`)
ExprLit(P<Lit>),
/// A cast (`foo as f64`)
ExprCast(P<Expr>, P<Ty>),
ExprType(P<Expr>, P<Ty>),
/// An `if` block, with an optional else block
///
/// `if expr { block } else { expr }`
ExprIf(P<Expr>, P<Block>, Option<P<Expr>>),
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
/// A closure (for example, `move |a, b, c| {a + b + c}`).
///
/// The final span is the span of the argument block `|...|`
ExprClosure(CaptureClause, P<FnDecl>, BodyId, Span),
/// A block (`{ ... }`)
ExprBlock(P<Block>),
/// An assignment (`a = foo()`)
ExprAssign(P<Expr>, P<Expr>),
/// An assignment with an operator
///
/// For example, `a += 1`.
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
/// Access of a named struct field (`obj.foo`)
ExprField(P<Expr>, Spanned<Name>),
/// Access of an unnamed field of a struct or tuple-struct
///
/// For example, `foo.0`.
ExprTupField(P<Expr>, Spanned<usize>),
/// An indexing operation (`foo[2]`)
ExprIndex(P<Expr>, P<Expr>),
/// Path to a definition, possibly containing lifetime or type parameters.
ExprPath(QPath),
/// A referencing operation (`&a` or `&mut a`)
ExprAddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break
ExprBreak(Destination, Option<P<Expr>>),
/// A `continue`, with an optional label
ExprAgain(Destination),
/// A `return`, with an optional value to be returned
ExprRet(Option<P<Expr>>),
/// Inline assembly (from `asm!`), with its outputs and inputs.
ExprInlineAsm(P<InlineAsm>, HirVec<Expr>, HirVec<Expr>),
/// A struct or struct-like variant literal expression.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
ExprStruct(QPath, HirVec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
/// For example, `[1; 5]`. The first expression is the element
/// to be repeated; the second is the number of times to repeat it.
ExprRepeat(P<Expr>, BodyId),
}
/// Optionally `Self`-qualified value/type path or associated extension.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum QPath {
/// Path to a definition, optionally "fully-qualified" with a `Self`
/// type, if the path points to an associated item in a trait.
///
/// E.g. an unqualified path like `Clone::clone` has `None` for `Self`,
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
/// even though they both have the same two-segment `Clone::clone` `Path`.
Resolved(Option<P<Ty>>, P<Path>),
/// Type-related paths, e.g. `<T>::default` or `<T>::Output`.
/// Will be resolved by type-checking to an associated item.
///
/// UFCS source paths can desugar into this, with `Vec::new` turning into
/// `<Vec>::new`, and `T::X::Y::method` into `<<<T>::X>::Y>::method`,
/// the `X` and `Y` nodes each being a `TyPath(QPath::TypeRelative(..))`.
TypeRelative(P<Ty>, P<PathSegment>)
}
/// Hints at the original code for a `match _ { .. }`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum MatchSource {
/// A `match _ { .. }`
Normal,
/// An `if let _ = _ { .. }` (optionally with `else { .. }`)
IfLetDesugar {
contains_else_clause: bool,
},
/// A `while let _ = _ { .. }` (which was desugared to a
/// `loop { match _ { .. } }`)
WhileLetDesugar,
/// A desugared `for _ in _ { .. }` loop
ForLoopDesugar,
/// A desugared `?` operator
TryDesugar,
}
/// The loop type that yielded an ExprLoop
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopSource {
/// A `loop { .. }` loop
Loop,
/// A `while let _ = _ { .. }` loop
WhileLet,
/// A `for _ in _ { .. }` loop
ForLoop,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopIdError {
OutsideLoopScope,
UnlabeledCfInWhileCondition,
UnresolvedLabel,
}
impl fmt::Display for LoopIdError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
LoopIdError::OutsideLoopScope => "not inside loop scope",
LoopIdError::UnlabeledCfInWhileCondition =>
"unlabeled control flow (break or continue) in while condition",
LoopIdError::UnresolvedLabel => "label not found",
}, f)
}
}
// FIXME(cramertj) this should use `Result` once master compiles w/ a vesion of Rust where
// `Result` implements `Encodable`/`Decodable`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopIdResult {
Ok(NodeId),
Err(LoopIdError),
}
impl Into<Result<NodeId, LoopIdError>> for LoopIdResult {
fn into(self) -> Result<NodeId, LoopIdError> {
match self {
LoopIdResult::Ok(ok) => Ok(ok),
LoopIdResult::Err(err) => Err(err),
}
}
}
impl From<Result<NodeId, LoopIdError>> for LoopIdResult {
fn from(res: Result<NodeId, LoopIdError>) -> Self {
match res {
Ok(ok) => LoopIdResult::Ok(ok),
Err(err) => LoopIdResult::Err(err),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum ScopeTarget {
Block(NodeId),
Loop(LoopIdResult),
}
impl ScopeTarget {
pub fn opt_id(self) -> Option<NodeId> {
match self {
ScopeTarget::Block(node_id) |
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => Some(node_id),
ScopeTarget::Loop(LoopIdResult::Err(_)) => None,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label
pub ident: Option<Spanned<Ident>>,
// These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs
pub target_id: ScopeTarget,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum CaptureClause {
CaptureByValue,
CaptureByRef,
}
// NB: If you change this, you'll probably want to change the corresponding
// type structure in middle/ty.rs as well.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MutTy {
pub ty: P<Ty>,
pub mutbl: Mutability,
}
/// Represents a method's signature in a trait declaration or implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MethodSig {
pub unsafety: Unsafety,
pub constness: Constness,
pub abi: Abi,
pub decl: P<FnDecl>,
pub generics: Generics,
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemId {
pub node_id: NodeId,
}
/// Represents an item declaration within a trait declaration,
/// possibly including a default implementation. A trait item is
/// either required (meaning it doesn't have an implementation, just a
/// signature) or provided (meaning it has a default implementation).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItem {
pub id: NodeId,
pub name: Name,
pub attrs: HirVec<Attribute>,
pub node: TraitItemKind,
pub span: Span,
}
/// A trait method's body (or just argument names).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitMethod {
/// No default body in the trait, just a signature.
Required(HirVec<Spanned<Name>>),
/// Both signature and body are provided in the trait.
Provided(BodyId),
}
/// Represents a trait method or associated constant or type
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitItemKind {
/// An associated constant with an optional value (otherwise `impl`s
/// must contain a value)
Const(P<Ty>, Option<BodyId>),
/// A method with an optional body
Method(MethodSig, TraitMethod),
/// An associated type with (possibly empty) bounds and optional concrete
/// type
Type(TyParamBounds, Option<P<Ty>>),
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemId {
pub node_id: NodeId,
}
/// Represents anything within an `impl` block
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItem {
pub id: NodeId,
pub name: Name,
pub vis: Visibility,
pub defaultness: Defaultness,
pub attrs: HirVec<Attribute>,
pub node: ImplItemKind,
pub span: Span,
}
/// Represents different contents within `impl`s
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ImplItemKind {
/// An associated constant of the given type, set to the constant result
/// of the expression
Const(P<Ty>, BodyId),
/// A method implementation with the given signature and body
Method(MethodSig, BodyId),
/// An associated type
Type(P<Ty>),
}
// Bind a type to an associated type: `A=Foo`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeBinding {
pub id: NodeId,
pub name: Name,
pub ty: P<Ty>,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Ty {
pub id: NodeId,
pub node: Ty_,
pub span: Span,
}
impl fmt::Debug for Ty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "type({})",
print::to_string(print::NO_ANN, |s| s.print_type(self)))
}
}
/// Not represented directly in the AST, referred to by name through a ty_path.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PrimTy {
TyInt(IntTy),
TyUint(UintTy),
TyFloat(FloatTy),
TyStr,
TyBool,
TyChar,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Abi,
pub lifetimes: HirVec<LifetimeDef>,
pub decl: P<FnDecl>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The different kinds of types recognized by the compiler
pub enum Ty_ {
/// A variable length slice (`[T]`)
TySlice(P<Ty>),
/// A fixed length array (`[T; n]`)
TyArray(P<Ty>, BodyId),
/// A raw pointer (`*const T` or `*mut T`)
TyPtr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
TyRptr(Lifetime, MutTy),
/// A bare function (e.g. `fn(usize) -> bool`)
TyBareFn(P<BareFnTy>),
/// The never type (`!`)
TyNever,
/// A tuple (`(A, B, C, D,...)`)
TyTup(HirVec<P<Ty>>),
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
///
/// Type parameters may be stored in each `PathSegment`.
TyPath(QPath),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TyTraitObject(HirVec<PolyTraitRef>, Lifetime),
/// An `impl Bound1 + Bound2 + Bound3` type
/// where `Bound` is a trait or a lifetime.
TyImplTrait(TyParamBounds),
/// Unused for now
TyTypeof(BodyId),
/// TyInfer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
TyInfer,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct InlineAsmOutput {
pub constraint: Symbol,
pub is_rw: bool,
pub is_indirect: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct InlineAsm {
pub asm: Symbol,
pub asm_str_style: StrStyle,
pub outputs: HirVec<InlineAsmOutput>,
pub inputs: HirVec<Symbol>,
pub clobbers: HirVec<Symbol>,
pub volatile: bool,
pub alignstack: bool,
pub dialect: AsmDialect,
pub expn_id: ExpnId,
}
/// represents an argument in a function header
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arg {
pub pat: P<Pat>,
pub id: NodeId,
}
/// Represents the header (not the body) of a function declaration
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FnDecl {
pub inputs: HirVec<P<Ty>>,
pub output: FunctionRetTy,
pub variadic: bool,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Constness {
Const,
NotConst,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Defaultness {
Default { has_value: bool },
Final,
}
impl Defaultness {
pub fn has_value(&self) -> bool {
match *self {
Defaultness::Default { has_value, .. } => has_value,
Defaultness::Final => true,
}
}
pub fn is_final(&self) -> bool {
*self == Defaultness::Final
}
pub fn is_default(&self) -> bool {
match *self {
Defaultness::Default { .. } => true,
_ => false,
}
}
}
impl fmt::Display for Unsafety {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
Unsafety::Normal => "normal",
Unsafety::Unsafe => "unsafe",
},
f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum ImplPolarity {
/// `impl Trait for Type`
Positive,
/// `impl !Trait for Type`
Negative,
}
impl fmt::Debug for ImplPolarity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ImplPolarity::Positive => "positive".fmt(f),
ImplPolarity::Negative => "negative".fmt(f),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum FunctionRetTy {
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
DefaultReturn(Span),
/// Everything else
Return(P<Ty>),
}
impl FunctionRetTy {
pub fn span(&self) -> Span {
match *self {
DefaultReturn(span) => span,
Return(ref ty) => ty.span,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Mod {
/// A span from the first token past `{` to the last token until `}`.
/// For `mod foo;`, the inner span ranges from the first token
/// to the last token in the external file.
pub inner: Span,
pub item_ids: HirVec<ItemId>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignMod {
pub abi: Abi,
pub items: HirVec<ForeignItem>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct EnumDef {
pub variants: HirVec<Variant>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Variant_ {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub data: VariantData,
/// Explicit discriminant, eg `Foo = 1`
pub disr_expr: Option<BodyId>,
}
pub type Variant = Spanned<Variant_>;
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum UseKind {
/// One import, e.g. `use foo::bar` or `use foo::bar as baz`.
/// Also produced for each element of a list `use`, e.g.
// `use foo::{a, b}` lowers to `use foo::a; use foo::b;`.
Single,
/// Glob import, e.g. `use foo::*`.
Glob,
/// Degenerate list import, e.g. `use foo::{a, b}` produces
/// an additional `use foo::{}` for performing checks such as
/// unstable feature gating. May be removed in the future.
ListStem,
}
/// TraitRef's appear in impls.
///
/// resolve maps each TraitRef's ref_id to its defining trait; that's all
/// that the ref_id is for. Note that ref_id's value is not the NodeId of the
/// trait being referred to but just a unique NodeId that serves as a key
/// within the DefMap.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitRef {
pub path: Path,
pub ref_id: NodeId,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_lifetimes: HirVec<LifetimeDef>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: TraitRef,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Visibility {
Public,
Crate,
Restricted { path: P<Path>, id: NodeId },
Inherited,
}
impl Visibility {
pub fn is_pub_restricted(&self) -> bool {
use self::Visibility::*;
match self {
&Public |
&Inherited => false,
&Crate |
&Restricted { .. } => true,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct StructField {
pub span: Span,
pub name: Name,
pub vis: Visibility,
pub id: NodeId,
pub ty: P<Ty>,
pub attrs: HirVec<Attribute>,
}
impl StructField {
// Still necessary in couple of places
pub fn is_positional(&self) -> bool {
let first = self.name.as_str().as_bytes()[0];
first >= b'0' && first <= b'9'
}
}
/// Fields and Ids of enum variants and structs
///
/// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all
/// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants).
/// One shared Id can be successfully used for these two purposes.
/// Id of the whole enum lives in `Item`.
///
/// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually
/// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of
/// the variant itself" from enum variants.
/// Id of the whole struct lives in `Item`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum VariantData {
Struct(HirVec<StructField>, NodeId),
Tuple(HirVec<StructField>, NodeId),
Unit(NodeId),
}
impl VariantData {
pub fn fields(&self) -> &[StructField] {
match *self {
VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields,
_ => &[],
}
}
pub fn id(&self) -> NodeId {
match *self {
VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id,
}
}
pub fn is_struct(&self) -> bool {
if let VariantData::Struct(..) = *self {
true
} else {
false
}
}
pub fn is_tuple(&self) -> bool {
if let VariantData::Tuple(..) = *self {
true
} else {
false
}
}
pub fn is_unit(&self) -> bool {
if let VariantData::Unit(..) = *self {
true
} else {
false
}
}
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ItemId {
pub id: NodeId,
}
/// An item
///
/// The name might be a dummy name in case of anonymous items
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Item {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub id: NodeId,
pub node: Item_,
pub vis: Visibility,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Item_ {
/// An`extern crate` item, with optional original crate name,
///
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
ItemExternCrate(Option<Name>),
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
///
/// or just
///
/// `use foo::bar::baz;` (with `as baz` implicitly on the right)
ItemUse(P<Path>, UseKind),
/// A `static` item
ItemStatic(P<Ty>, Mutability, BodyId),
/// A `const` item
ItemConst(P<Ty>, BodyId),
/// A function declaration
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, BodyId),
/// A module
ItemMod(Mod),
/// An external module
ItemForeignMod(ForeignMod),
/// A type alias, e.g. `type Foo = Bar<u8>`
ItemTy(P<Ty>, Generics),
/// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}`
ItemEnum(EnumDef, Generics),
/// A struct definition, e.g. `struct Foo<A> {x: A}`
ItemStruct(VariantData, Generics),
/// A union definition, e.g. `union Foo<A, B> {x: A, y: B}`
ItemUnion(VariantData, Generics),
/// Represents a Trait Declaration
ItemTrait(Unsafety, Generics, TyParamBounds, HirVec<TraitItemRef>),
// Default trait implementations
///
/// `impl Trait for .. {}`
ItemDefaultImpl(Unsafety, TraitRef),
/// An implementation, eg `impl<A> Trait for Foo { .. }`
ItemImpl(Unsafety,
ImplPolarity,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
HirVec<ImplItemRef>),
}
impl Item_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ItemExternCrate(..) => "extern crate",
ItemUse(..) => "use",
ItemStatic(..) => "static item",
ItemConst(..) => "constant item",
ItemFn(..) => "function",
ItemMod(..) => "module",
ItemForeignMod(..) => "foreign module",
ItemTy(..) => "type alias",
ItemEnum(..) => "enum",
ItemStruct(..) => "struct",
ItemUnion(..) => "union",
ItemTrait(..) => "trait",
ItemImpl(..) |
ItemDefaultImpl(..) => "item",
}
}
}
/// A reference from an trait to one of its associated items. This
/// contains the item's id, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
/// passes to find the impl they want without loading the id (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemRef {
pub id: TraitItemId,
pub name: Name,
pub kind: AssociatedItemKind,
pub span: Span,
pub defaultness: Defaultness,
}
/// A reference from an impl to one of its associated items. This
/// contains the item's id, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
/// passes to find the impl they want without loading the id (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemRef {
pub id: ImplItemId,
pub name: Name,
pub kind: AssociatedItemKind,
pub span: Span,
pub vis: Visibility,
pub defaultness: Defaultness,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum AssociatedItemKind {
Const,
Method { has_self: bool },
Type,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignItem {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub node: ForeignItem_,
pub id: NodeId,
pub span: Span,
pub vis: Visibility,
}
/// An item within an `extern` block
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ForeignItem_ {
/// A foreign function
ForeignItemFn(P<FnDecl>, HirVec<Spanned<Name>>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
ForeignItemStatic(P<Ty>, bool),
}
impl ForeignItem_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ForeignItemFn(..) => "foreign function",
ForeignItemStatic(..) => "foreign static item",
}
}
}
/// A free variable referred to in a function.
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct Freevar {
/// The variable being accessed free.
pub def: Def,
// First span where it is accessed (there can be multiple).
pub span: Span
}
pub type FreevarMap = NodeMap<Vec<Freevar>>;
pub type CaptureModeMap = NodeMap<CaptureClause>;
#[derive(Clone, Debug)]
pub struct TraitCandidate {
pub def_id: DefId,
pub import_id: Option<NodeId>,
}
// Trait method resolution
pub type TraitMap = NodeMap<Vec<TraitCandidate>>;
// Map from the NodeId of a glob import to a list of items which are actually
// imported.
pub type GlobMap = NodeMap<FxHashSet<Name>>;
| 30.93719 | 99 | 0.616338 |
ded52c79589e3a138a315370966e8bd4bbe26a31 | 136 | #[macro_use]
extern crate serde_derive;
#[derive(Serialize)]
struct Foo(u32, #[serde(flatten)] HashMap<String, String>);
fn main() {}
| 17 | 59 | 0.705882 |
2184399b239318d630090746b4aef797ff925903 | 2,821 | extern crate cv;
extern crate getopts;
use cv::highgui::*;
use cv::imgcodecs::*;
use cv::objdetect::*;
use cv::*;
#[cfg(feature = "cuda")]
use cv::cuda::GpuHog as Hog;
#[cfg(not(feature = "cuda"))]
use cv::objdetect::HogDescriptor as Hog;
use std::fs;
use std::fs::File;
use std::io::{Read, Result};
use std::path::Path;
fn main() {
run().unwrap();
}
fn run() -> Result<()> {
let args: Vec<String> = ::std::env::args().collect();
let program = args[0].clone();
let mut opts = getopts::Options::new();
opts.optopt("d", "dir", "the directory to look for images", "DIRECTORY");
opts.optflag("m", "measure", "measure the execution time (report in ms)");
opts.optflag("s", "show", "display the detection results");
opts.optflag("h", "help", "print this help menu");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(_) => {
print_usage(&program, opts);
::std::process::exit(-1);
}
};
if matches.opt_present("h") {
print_usage(&program, opts);
return Ok(());
}
let show = matches.opt_present("s");
let measure = matches.opt_present("m");
let dir = matches.opt_str("d").expect("You need to provide the directory");
if show {
highgui_named_window("window", WindowFlag::Autosize).unwrap();
}
let mut param = HogParams::default();
param.group_threshold = 0;
let mut hog = Hog::with_params(param);
let detector = SvmDetector::default_people_detector();
hog.set_svm_detector(detector);
for entry in fs::read_dir(Path::new(&dir))? {
let dir = entry?;
println!("Processing {:?}", dir.path());
run_detect_for_image(&mut hog, dir.path(), show, measure);
}
Ok(())
}
fn run_detect_for_image<P: AsRef<Path>, OD: ObjectDetect>(detector: &mut OD, path: P, show: bool, measure: bool) {
let mut buf = Vec::new();
let filename = path.as_ref().file_stem().unwrap().to_string_lossy().into_owned();
let frame_num = filename.parse::<usize>().unwrap();
File::open(path).unwrap().read_to_end(&mut buf).unwrap();
let mat = Mat::image_decode(&buf, ImageReadMode::Grayscale);
let start = ::std::time::Instant::now();
let results = detector.detect(&mat);
let elapsed = start.elapsed();
print!("{},{},", frame_num, results.len());
if measure {
println!(
"{}",
elapsed.as_secs() as f64 * 1_000.0 + elapsed.subsec_nanos() as f64 / 1_000_000.0
);
}
if show {
results.iter().map(|&(r, _w)| mat.rectangle(r.scale(0.6))).count();
mat.show("window", 0).unwrap();
}
}
fn print_usage(program: &str, opts: getopts::Options) {
let brief = format!("Usage: {} [options] DIRECTORY", program);
print!("{}", opts.usage(&brief));
}
| 28.785714 | 114 | 0.591634 |
fc8e09564e7934931838f9d49b2ab1da049b046d | 17,654 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::iter::repeat;
use cryptoutil::{copy_memory, read_u32v_le, write_u32v_le};
use digest::Digest;
use mac::{Mac, MacResult};
use util::secure_memset;
static IV : [u32; 8] = [
0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A,
0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19
];
static SIGMA : [[usize; 16]; 10] = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ],
[ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ],
[ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ],
[ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ],
[ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ],
[ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ],
[ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ],
[ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ],
[ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ],
[ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13 , 0 ]
];
const BLAKE2S_BLOCKBYTES : usize = 64;
const BLAKE2S_OUTBYTES : usize = 32;
const BLAKE2S_KEYBYTES : usize = 32;
const BLAKE2S_SALTBYTES : usize = 8;
const BLAKE2S_PERSONALBYTES : usize = 8;
#[derive(Copy)]
pub struct Blake2s {
h: [u32; 8],
t: [u32; 2],
f: [u32; 2],
buf: [u8; 2*BLAKE2S_BLOCKBYTES],
buflen: usize,
key: [u8; BLAKE2S_KEYBYTES],
key_length: u8,
last_node: u8,
digest_length: u8,
computed: bool, // whether the final digest has been computed
param: Blake2sParam
}
impl Clone for Blake2s { fn clone(&self) -> Blake2s { *self } }
#[derive(Copy, Clone)]
struct Blake2sParam {
digest_length: u8,
key_length: u8,
fanout: u8,
depth: u8,
leaf_length: u32,
node_offset: [u8; 6],
node_depth: u8,
inner_length: u8,
salt: [u8; BLAKE2S_SALTBYTES],
personal: [u8; BLAKE2S_PERSONALBYTES],
}
macro_rules! G( ($r:expr, $i:expr, $a:expr, $b:expr, $c:expr, $d:expr, $m:expr) => ({
$a = $a.wrapping_add($b).wrapping_add($m[SIGMA[$r][2*$i+0]]);
$d = ($d ^ $a).rotate_right(16);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(12);
$a = $a.wrapping_add($b).wrapping_add($m[SIGMA[$r][2*$i+1]]);
$d = ($d ^ $a).rotate_right(8);
$c = $c.wrapping_add($d);
$b = ($b ^ $c).rotate_right(7);
}));
macro_rules! round( ($r:expr, $v:expr, $m:expr) => ( {
G!($r,0,$v[ 0],$v[ 4],$v[ 8],$v[12], $m);
G!($r,1,$v[ 1],$v[ 5],$v[ 9],$v[13], $m);
G!($r,2,$v[ 2],$v[ 6],$v[10],$v[14], $m);
G!($r,3,$v[ 3],$v[ 7],$v[11],$v[15], $m);
G!($r,4,$v[ 0],$v[ 5],$v[10],$v[15], $m);
G!($r,5,$v[ 1],$v[ 6],$v[11],$v[12], $m);
G!($r,6,$v[ 2],$v[ 7],$v[ 8],$v[13], $m);
G!($r,7,$v[ 3],$v[ 4],$v[ 9],$v[14], $m);
}
));
impl Blake2s {
fn set_lastnode(&mut self) {
self.f[1] = 0xFFFFFFFF;
}
fn set_lastblock(&mut self) {
if self.last_node!=0 {
self.set_lastnode();
}
self.f[0] = 0xFFFFFFFF;
}
fn increment_counter(&mut self, inc : u32) {
self.t[0] += inc;
self.t[1] += if self.t[0] < inc { 1 } else { 0 };
}
fn init0(param: Blake2sParam, digest_length: u8, key: &[u8]) -> Blake2s {
assert!(key.len() <= BLAKE2S_KEYBYTES);
let mut b = Blake2s {
h: IV,
t: [0,0],
f: [0,0],
buf: [0; 2*BLAKE2S_BLOCKBYTES],
buflen: 0,
last_node: 0,
digest_length: digest_length,
computed: false,
key: [0; BLAKE2S_KEYBYTES],
key_length: key.len() as u8,
param: param
};
copy_memory(key, &mut b.key);
b
}
fn apply_param(&mut self) {
use std::io::Write;
use cryptoutil::WriteExt;
let mut param_bytes : [u8; 32] = [0; 32];
{
let mut writer: &mut [u8] = &mut param_bytes;
writer.write_u8(self.param.digest_length).unwrap();
writer.write_u8(self.param.key_length).unwrap();
writer.write_u8(self.param.fanout).unwrap();
writer.write_u8(self.param.depth).unwrap();
writer.write_u32_le(self.param.leaf_length).unwrap();
writer.write_all(&self.param.node_offset).unwrap();
writer.write_u8(self.param.node_depth).unwrap();
writer.write_u8(self.param.inner_length).unwrap();
writer.write_all(&self.param.salt).unwrap();
writer.write_all(&self.param.personal).unwrap();
}
let mut param_words : [u32; 8] = [0; 8];
read_u32v_le(&mut param_words, ¶m_bytes);
for (h, param_word) in self.h.iter_mut().zip(param_words.iter()) {
*h = *h ^ *param_word;
}
}
// init xors IV with input parameter block
fn init_param( p: Blake2sParam, key: &[u8] ) -> Blake2s {
let mut b = Blake2s::init0(p, p.digest_length, key);
b.apply_param();
b
}
fn default_param(outlen: u8) -> Blake2sParam {
Blake2sParam {
digest_length: outlen,
key_length: 0,
fanout: 1,
depth: 1,
leaf_length: 0,
node_offset: [0; 6],
node_depth: 0,
inner_length: 0,
salt: [0; BLAKE2S_SALTBYTES],
personal: [0; BLAKE2S_PERSONALBYTES],
}
}
pub fn new(outlen: usize) -> Blake2s {
assert!(outlen > 0 && outlen <= BLAKE2S_OUTBYTES);
Blake2s::init_param(Blake2s::default_param(outlen as u8), &[])
}
fn apply_key(&mut self) {
let mut block : [u8; BLAKE2S_BLOCKBYTES] = [0; BLAKE2S_BLOCKBYTES];
copy_memory(&self.key[..self.key_length as usize], &mut block);
self.update(&block);
secure_memset(&mut block[..], 0);
}
pub fn new_keyed(outlen: usize, key: &[u8] ) -> Blake2s {
assert!(outlen > 0 && outlen <= BLAKE2S_OUTBYTES);
assert!(key.len() > 0 && key.len() <= BLAKE2S_KEYBYTES);
let param = Blake2sParam {
digest_length: outlen as u8,
key_length: key.len() as u8,
fanout: 1,
depth: 1,
leaf_length: 0,
node_offset: [0; 6],
node_depth: 0,
inner_length: 0,
salt: [0; BLAKE2S_SALTBYTES],
personal: [0; BLAKE2S_PERSONALBYTES],
};
let mut b = Blake2s::init_param(param, key);
b.apply_key();
b
}
fn compress(&mut self) {
let mut ms: [u32; 16] = [0; 16];
let mut vs: [u32; 16] = [0; 16];
read_u32v_le(&mut ms, &self.buf[0..BLAKE2S_BLOCKBYTES]);
for (v, h) in vs.iter_mut().zip(self.h.iter()) {
*v = *h;
}
vs[ 8] = IV[0];
vs[ 9] = IV[1];
vs[10] = IV[2];
vs[11] = IV[3];
vs[12] = self.t[0] ^ IV[4];
vs[13] = self.t[1] ^ IV[5];
vs[14] = self.f[0] ^ IV[6];
vs[15] = self.f[1] ^ IV[7];
round!( 0, vs, ms );
round!( 1, vs, ms );
round!( 2, vs, ms );
round!( 3, vs, ms );
round!( 4, vs, ms );
round!( 5, vs, ms );
round!( 6, vs, ms );
round!( 7, vs, ms );
round!( 8, vs, ms );
round!( 9, vs, ms );
for (h_elem, (v_low, v_high)) in self.h.iter_mut().zip( vs[0..8].iter().zip(vs[8..16].iter()) ) {
*h_elem = *h_elem ^ *v_low ^ *v_high;
}
}
fn update( &mut self, mut input: &[u8] ) {
while input.len() > 0 {
let left = self.buflen;
let fill = 2 * BLAKE2S_BLOCKBYTES - left;
if input.len() > fill {
copy_memory(&input[0..fill], &mut self.buf[left..]); // Fill buffer
self.buflen += fill;
self.increment_counter( BLAKE2S_BLOCKBYTES as u32);
self.compress();
let mut halves = self.buf.chunks_mut(BLAKE2S_BLOCKBYTES);
let first_half = halves.next().unwrap();
let second_half = halves.next().unwrap();
copy_memory(second_half, first_half);
self.buflen -= BLAKE2S_BLOCKBYTES;
input = &input[fill..input.len()];
} else { // inlen <= fill
copy_memory(input, &mut self.buf[left..]);
self.buflen += input.len();
break;
}
}
}
fn finalize( &mut self, out: &mut [u8] ) {
assert!(out.len() == self.digest_length as usize);
if !self.computed {
if self.buflen > BLAKE2S_BLOCKBYTES {
self.increment_counter(BLAKE2S_BLOCKBYTES as u32);
self.compress();
self.buflen -= BLAKE2S_BLOCKBYTES;
let mut halves = self.buf.chunks_mut(BLAKE2S_BLOCKBYTES);
let first_half = halves.next().unwrap();
let second_half = halves.next().unwrap();
copy_memory(second_half, first_half);
}
let incby = self.buflen as u32;
self.increment_counter(incby);
self.set_lastblock();
for b in self.buf[self.buflen..].iter_mut() {
*b = 0;
}
self.compress();
write_u32v_le(&mut self.buf[0..32], &self.h);
self.computed = true;
}
let outlen = out.len();
copy_memory(&self.buf[0..outlen], out);
}
pub fn reset(&mut self) {
for (h_elem, iv_elem) in self.h.iter_mut().zip(IV.iter()) {
*h_elem = *iv_elem;
}
for t_elem in self.t.iter_mut() {
*t_elem = 0;
}
for f_elem in self.f.iter_mut() {
*f_elem = 0;
}
for b in self.buf.iter_mut() {
*b = 0;
}
self.buflen = 0;
self.last_node = 0;
self.computed = false;
self.apply_param();
if self.key_length > 0 {
self.apply_key();
}
}
pub fn blake2s(out: &mut[u8], input: &[u8], key: &[u8]) {
let mut hasher : Blake2s = if key.len() > 0 { Blake2s::new_keyed(out.len(), key) } else { Blake2s::new(out.len()) };
hasher.update(input);
hasher.finalize(out);
}
}
impl Digest for Blake2s {
fn reset(&mut self) { Blake2s::reset(self); }
fn input(&mut self, msg: &[u8]) { self.update(msg); }
fn result(&mut self, out: &mut [u8]) { self.finalize(out); }
fn output_bits(&self) -> usize { 8 * (self.digest_length as usize) }
fn block_size(&self) -> usize { 8 * BLAKE2S_BLOCKBYTES }
}
impl Mac for Blake2s {
/**
* Process input data.
*
* # Arguments
* * data - The input data to process.
*
*/
fn input(&mut self, data: &[u8]) {
self.update(data);
}
/**
* Reset the Mac state to begin processing another input stream.
*/
fn reset(&mut self) {
Blake2s::reset(self);
}
/**
* Obtain the result of a Mac computation as a MacResult.
*/
fn result(&mut self) -> MacResult {
let mut mac: Vec<u8> = repeat(0).take(self.digest_length as usize).collect();
self.raw_result(&mut mac);
MacResult::new_from_owned(mac)
}
/**
* Obtain the result of a Mac computation as [u8]. This method should be used very carefully
* since incorrect use of the Mac code could result in permitting a timing attack which defeats
* the security provided by a Mac function.
*/
fn raw_result(&mut self, output: &mut [u8]) {
self.finalize(output);
}
/**
* Get the size of the Mac code, in bytes.
*/
fn output_bytes(&self) -> usize { self.digest_length as usize }
}
#[cfg(test)]
mod digest_tests {
//use cryptoutil::test::test_digest_1million_random;
use blake2s::Blake2s;
use digest::Digest;
struct Test {
input: Vec<u8>,
output: Vec<u8>,
key: Option<Vec<u8>>,
}
fn test_hash(tests: &[Test]) {
for t in tests {
let mut sh = match t.key {
Some(ref key) => Blake2s::new_keyed(32, &key),
None => Blake2s::new(32)
};
// Test that it works when accepting the message all at once
sh.input(&t.input[..]);
let mut out = [0u8; 32];
sh.result(&mut out);
assert!(&out[..] == &t.output[..]);
sh.reset();
// Test that it works when accepting the message in pieces
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.input(&t.input[len - left..take + len - left]);
left -= take;
}
let mut out = [0u8; 32];
sh.result(&mut out);
assert!(&out[..] == &t.output[..]);
sh.reset();
}
}
#[test]
fn test_blake2s_digest() {
let tests = vec![
// from: https://github.com/BLAKE2/BLAKE2/blob/master/testvectors/blake2s-test.txt
Test {
input: vec![0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,
0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,
0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b,
0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83,
0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b,
0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3,
0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb,
0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3,
0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb,
0xfc, 0xfd, 0xfe],
output: vec![0x3f, 0xb7, 0x35, 0x06, 0x1a, 0xbc, 0x51, 0x9d, 0xfe, 0x97, 0x9e,
0x54, 0xc1, 0xee, 0x5b, 0xfa, 0xd0, 0xa9, 0xd8, 0x58, 0xb3, 0x31,
0x5b, 0xad, 0x34, 0xbd, 0xe9, 0x99, 0xef, 0xd7, 0x24, 0xdd],
key: Some(vec![0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15,
0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f])
},
];
test_hash(&tests[..]);
}
}
#[cfg(test)]
mod mac_tests {
use blake2s::Blake2s;
use mac::Mac;
#[test]
fn test_blake2s_mac() {
let key: Vec<u8> = (0..32).map(|i| i).collect();
let mut m = Blake2s::new_keyed(32, &key[..]);
m.input(&[1,2,4,8]);
let expected = [
0x0e, 0x88, 0xf6, 0x8a, 0xaa, 0x5c, 0x4e, 0xd8,
0xf7, 0xed, 0x28, 0xf8, 0x04, 0x45, 0x01, 0x9c,
0x7e, 0xf9, 0x76, 0x2b, 0x4f, 0xf1, 0xad, 0x7e,
0x05, 0x5b, 0xa8, 0xc8, 0x82, 0x9e, 0xe2, 0x49
];
assert_eq!(m.result().code().to_vec(), expected.to_vec());
}
}
#[cfg(all(test, feature = "with-bench"))]
mod bench {
use test::Bencher;
use digest::Digest;
use blake2s::Blake2s;
#[bench]
pub fn blake2s_10(bh: & mut Bencher) {
let mut sh = Blake2s::new(32);
let bytes = [1u8; 10];
bh.iter( || {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn blake2s_1k(bh: & mut Bencher) {
let mut sh = Blake2s::new(32);
let bytes = [1u8; 1024];
bh.iter( || {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn blake2s_64k(bh: & mut Bencher) {
let mut sh = Blake2s::new(32);
let bytes = [1u8; 65536];
bh.iter( || {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}
}
| 33.626667 | 124 | 0.500397 |
291475f749358b01570492ffc5be15cf7622ee89 | 13,949 | // This file is generated by rust-protobuf 3.0.0-pre. Do not edit
// .proto file is parsed by protoc --rust-out=...
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `google/protobuf/timestamp.proto`
#[derive(PartialEq,Clone,Default)]
#[cfg_attr(serde, derive(Serialize, Deserialize))]
pub struct Timestamp {
// message fields
/// Represents seconds of UTC time since Unix epoch
/// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
/// 9999-12-31T23:59:59Z inclusive.
pub seconds: i64,
/// Non-negative fractions of a second at nanosecond resolution. Negative
/// second values with fractions must still have non-negative nanos values
/// that count forward in time. Must be from 0 to 999,999,999
/// inclusive.
pub nanos: i32,
// special fields
#[cfg_attr(serde, serde(skip))]
pub unknown_fields: crate::UnknownFields,
#[cfg_attr(serde, serde(skip))]
pub cached_size: crate::rt::CachedSize,
}
impl<'a> ::std::default::Default for &'a Timestamp {
fn default() -> &'a Timestamp {
<Timestamp as crate::Message>::default_instance()
}
}
impl Timestamp {
pub fn new() -> Timestamp {
::std::default::Default::default()
}
}
impl crate::Message for Timestamp {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut crate::CodedInputStream<'_>) -> crate::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.seconds = is.read_int64()?;
},
2 => {
if wire_type != crate::wire_format::WireTypeVarint {
return ::std::result::Result::Err(crate::rt::unexpected_wire_type(wire_type));
}
self.nanos = is.read_int32()?;
},
_ => {
crate::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.seconds != 0 {
my_size += crate::rt::value_size(1, self.seconds, crate::wire_format::WireTypeVarint);
}
if self.nanos != 0 {
my_size += crate::rt::value_size(2, self.nanos, crate::wire_format::WireTypeVarint);
}
my_size += crate::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut crate::CodedOutputStream<'_>) -> crate::ProtobufResult<()> {
if self.seconds != 0 {
os.write_int64(1, self.seconds)?;
}
if self.nanos != 0 {
os.write_int32(2, self.nanos)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &crate::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut crate::UnknownFields {
&mut self.unknown_fields
}
fn descriptor(&self) -> &'static crate::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Timestamp {
Timestamp::new()
}
fn descriptor_static() -> &'static crate::reflect::MessageDescriptor {
static descriptor: crate::rt::Lazy<crate::reflect::MessageDescriptor> = crate::rt::Lazy::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(crate::reflect::rt::make_simple_field_accessor::<_, crate::reflect::types::ProtobufTypeInt64>(
"seconds",
|m: &Timestamp| { &m.seconds },
|m: &mut Timestamp| { &mut m.seconds },
));
fields.push(crate::reflect::rt::make_simple_field_accessor::<_, crate::reflect::types::ProtobufTypeInt32>(
"nanos",
|m: &Timestamp| { &m.nanos },
|m: &mut Timestamp| { &mut m.nanos },
));
crate::reflect::MessageDescriptor::new::<Timestamp>(
"Timestamp",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Timestamp {
static instance: crate::rt::Lazy<Timestamp> = crate::rt::Lazy::INIT;
instance.get(Timestamp::new)
}
}
impl crate::Clear for Timestamp {
fn clear(&mut self) {
self.seconds = 0;
self.nanos = 0;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Timestamp {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
crate::text_format::fmt(self, f)
}
}
impl crate::reflect::ProtobufValue for Timestamp {
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\";\n\tTimes\
tamp\x12\x18\n\x07seconds\x18\x01\x20\x01(\x03R\x07seconds\x12\x14\n\x05\
nanos\x18\x02\x20\x01(\x05R\x05nanosB~\n\x13com.google.protobufB\x0eTime\
stampProtoP\x01Z+github.com/golang/protobuf/ptypes/timestamp\xf8\x01\x01\
\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesJ\x95!\n\x06\
\x12\x04\x1e\0k\x01\n\xcc\x0c\n\x01\x0c\x12\x03\x1e\0\x122\xc1\x0c\x20Pr\
otocol\x20Buffers\x20-\x20Google's\x20data\x20interchange\x20format\n\
\x20Copyright\x202008\x20Google\x20Inc.\x20\x20All\x20rights\x20reserved\
.\n\x20https://developers.google.com/protocol-buffers/\n\n\x20Redistribu\
tion\x20and\x20use\x20in\x20source\x20and\x20binary\x20forms,\x20with\
\x20or\x20without\n\x20modification,\x20are\x20permitted\x20provided\x20\
that\x20the\x20following\x20conditions\x20are\n\x20met:\n\n\x20\x20\x20\
\x20\x20*\x20Redistributions\x20of\x20source\x20code\x20must\x20retain\
\x20the\x20above\x20copyright\n\x20notice,\x20this\x20list\x20of\x20cond\
itions\x20and\x20the\x20following\x20disclaimer.\n\x20\x20\x20\x20\x20*\
\x20Redistributions\x20in\x20binary\x20form\x20must\x20reproduce\x20the\
\x20above\n\x20copyright\x20notice,\x20this\x20list\x20of\x20conditions\
\x20and\x20the\x20following\x20disclaimer\n\x20in\x20the\x20documentatio\
n\x20and/or\x20other\x20materials\x20provided\x20with\x20the\n\x20distri\
bution.\n\x20\x20\x20\x20\x20*\x20Neither\x20the\x20name\x20of\x20Google\
\x20Inc.\x20nor\x20the\x20names\x20of\x20its\n\x20contributors\x20may\
\x20be\x20used\x20to\x20endorse\x20or\x20promote\x20products\x20derived\
\x20from\n\x20this\x20software\x20without\x20specific\x20prior\x20writte\
n\x20permission.\n\n\x20THIS\x20SOFTWARE\x20IS\x20PROVIDED\x20BY\x20THE\
\x20COPYRIGHT\x20HOLDERS\x20AND\x20CONTRIBUTORS\n\x20\"AS\x20IS\"\x20AND\
\x20ANY\x20EXPRESS\x20OR\x20IMPLIED\x20WARRANTIES,\x20INCLUDING,\x20BUT\
\x20NOT\n\x20LIMITED\x20TO,\x20THE\x20IMPLIED\x20WARRANTIES\x20OF\x20MER\
CHANTABILITY\x20AND\x20FITNESS\x20FOR\n\x20A\x20PARTICULAR\x20PURPOSE\
\x20ARE\x20DISCLAIMED.\x20IN\x20NO\x20EVENT\x20SHALL\x20THE\x20COPYRIGHT\
\n\x20OWNER\x20OR\x20CONTRIBUTORS\x20BE\x20LIABLE\x20FOR\x20ANY\x20DIREC\
T,\x20INDIRECT,\x20INCIDENTAL,\n\x20SPECIAL,\x20EXEMPLARY,\x20OR\x20CONS\
EQUENTIAL\x20DAMAGES\x20(INCLUDING,\x20BUT\x20NOT\n\x20LIMITED\x20TO,\
\x20PROCUREMENT\x20OF\x20SUBSTITUTE\x20GOODS\x20OR\x20SERVICES;\x20LOSS\
\x20OF\x20USE,\n\x20DATA,\x20OR\x20PROFITS;\x20OR\x20BUSINESS\x20INTERRU\
PTION)\x20HOWEVER\x20CAUSED\x20AND\x20ON\x20ANY\n\x20THEORY\x20OF\x20LIA\
BILITY,\x20WHETHER\x20IN\x20CONTRACT,\x20STRICT\x20LIABILITY,\x20OR\x20T\
ORT\n\x20(INCLUDING\x20NEGLIGENCE\x20OR\x20OTHERWISE)\x20ARISING\x20IN\
\x20ANY\x20WAY\x20OUT\x20OF\x20THE\x20USE\n\x20OF\x20THIS\x20SOFTWARE,\
\x20EVEN\x20IF\x20ADVISED\x20OF\x20THE\x20POSSIBILITY\x20OF\x20SUCH\x20D\
AMAGE.\n\n\x08\n\x01\x02\x12\x03\x20\0\x18\n\x08\n\x01\x08\x12\x03\"\0;\
\n\t\n\x02\x08%\x12\x03\"\0;\n\x08\n\x01\x08\x12\x03#\0\x1f\n\t\n\x02\
\x08\x1f\x12\x03#\0\x1f\n\x08\n\x01\x08\x12\x03$\0B\n\t\n\x02\x08\x0b\
\x12\x03$\0B\n\x08\n\x01\x08\x12\x03%\0,\n\t\n\x02\x08\x01\x12\x03%\0,\n\
\x08\n\x01\x08\x12\x03&\0/\n\t\n\x02\x08\x08\x12\x03&\0/\n\x08\n\x01\x08\
\x12\x03'\0\"\n\t\n\x02\x08\n\x12\x03'\0\"\n\x08\n\x01\x08\x12\x03(\0!\n\
\t\n\x02\x08$\x12\x03(\0!\n\xb8\x0f\n\x02\x04\0\x12\x04_\0k\x01\x1a\xab\
\x0f\x20A\x20Timestamp\x20represents\x20a\x20point\x20in\x20time\x20inde\
pendent\x20of\x20any\x20time\x20zone\n\x20or\x20calendar,\x20represented\
\x20as\x20seconds\x20and\x20fractions\x20of\x20seconds\x20at\n\x20nanose\
cond\x20resolution\x20in\x20UTC\x20Epoch\x20time.\x20It\x20is\x20encoded\
\x20using\x20the\n\x20Proleptic\x20Gregorian\x20Calendar\x20which\x20ext\
ends\x20the\x20Gregorian\x20calendar\n\x20backwards\x20to\x20year\x20one\
.\x20It\x20is\x20encoded\x20assuming\x20all\x20minutes\x20are\x2060\n\
\x20seconds\x20long,\x20i.e.\x20leap\x20seconds\x20are\x20\"smeared\"\
\x20so\x20that\x20no\x20leap\x20second\n\x20table\x20is\x20needed\x20for\
\x20interpretation.\x20Range\x20is\x20from\n\x200001-01-01T00:00:00Z\x20\
to\x209999-12-31T23:59:59.999999999Z.\n\x20By\x20restricting\x20to\x20th\
at\x20range,\x20we\x20ensure\x20that\x20we\x20can\x20convert\x20to\n\x20\
and\x20from\x20\x20RFC\x203339\x20date\x20strings.\n\x20See\x20[https://\
www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).\n\n\
\x20Example\x201:\x20Compute\x20Timestamp\x20from\x20POSIX\x20`time()`.\
\n\n\x20\x20\x20\x20\x20Timestamp\x20timestamp;\n\x20\x20\x20\x20\x20tim\
estamp.set_seconds(time(NULL));\n\x20\x20\x20\x20\x20timestamp.set_nanos\
(0);\n\n\x20Example\x202:\x20Compute\x20Timestamp\x20from\x20POSIX\x20`g\
ettimeofday()`.\n\n\x20\x20\x20\x20\x20struct\x20timeval\x20tv;\n\x20\
\x20\x20\x20\x20gettimeofday(&tv,\x20NULL);\n\n\x20\x20\x20\x20\x20Times\
tamp\x20timestamp;\n\x20\x20\x20\x20\x20timestamp.set_seconds(tv.tv_sec)\
;\n\x20\x20\x20\x20\x20timestamp.set_nanos(tv.tv_usec\x20*\x201000);\n\n\
\x20Example\x203:\x20Compute\x20Timestamp\x20from\x20Win32\x20`GetSystem\
TimeAsFileTime()`.\n\n\x20\x20\x20\x20\x20FILETIME\x20ft;\n\x20\x20\x20\
\x20\x20GetSystemTimeAsFileTime(&ft);\n\x20\x20\x20\x20\x20UINT64\x20tic\
ks\x20=\x20(((UINT64)ft.dwHighDateTime)\x20<<\x2032)\x20|\x20ft.dwLowDat\
eTime;\n\n\x20\x20\x20\x20\x20//\x20A\x20Windows\x20tick\x20is\x20100\
\x20nanoseconds.\x20Windows\x20epoch\x201601-01-01T00:00:00Z\n\x20\x20\
\x20\x20\x20//\x20is\x2011644473600\x20seconds\x20before\x20Unix\x20epoc\
h\x201970-01-01T00:00:00Z.\n\x20\x20\x20\x20\x20Timestamp\x20timestamp;\
\n\x20\x20\x20\x20\x20timestamp.set_seconds((INT64)\x20((ticks\x20/\x201\
0000000)\x20-\x2011644473600LL));\n\x20\x20\x20\x20\x20timestamp.set_nan\
os((INT32)\x20((ticks\x20%\x2010000000)\x20*\x20100));\n\n\x20Example\
\x204:\x20Compute\x20Timestamp\x20from\x20Java\x20`System.currentTimeMil\
lis()`.\n\n\x20\x20\x20\x20\x20long\x20millis\x20=\x20System.currentTime\
Millis();\n\n\x20\x20\x20\x20\x20Timestamp\x20timestamp\x20=\x20Timestam\
p.newBuilder().setSeconds(millis\x20/\x201000)\n\x20\x20\x20\x20\x20\x20\
\x20\x20\x20.setNanos((int)\x20((millis\x20%\x201000)\x20*\x201000000)).\
build();\n\n\n\x20Example\x205:\x20Compute\x20Timestamp\x20from\x20curre\
nt\x20time\x20in\x20Python.\n\n\x20\x20\x20\x20\x20timestamp\x20=\x20Tim\
estamp()\n\x20\x20\x20\x20\x20timestamp.GetCurrentTime()\n\n\n\n\n\n\x03\
\x04\0\x01\x12\x03_\x08\x11\n\x9c\x01\n\x04\x04\0\x02\0\x12\x03d\x02\x14\
\x1a\x8e\x01\x20Represents\x20seconds\x20of\x20UTC\x20time\x20since\x20U\
nix\x20epoch\n\x201970-01-01T00:00:00Z.\x20Must\x20be\x20from\x200001-01\
-01T00:00:00Z\x20to\n\x209999-12-31T23:59:59Z\x20inclusive.\n\n\x0c\n\
\x05\x04\0\x02\0\x05\x12\x03d\x02\x07\n\x0c\n\x05\x04\0\x02\0\x01\x12\
\x03d\x08\x0f\n\x0c\n\x05\x04\0\x02\0\x03\x12\x03d\x12\x13\n\xe4\x01\n\
\x04\x04\0\x02\x01\x12\x03j\x02\x12\x1a\xd6\x01\x20Non-negative\x20fract\
ions\x20of\x20a\x20second\x20at\x20nanosecond\x20resolution.\x20Negative\
\n\x20second\x20values\x20with\x20fractions\x20must\x20still\x20have\x20\
non-negative\x20nanos\x20values\n\x20that\x20count\x20forward\x20in\x20t\
ime.\x20Must\x20be\x20from\x200\x20to\x20999,999,999\n\x20inclusive.\n\n\
\x0c\n\x05\x04\0\x02\x01\x05\x12\x03j\x02\x07\n\x0c\n\x05\x04\0\x02\x01\
\x01\x12\x03j\x08\r\n\x0c\n\x05\x04\0\x02\x01\x03\x12\x03j\x10\x11b\x06p\
roto3\
";
static file_descriptor_proto_lazy: crate::rt::Lazy<crate::descriptor::FileDescriptorProto> = crate::rt::Lazy::INIT;
fn parse_descriptor_proto() -> crate::descriptor::FileDescriptorProto {
crate::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
/// `FileDescriptorProto` object which was a source for this generated file
pub fn file_descriptor_proto() -> &'static crate::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
| 47.934708 | 118 | 0.679977 |
11618d30b9d4cc59c51a4a97a02d8569519fa15c | 4,945 | mod code_writer;
use crate::converter::BaseRoot;
use crate::SFCInfo;
use crate::ir::{self as C, ConvertInfo, IRNode, IRRoot};
use code_writer::CodeWriter;
use smallvec::{smallvec, SmallVec};
use std::marker::PhantomData;
use std::{
borrow::Cow,
rc::Rc,
io::{self, Write as ioWrite},
};
pub trait CodeGenerator {
type IR<'a>;
type Info<'a>;
type Output;
/// generate will take optimized ir node and output
/// desired code format, e.g. String or Binary code or StdOut
fn generate<'a>(&self, node: Self::IR<'a>, info: Self::Info<'a>) -> Self::Output;
}
#[derive(PartialEq, Eq, Clone)]
pub enum ScriptMode {
Function {
/// Transform expressions like {{ foo }} to `_ctx.foo`.
/// If this option is false, the generated code will be wrapped in a
/// `with (this) { ... }` block.
/// - This is force-enabled in module mode, since modules are by default strict
/// and cannot use `with`
/// @default mode === 'module'
prefix_identifier: bool,
/// Customize the global variable name of `Vue` to get helpers from
/// in function mode
/// @default 'Vue'
runtime_global_name: String,
},
Module {
/// Customize where to import runtime helpers from.
/// @default 'vue'
runtime_module_name: String,
},
}
#[derive(Clone)]
pub struct CodeGenerateOption {
pub is_dev: bool,
pub mode: ScriptMode,
pub source_map: bool,
pub helper_strs: &'static [&'static str],
}
impl CodeGenerateOption {
fn use_with_scope(&self) -> bool {
match self.mode {
ScriptMode::Function {
prefix_identifier, ..
} => !prefix_identifier,
ScriptMode::Module { .. } => false,
}
}
}
impl Default for CodeGenerateOption {
fn default() -> Self {
Self {
is_dev: true,
mode: ScriptMode::Function {
prefix_identifier: false,
runtime_global_name: "Vue".into(),
},
source_map: false,
helper_strs: &[],
}
}
}
pub trait CoreCodeGenerator<T: ConvertInfo> {
type Written;
fn generate_ir(&mut self, ir: IRNode<T>) -> Self::Written {
use IRNode as IR;
match ir {
IR::TextCall(t) => self.generate_text(t),
IR::If(v_if) => self.generate_if(v_if),
IR::For(v_for) => self.generate_for(v_for),
IR::VNodeCall(vnode) => self.generate_vnode(vnode),
IR::RenderSlotCall(r) => self.generate_slot_outlet(r),
IR::VSlotUse(s) => self.generate_v_slot(s),
IR::AlterableSlot(a) => self.generate_alterable_slot(a),
IR::CacheNode(cache) => self.generate_cache(cache),
IR::CommentCall(c) => self.generate_comment(c),
}
}
fn generate_prologue(&mut self, t: &mut IRRoot<T>) -> Self::Written;
fn generate_epilogue(&mut self) -> Self::Written;
fn generate_text(&mut self, t: C::TextIR<T>) -> Self::Written;
fn generate_if(&mut self, i: C::IfNodeIR<T>) -> Self::Written;
fn generate_for(&mut self, f: C::ForNodeIR<T>) -> Self::Written;
fn generate_vnode(&mut self, v: C::VNodeIR<T>) -> Self::Written;
fn generate_slot_outlet(&mut self, r: C::RenderSlotIR<T>) -> Self::Written;
fn generate_v_slot(&mut self, s: C::VSlotIR<T>) -> Self::Written;
fn generate_alterable_slot(&mut self, s: C::Slot<T>) -> Self::Written;
fn generate_cache(&mut self, c: C::CacheIR<T>) -> Self::Written;
fn generate_js_expr(&mut self, e: T::JsExpression) -> Self::Written;
fn generate_comment(&mut self, c: T::CommentType) -> Self::Written;
}
pub struct CodeGen<T: ioWrite> {
option: Rc<CodeGenerateOption>,
pd: PhantomData<T>,
}
pub struct CodeGenInfo<'a, T: ioWrite> {
pub writer: T,
pub sfc_info: &'a SFCInfo<'a>,
}
impl<T: ioWrite> CodeGen<T> {
pub fn new(option: CodeGenerateOption) -> Self {
Self {
option: Rc::new(option),
pd: PhantomData,
}
}
}
impl<T: ioWrite> CodeGenerator for CodeGen<T> {
type IR<'a> = BaseRoot<'a>;
type Info<'a> = CodeGenInfo<'a, T>;
type Output = io::Result<()>;
fn generate<'a>(&self, root: BaseRoot<'a>, info: Self::Info<'a>) -> Self::Output {
let mut imp = CodeWriter::new(info.writer, self.option.clone(), info.sfc_info);
imp.generate_root(root)
.map_err(|_| imp.writer.get_io_error())
}
}
/// DecodedStr represents text after decoding html entities.
/// SmallVec and Cow are used internally for less allocation.
#[derive(Debug)]
pub struct DecodedStr<'a>(SmallVec<[Cow<'a, str>; 1]>);
impl<'a> From<&'a str> for DecodedStr<'a> {
fn from(decoded: &'a str) -> Self {
debug_assert!(!decoded.is_empty());
Self(smallvec![Cow::Borrowed(decoded)])
}
}
pub type EntityDecoder = fn(&str, bool) -> DecodedStr<'_>;
| 32.532895 | 87 | 0.598382 |
9c479b22f79307807fb3ea29773a7a4e95342cea | 5,090 | //! Tests for when multiple artifacts have the same output filename.
//! See https://github.com/rust-lang/cargo/issues/6313 for more details.
//! Ideally these should never happen, but I don't think we'll ever be able to
//! prevent all collisions.
use cargo_test_support::basic_manifest;
use cargo_test_support::project;
use std::env;
#[cargo_test]
fn collision_dylib() {
// Path dependencies don't include metadata hash in filename for dylibs.
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
)
.file(
"a/Cargo.toml",
r#"
[package]
name = "a"
version = "1.0.0"
[lib]
crate-type = ["dylib"]
"#,
)
.file("a/src/lib.rs", "")
.file(
"b/Cargo.toml",
r#"
[package]
name = "b"
version = "1.0.0"
[lib]
crate-type = ["dylib"]
name = "a"
"#,
)
.file("b/src/lib.rs", "")
.build();
// `j=1` is required because on Windows you'll get an error due to
// two processes writing to the file at the same time.
p.cargo("build -j=1")
.with_stderr_contains(&format!("\
[WARNING] output filename collision.
The lib target `a` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the lib target `a` in package `a v1.0.0 ([..]/foo/a)`.
Colliding filename is: [..]/foo/target/debug/deps/{}a{}
The targets should have unique names.
Consider changing their names to be unique or compiling them separately.
This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX))
.run();
}
#[cargo_test]
fn collision_example() {
// Examples in a workspace can easily collide.
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
)
.file("a/Cargo.toml", &basic_manifest("a", "1.0.0"))
.file("a/examples/ex1.rs", "fn main() {}")
.file("b/Cargo.toml", &basic_manifest("b", "1.0.0"))
.file("b/examples/ex1.rs", "fn main() {}")
.build();
// `j=1` is required because on Windows you'll get an error due to
// two processes writing to the file at the same time.
p.cargo("build --examples -j=1")
.with_stderr_contains("\
[WARNING] output filename collision.
The example target `ex1` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the example target `ex1` in package `a v1.0.0 ([..]/foo/a)`.
Colliding filename is: [..]/foo/target/debug/examples/ex1[EXE]
The targets should have unique names.
Consider changing their names to be unique or compiling them separately.
This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
")
.run();
}
#[cargo_test]
#[cfg(not(target_env = "msvc"))]
fn collision_export() {
// `--out-dir` combines some things which can cause conflicts.
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
.file("examples/foo.rs", "fn main() {}")
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("build --out-dir=out -Z unstable-options --bins --examples")
.masquerade_as_nightly_cargo()
.with_stderr_contains("\
[WARNING] `--out-dir` filename collision.
The example target `foo` in package `foo v1.0.0 ([..]/foo)` has the same output filename as the bin target `foo` in package `foo v1.0.0 ([..]/foo)`.
Colliding filename is: [..]/foo/out/foo[EXE]
The exported filenames should be unique.
Consider changing their names to be unique or compiling them separately.
This may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>.
")
.run();
}
#[cargo_test]
fn collision_doc() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
foo2 = { path = "foo2" }
"#,
)
.file("src/lib.rs", "")
.file(
"foo2/Cargo.toml",
r#"
[package]
name = "foo2"
version = "0.1.0"
[lib]
name = "foo"
"#,
)
.file("foo2/src/lib.rs", "")
.build();
p.cargo("doc")
.with_stderr_contains(
"\
[WARNING] output filename collision.
The lib target `foo` in package `foo2 v0.1.0 ([..]/foo/foo2)` has the same output \
filename as the lib target `foo` in package `foo v0.1.0 ([..]/foo)`.
Colliding filename is: [..]/foo/target/doc/foo/index.html
The targets should have unique names.
This is a known bug where multiple crates with the same name use
the same path; see <https://github.com/rust-lang/cargo/issues/6313>.
",
)
.run();
}
| 32.012579 | 152 | 0.560904 |
283334dc700043240277dcee46b221ad438d2823 | 5,953 | use crate::expression::BPyExpr;
use arrow::record_batch::RecordBatch;
use ballista::context::BallistaDataFrame;
use datafusion::logical_plan::Expr;
use futures::StreamExt;
use pyo3::exceptions::PyException;
use pyo3::{ffi::Py_uintptr_t, prelude::*};
use crate::{expression::any_to_expression, util};
use pyo3::PyErr;
use std::convert::From;
use arrow::array::ArrayRef;
#[pyclass(unsendable, module = "ballista", name = "DataFrame")]
pub struct BPyDataFrame {
pub df: BallistaDataFrame,
}
use crate::dfschema::BPyDFSchema;
use pyo3::types::{PyList, PyTuple};
#[pymethods]
impl BPyDataFrame {
#[args(columns = "*")]
pub fn select_columns(&self, columns: &PyTuple) -> PyResult<BPyDataFrame> {
let col_vec: Vec<&str> = util::tuple_to_uniform_type(columns)?;
let df = self
.df
.select_columns(col_vec.as_slice())
.map_err(util::wrap_err)?;
let ballista_df = BPyDataFrame { df };
Ok(ballista_df)
}
#[args(expr = "*")]
fn select(&self, expr: &PyTuple) -> PyResult<Self> {
let expressions: Vec<Expr> =
util::transform_tuple_to_uniform_type(expr, |e: BPyExpr| e.expr)?;
self.df
.select(expressions.as_slice())
.map(|df| df.into())
.map_err(util::wrap_err)
}
#[args(expr = "*")]
fn filter(&self, expr: &PyAny) -> PyResult<Self> {
let filter_expr = any_to_expression(expr)?;
self.df
.filter(filter_expr)
.map(|df| df.into())
.map_err(util::wrap_err)
}
fn aggregate(&self, group_expr: &PyList, aggr_expr: &PyList) -> PyResult<Self> {
let group_by = group_expr
.iter()
.map(|any| any_to_expression(any))
.collect::<PyResult<Vec<Expr>>>()?;
let aggr_by = aggr_expr
.iter()
.map(|any| any_to_expression(any))
.collect::<PyResult<Vec<Expr>>>()?;
self.df
.aggregate(group_by.as_slice(), aggr_by.as_slice())
.map(|df| df.into())
.map_err(util::wrap_err)
}
fn limit(&self, n: usize) -> PyResult<Self> {
self.df.limit(n).map(|df| df.into()).map_err(util::wrap_err)
}
fn sort(&self, expr: &PyTuple) -> PyResult<Self> {
let expressions = util::transform_tuple_to_uniform_type(expr, |e: BPyExpr| e.expr)?;
self.df
.sort(expressions.as_slice())
.map(|df| df.into())
.map_err(util::wrap_err)
}
//TODO: add join implementation to dataframe, blocked on BallistaDataFrame adding join
#[allow(unused_variables)]
#[args(join_type = "\"inner\"")]
fn join(
slf: &PyCell<BPyDataFrame>,
right: &PyCell<BPyDataFrame>,
left_cols: &PyList,
right_cols: &PyList,
join_type: &str,
) -> PyResult<Self> {
Err(PyException::new_err(
"Not implemented on ballista dataframe yet",
))
}
fn repartition(
&self,
partitioning_scheme: crate::partition::BPyPartitioning,
) -> PyResult<Self> {
self.df
.repartition(partitioning_scheme.scheme)
.map(|df| df.into())
.map_err(util::wrap_err)
}
fn collect(&self, _py: Python) -> PyResult<PyObject> {
let mut rt = tokio::runtime::Runtime::new().map_err(util::wrap_err)?;
let batches: Vec<RecordBatch> =
rt.block_on(self.async_collect()).map_err(util::wrap_err)?;
to_py(&batches)
}
fn schema(&self) -> BPyDFSchema {
self.df.schema().clone().into()
}
#[args(verbose = "false")]
fn explain(&self, verbose: bool) -> PyResult<Self> {
self.df
.explain(verbose)
.map(|df| df.into())
.map_err(util::wrap_err)
}
}
pub fn to_py(batches: &[RecordBatch]) -> PyResult<PyObject> {
let gil = pyo3::Python::acquire_gil();
let py = gil.python();
let pyarrow = PyModule::import(py, "pyarrow")?;
let builtins = PyModule::import(py, "builtins")?;
let mut py_batches = vec![];
for batch in batches {
py_batches.push(to_py_batch(batch, py, pyarrow)?);
}
let result = builtins.call1("list", (py_batches,))?;
Ok(PyObject::from(result))
}
pub fn to_py_array(array: &ArrayRef, py: Python) -> PyResult<PyObject> {
let (array_pointer, schema_pointer) = array.to_raw().map_err(crate::util::wrap_err)?;
let pa = py.import("pyarrow")?;
let array = pa.getattr("Array")?.call_method1(
"_import_from_c",
(
array_pointer as Py_uintptr_t,
schema_pointer as Py_uintptr_t,
),
)?;
Ok(array.to_object(py))
}
fn to_py_batch<'a>(
batch: &RecordBatch,
py: Python,
pyarrow: &'a PyModule,
) -> Result<PyObject, PyErr> {
let mut py_arrays = vec![];
let mut py_names = vec![];
let schema = batch.schema();
for (array, field) in batch.columns().iter().zip(schema.fields().iter()) {
let array = to_py_array(array, py)?;
py_arrays.push(array);
py_names.push(field.name());
}
let record = pyarrow
.getattr("RecordBatch")?
.call_method1("from_arrays", (py_arrays, py_names))?;
Ok(PyObject::from(record))
}
impl BPyDataFrame {
async fn async_collect<'py>(&self) -> PyResult<Vec<RecordBatch>> {
let mut stream = self.df.collect().await.map_err(wrap_err)?;
let mut batches: Vec<RecordBatch> = Vec::new();
while let Some(result) = stream.next().await {
let batch = result.map_err(wrap_err)?;
batches.push(batch);
}
Ok(batches)
}
}
impl From<ballista::context::BallistaDataFrame> for BPyDataFrame {
fn from(df: ballista::context::BallistaDataFrame) -> BPyDataFrame {
BPyDataFrame { df }
}
}
pub fn wrap_err<E: std::error::Error>(err: E) -> PyErr {
PyException::new_err(err.to_string())
}
| 29.914573 | 92 | 0.589787 |
dd51080c6e900e4cd8bc937589e16616a9e585cd | 3,739 | mod helpers;
use h::{in_directory as cwd, Playground, Stub::*};
use helpers as h;
#[test]
fn can_only_apply_one() {
nu_error!(
output,
cwd("tests/fixtures/formats"),
"open caco3_plastics.csv | first 1 | str origin --downcase --upcase"
);
assert!(
output.contains("Usage: str field [--downcase|--upcase|--to-int|--replace|--find-replace]")
);
}
#[test]
fn acts_without_passing_field() {
Playground::setup_for("plugin_str_acts_without_passing_field_test").with_files(vec![
FileWithContent(
"sample.yml",
r#"
environment:
global:
PROJECT_NAME: nushell
"#,
),
]);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_acts_without_passing_field_test"),
"open sample.yml | get environment.global.PROJECT_NAME | str --upcase | echo $it"
);
assert_eq!(output, "NUSHELL");
}
#[test]
fn downcases() {
Playground::setup_for("plugin_str_downcases_test").with_files(vec![FileWithContent(
"sample.toml",
r#"
[dependency]
name = "LIGHT"
"#,
)]);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_downcases_test"),
"open sample.toml | str dependency.name --downcase | get dependency.name | echo $it"
);
assert_eq!(output, "light");
}
#[test]
fn upcases() {
Playground::setup_for("plugin_str_upcases_test").with_files(vec![FileWithContent(
"sample.toml",
r#"
[package]
name = "nushell"
"#,
)]);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_upcases_test"),
"open sample.toml | str package.name --upcase | get package.name | echo $it"
);
assert_eq!(output, "NUSHELL");
}
#[test]
fn converts_to_int() {
nu!(
output,
cwd("tests/fixtures/formats"),
"open caco3_plastics.csv | first 1 | str tariff_item --to-int | where tariff_item == 2509000000 | get tariff_item | echo $it"
);
assert_eq!(output, "2509000000");
}
#[test]
fn replaces() {
Playground::setup_for("plugin_str_replaces_test").with_files(vec![FileWithContent(
"sample.toml",
r#"
[package]
name = "nushell"
"#,
)]);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_replaces_test"),
"open sample.toml | str package.name --replace wykittenshell | get package.name | echo $it"
);
assert_eq!(output, "wykittenshell");
}
#[test]
fn find_and_replaces() {
Playground::setup_for("plugin_str_find_and_replaces_test").with_files(vec![FileWithContent(
"sample.toml",
r#"
[fortune.teller]
phone = "1-800-KATZ"
"#,
)]);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_test"),
"open sample.toml | str fortune.teller.phone --find-replace KATZ \"5289\" | get fortune.teller.phone | echo $it"
);
assert_eq!(output, "1-800-5289");
}
#[test]
fn find_and_replaces_without_passing_field() {
Playground::setup_for("plugin_str_find_and_replaces_without_passing_field_test").with_files(
vec![FileWithContent(
"sample.toml",
r#"
[fortune.teller]
phone = "1-800-KATZ"
"#,
)],
);
nu!(
output,
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_without_passing_field_test"),
"open sample.toml | get fortune.teller.phone | str --find-replace KATZ \"5289\" | echo $it"
);
assert_eq!(output, "1-800-5289");
}
| 25.263514 | 133 | 0.584648 |
762b232ad62566a9227bf6d343a0041ad57c856f | 1,209 | //!
//! # Add Without Plus:
//!
//! Write a function that adds two numbers.
//! You should not use + or any arithmetic operators.
//!
//! Hints: #467, #544, #601, #628, #642, #664, #692, #7 12, #724
//!
/// Primary Implementation
///
/// Run a ripple-carry adder on `a` and `b`'s bits!
///
pub fn add_without_plus(a: i32, b: i32) -> i32 {
let mut rv = 0;
let mut carry = 0;
for bit in 0..32 {
// Mask out the relevant bit of `a` and `b`
let bita = (a >> bit) & 1;
let bitb = (b >> bit) & 1;
// Perform the full-adder operations
let sum = carry ^ bita ^ bitb;
carry = (bita & bitb) | (bita & carry) | (bitb & carry);
// And or-in the summand
rv |= sum << bit;
}
rv
}
#[test]
fn test_add_without_plus() {
let test_cases = [
(0, 0, 0),
(1, 2, 3),
(-1, 1, 0),
(4, 5, 9),
(55, 11, 66),
(1023, -1024, -1),
(2_i32.pow(16), 2_i32.pow(16), 2_i32.pow(17)),
// Test some wrap-around cases
(i32::MAX, 1, -i32::MAX - 1),
(i32::MAX, i32::MAX, -2),
];
for case in test_cases {
assert_eq!(add_without_plus(case.0, case.1), case.2);
}
}
| 25.723404 | 64 | 0.494624 |
bb7c411df9711d972cedb9840274b679caa5dc15 | 25,392 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ServerVersion {
#[serde(rename = "5.6")]
_5_6,
#[serde(rename = "5.7")]
_5_7,
#[serde(rename = "8.0")]
_8_0,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SslEnforcement {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MinimalTlsVersion {
#[serde(rename = "TLS1_0")]
Tls10,
#[serde(rename = "TLS1_1")]
Tls11,
#[serde(rename = "TLS1_2")]
Tls12,
#[serde(rename = "TLSEnforcementDisabled")]
TlsEnforcementDisabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPrivateEndpointConnection {
#[serde(skip_serializing)]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpointProperty>,
#[serde(rename = "privateLinkServiceConnectionState", skip_serializing_if = "Option::is_none")]
pub private_link_service_connection_state: Option<PrivateLinkServiceConnectionStateProperty>,
#[serde(rename = "provisioningState", skip_serializing)]
pub provisioning_state: Option<private_endpoint_connection_properties::ProvisioningState>,
}
pub mod private_endpoint_connection_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Approving,
Ready,
Dropping,
Failed,
Rejecting,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointProperty {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionStateProperty {
pub status: private_link_service_connection_state_property::Status,
pub description: String,
#[serde(rename = "actionsRequired", skip_serializing)]
pub actions_required: Option<private_link_service_connection_state_property::ActionsRequired>,
}
pub mod private_link_service_connection_state_property {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Approved,
Pending,
Rejected,
Disconnected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ActionsRequired {
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerProperties {
#[serde(rename = "administratorLogin", skip_serializing_if = "Option::is_none")]
pub administrator_login: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<ServerVersion>,
#[serde(rename = "sslEnforcement", skip_serializing_if = "Option::is_none")]
pub ssl_enforcement: Option<SslEnforcement>,
#[serde(rename = "minimalTlsVersion", skip_serializing_if = "Option::is_none")]
pub minimal_tls_version: Option<MinimalTlsVersion>,
#[serde(rename = "byokEnforcement", skip_serializing)]
pub byok_enforcement: Option<String>,
#[serde(rename = "infrastructureEncryption", skip_serializing_if = "Option::is_none")]
pub infrastructure_encryption: Option<server_properties::InfrastructureEncryption>,
#[serde(rename = "userVisibleState", skip_serializing_if = "Option::is_none")]
pub user_visible_state: Option<server_properties::UserVisibleState>,
#[serde(rename = "fullyQualifiedDomainName", skip_serializing_if = "Option::is_none")]
pub fully_qualified_domain_name: Option<String>,
#[serde(rename = "earliestRestoreDate", skip_serializing_if = "Option::is_none")]
pub earliest_restore_date: Option<String>,
#[serde(rename = "storageProfile", skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<StorageProfile>,
#[serde(rename = "replicationRole", skip_serializing_if = "Option::is_none")]
pub replication_role: Option<String>,
#[serde(rename = "masterServerId", skip_serializing_if = "Option::is_none")]
pub master_server_id: Option<String>,
#[serde(rename = "replicaCapacity", skip_serializing_if = "Option::is_none")]
pub replica_capacity: Option<i32>,
#[serde(rename = "publicNetworkAccess", skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<server_properties::PublicNetworkAccess>,
#[serde(rename = "privateEndpointConnections", skip_serializing)]
pub private_endpoint_connections: Vec<ServerPrivateEndpointConnection>,
}
pub mod server_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum InfrastructureEncryption {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum UserVisibleState {
Ready,
Dropping,
Disabled,
Inaccessible,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageProfile {
#[serde(rename = "backupRetentionDays", skip_serializing_if = "Option::is_none")]
pub backup_retention_days: Option<i32>,
#[serde(rename = "geoRedundantBackup", skip_serializing_if = "Option::is_none")]
pub geo_redundant_backup: Option<storage_profile::GeoRedundantBackup>,
#[serde(rename = "storageMB", skip_serializing_if = "Option::is_none")]
pub storage_mb: Option<i32>,
#[serde(rename = "storageAutogrow", skip_serializing_if = "Option::is_none")]
pub storage_autogrow: Option<storage_profile::StorageAutogrow>,
}
pub mod storage_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum GeoRedundantBackup {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAutogrow {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPropertiesForCreate {
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<ServerVersion>,
#[serde(rename = "sslEnforcement", skip_serializing_if = "Option::is_none")]
pub ssl_enforcement: Option<SslEnforcement>,
#[serde(rename = "minimalTlsVersion", skip_serializing_if = "Option::is_none")]
pub minimal_tls_version: Option<MinimalTlsVersion>,
#[serde(rename = "storageProfile", skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<StorageProfile>,
#[serde(rename = "createMode")]
pub create_mode: server_properties_for_create::CreateMode,
}
pub mod server_properties_for_create {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreateMode {
Default,
PointInTimeRestore,
GeoRestore,
Replica,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPropertiesForDefaultCreate {
#[serde(flatten)]
pub server_properties_for_create: ServerPropertiesForCreate,
#[serde(rename = "administratorLogin")]
pub administrator_login: String,
#[serde(rename = "administratorLoginPassword")]
pub administrator_login_password: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPropertiesForRestore {
#[serde(flatten)]
pub server_properties_for_create: ServerPropertiesForCreate,
#[serde(rename = "sourceServerId")]
pub source_server_id: String,
#[serde(rename = "restorePointInTime")]
pub restore_point_in_time: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPropertiesForGeoRestore {
#[serde(flatten)]
pub server_properties_for_create: ServerPropertiesForCreate,
#[serde(rename = "sourceServerId")]
pub source_server_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerPropertiesForReplica {
#[serde(flatten)]
pub server_properties_for_create: ServerPropertiesForCreate,
#[serde(rename = "sourceServerId")]
pub source_server_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub tier: Option<sku::Tier>,
#[serde(skip_serializing_if = "Option::is_none")]
pub capacity: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub size: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub family: Option<String>,
}
pub mod sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Tier {
Basic,
GeneralPurpose,
MemoryOptimized,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceIdentity {
#[serde(rename = "principalId", skip_serializing)]
pub principal_id: Option<String>,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_: Option<resource_identity::Type>,
#[serde(rename = "tenantId", skip_serializing)]
pub tenant_id: Option<String>,
}
pub mod resource_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Server {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub identity: Option<ResourceIdentity>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<ServerProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerForCreate {
#[serde(skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
pub properties: ServerPropertiesForCreate,
pub location: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerUpdateParameters {
#[serde(skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<server_update_parameters::Properties>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
pub mod server_update_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "storageProfile", skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<StorageProfile>,
#[serde(rename = "administratorLoginPassword", skip_serializing_if = "Option::is_none")]
pub administrator_login_password: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<ServerVersion>,
#[serde(rename = "sslEnforcement", skip_serializing_if = "Option::is_none")]
pub ssl_enforcement: Option<SslEnforcement>,
#[serde(rename = "minimalTlsVersion", skip_serializing_if = "Option::is_none")]
pub minimal_tls_version: Option<MinimalTlsVersion>,
#[serde(rename = "replicationRole", skip_serializing_if = "Option::is_none")]
pub replication_role: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Server>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FirewallRuleProperties {
#[serde(rename = "startIpAddress")]
pub start_ip_address: String,
#[serde(rename = "endIpAddress")]
pub end_ip_address: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FirewallRule {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
pub properties: FirewallRuleProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FirewallRuleListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<FirewallRule>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkRuleProperties {
#[serde(rename = "virtualNetworkSubnetId")]
pub virtual_network_subnet_id: String,
#[serde(rename = "ignoreMissingVnetServiceEndpoint", skip_serializing_if = "Option::is_none")]
pub ignore_missing_vnet_service_endpoint: Option<bool>,
#[serde(skip_serializing)]
pub state: Option<virtual_network_rule_properties::State>,
}
pub mod virtual_network_rule_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Initializing,
InProgress,
Ready,
Deleting,
Unknown,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkRule {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualNetworkRuleProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkRuleListResult {
#[serde(skip_serializing)]
pub value: Vec<VirtualNetworkRule>,
#[serde(rename = "nextLink", skip_serializing)]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DatabaseProperties {
#[serde(skip_serializing_if = "Option::is_none")]
pub charset: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub collation: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Database {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<DatabaseProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DatabaseListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Database>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConfigurationProperties {
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
#[serde(skip_serializing)]
pub description: Option<String>,
#[serde(rename = "defaultValue", skip_serializing)]
pub default_value: Option<String>,
#[serde(rename = "dataType", skip_serializing)]
pub data_type: Option<String>,
#[serde(rename = "allowedValues", skip_serializing)]
pub allowed_values: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Configuration {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<ConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConfigurationListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Configuration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplay {
#[serde(skip_serializing)]
pub provider: Option<String>,
#[serde(skip_serializing)]
pub resource: Option<String>,
#[serde(skip_serializing)]
pub operation: Option<String>,
#[serde(skip_serializing)]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplay>,
#[serde(skip_serializing)]
pub origin: Option<operation::Origin>,
#[serde(skip_serializing)]
pub properties: Option<serde_json::Value>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Origin {
NotSpecified,
#[serde(rename = "user")]
User,
#[serde(rename = "system")]
System,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogFileProperties {
#[serde(rename = "sizeInKB", skip_serializing_if = "Option::is_none")]
pub size_in_kb: Option<i64>,
#[serde(rename = "createdTime", skip_serializing)]
pub created_time: Option<String>,
#[serde(rename = "lastModifiedTime", skip_serializing)]
pub last_modified_time: Option<String>,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogFile {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<LogFileProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogFileListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<LogFile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PerformanceTierServiceLevelObjectives {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub edition: Option<String>,
#[serde(rename = "vCore", skip_serializing_if = "Option::is_none")]
pub v_core: Option<i32>,
#[serde(rename = "hardwareGeneration", skip_serializing_if = "Option::is_none")]
pub hardware_generation: Option<String>,
#[serde(rename = "maxBackupRetentionDays", skip_serializing_if = "Option::is_none")]
pub max_backup_retention_days: Option<i32>,
#[serde(rename = "minBackupRetentionDays", skip_serializing_if = "Option::is_none")]
pub min_backup_retention_days: Option<i32>,
#[serde(rename = "maxStorageMB", skip_serializing_if = "Option::is_none")]
pub max_storage_mb: Option<i32>,
#[serde(rename = "minStorageMB", skip_serializing_if = "Option::is_none")]
pub min_storage_mb: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PerformanceTierProperties {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "maxBackupRetentionDays", skip_serializing_if = "Option::is_none")]
pub max_backup_retention_days: Option<i32>,
#[serde(rename = "minBackupRetentionDays", skip_serializing_if = "Option::is_none")]
pub min_backup_retention_days: Option<i32>,
#[serde(rename = "maxStorageMB", skip_serializing_if = "Option::is_none")]
pub max_storage_mb: Option<i32>,
#[serde(rename = "minLargeStorageMB", skip_serializing_if = "Option::is_none")]
pub min_large_storage_mb: Option<i32>,
#[serde(rename = "maxLargeStorageMB", skip_serializing_if = "Option::is_none")]
pub max_large_storage_mb: Option<i32>,
#[serde(rename = "minStorageMB", skip_serializing_if = "Option::is_none")]
pub min_storage_mb: Option<i32>,
#[serde(rename = "serviceLevelObjectives", skip_serializing_if = "Vec::is_empty")]
pub service_level_objectives: Vec<PerformanceTierServiceLevelObjectives>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PerformanceTierListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PerformanceTierProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailabilityRequest {
pub name: String,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailability {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(rename = "nameAvailable", skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SecurityAlertPolicyProperties {
pub state: security_alert_policy_properties::State,
#[serde(rename = "disabledAlerts", skip_serializing_if = "Vec::is_empty")]
pub disabled_alerts: Vec<String>,
#[serde(rename = "emailAddresses", skip_serializing_if = "Vec::is_empty")]
pub email_addresses: Vec<String>,
#[serde(rename = "emailAccountAdmins", skip_serializing_if = "Option::is_none")]
pub email_account_admins: Option<bool>,
#[serde(rename = "storageEndpoint", skip_serializing_if = "Option::is_none")]
pub storage_endpoint: Option<String>,
#[serde(rename = "storageAccountAccessKey", skip_serializing_if = "Option::is_none")]
pub storage_account_access_key: Option<String>,
#[serde(rename = "retentionDays", skip_serializing_if = "Option::is_none")]
pub retention_days: Option<i32>,
}
pub mod security_alert_policy_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerSecurityAlertPolicy {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<SecurityAlertPolicyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudErrorBody {
#[serde(skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub details: Vec<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerAdministratorProperties {
#[serde(rename = "administratorType")]
pub administrator_type: server_administrator_properties::AdministratorType,
pub login: String,
pub sid: String,
#[serde(rename = "tenantId")]
pub tenant_id: String,
}
pub mod server_administrator_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AdministratorType {
ActiveDirectory,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerAdministratorResource {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<ServerAdministratorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerAdministratorResourceListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ServerAdministratorResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecoverableServerProperties {
#[serde(rename = "lastAvailableBackupDateTime", skip_serializing)]
pub last_available_backup_date_time: Option<String>,
#[serde(rename = "serviceLevelObjective", skip_serializing)]
pub service_level_objective: Option<String>,
#[serde(skip_serializing)]
pub edition: Option<String>,
#[serde(rename = "vCore", skip_serializing)]
pub v_core: Option<i32>,
#[serde(rename = "hardwareGeneration", skip_serializing)]
pub hardware_generation: Option<String>,
#[serde(skip_serializing)]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecoverableServerResource {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<RecoverableServerProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(skip_serializing)]
pub id: Option<String>,
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(rename = "type", skip_serializing)]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
| 39.428571 | 99 | 0.71365 |
b9f1dd1663eee4de658b95efab064ad54e36776b | 19,714 | use crate::creader::{CStore, LoadedMacro};
use crate::foreign_modules;
use crate::link_args;
use crate::native_libs;
use crate::rmeta::{self, encoder};
use rustc::hir::exports::Export;
use rustc::middle::cstore::{CrateSource, CrateStore, EncodedMetadata, NativeLibraryKind};
use rustc::middle::exported_symbols::ExportedSymbol;
use rustc::middle::stability::DeprecationEntry;
use rustc::ty::query::Providers;
use rustc::ty::query::QueryConfig;
use rustc::ty::{self, TyCtxt};
use rustc_ast::ast;
use rustc_ast::attr;
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_data_structures::svh::Svh;
use rustc_hir as hir;
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::definitions::DefPathTable;
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
use rustc_session::{CrateDisambiguator, Session};
use rustc_span::source_map::{self, Span, Spanned};
use rustc_span::symbol::Symbol;
use rustc_data_structures::sync::Lrc;
use smallvec::SmallVec;
use std::any::Any;
use std::sync::Arc;
macro_rules! provide {
(<$lt:tt> $tcx:ident, $def_id:ident, $other:ident, $cdata:ident,
$($name:ident => $compute:block)*) => {
pub fn provide_extern<$lt>(providers: &mut Providers<$lt>) {
// HACK(eddyb) `$lt: $lt` forces `$lt` to be early-bound, which
// allows the associated type in the return type to be normalized.
$(fn $name<$lt: $lt, T: IntoArgs>(
$tcx: TyCtxt<$lt>,
def_id_arg: T,
) -> <ty::queries::$name<$lt> as QueryConfig<$lt>>::Value {
let _prof_timer =
$tcx.prof.generic_activity("metadata_decode_entry");
#[allow(unused_variables)]
let ($def_id, $other) = def_id_arg.into_args();
assert!(!$def_id.is_local());
let $cdata = CStore::from_tcx($tcx).get_crate_data($def_id.krate);
if $tcx.dep_graph.is_fully_enabled() {
let crate_dep_node_index = $cdata.get_crate_dep_node_index($tcx);
$tcx.dep_graph.read_index(crate_dep_node_index);
}
$compute
})*
*providers = Providers {
$($name,)*
..*providers
};
}
}
}
// small trait to work around different signature queries all being defined via
// the macro above.
trait IntoArgs {
fn into_args(self) -> (DefId, DefId);
}
impl IntoArgs for DefId {
fn into_args(self) -> (DefId, DefId) {
(self, self)
}
}
impl IntoArgs for CrateNum {
fn into_args(self) -> (DefId, DefId) {
(self.as_def_id(), self.as_def_id())
}
}
impl IntoArgs for (CrateNum, DefId) {
fn into_args(self) -> (DefId, DefId) {
(self.0.as_def_id(), self.1)
}
}
provide! { <'tcx> tcx, def_id, other, cdata,
type_of => { cdata.get_type(def_id.index, tcx) }
generics_of => {
tcx.arena.alloc(cdata.get_generics(def_id.index, tcx.sess))
}
explicit_predicates_of => { cdata.get_explicit_predicates(def_id.index, tcx) }
inferred_outlives_of => { cdata.get_inferred_outlives(def_id.index, tcx) }
super_predicates_of => { cdata.get_super_predicates(def_id.index, tcx) }
trait_def => {
tcx.arena.alloc(cdata.get_trait_def(def_id.index, tcx.sess))
}
adt_def => { cdata.get_adt_def(def_id.index, tcx) }
adt_destructor => {
let _ = cdata;
tcx.calculate_dtor(def_id, &mut |_,_| Ok(()))
}
variances_of => { tcx.arena.alloc_from_iter(cdata.get_item_variances(def_id.index)) }
associated_item_def_ids => {
let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index,
|child| result.push(child.res.def_id()), tcx.sess);
tcx.arena.alloc_slice(&result)
}
associated_item => { cdata.get_associated_item(def_id.index, tcx.sess) }
impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
impl_polarity => { cdata.get_impl_polarity(def_id.index) }
coerce_unsized_info => {
cdata.get_coerce_unsized_info(def_id.index).unwrap_or_else(|| {
bug!("coerce_unsized_info: `{:?}` is missing its info", def_id);
})
}
optimized_mir => { tcx.arena.alloc(cdata.get_optimized_mir(tcx, def_id.index)) }
promoted_mir => { tcx.arena.alloc(cdata.get_promoted_mir(tcx, def_id.index)) }
mir_const_qualif => { cdata.mir_const_qualif(def_id.index) }
fn_sig => { cdata.fn_sig(def_id.index, tcx) }
inherent_impls => { cdata.get_inherent_implementations_for_type(tcx, def_id.index) }
is_const_fn_raw => { cdata.is_const_fn_raw(def_id.index) }
asyncness => { cdata.asyncness(def_id.index) }
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
static_mutability => { cdata.static_mutability(def_id.index) }
generator_kind => { cdata.generator_kind(def_id.index) }
def_kind => { cdata.def_kind(def_id.index) }
def_span => { cdata.get_span(def_id.index, &tcx.sess) }
lookup_stability => {
cdata.get_stability(def_id.index).map(|s| tcx.intern_stability(s))
}
lookup_const_stability => {
cdata.get_const_stability(def_id.index).map(|s| tcx.intern_const_stability(s))
}
lookup_deprecation_entry => {
cdata.get_deprecation(def_id.index).map(DeprecationEntry::external)
}
item_attrs => { cdata.get_item_attrs(def_id.index, tcx.sess) }
// FIXME(#38501) We've skipped a `read` on the `hir_owner_nodes` of
// a `fn` when encoding, so the dep-tracking wouldn't work.
// This is only used by rustdoc anyway, which shouldn't have
// incremental recompilation ever enabled.
fn_arg_names => { cdata.get_fn_param_names(def_id.index) }
rendered_const => { cdata.get_rendered_const(def_id.index) }
impl_parent => { cdata.get_parent_impl(def_id.index) }
trait_of_item => { cdata.get_trait_of_item(def_id.index) }
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) }
is_panic_runtime => { cdata.root.panic_runtime }
is_compiler_builtins => { cdata.root.compiler_builtins }
has_global_allocator => { cdata.root.has_global_allocator }
has_panic_handler => { cdata.root.has_panic_handler }
is_profiler_runtime => { cdata.root.profiler_runtime }
panic_strategy => { cdata.root.panic_strategy }
extern_crate => {
let r = *cdata.extern_crate.lock();
r.map(|c| &*tcx.arena.alloc(c))
}
is_no_builtins => { cdata.root.no_builtins }
symbol_mangling_version => { cdata.root.symbol_mangling_version }
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
reachable_non_generics => {
let reachable_non_generics = tcx
.exported_symbols(cdata.cnum)
.iter()
.filter_map(|&(exported_symbol, export_level)| {
if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
Some((def_id, export_level))
} else {
None
}
})
.collect();
tcx.arena.alloc(reachable_non_generics)
}
native_libraries => { Lrc::new(cdata.get_native_libraries(tcx.sess)) }
foreign_modules => { cdata.get_foreign_modules(tcx) }
plugin_registrar_fn => {
cdata.root.plugin_registrar_fn.map(|index| {
DefId { krate: def_id.krate, index }
})
}
proc_macro_decls_static => {
cdata.root.proc_macro_decls_static.map(|index| {
DefId { krate: def_id.krate, index }
})
}
crate_disambiguator => { cdata.root.disambiguator }
crate_hash => { cdata.root.hash }
crate_host_hash => { cdata.host_hash }
original_crate_name => { cdata.root.name }
extra_filename => { cdata.root.extra_filename.clone() }
implementations_of_trait => {
cdata.get_implementations_for_trait(tcx, Some(other))
}
all_trait_implementations => {
cdata.get_implementations_for_trait(tcx, None)
}
visibility => { cdata.get_visibility(def_id.index) }
dep_kind => {
let r = *cdata.dep_kind.lock();
r
}
crate_name => { cdata.root.name }
item_children => {
let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index, |child| result.push(child), tcx.sess);
tcx.arena.alloc_slice(&result)
}
defined_lib_features => { cdata.get_lib_features(tcx) }
defined_lang_items => { cdata.get_lang_items(tcx) }
diagnostic_items => { cdata.get_diagnostic_items(tcx) }
missing_lang_items => { cdata.get_missing_lang_items(tcx) }
missing_extern_crate_item => {
let r = match *cdata.extern_crate.borrow() {
Some(extern_crate) if !extern_crate.is_direct() => true,
_ => false,
};
r
}
used_crate_source => { Lrc::new(cdata.source.clone()) }
exported_symbols => {
let syms = cdata.exported_symbols(tcx);
// FIXME rust-lang/rust#64319, rust-lang/rust#64872: We want
// to block export of generics from dylibs, but we must fix
// rust-lang/rust#65890 before we can do that robustly.
Arc::new(syms)
}
}
pub fn provide(providers: &mut Providers<'_>) {
// FIXME(#44234) - almost all of these queries have no sub-queries and
// therefore no actual inputs, they're just reading tables calculated in
// resolve! Does this work? Unsure! That's what the issue is about
*providers = Providers {
is_dllimport_foreign_item: |tcx, id| match tcx.native_library_kind(id) {
Some(NativeLibraryKind::NativeUnknown) | Some(NativeLibraryKind::NativeRawDylib) => {
true
}
_ => false,
},
is_statically_included_foreign_item: |tcx, id| match tcx.native_library_kind(id) {
Some(NativeLibraryKind::NativeStatic)
| Some(NativeLibraryKind::NativeStaticNobundle) => true,
_ => false,
},
native_library_kind: |tcx, id| {
tcx.native_libraries(id.krate)
.iter()
.filter(|lib| native_libs::relevant_lib(&tcx.sess, lib))
.find(|lib| {
let fm_id = match lib.foreign_module {
Some(id) => id,
None => return false,
};
tcx.foreign_modules(id.krate)
.iter()
.find(|m| m.def_id == fm_id)
.expect("failed to find foreign module")
.foreign_items
.contains(&id)
})
.map(|l| l.kind)
},
native_libraries: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
Lrc::new(native_libs::collect(tcx))
},
foreign_modules: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
&tcx.arena.alloc(foreign_modules::collect(tcx))[..]
},
link_args: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
Lrc::new(link_args::collect(tcx))
},
// Returns a map from a sufficiently visible external item (i.e., an
// external item that is visible from at least one local module) to a
// sufficiently visible parent (considering modules that re-export the
// external item to be parents).
visible_parent_map: |tcx, cnum| {
use std::collections::hash_map::Entry;
use std::collections::vec_deque::VecDeque;
assert_eq!(cnum, LOCAL_CRATE);
let mut visible_parent_map: DefIdMap<DefId> = Default::default();
// Issue 46112: We want the map to prefer the shortest
// paths when reporting the path to an item. Therefore we
// build up the map via a breadth-first search (BFS),
// which naturally yields minimal-length paths.
//
// Note that it needs to be a BFS over the whole forest of
// crates, not just each individual crate; otherwise you
// only get paths that are locally minimal with respect to
// whatever crate we happened to encounter first in this
// traversal, but not globally minimal across all crates.
let bfs_queue = &mut VecDeque::new();
// Preferring shortest paths alone does not guarantee a
// deterministic result; so sort by crate num to avoid
// hashtable iteration non-determinism. This only makes
// things as deterministic as crate-nums assignment is,
// which is to say, its not deterministic in general. But
// we believe that libstd is consistently assigned crate
// num 1, so it should be enough to resolve #46112.
let mut crates: Vec<CrateNum> = (*tcx.crates()).to_owned();
crates.sort();
for &cnum in crates.iter() {
// Ignore crates without a corresponding local `extern crate` item.
if tcx.missing_extern_crate_item(cnum) {
continue;
}
bfs_queue.push_back(DefId { krate: cnum, index: CRATE_DEF_INDEX });
}
// (restrict scope of mutable-borrow of `visible_parent_map`)
{
let visible_parent_map = &mut visible_parent_map;
let mut add_child =
|bfs_queue: &mut VecDeque<_>, child: &Export<hir::HirId>, parent: DefId| {
if child.vis != ty::Visibility::Public {
return;
}
if let Some(child) = child.res.opt_def_id() {
match visible_parent_map.entry(child) {
Entry::Occupied(mut entry) => {
// If `child` is defined in crate `cnum`, ensure
// that it is mapped to a parent in `cnum`.
if child.krate == cnum && entry.get().krate != cnum {
entry.insert(parent);
}
}
Entry::Vacant(entry) => {
entry.insert(parent);
bfs_queue.push_back(child);
}
}
}
};
while let Some(def) = bfs_queue.pop_front() {
for child in tcx.item_children(def).iter() {
add_child(bfs_queue, child, def);
}
}
}
tcx.arena.alloc(visible_parent_map)
},
dependency_formats: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
Lrc::new(crate::dependency_format::calculate(tcx))
},
has_global_allocator: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
CStore::from_tcx(tcx).has_global_allocator()
},
postorder_cnums: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
tcx.arena.alloc_slice(&CStore::from_tcx(tcx).crate_dependencies_in_postorder(cnum))
},
..*providers
};
}
impl CStore {
pub fn struct_field_names_untracked(&self, def: DefId, sess: &Session) -> Vec<Spanned<Symbol>> {
self.get_crate_data(def.krate).get_struct_field_names(def.index, sess)
}
pub fn item_children_untracked(
&self,
def_id: DefId,
sess: &Session,
) -> Vec<Export<hir::HirId>> {
let mut result = vec![];
self.get_crate_data(def_id.krate).each_child_of_item(
def_id.index,
|child| result.push(child),
sess,
);
result
}
pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
let _prof_timer = sess.prof.generic_activity("metadata_load_macro");
let data = self.get_crate_data(id.krate);
if data.root.is_proc_macro_crate() {
return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, sess));
}
let span = data.get_span(id.index, sess);
// Mark the attrs as used
let attrs = data.get_item_attrs(id.index, sess);
for attr in attrs.iter() {
attr::mark_used(attr);
}
let ident = data
.def_key(id.index)
.disambiguated_data
.data
.get_opt_name()
.map(ast::Ident::with_dummy_span) // FIXME: cross-crate hygiene
.expect("no name in load_macro");
LoadedMacro::MacroDef(
ast::Item {
ident,
id: ast::DUMMY_NODE_ID,
span,
attrs: attrs.iter().cloned().collect(),
kind: ast::ItemKind::MacroDef(data.get_macro(id.index, sess)),
vis: source_map::respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
tokens: None,
},
data.root.edition,
)
}
pub fn associated_item_cloned_untracked(&self, def: DefId, sess: &Session) -> ty::AssocItem {
self.get_crate_data(def.krate).get_associated_item(def.index, sess)
}
pub fn crate_source_untracked(&self, cnum: CrateNum) -> CrateSource {
self.get_crate_data(cnum).source.clone()
}
pub fn get_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
self.get_crate_data(def_id.krate).get_span(def_id.index, sess)
}
pub fn item_generics_num_lifetimes(&self, def_id: DefId, sess: &Session) -> usize {
self.get_crate_data(def_id.krate).get_generics(def_id.index, sess).own_counts().lifetimes
}
}
impl CrateStore for CStore {
fn as_any(&self) -> &dyn Any {
self
}
fn crate_name_untracked(&self, cnum: CrateNum) -> Symbol {
self.get_crate_data(cnum).root.name
}
fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool {
self.get_crate_data(cnum).private_dep
}
fn crate_disambiguator_untracked(&self, cnum: CrateNum) -> CrateDisambiguator {
self.get_crate_data(cnum).root.disambiguator
}
fn crate_hash_untracked(&self, cnum: CrateNum) -> Svh {
self.get_crate_data(cnum).root.hash
}
/// Returns the `DefKey` for a given `DefId`. This indicates the
/// parent `DefId` as well as some idea of what kind of data the
/// `DefId` refers to.
fn def_key(&self, def: DefId) -> DefKey {
self.get_crate_data(def.krate).def_key(def.index)
}
fn def_path(&self, def: DefId) -> DefPath {
self.get_crate_data(def.krate).def_path(def.index)
}
fn def_path_hash(&self, def: DefId) -> DefPathHash {
self.get_crate_data(def.krate).def_path_hash(def.index)
}
fn def_path_table(&self, cnum: CrateNum) -> &DefPathTable {
&self.get_crate_data(cnum).cdata.def_path_table
}
fn crates_untracked(&self) -> Vec<CrateNum> {
let mut result = vec![];
self.iter_crate_data(|cnum, _| result.push(cnum));
result
}
fn encode_metadata(&self, tcx: TyCtxt<'_>) -> EncodedMetadata {
encoder::encode_metadata(tcx)
}
fn metadata_encoding_version(&self) -> &[u8] {
rmeta::METADATA_HEADER
}
fn allocator_kind(&self) -> Option<AllocatorKind> {
self.allocator_kind()
}
}
| 37.911538 | 100 | 0.591965 |
79833ba6d1a65f9522ca896e963510e809779fc2 | 1,024 | use super::{Identification, FileType};
#[repr(C)]
#[derive(Debug)]
pub struct FileHeader64 {
ident: Identification,
file_type: FileType,
machine: u16,
version: u32,
entry: u64,
program_header_offset: u64,
section_header_offset: u64,
flags: u32,
header_size: u16,
program_header_entry_size: u16,
program_header_entries: u16,
section_header_entry_size: u16,
section_header_entries: u16,
section_name_string_table_index: u16,
}
impl FileHeader64 {
pub fn identification(&self) -> &Identification {
&self.ident
}
pub fn file_type(&self) -> FileType {
self.file_type
}
pub fn entry_point(&self) -> u64 {
self.entry
}
pub fn program_header_offset(&self) -> usize {
self.program_header_offset as usize
}
pub fn program_entry_size(&self) -> usize {
self.program_header_entry_size as usize
}
pub fn program_entries(&self) -> usize {
self.program_header_entries as usize
}
}
| 21.787234 | 53 | 0.655273 |
032d9a7331c00009b6b4ee1efbb05bde46170d21 | 3,406 | use zenith_utils::postgres_backend::AuthType;
use zenith_utils::zid::{ZTenantId, ZTimelineId};
use std::path::PathBuf;
use std::time::Duration;
use lazy_static::lazy_static;
use zenith_metrics::{register_int_gauge_vec, IntGaugeVec};
pub mod basebackup;
pub mod branches;
pub mod http;
pub mod layered_repository;
pub mod logger;
pub mod object_key;
pub mod object_repository;
pub mod object_store;
pub mod page_cache;
pub mod page_service;
pub mod relish;
pub mod repository;
pub mod restore_local_repo;
pub mod rocksdb_storage;
pub mod waldecoder;
pub mod walreceiver;
pub mod walredo;
lazy_static! {
static ref LIVE_CONNECTIONS_COUNT: IntGaugeVec = register_int_gauge_vec!(
"pageserver_live_connections_count",
"Number of live network connections",
&["pageserver_connection_kind"]
)
.expect("failed to define a metric");
}
#[derive(Debug, Clone)]
pub struct PageServerConf {
pub daemonize: bool,
pub listen_addr: String,
pub http_endpoint_addr: String,
pub gc_horizon: u64,
pub gc_period: Duration,
pub superuser: String,
// Repository directory, relative to current working directory.
// Normally, the page server changes the current working directory
// to the repository, and 'workdir' is always '.'. But we don't do
// that during unit testing, because the current directory is global
// to the process but different unit tests work on different
// repositories.
pub workdir: PathBuf,
pub pg_distrib_dir: PathBuf,
pub auth_type: AuthType,
pub auth_validation_public_key_path: Option<PathBuf>,
pub repository_format: RepositoryFormat,
}
#[derive(Debug, Clone, PartialEq)]
pub enum RepositoryFormat {
Layered,
RocksDb,
}
impl PageServerConf {
//
// Repository paths, relative to workdir.
//
fn tenants_path(&self) -> PathBuf {
self.workdir.join("tenants")
}
fn tenant_path(&self, tenantid: &ZTenantId) -> PathBuf {
self.tenants_path().join(tenantid.to_string())
}
fn tags_path(&self, tenantid: &ZTenantId) -> PathBuf {
self.tenant_path(tenantid).join("refs").join("tags")
}
fn tag_path(&self, tag_name: &str, tenantid: &ZTenantId) -> PathBuf {
self.tags_path(tenantid).join(tag_name)
}
fn branches_path(&self, tenantid: &ZTenantId) -> PathBuf {
self.tenant_path(tenantid).join("refs").join("branches")
}
fn branch_path(&self, branch_name: &str, tenantid: &ZTenantId) -> PathBuf {
self.branches_path(tenantid).join(branch_name)
}
fn timelines_path(&self, tenantid: &ZTenantId) -> PathBuf {
self.tenant_path(tenantid).join("timelines")
}
fn timeline_path(&self, timelineid: &ZTimelineId, tenantid: &ZTenantId) -> PathBuf {
self.timelines_path(tenantid).join(timelineid.to_string())
}
fn ancestor_path(&self, timelineid: &ZTimelineId, tenantid: &ZTenantId) -> PathBuf {
self.timeline_path(timelineid, tenantid).join("ancestor")
}
fn wal_dir_path(&self, timelineid: &ZTimelineId, tenantid: &ZTenantId) -> PathBuf {
self.timeline_path(timelineid, tenantid).join("wal")
}
//
// Postgres distribution paths
//
pub fn pg_bin_dir(&self) -> PathBuf {
self.pg_distrib_dir.join("bin")
}
pub fn pg_lib_dir(&self) -> PathBuf {
self.pg_distrib_dir.join("lib")
}
}
| 27.031746 | 88 | 0.687904 |
913f5bed7c1ff5afab6e638d1c1a50c797906a52 | 829 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
#![allow(dead_code)] // Due to criterion we need this to avoid warnings.
#![cfg_attr(feature = "cargo-clippy", allow(clippy::let_and_return))] // Benches often artificially return values. Allow it.
use criterion::Criterion;
use std::time::Duration;
mod suites;
pub const DEFAULT_RAFT_SETS: [(usize, usize); 4] = [(0, 0), (3, 1), (5, 2), (7, 3)];
fn main() {
let mut c = Criterion::default()
// Configure defaults before overriding with args.
.warm_up_time(Duration::from_millis(500))
.measurement_time(Duration::from_secs(1))
.configure_from_args();
suites::bench_raft(&mut c);
suites::bench_raw_node(&mut c);
suites::bench_progress(&mut c);
suites::bench_progress_set(&mut c);
c.final_summary();
}
| 30.703704 | 124 | 0.6731 |
39d8042e8268e2fbc7e1fb22ec9db8ffdc09af0b | 3,940 | use std::io::{self, Read, Seek, SeekFrom};
pub trait ReadUtil: Read {
/// Attempts to read an unsigned 8 bit integer from the reader.
fn read_u8(&mut self) -> io::Result<u8> {
let mut buf = [0u8];
self.read_exact(&mut buf)?;
Ok(buf[0])
}
/// Attempts to read an unsigned 16 bit big endian integer from the reader.
fn read_u16(&mut self) -> io::Result<u16> {
let mut buf = [0u8; 2];
self.read_exact(&mut buf)?;
Ok(u16::from_be_bytes(buf))
}
/// Attempts to read an unsigned 32 bit big endian integer from the reader.
fn read_u32(&mut self) -> io::Result<u32> {
let mut buf = [0u8; 4];
self.read_exact(&mut buf)?;
Ok(u32::from_be_bytes(buf))
}
/// Attempts to read an unsigned 64 bit big endian integer from the reader.
fn read_u64(&mut self) -> io::Result<u64> {
let mut buf = [0u8; 8];
self.read_exact(&mut buf)?;
Ok(u64::from_be_bytes(buf))
}
/// Attempts to read 8 bit unsigned integers from the reader to a vector of size length.
fn read_u8_vec(&mut self, len: u64) -> io::Result<Vec<u8>> {
let mut buf = vec![0u8; len as usize];
self.read_exact(&mut buf)?;
Ok(buf)
}
/// Attempts to read a utf-8 string from the reader.
fn read_utf8(&mut self, len: u64) -> crate::Result<String> {
let data = self.read_u8_vec(len)?;
Ok(String::from_utf8(data)?)
}
/// Attempts to read a utf-16 string from the reader.
fn read_utf16(&mut self, len: u64) -> crate::Result<String> {
let mut buf = vec![0u8; len as usize];
self.read_exact(&mut buf)?;
let data: Vec<u16> =
buf.chunks_exact(2).map(|c| u16::from_be_bytes([c[0], c[1]])).collect();
Ok(String::from_utf16(&data)?)
}
}
impl<T: Read> ReadUtil for T {}
pub trait SeekUtil: Seek {
/// Attempts to read the remaining stream length and returns to the starting position.
fn remaining_stream_len(&mut self) -> io::Result<u64> {
let current_pos = self.seek(SeekFrom::Current(0))?;
let complete_len = self.seek(SeekFrom::End(0))?;
let len = complete_len - current_pos;
self.seek(SeekFrom::Start(current_pos))?;
Ok(len)
}
}
impl<T: Seek> SeekUtil for T {}
/// Attempts to read a big endian integer at the specified index from a byte slice.
macro_rules! be_int {
($bytes:expr, $index:expr, $type:ty) => {{
use std::convert::TryFrom;
const SIZE: usize = std::mem::size_of::<$type>();
let bytes_start = ($index);
let bytes_end = ($index) + SIZE;
if $bytes.len() < bytes_end {
None
} else {
let be_bytes = <[u8; SIZE]>::try_from(&$bytes[bytes_start..bytes_end]);
match be_bytes {
Ok(b) => Some(<$type>::from_be_bytes(b)),
Err(_) => None,
}
}
}};
}
/// Attempts to write a big endian integer at the specified index to a byte vector.
macro_rules! set_be_int {
($bytes:expr, $index:expr, $value:expr, $type:ty) => {{
const SIZE: usize = std::mem::size_of::<$type>();
let bytes_start = ($index);
let bytes_end = ($index) + SIZE;
let be_bytes = <$type>::to_be_bytes($value);
if $bytes.len() < bytes_end {
$bytes.resize(bytes_end, 0);
}
for i in 0..SIZE {
$bytes[bytes_start + i] = be_bytes[i];
}
}};
}
#[cfg(test)]
mod test {
#[test]
fn be_int() {
let bytes = vec![0x00, 0x00, 0x00, 0x00, 0x2D, 0x34, 0xD0, 0x5E];
let int = be_int!(bytes, 4, u32);
assert_eq!(int, Some(758435934u32));
}
#[test]
fn set_be_int() {
let mut bytes = vec![0u8, 0, 0, 0, 0, 0, 0, 0];
set_be_int!(bytes, 4, 524, u16);
assert_eq!(bytes[4], 2u8);
assert_eq!(bytes[5], 12u8);
}
}
| 29.402985 | 92 | 0.561675 |
e866e2ff2352e24cb7d1681d032f772edf8d236d | 8,459 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [wasm_bindgen (extends = SvgGraphicsElement , extends = SvgElement , extends = Element , extends = Node , extends = EventTarget , extends = :: js_sys :: Object , js_name = SVGAElement , typescript_type = "SVGAElement")]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `SvgaElement` class."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub type SvgaElement;
#[cfg(feature = "SvgAnimatedString")]
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = target)]
#[doc = "Getter for the `target` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/target)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgAnimatedString`, `SvgaElement`*"]
pub fn target(this: &SvgaElement) -> SvgAnimatedString;
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = download)]
#[doc = "Getter for the `download` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/download)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn download(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = download)]
#[doc = "Setter for the `download` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/download)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_download(this: &SvgaElement, value: &str);
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = ping)]
#[doc = "Getter for the `ping` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/ping)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn ping(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = ping)]
#[doc = "Setter for the `ping` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/ping)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_ping(this: &SvgaElement, value: &str);
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = rel)]
#[doc = "Getter for the `rel` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/rel)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn rel(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = rel)]
#[doc = "Setter for the `rel` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/rel)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_rel(this: &SvgaElement, value: &str);
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = referrerPolicy)]
#[doc = "Getter for the `referrerPolicy` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/referrerPolicy)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn referrer_policy(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = referrerPolicy)]
#[doc = "Setter for the `referrerPolicy` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/referrerPolicy)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_referrer_policy(this: &SvgaElement, value: &str);
#[cfg(feature = "DomTokenList")]
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = relList)]
#[doc = "Getter for the `relList` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/relList)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `DomTokenList`, `SvgaElement`*"]
pub fn rel_list(this: &SvgaElement) -> DomTokenList;
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = hreflang)]
#[doc = "Getter for the `hreflang` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/hreflang)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn hreflang(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = hreflang)]
#[doc = "Setter for the `hreflang` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/hreflang)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_hreflang(this: &SvgaElement, value: &str);
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = type)]
#[doc = "Getter for the `type` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/type)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn type_(this: &SvgaElement) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "SVGAElement" , js_name = type)]
#[doc = "Setter for the `type` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/type)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_type(this: &SvgaElement, value: &str);
# [wasm_bindgen (structural , catch , method , getter , js_class = "SVGAElement" , js_name = text)]
#[doc = "Getter for the `text` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/text)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn text(this: &SvgaElement) -> Result<String, JsValue>;
# [wasm_bindgen (structural , catch , method , setter , js_class = "SVGAElement" , js_name = text)]
#[doc = "Setter for the `text` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/text)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgaElement`*"]
pub fn set_text(this: &SvgaElement, value: &str) -> Result<(), JsValue>;
#[cfg(feature = "SvgAnimatedString")]
# [wasm_bindgen (structural , method , getter , js_class = "SVGAElement" , js_name = href)]
#[doc = "Getter for the `href` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement/href)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `SvgAnimatedString`, `SvgaElement`*"]
pub fn href(this: &SvgaElement) -> SvgAnimatedString;
}
| 61.744526 | 225 | 0.650077 |
4b6160a959f646ba2066d4a696d4fa22e9b48ff8 | 553 | #[allow(unused_imports)]
#[macro_use]
extern crate log;
#[allow(unused_imports)]
#[macro_use]
extern crate imgui;
//TODO: This is for selection, get rid of this when possible
extern crate nalgebra as na;
extern crate nalgebra_glm as glm;
mod select;
pub use select::EditorSelectRegistryBuilder;
pub use select::EditorSelectRegistry;
pub use select::EditorSelectable;
pub use select::EditorSelectableTransformed;
mod inspect;
pub use inspect::EditorInspectRegistryBuilder;
pub use inspect::EditorInspectRegistry;
pub mod resources;
pub mod systems;
| 22.12 | 60 | 0.801085 |
2823697c41241d23676a52aa25e5b78ba205a77d | 18,455 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
//! This module is responsible to build a single project. It does not handle
//! watch mode or other state.
mod artifact_content;
pub mod artifact_writer;
mod build_ir;
mod build_schema;
mod generate_artifacts;
pub mod generate_extra_artifacts;
mod is_operation_preloadable;
mod log_program_stats;
mod persist_operations;
mod source_control;
mod validate;
use self::log_program_stats::print_stats;
use crate::compiler_state::{ArtifactMapKind, CompilerState, ProjectName, SourceSetName};
use crate::config::{Config, ProjectConfig};
use crate::errors::BuildProjectError;
use crate::file_source::SourceControlUpdateStatus;
use crate::{artifact_map::ArtifactMap, graphql_asts::GraphQLAsts};
pub use artifact_content::QueryID;
use build_ir::BuildIRResult;
pub use build_ir::SourceHashes;
pub use build_schema::build_schema;
use common::{sync::ParallelIterator, PerfLogEvent, PerfLogger};
use fnv::{FnvHashMap, FnvHashSet};
pub use generate_artifacts::{
create_path_for_artifact, generate_artifacts, Artifact, ArtifactContent,
GenerateFragmentTextArtifactFn, GenerateOperationTextArtifactFn,
};
use generate_extra_artifacts::generate_extra_artifacts;
use graphql_ir::Program;
use interner::StringKey;
pub use is_operation_preloadable::is_operation_preloadable;
use log::{debug, info, warn};
use rayon::slice::ParallelSlice;
use relay_codegen::Printer;
use relay_transforms::{apply_transforms, find_resolver_dependencies, DependencyMap, Programs};
use schema::SDLSchema;
pub use source_control::add_to_mercurial;
use std::{collections::hash_map::Entry, path::PathBuf, sync::Arc};
pub use validate::{validate, AdditionalValidations};
pub enum BuildProjectFailure {
Error(BuildProjectError),
Cancelled,
}
impl From<BuildProjectError> for BuildProjectFailure {
fn from(err: BuildProjectError) -> BuildProjectFailure {
BuildProjectFailure::Error(err)
}
}
/// This program doesn't have IR transforms applied to it, so it's not optimized.
/// It's perfect for the LSP server: we have all the documents with
/// their locations to provide information to go_to_definition, hover, etc.
pub fn build_raw_program(
project_config: &ProjectConfig,
implicit_dependencies: &DependencyMap,
graphql_asts: &FnvHashMap<SourceSetName, GraphQLAsts>,
schema: Arc<SDLSchema>,
log_event: &impl PerfLogEvent,
is_incremental_build: bool,
) -> Result<(Program, FnvHashSet<StringKey>, SourceHashes), BuildProjectError> {
// Build a type aware IR.
let BuildIRResult {
ir,
base_fragment_names,
source_hashes,
} = log_event.time("build_ir_time", || {
build_ir::build_ir(
project_config,
&implicit_dependencies,
&schema,
graphql_asts,
is_incremental_build,
)
.map_err(|errors| BuildProjectError::ValidationErrors { errors })
})?;
// Turn the IR into a base Program.
let program = log_event.time("build_program_time", || {
Program::from_definitions(schema, ir)
});
Ok((program, base_fragment_names, source_hashes))
}
pub fn validate_program(
config: &Config,
program: &Program,
log_event: &impl PerfLogEvent,
) -> Result<(), BuildProjectError> {
let timer = log_event.start("validate_time");
log_event.number("validate_documents_count", program.document_count());
let result = validate(
program,
&config.connection_interface,
&config.additional_validations,
)
.map_err(|errors| BuildProjectError::ValidationErrors { errors });
log_event.stop(timer);
result
}
/// Apply various chains of transforms to create a set of output programs.
pub fn transform_program(
config: &Config,
project_config: &ProjectConfig,
program: Arc<Program>,
base_fragment_names: Arc<FnvHashSet<StringKey>>,
perf_logger: Arc<impl PerfLogger + 'static>,
log_event: &impl PerfLogEvent,
) -> Result<Programs, BuildProjectFailure> {
let timer = log_event.start("apply_transforms_time");
let result = apply_transforms(
project_config.name,
program,
base_fragment_names,
&config.connection_interface,
Arc::clone(&project_config.feature_flags),
perf_logger,
Some(print_stats),
)
.map_err(|errors| BuildProjectFailure::Error(BuildProjectError::ValidationErrors { errors }));
log_event.stop(timer);
result
}
pub fn build_programs(
config: &Config,
project_config: &ProjectConfig,
compiler_state: &CompilerState,
graphql_asts: &FnvHashMap<SourceSetName, GraphQLAsts>,
schema: Arc<SDLSchema>,
log_event: &impl PerfLogEvent,
perf_logger: Arc<impl PerfLogger + 'static>,
) -> Result<(Programs, Arc<SourceHashes>), BuildProjectFailure> {
let project_name = project_config.name;
let is_incremental_build = compiler_state.has_processed_changes()
&& !compiler_state.has_breaking_schema_change(project_name)
&& if let Some(base) = project_config.base {
!compiler_state.has_breaking_schema_change(base)
} else {
true
};
let (program, base_fragment_names, source_hashes) = build_raw_program(
project_config,
&compiler_state.implicit_dependencies.read().unwrap(),
graphql_asts,
schema,
log_event,
is_incremental_build,
)?;
if compiler_state.should_cancel_current_build() {
debug!("Build is cancelled: updates in source code/or new file changes are pending.");
return Err(BuildProjectFailure::Cancelled);
}
let (validation_results, _) = rayon::join(
|| {
// Call validation rules that go beyond type checking.
validate_program(&config, &program, log_event)
},
|| {
find_resolver_dependencies(
&mut compiler_state
.pending_implicit_dependencies
.write()
.unwrap(),
&program,
);
},
);
validation_results?;
let programs = transform_program(
config,
project_config,
Arc::new(program),
Arc::new(base_fragment_names),
Arc::clone(&perf_logger),
log_event,
)?;
Ok((programs, Arc::new(source_hashes)))
}
pub fn build_project(
config: &Config,
project_config: &ProjectConfig,
compiler_state: &CompilerState,
graphql_asts: &FnvHashMap<SourceSetName, GraphQLAsts>,
perf_logger: Arc<impl PerfLogger + 'static>,
) -> Result<(ProjectName, Arc<SDLSchema>, Programs, Vec<Artifact>), BuildProjectFailure> {
let log_event = perf_logger.create_event("build_project");
let build_time = log_event.start("build_project_time");
let project_name = project_config.name;
log_event.string("project", project_name.to_string());
info!("[{}] compiling...", project_name);
// Construct a schema instance including project specific extensions.
let schema = log_event
.time("build_schema_time", || {
Ok(build_schema(compiler_state, project_config)?)
})
.map_err(|errors| {
BuildProjectFailure::Error(BuildProjectError::ValidationErrors { errors })
})?;
if compiler_state.should_cancel_current_build() {
debug!("Build is cancelled: updates in source code/or new file changes are pending.");
return Err(BuildProjectFailure::Cancelled);
}
// Apply different transform pipelines to produce the `Programs`.
let (programs, source_hashes) = build_programs(
config,
project_config,
compiler_state,
graphql_asts,
Arc::clone(&schema),
&log_event,
Arc::clone(&perf_logger),
)?;
if compiler_state.should_cancel_current_build() {
debug!("Build is cancelled: updates in source code/or new file changes are pending.");
return Err(BuildProjectFailure::Cancelled);
}
// Generate artifacts by collecting information from the `Programs`.
let artifacts_timer = log_event.start("generate_artifacts_time");
let artifacts = generate_artifacts(
config,
project_config,
&programs,
Arc::clone(&source_hashes),
);
log_event.stop(artifacts_timer);
log_event.number(
"generated_artifacts",
programs.reader.document_count() + programs.normalization.document_count(),
);
log_event.stop(build_time);
perf_logger.complete_event(log_event);
Ok((project_config.name, schema, programs, artifacts))
}
#[allow(clippy::too_many_arguments)]
pub async fn commit_project(
config: &Config,
project_config: &ProjectConfig,
perf_logger: Arc<impl PerfLogger + 'static>,
schema: &SDLSchema,
programs: Programs,
mut artifacts: Vec<Artifact>,
artifact_map: Arc<ArtifactMapKind>,
// Definitions that are removed from the previous artifact map
removed_definition_names: Vec<StringKey>,
// Dirty artifacts that should be removed if no longer in the artifacts map
mut artifacts_to_remove: FnvHashSet<PathBuf>,
source_control_update_status: Arc<SourceControlUpdateStatus>,
) -> Result<ArtifactMap, BuildProjectFailure> {
let log_event = perf_logger.create_event("commit_project");
log_event.string("project", project_config.name.to_string());
let commit_time = log_event.start("commit_project_time");
if source_control_update_status.is_started() {
debug!("commit_project cancelled before persisting due to source control updates");
return Err(BuildProjectFailure::Cancelled);
}
if let Some(ref operation_persister) = config.operation_persister {
if let Some(ref persist_config) = project_config.persist {
let persist_operations_timer = log_event.start("persist_operations_time");
persist_operations::persist_operations(
&mut artifacts,
&config.root_dir,
&persist_config,
config,
operation_persister.as_ref(),
&log_event,
)
.await?;
log_event.stop(persist_operations_timer);
}
}
if source_control_update_status.is_started() {
debug!(
"commit_project cancelled before generating extra artifacts due to source control updates"
);
return Err(BuildProjectFailure::Cancelled);
}
// In some cases we need to create additional (platform specific) artifacts
// For that, we will use `generate_extra_artifacts` from the configs
if let Some(generate_extra_artifacts_fn) = &config.generate_extra_artifacts {
log_event.time("generate_extra_artifacts_time", || {
generate_extra_artifacts(
schema,
project_config,
&mut artifacts,
generate_extra_artifacts_fn,
)
});
}
if source_control_update_status.is_started() {
debug!("commit_project cancelled before writing artifacts due to source control updates");
return Err(BuildProjectFailure::Cancelled);
}
let should_stop_updating_artifacts = || {
if source_control_update_status.is_started() {
debug!("artifact_writer updates cancelled due source control updates");
true
} else {
false
}
};
// Write the generated artifacts to disk. This step is separate from
// generating artifacts or persisting to avoid partial writes in case of
// errors as much as possible.
let next_artifact_map = match Arc::as_ref(&artifact_map) {
ArtifactMapKind::Unconnected(existing_artifacts) => {
let mut existing_artifacts = existing_artifacts.clone();
let write_artifacts_time = log_event.start("write_artifacts_time");
write_artifacts(
config,
project_config,
schema,
should_stop_updating_artifacts,
&artifacts,
)?;
for artifact in &artifacts {
if !existing_artifacts.remove(&artifact.path) {
debug!(
"[{}] new artifact {:?} from definitions {:?}",
project_config.name, &artifact.path, &artifact.source_definition_names
);
}
}
log_event.stop(write_artifacts_time);
let delete_artifacts_time = log_event.start("delete_artifacts_time");
for remaining_artifact in &existing_artifacts {
if should_stop_updating_artifacts() {
break;
}
let path = config.root_dir.join(remaining_artifact);
config.artifact_writer.remove(path)?;
}
log_event.stop(delete_artifacts_time);
ArtifactMap::from(artifacts)
}
ArtifactMapKind::Mapping(artifact_map) => {
let mut artifact_map = artifact_map.clone();
let mut current_paths_map = ArtifactMap::default();
let write_artifacts_incremental_time =
log_event.start("write_artifacts_incremental_time");
// Write or update artifacts
write_artifacts(
config,
project_config,
schema,
should_stop_updating_artifacts,
&artifacts,
)?;
for artifact in artifacts {
current_paths_map.insert(artifact);
}
log_event.stop(write_artifacts_incremental_time);
log_event.time("update_artifact_map_time", || {
// All generated paths for removed definitions should be removed
for name in &removed_definition_names {
if let Some(artifacts) = artifact_map.0.remove(&name) {
for artifact in artifacts {
artifacts_to_remove.insert(artifact.path);
}
}
}
// Update the artifact map, and delete any removed artifacts
for (definition_name, artifact_records) in current_paths_map.0 {
match artifact_map.0.entry(definition_name) {
Entry::Occupied(mut entry) => {
let prev_records = entry.get_mut();
for prev_record in prev_records.drain(..) {
if !artifact_records.iter().any(|t| t.path == prev_record.path) {
artifacts_to_remove.insert(prev_record.path);
}
}
prev_records.extend(artifact_records.into_iter());
}
Entry::Vacant(entry) => {
entry.insert(artifact_records);
}
}
}
// Filter out any artifact that is in the artifact map
for artifacts in artifact_map.0.values() {
for artifact in artifacts {
artifacts_to_remove.remove(&artifact.path);
}
}
});
let delete_artifacts_incremental_time =
log_event.start("delete_artifacts_incremental_time");
// The remaining dirty artifacts are no longer required
for path in artifacts_to_remove {
if should_stop_updating_artifacts() {
break;
}
config.artifact_writer.remove(config.root_dir.join(path))?;
}
log_event.stop(delete_artifacts_incremental_time);
artifact_map
}
};
if source_control_update_status.is_started() {
log_event.number("update_artifacts_after_source_control_update", 1);
debug!(
"We just updated artifacts after source control update happened. Most likely we have outdated artifacts now..."
);
warn!(
r#"
Build canceled due to a source control update while we're writing artifacts.
The compiler may produce outdated artifacts, but it will regenerate the correct set after the update is completed."#
);
return Err(BuildProjectFailure::Cancelled);
} else {
// For now, lets log how often this is happening, so we can decide if we want to
// adjust the way we write artifacts. For example, we could write them to the temp
// directory first, then move to a correct destination.
log_event.number("update_artifacts_after_source_control_update", 0);
}
info!(
"[{}] compiled documents: {} reader, {} normalization, {} operation text",
project_config.name,
programs.reader.document_count(),
programs.normalization.document_count(),
programs.operation_text.document_count()
);
log_event.stop(commit_time);
perf_logger.complete_event(log_event);
Ok(next_artifact_map)
}
fn write_artifacts<F: Fn() -> bool + Sync + Send>(
config: &Config,
project_config: &ProjectConfig,
schema: &SDLSchema,
should_stop_updating_artifacts: F,
artifacts: &[Artifact],
) -> Result<(), BuildProjectFailure> {
artifacts.par_chunks(8192).try_for_each_init(
|| Printer::with_dedupe(project_config.js_module_format),
|mut printer, artifacts| {
for artifact in artifacts {
if should_stop_updating_artifacts() {
return Err(BuildProjectFailure::Cancelled);
}
let path = config.root_dir.join(&artifact.path);
let content = artifact.content.as_bytes(
config,
project_config,
&mut printer,
schema,
artifact.source_file,
);
if config.artifact_writer.should_write(&path, &content)? {
config.artifact_writer.write(path, content)?;
}
}
Ok(())
},
)?;
Ok(())
}
| 36.544554 | 123 | 0.631374 |
678866e181b95fe66eb677dfb5c06f3600488d0e | 22,315 | //!
//! Special module that allows users to interact and communicate with a
//! group of actors through the dispatchers that holds information about
//! actors grouped together.
use crate::child_ref::ChildRef;
use crate::envelope::SignedMessage;
use dashmap::DashMap;
use std::fmt::{self, Debug};
use std::hash::{Hash, Hasher};
use std::sync::{
atomic::{AtomicU64, Ordering},
Arc,
};
/// Type alias for the concurrency hashmap. Each key-value pair stores
/// the Bastion identifier as the key and the module name as the value.
pub type DispatcherMap = DashMap<ChildRef, String>;
#[derive(Debug, Clone)]
/// Defines types of the notifications handled by the dispatcher
/// when the group of actors is changing.
pub enum NotificationType {
/// Represents a notification when a new actor wants to
/// join to the existing group of actors.
Register,
/// Represents a notification when the existing actor
/// was stopped, killed, suspended or finished an execution.
Remove,
}
#[derive(Debug, Clone)]
/// Defines types of the notifications handled by the dispatcher
/// when the group of actors is changing.
///
/// If the message can't be delivered to the declared group, then
/// the message will be marked as the "dead letter".
pub enum BroadcastTarget {
/// Send the broadcasted message to everyone in the system.
All,
/// Send the broadcasted message to each actor in group.
Group(String),
}
#[derive(Debug, Clone, Eq, PartialEq)]
/// Defines the type of the dispatcher.
///
/// The default type is `Anonymous`.
pub enum DispatcherType {
/// The default kind of the dispatcher which is using for
/// handling all actors in the cluster. Can be more than
/// one instance of this type.
Anonymous,
/// The dispatcher with a unique name which will be using
/// for updating and notifying actors in the same group
/// base on the desired strategy. The logic handling broadcasted
/// messages and their distribution across the group depends on
/// the dispatcher's handler.
Named(String),
}
/// The default handler, which does round-robin.
pub type DefaultDispatcherHandler = RoundRobinHandler;
/// Dispatcher that will do simple round-robin distribution
#[derive(Default, Debug)]
pub struct RoundRobinHandler {
index: AtomicU64,
}
impl DispatcherHandler for RoundRobinHandler {
// Will left this implementation as empty.
fn notify(
&self,
_from_child: &ChildRef,
_entries: &DispatcherMap,
_notification_type: NotificationType,
) {
}
// Each child in turn will receive a message.
fn broadcast_message(&self, entries: &DispatcherMap, message: &Arc<SignedMessage>) {
let current_index = self.index.load(Ordering::SeqCst) % entries.len() as u64;
let mut skipped = 0;
for pair in entries.iter() {
if skipped != current_index {
skipped += 1;
continue;
}
let entry = pair.key();
entry.tell_anonymously(message.clone()).unwrap();
break;
}
self.index.store(current_index + 1, Ordering::SeqCst);
}
}
/// Generic trait which any custom dispatcher handler must implement for
/// the further usage by the `Dispatcher` instances.
pub trait DispatcherHandler {
/// Sends the notification of the certain type to each actor in group.
fn notify(
&self,
from_child: &ChildRef,
entries: &DispatcherMap,
notification_type: NotificationType,
);
/// Broadcasts the message to actors in according to the implemented behaviour.
fn broadcast_message(&self, entries: &DispatcherMap, message: &Arc<SignedMessage>);
}
/// A generic implementation of the Bastion dispatcher
///
/// The main idea of the dispatcher is to provide an alternative way to
/// communicate between a group of actors. For example, dispatcher can
/// be used when a developer wants to send a specific message or share a
/// local state between the specific group of registered actors with
/// the usage of a custom dispatcher.
pub struct Dispatcher {
/// Defines the type of the dispatcher.
dispatcher_type: DispatcherType,
/// The handler used for a notification or a message.
handler: Box<dyn DispatcherHandler + Send + Sync + 'static>,
/// Special field that stores information about all
/// registered actors in the group.
actors: DispatcherMap,
}
impl Dispatcher {
/// Returns the type of the dispatcher.
pub fn dispatcher_type(&self) -> DispatcherType {
self.dispatcher_type.clone()
}
/// Returns the used handler by the dispatcher.
pub fn handler(&self) -> &Box<dyn DispatcherHandler + Send + Sync + 'static> {
&self.handler
}
/// Sets the dispatcher type.
pub fn with_dispatcher_type(mut self, dispatcher_type: DispatcherType) -> Self {
trace!("Setting dispatcher the {:?} type.", dispatcher_type);
self.dispatcher_type = dispatcher_type;
self
}
/// Creates a dispatcher with a specific dispatcher type.
pub fn with_type(dispatcher_type: DispatcherType) -> Self {
trace!(
"Instanciating a dispatcher with type {:?}.",
dispatcher_type
);
Self {
dispatcher_type,
handler: Box::new(DefaultDispatcherHandler::default()),
actors: Default::default(),
}
}
/// Sets the handler for the dispatcher.
pub fn with_handler(
mut self,
handler: Box<dyn DispatcherHandler + Send + Sync + 'static>,
) -> Self {
trace!(
"Setting handler for the {:?} dispatcher.",
self.dispatcher_type
);
self.handler = handler;
self
}
/// Appends the information about actor to the dispatcher.
pub(crate) fn register(&self, key: &ChildRef, module_name: String) {
self.actors.insert(key.to_owned(), module_name);
self.handler
.notify(key, &self.actors, NotificationType::Register);
}
/// Removes and then returns the record from the registry by the given key.
/// Returns `None` when the record wasn't found by the given key.
pub(crate) fn remove(&self, key: &ChildRef) {
if let Some(_) = self.actors.remove(key) {
self.handler
.notify(key, &self.actors, NotificationType::Remove);
}
}
/// Forwards the message to the handler for processing.
pub fn notify(&self, from_child: &ChildRef, notification_type: NotificationType) {
self.handler
.notify(from_child, &self.actors, notification_type)
}
/// Sends the message to the group of actors.
/// The logic of who and how should receive the message relies onto
/// the handler implementation.
pub fn broadcast_message(&self, message: &Arc<SignedMessage>) {
self.handler.broadcast_message(&self.actors, &message);
}
}
impl Debug for Dispatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Dispatcher(type: {:?}, actors: {:?})",
self.dispatcher_type,
self.actors.len()
)
}
}
impl DispatcherType {
pub(crate) fn name(&self) -> String {
match self {
DispatcherType::Anonymous => String::from("__Anonymous__"),
DispatcherType::Named(value) => value.to_owned(),
}
}
}
impl Default for Dispatcher {
fn default() -> Self {
Dispatcher {
dispatcher_type: DispatcherType::default(),
handler: Box::new(DefaultDispatcherHandler::default()),
actors: DashMap::new(),
}
}
}
impl Default for DispatcherType {
fn default() -> Self {
DispatcherType::Anonymous
}
}
#[allow(clippy::derive_hash_xor_eq)]
impl Hash for DispatcherType {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name().hash(state);
}
}
impl Into<DispatcherType> for String {
fn into(self) -> DispatcherType {
match self == DispatcherType::Anonymous.name() {
true => DispatcherType::Anonymous,
false => DispatcherType::Named(self),
}
}
}
#[derive(Debug)]
/// The global dispatcher of bastion the cluster.
///
/// The main purpose of this dispatcher is be a point through
/// developers can communicate with actors through group names.
pub(crate) struct GlobalDispatcher {
/// Storage for all registered group of actors.
pub dispatchers: DashMap<DispatcherType, Arc<Box<Dispatcher>>>,
}
impl GlobalDispatcher {
/// Creates a new instance of the global registry.
pub(crate) fn new() -> Self {
GlobalDispatcher {
dispatchers: DashMap::new(),
}
}
/// Appends the information about actor to the dispatcher.
pub(crate) fn register(
&self,
dispatchers: &Vec<DispatcherType>,
child_ref: &ChildRef,
module_name: String,
) {
dispatchers
.iter()
.filter(|key| self.dispatchers.contains_key(*key))
.for_each(|key| {
if let Some(dispatcher) = self.dispatchers.get(key) {
dispatcher.register(child_ref, module_name.clone())
}
})
}
/// Removes and then returns the record from the registry by the given key.
/// Returns `None` when the record wasn't found by the given key.
pub(crate) fn remove(&self, dispatchers: &Vec<DispatcherType>, child_ref: &ChildRef) {
dispatchers
.iter()
.filter(|key| self.dispatchers.contains_key(*key))
.for_each(|key| {
if let Some(dispatcher) = self.dispatchers.get(key) {
dispatcher.remove(child_ref)
}
})
}
/// Passes the notification from the actor to everyone that registered in the same
/// groups as the caller.
pub(crate) fn notify(
&self,
from_actor: &ChildRef,
dispatchers: &Vec<DispatcherType>,
notification_type: NotificationType,
) {
self.dispatchers
.iter()
.filter(|pair| dispatchers.contains(&pair.key()))
.for_each(|pair| {
let dispatcher = pair.value();
dispatcher.notify(from_actor, notification_type.clone())
})
}
/// Broadcasts the given message in according with the specified target.
pub(crate) fn broadcast_message(&self, target: BroadcastTarget, message: &Arc<SignedMessage>) {
let mut acked_dispatchers: Vec<DispatcherType> = Vec::new();
match target {
BroadcastTarget::All => self
.dispatchers
.iter()
.map(|pair| pair.key().name().into())
.for_each(|group_name| acked_dispatchers.push(group_name)),
BroadcastTarget::Group(name) => {
let target_dispatcher = name.into();
acked_dispatchers.push(target_dispatcher);
}
}
for dispatcher_type in acked_dispatchers {
match self.dispatchers.get(&dispatcher_type) {
Some(pair) => {
let dispatcher = pair.value();
dispatcher.broadcast_message(&message.clone());
}
// TODO: Put the message into the dead queue
None => {
let name = dispatcher_type.name();
warn!(
"The message can't be delivered to the group with the '{}' name.",
name
);
}
}
}
}
/// Adds dispatcher to the global registry.
pub(crate) fn register_dispatcher(&self, dispatcher: &Arc<Box<Dispatcher>>) {
let dispatcher_type = dispatcher.dispatcher_type();
let is_registered = self.dispatchers.contains_key(&dispatcher_type.clone());
if is_registered && dispatcher_type != DispatcherType::Anonymous {
warn!(
"The dispatcher with the '{:?}' name already registered in the cluster.",
dispatcher_type
);
return;
}
let instance = dispatcher.clone();
self.dispatchers.insert(dispatcher_type, instance);
}
/// Removes dispatcher from the global registry.
pub(crate) fn remove_dispatcher(&self, dispatcher: &Arc<Box<Dispatcher>>) {
self.dispatchers.remove(&dispatcher.dispatcher_type());
}
}
#[cfg(test)]
mod tests {
use crate::child_ref::ChildRef;
use crate::context::BastionId;
use crate::dispatcher::*;
use crate::envelope::{RefAddr, SignedMessage};
use crate::message::Msg;
use crate::path::BastionPath;
use futures::channel::mpsc;
use std::sync::{Arc, Mutex};
#[derive(Clone)]
struct CustomHandler {
called: Arc<Mutex<bool>>,
}
impl CustomHandler {
pub fn new(value: bool) -> Self {
CustomHandler {
called: Arc::new(Mutex::new(value)),
}
}
pub fn was_called(&self) -> bool {
*self.called.clone().lock().unwrap()
}
}
impl DispatcherHandler for CustomHandler {
fn notify(
&self,
_from_child: &ChildRef,
_entries: &DispatcherMap,
_notification_type: NotificationType,
) {
let handler_field_ref = self.called.clone();
let mut data = handler_field_ref.lock().unwrap();
*data = true;
}
fn broadcast_message(&self, _entries: &DispatcherMap, _message: &Arc<SignedMessage>) {
let handler_field_ref = self.called.clone();
let mut data = handler_field_ref.lock().unwrap();
*data = true;
}
}
#[test]
fn test_get_dispatcher_type_as_anonymous() {
let instance = Dispatcher::default();
assert_eq!(instance.dispatcher_type(), DispatcherType::Anonymous);
}
#[test]
fn test_get_dispatcher_type_as_named() {
let name = "test_group".to_string();
let dispatcher_type = DispatcherType::Named(name.clone());
let instance = Dispatcher::with_type(dispatcher_type.clone());
assert_eq!(instance.dispatcher_type(), dispatcher_type);
}
#[test]
fn test_local_dispatcher_append_child_ref() {
let instance = Dispatcher::default();
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
assert_eq!(instance.actors.contains_key(&child_ref), false);
instance.register(&child_ref, "my::test::module".to_string());
assert_eq!(instance.actors.contains_key(&child_ref), true);
}
#[test]
fn test_dispatcher_remove_child_ref() {
let instance = Dispatcher::default();
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
instance.register(&child_ref, "my::test::module".to_string());
assert_eq!(instance.actors.contains_key(&child_ref), true);
instance.remove(&child_ref);
assert_eq!(instance.actors.contains_key(&child_ref), false);
}
#[test]
fn test_local_dispatcher_notify() {
let handler = Box::new(CustomHandler::new(false));
let instance = Dispatcher::default().with_handler(handler.clone());
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
instance.notify(&child_ref, NotificationType::Register);
let handler_was_called = handler.was_called();
assert_eq!(handler_was_called, true);
}
#[test]
fn test_local_dispatcher_broadcast_message() {
let handler = Box::new(CustomHandler::new(false));
let instance = Dispatcher::default().with_handler(handler.clone());
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
const DATA: &'static str = "A message containing data (ask).";
let message = Arc::new(SignedMessage::new(
Msg::broadcast(DATA),
RefAddr::new(path, sender),
));
instance.broadcast_message(&message);
let handler_was_called = handler.was_called();
assert_eq!(handler_was_called, true);
}
#[test]
fn test_global_dispatcher_add_local_dispatcher() {
let dispatcher_type = DispatcherType::Named("test".to_string());
let local_dispatcher = Arc::new(Box::new(Dispatcher::with_type(dispatcher_type.clone())));
let global_dispatcher = GlobalDispatcher::new();
assert_eq!(
global_dispatcher.dispatchers.contains_key(&dispatcher_type),
false
);
global_dispatcher.register_dispatcher(&local_dispatcher);
assert_eq!(
global_dispatcher.dispatchers.contains_key(&dispatcher_type),
true
);
}
#[test]
fn test_global_dispatcher_remove_local_dispatcher() {
let dispatcher_type = DispatcherType::Named("test".to_string());
let local_dispatcher = Arc::new(Box::new(Dispatcher::with_type(dispatcher_type.clone())));
let global_dispatcher = GlobalDispatcher::new();
global_dispatcher.register_dispatcher(&local_dispatcher);
assert_eq!(
global_dispatcher.dispatchers.contains_key(&dispatcher_type),
true
);
global_dispatcher.remove_dispatcher(&local_dispatcher);
assert_eq!(
global_dispatcher.dispatchers.contains_key(&dispatcher_type),
false
);
}
#[test]
fn test_global_dispatcher_register_actor() {
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
let dispatcher_type = DispatcherType::Named("test".to_string());
let local_dispatcher = Arc::new(Box::new(Dispatcher::with_type(dispatcher_type.clone())));
let actor_groups = vec![dispatcher_type];
let module_name = "my::test::module".to_string();
let global_dispatcher = GlobalDispatcher::new();
global_dispatcher.register_dispatcher(&local_dispatcher);
assert_eq!(local_dispatcher.actors.contains_key(&child_ref), false);
global_dispatcher.register(&actor_groups, &child_ref, module_name);
assert_eq!(local_dispatcher.actors.contains_key(&child_ref), true);
}
#[test]
fn test_global_dispatcher_remove_actor() {
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
let dispatcher_type = DispatcherType::Named("test".to_string());
let local_dispatcher = Arc::new(Box::new(Dispatcher::with_type(dispatcher_type.clone())));
let actor_groups = vec![dispatcher_type];
let module_name = "my::test::module".to_string();
let global_dispatcher = GlobalDispatcher::new();
global_dispatcher.register_dispatcher(&local_dispatcher);
global_dispatcher.register(&actor_groups, &child_ref, module_name);
assert_eq!(local_dispatcher.actors.contains_key(&child_ref), true);
global_dispatcher.remove(&actor_groups, &child_ref);
assert_eq!(local_dispatcher.actors.contains_key(&child_ref), false);
}
#[test]
fn test_global_dispatcher_notify() {
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
let dispatcher_type = DispatcherType::Named("test".to_string());
let handler = Box::new(CustomHandler::new(false));
let local_dispatcher = Arc::new(Box::new(
Dispatcher::with_type(dispatcher_type.clone()).with_handler(handler.clone()),
));
let actor_groups = vec![dispatcher_type];
let module_name = "my::test::module".to_string();
let global_dispatcher = GlobalDispatcher::new();
global_dispatcher.register_dispatcher(&local_dispatcher);
global_dispatcher.register(&actor_groups, &child_ref, module_name);
global_dispatcher.notify(&child_ref, &actor_groups, NotificationType::Register);
let handler_was_called = handler.was_called();
assert_eq!(handler_was_called, true);
}
#[test]
fn test_global_dispatcher_broadcast_message() {
let bastion_id = BastionId::new();
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
let child_ref = ChildRef::new(bastion_id, sender, path);
let dispatcher_type = DispatcherType::Named("test".to_string());
let handler = Box::new(CustomHandler::new(false));
let local_dispatcher = Arc::new(Box::new(
Dispatcher::with_type(dispatcher_type.clone()).with_handler(handler.clone()),
));
let actor_groups = vec![dispatcher_type];
let module_name = "my::test::module".to_string();
let global_dispatcher = GlobalDispatcher::new();
global_dispatcher.register_dispatcher(&local_dispatcher);
global_dispatcher.register(&actor_groups, &child_ref, module_name);
let (sender, _) = mpsc::unbounded();
let path = Arc::new(BastionPath::root());
const DATA: &'static str = "A message containing data (ask).";
let message = Arc::new(SignedMessage::new(
Msg::broadcast(DATA),
RefAddr::new(path, sender),
));
global_dispatcher.broadcast_message(BroadcastTarget::Group("".to_string()), &message);
let handler_was_called = handler.was_called();
assert_eq!(handler_was_called, true);
}
}
| 35.031397 | 99 | 0.625274 |
3a2e67e952313cb0f0a450b9ed6a26c5730d0583 | 1,942 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
config::global::{Config, Entry},
errors::*,
tests::parse_each_line_as,
};
#[test]
fn parse_account() {
for s in &[
"//! account: alice",
"//!account: bob",
"//! account: bob, 100",
"//!account:alice,",
// TODO: The following should parse. Fix it.
// "//! account :alice,1, 2",
"//! account: bob, 0, 0",
] {
s.parse::<Entry>().unwrap();
}
for s in &["//! account:", "//! account", "//! account: alice, 1, 2, 3"] {
s.parse::<Entry>().unwrap_err();
}
}
/// Parses each line in the given input as an entry and build global config.
pub fn parse_and_build_config(s: &str) -> Result<Config> {
Config::build(&parse_each_line_as::<Entry>(s)?)
}
#[rustfmt::skip]
#[test]
fn build_global_config_1() {
let config = parse_and_build_config(r"
//! account: Alice,
//! account: bob, 2000, 10
").unwrap();
assert!(config.accounts.len() == 3);
assert!(config.accounts.contains_key("default"));
assert!(config.accounts.contains_key("alice"));
let bob = config.accounts.get("bob").unwrap();
assert!(bob.balance() == 2000);
assert!(bob.sequence_number() == 10);
}
#[test]
fn build_global_config_2() {
let config = parse_and_build_config("").unwrap();
assert!(config.accounts.len() == 1);
assert!(config.accounts.contains_key("default"));
}
#[rustfmt::skip]
#[test]
fn build_global_config_3() {
parse_and_build_config(r"
//! account: bob
//! account: BOB
").unwrap_err();
}
#[rustfmt::skip]
#[test]
fn build_global_config_4() {
let config = parse_and_build_config(r"
//! account: default, 50,
").unwrap();
assert!(config.accounts.len() == 1);
let default = config.accounts.get("default").unwrap();
assert!(default.balance() == 50);
}
| 24.897436 | 78 | 0.587024 |
aba6eb26d6d7e1da5d1148cb4eb14d814581f934 | 5,637 | #[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Start TWI receive sequence"]
pub tasks_startrx: TASKS_STARTRX,
_reserved0: [u8; 4usize],
#[doc = "0x08 - Start TWI transmit sequence"]
pub tasks_starttx: TASKS_STARTTX,
_reserved1: [u8; 8usize],
#[doc = "0x14 - Stop TWI transaction"]
pub tasks_stop: TASKS_STOP,
_reserved2: [u8; 4usize],
#[doc = "0x1c - Suspend TWI transaction"]
pub tasks_suspend: TASKS_SUSPEND,
#[doc = "0x20 - Resume TWI transaction"]
pub tasks_resume: TASKS_RESUME,
_reserved3: [u8; 224usize],
#[doc = "0x104 - TWI stopped"]
pub events_stopped: EVENTS_STOPPED,
#[doc = "0x108 - TWI RXD byte received"]
pub events_rxdready: EVENTS_RXDREADY,
_reserved4: [u8; 16usize],
#[doc = "0x11c - TWI TXD byte sent"]
pub events_txdsent: EVENTS_TXDSENT,
_reserved5: [u8; 4usize],
#[doc = "0x124 - TWI error"]
pub events_error: EVENTS_ERROR,
_reserved6: [u8; 16usize],
#[doc = "0x138 - TWI byte boundary, generated before each byte that is sent or received"]
pub events_bb: EVENTS_BB,
_reserved7: [u8; 12usize],
#[doc = "0x148 - TWI entered the suspended state"]
pub events_suspended: EVENTS_SUSPENDED,
_reserved8: [u8; 180usize],
#[doc = "0x200 - Shortcut register"]
pub shorts: SHORTS,
_reserved9: [u8; 256usize],
#[doc = "0x304 - Enable interrupt"]
pub intenset: INTENSET,
#[doc = "0x308 - Disable interrupt"]
pub intenclr: INTENCLR,
_reserved10: [u8; 440usize],
#[doc = "0x4c4 - Error source"]
pub errorsrc: ERRORSRC,
_reserved11: [u8; 56usize],
#[doc = "0x500 - Enable TWI"]
pub enable: ENABLE,
_reserved12: [u8; 4usize],
#[doc = "0x508 - Unspecified"]
pub psel: PSEL,
_reserved13: [u8; 8usize],
#[doc = "0x518 - RXD register"]
pub rxd: RXD,
#[doc = "0x51c - TXD register"]
pub txd: TXD,
_reserved14: [u8; 4usize],
#[doc = "0x524 - TWI frequency. Accuracy depends on the HFCLK source selected."]
pub frequency: FREQUENCY,
_reserved15: [u8; 96usize],
#[doc = "0x588 - Address used in the TWI transfer"]
pub address: ADDRESS,
}
#[doc = r" Register block"]
#[repr(C)]
pub struct PSEL {
#[doc = "0x00 - Pin select for SCL"]
pub scl: self::psel::SCL,
#[doc = "0x04 - Pin select for SDA"]
pub sda: self::psel::SDA,
}
#[doc = r" Register block"]
#[doc = "Unspecified"]
pub mod psel;
#[doc = "Start TWI receive sequence"]
pub struct TASKS_STARTRX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Start TWI receive sequence"]
pub mod tasks_startrx;
#[doc = "Start TWI transmit sequence"]
pub struct TASKS_STARTTX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Start TWI transmit sequence"]
pub mod tasks_starttx;
#[doc = "Stop TWI transaction"]
pub struct TASKS_STOP {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Stop TWI transaction"]
pub mod tasks_stop;
#[doc = "Suspend TWI transaction"]
pub struct TASKS_SUSPEND {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Suspend TWI transaction"]
pub mod tasks_suspend;
#[doc = "Resume TWI transaction"]
pub struct TASKS_RESUME {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Resume TWI transaction"]
pub mod tasks_resume;
#[doc = "TWI stopped"]
pub struct EVENTS_STOPPED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI stopped"]
pub mod events_stopped;
#[doc = "TWI RXD byte received"]
pub struct EVENTS_RXDREADY {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI RXD byte received"]
pub mod events_rxdready;
#[doc = "TWI TXD byte sent"]
pub struct EVENTS_TXDSENT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI TXD byte sent"]
pub mod events_txdsent;
#[doc = "TWI error"]
pub struct EVENTS_ERROR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI error"]
pub mod events_error;
#[doc = "TWI byte boundary, generated before each byte that is sent or received"]
pub struct EVENTS_BB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI byte boundary, generated before each byte that is sent or received"]
pub mod events_bb;
#[doc = "TWI entered the suspended state"]
pub struct EVENTS_SUSPENDED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI entered the suspended state"]
pub mod events_suspended;
#[doc = "Shortcut register"]
pub struct SHORTS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Shortcut register"]
pub mod shorts;
#[doc = "Enable interrupt"]
pub struct INTENSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Enable interrupt"]
pub mod intenset;
#[doc = "Disable interrupt"]
pub struct INTENCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Disable interrupt"]
pub mod intenclr;
#[doc = "Error source"]
pub struct ERRORSRC {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Error source"]
pub mod errorsrc;
#[doc = "Enable TWI"]
pub struct ENABLE {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Enable TWI"]
pub mod enable;
#[doc = "RXD register"]
pub struct RXD {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RXD register"]
pub mod rxd;
#[doc = "TXD register"]
pub struct TXD {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TXD register"]
pub mod txd;
#[doc = "TWI frequency. Accuracy depends on the HFCLK source selected."]
pub struct FREQUENCY {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI frequency. Accuracy depends on the HFCLK source selected."]
pub mod frequency;
#[doc = "Address used in the TWI transfer"]
pub struct ADDRESS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Address used in the TWI transfer"]
pub mod address;
| 29.056701 | 93 | 0.665247 |
4a4b030f89d353bd4f5894afec9bb7a49cb98dff | 7,917 | //! Functions that are used to classify an element from its definition or reference.
use hir::{FromSource, Module, ModuleSource, Path, PathResolution, Source, SourceAnalyzer};
use ra_prof::profile;
use ra_syntax::{ast, match_ast, AstNode};
use test_utils::tested_by;
use super::{
name_definition::{from_assoc_item, from_module_def, from_struct_field},
NameDefinition, NameKind,
};
use crate::db::RootDatabase;
pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> {
let _p = profile("classify_name");
let parent = name.value.syntax().parent()?;
match_ast! {
match parent {
ast::BindPat(it) => {
let src = name.with_value(it);
let local = hir::Local::from_source(db, src)?;
Some(NameDefinition {
visibility: None,
container: local.module(db),
kind: NameKind::Local(local),
})
},
ast::RecordFieldDef(it) => {
let ast = hir::FieldSource::Named(it);
let src = name.with_value(ast);
let field = hir::StructField::from_source(db, src)?;
Some(from_struct_field(db, field))
},
ast::Module(it) => {
let def = {
if !it.has_semi() {
let ast = hir::ModuleSource::Module(it);
let src = name.with_value(ast);
hir::Module::from_definition(db, src)
} else {
let src = name.with_value(it);
hir::Module::from_declaration(db, src)
}
}?;
Some(from_module_def(db, def.into(), None))
},
ast::StructDef(it) => {
let src = name.with_value(it);
let def = hir::Struct::from_source(db, src)?;
Some(from_module_def(db, def.into(), None))
},
ast::EnumDef(it) => {
let src = name.with_value(it);
let def = hir::Enum::from_source(db, src)?;
Some(from_module_def(db, def.into(), None))
},
ast::TraitDef(it) => {
let src = name.with_value(it);
let def = hir::Trait::from_source(db, src)?;
Some(from_module_def(db, def.into(), None))
},
ast::StaticDef(it) => {
let src = name.with_value(it);
let def = hir::Static::from_source(db, src)?;
Some(from_module_def(db, def.into(), None))
},
ast::EnumVariant(it) => {
let src = name.with_value(it);
let def = hir::EnumVariant::from_source(db, src)?;
Some(from_module_def(db, def.into(), None))
},
ast::FnDef(it) => {
let src = name.with_value(it);
let def = hir::Function::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into()))
} else {
Some(from_module_def(db, def.into(), None))
}
},
ast::ConstDef(it) => {
let src = name.with_value(it);
let def = hir::Const::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into()))
} else {
Some(from_module_def(db, def.into(), None))
}
},
ast::TypeAliasDef(it) => {
let src = name.with_value(it);
let def = hir::TypeAlias::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into()))
} else {
Some(from_module_def(db, def.into(), None))
}
},
ast::MacroCall(it) => {
let src = name.with_value(it);
let def = hir::MacroDef::from_source(db, src.clone())?;
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, src.with_value(module_src))?;
Some(NameDefinition {
visibility: None,
container: module,
kind: NameKind::Macro(def),
})
},
_ => None,
}
}
}
pub(crate) fn classify_name_ref(
db: &RootDatabase,
name_ref: Source<&ast::NameRef>,
) -> Option<NameDefinition> {
let _p = profile("classify_name_ref");
let parent = name_ref.value.syntax().parent()?;
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
tested_by!(goto_definition_works_for_methods);
if let Some(func) = analyzer.resolve_method_call(&method_call) {
return Some(from_assoc_item(db, func.into()));
}
}
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
tested_by!(goto_definition_works_for_fields);
if let Some(field) = analyzer.resolve_field(&field_expr) {
return Some(from_struct_field(db, field));
}
}
if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
tested_by!(goto_definition_works_for_record_fields);
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) {
let variant_def = analyzer.resolve_record_literal(&record_lit)?;
let hir_path = Path::from_name_ref(name_ref.value);
let hir_name = hir_path.as_ident()?;
let field = variant_def.field(db, hir_name)?;
return Some(from_struct_field(db, field));
}
}
let ast = ModuleSource::from_child_node(db, name_ref.with_value(&parent));
// FIXME: find correct container and visibility for each case
let container = Module::from_definition(db, name_ref.with_value(ast))?;
let visibility = None;
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
tested_by!(goto_definition_works_for_macros);
if let Some(macro_def) = analyzer.resolve_macro_call(db, ¯o_call) {
let kind = NameKind::Macro(macro_def);
return Some(NameDefinition { kind, container, visibility });
}
}
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
let resolved = analyzer.resolve_path(db, &path)?;
match resolved {
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),
PathResolution::AssocItem(item) => Some(from_assoc_item(db, item)),
PathResolution::Local(local) => {
let container = local.module(db);
let kind = NameKind::Local(local);
Some(NameDefinition { kind, container, visibility: None })
}
PathResolution::GenericParam(par) => {
// FIXME: get generic param def
let kind = NameKind::GenericParam(par);
Some(NameDefinition { kind, container, visibility })
}
PathResolution::Macro(def) => {
let kind = NameKind::Macro(def);
Some(NameDefinition { kind, container, visibility })
}
PathResolution::SelfType(impl_block) => {
let ty = impl_block.target_ty(db);
let kind = NameKind::SelfType(ty);
let container = impl_block.module(db);
Some(NameDefinition { kind, container, visibility })
}
}
}
| 41.234375 | 103 | 0.536693 |
56382e56d7597b39530c8aa30eccac2c2b50f0da | 10,592 | /*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std::collections::HashMap;
use std::sync::Arc;
use async_trait::async_trait;
use log::{debug, info, warn};
use tokio::sync::mpsc::Sender;
use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard};
use containerd_shim_protos::api::DeleteResponse;
use containerd_shim_protos::events::task::{
TaskCreate, TaskDelete, TaskExecAdded, TaskExecStarted, TaskIO, TaskStart,
};
use containerd_shim_protos::protobuf::{Message, SingularPtrField};
use containerd_shim_protos::shim_async::Task;
use containerd_shim_protos::ttrpc;
use containerd_shim_protos::ttrpc::r#async::TtrpcContext;
use crate::api::{
CreateTaskRequest, CreateTaskResponse, DeleteRequest, Empty, ExecProcessRequest, KillRequest,
ResizePtyRequest, ShutdownRequest, StartRequest, StartResponse, StateRequest, StateResponse,
Status, WaitRequest, WaitResponse,
};
use crate::asynchronous::container::{Container, ContainerFactory};
use crate::asynchronous::ExitSignal;
use crate::event::Event;
use crate::util::{convert_to_timestamp, AsOption};
use crate::TtrpcResult;
type EventSender = Sender<(String, Box<dyn Message>)>;
/// TaskService is a Task template struct, it is considered a helper struct,
/// which has already implemented `Task` trait, so that users can make it the type `T`
/// parameter of `Service`, and implements their own `ContainerFactory` and `Container`.
pub struct TaskService<F, C> {
pub factory: F,
pub containers: Arc<Mutex<HashMap<String, C>>>,
pub namespace: String,
pub exit: Arc<ExitSignal>,
pub tx: EventSender,
}
impl<F, C> TaskService<F, C>
where
F: Default,
{
pub fn new(ns: &str, exit: Arc<ExitSignal>, tx: EventSender) -> Self {
Self {
factory: Default::default(),
containers: Arc::new(Mutex::new(Default::default())),
namespace: ns.to_string(),
exit,
tx,
}
}
}
impl<F, C> TaskService<F, C> {
pub async fn get_container(&self, id: &str) -> TtrpcResult<MappedMutexGuard<'_, C>> {
let mut containers = self.containers.lock().await;
containers.get_mut(id).ok_or_else(|| {
ttrpc::Error::RpcStatus(ttrpc::get_status(
ttrpc::Code::NOT_FOUND,
format!("can not find container by id {}", id),
))
})?;
let container = MutexGuard::map(containers, |m| m.get_mut(id).unwrap());
Ok(container)
}
pub async fn send_event(&self, event: impl Event) {
let topic = event.topic();
self.tx
.send((topic.to_string(), Box::new(event)))
.await
.unwrap_or_else(|e| warn!("send {} to publisher: {}", topic, e));
}
}
#[async_trait]
impl<F, C> Task for TaskService<F, C>
where
F: ContainerFactory<C> + Sync + Send,
C: Container + Sync + Send + 'static,
{
async fn state(&self, _ctx: &TtrpcContext, req: StateRequest) -> TtrpcResult<StateResponse> {
let container = self.get_container(req.get_id()).await?;
let exec_id = req.get_exec_id().as_option();
let resp = container.state(exec_id).await?;
Ok(resp)
}
async fn create(
&self,
_ctx: &TtrpcContext,
req: CreateTaskRequest,
) -> TtrpcResult<CreateTaskResponse> {
info!("Create request for {:?}", &req);
// Note: Get containers here is for getting the lock,
// to make sure no other threads manipulate the containers metadata;
let mut containers = self.containers.lock().await;
let ns = self.namespace.as_str();
let id = req.id.as_str();
let container = self.factory.create(ns, &req).await?;
let mut resp = CreateTaskResponse::new();
let pid = container.pid().await as u32;
resp.pid = pid;
containers.insert(id.to_string(), container);
self.send_event(TaskCreate {
container_id: req.id.to_string(),
bundle: req.bundle.to_string(),
rootfs: req.rootfs,
io: SingularPtrField::some(TaskIO {
stdin: req.stdin.to_string(),
stdout: req.stdout.to_string(),
stderr: req.stderr.to_string(),
terminal: req.terminal,
unknown_fields: Default::default(),
cached_size: Default::default(),
}),
checkpoint: req.checkpoint.to_string(),
pid,
..Default::default()
})
.await;
info!("Create request for {} returns pid {}", id, resp.pid);
Ok(resp)
}
async fn start(&self, _ctx: &TtrpcContext, req: StartRequest) -> TtrpcResult<StartResponse> {
info!("Start request for {:?}", &req);
let mut container = self.get_container(req.get_id()).await?;
let pid = container.start(req.exec_id.as_str().as_option()).await?;
let mut resp = StartResponse::new();
resp.pid = pid as u32;
if req.exec_id.is_empty() {
self.send_event(TaskStart {
container_id: req.id.to_string(),
pid: pid as u32,
..Default::default()
})
.await;
} else {
self.send_event(TaskExecStarted {
container_id: req.id.to_string(),
exec_id: req.exec_id.to_string(),
pid: pid as u32,
..Default::default()
})
.await;
};
info!("Start request for {:?} returns pid {}", req, resp.get_pid());
Ok(resp)
}
async fn delete(&self, _ctx: &TtrpcContext, req: DeleteRequest) -> TtrpcResult<DeleteResponse> {
info!("Delete request for {:?}", &req);
let mut containers = self.containers.lock().await;
let container = containers.get_mut(req.get_id()).ok_or_else(|| {
ttrpc::Error::RpcStatus(ttrpc::get_status(
ttrpc::Code::NOT_FOUND,
format!("can not find container by id {}", req.get_id()),
))
})?;
let id = container.id().await;
let exec_id_opt = req.get_exec_id().as_option();
let (pid, exit_status, exited_at) = container.delete(exec_id_opt).await?;
self.factory.cleanup(&*self.namespace, container).await?;
if req.get_exec_id().is_empty() {
containers.remove(req.get_id());
}
let ts = convert_to_timestamp(exited_at);
self.send_event(TaskDelete {
container_id: id,
pid: pid as u32,
exit_status: exit_status as u32,
exited_at: SingularPtrField::some(ts.clone()),
..Default::default()
})
.await;
let mut resp = DeleteResponse::new();
resp.set_exited_at(ts);
resp.set_pid(pid as u32);
resp.set_exit_status(exit_status as u32);
info!(
"Delete request for {} {} returns {:?}",
req.get_id(),
req.get_exec_id(),
resp
);
Ok(resp)
}
async fn kill(&self, _ctx: &TtrpcContext, req: KillRequest) -> TtrpcResult<Empty> {
info!("Kill request for {:?}", req);
let mut container = self.get_container(req.get_id()).await?;
container
.kill(req.get_exec_id().as_option(), req.signal, req.all)
.await?;
info!("Kill request for {:?} returns successfully", req);
Ok(Empty::new())
}
async fn exec(&self, _ctx: &TtrpcContext, req: ExecProcessRequest) -> TtrpcResult<Empty> {
info!("Exec request for {:?}", req);
let exec_id = req.get_exec_id().to_string();
let mut container = self.get_container(req.get_id()).await?;
container.exec(req).await?;
self.send_event(TaskExecAdded {
container_id: container.id().await,
exec_id,
..Default::default()
})
.await;
Ok(Empty::new())
}
async fn resize_pty(&self, _ctx: &TtrpcContext, req: ResizePtyRequest) -> TtrpcResult<Empty> {
debug!(
"Resize pty request for container {}, exec_id: {}",
&req.id, &req.exec_id
);
let mut container = self.get_container(req.get_id()).await?;
container
.resize_pty(req.get_exec_id().as_option(), req.height, req.width)
.await?;
Ok(Empty::new())
}
async fn wait(&self, _ctx: &TtrpcContext, req: WaitRequest) -> TtrpcResult<WaitResponse> {
info!("Wait request for {:?}", req);
let exec_id = req.exec_id.as_str().as_option();
let wait_rx = {
let mut container = self.get_container(req.get_id()).await?;
let state = container.state(exec_id).await?;
if state.status != Status::RUNNING && state.status != Status::CREATED {
let mut resp = WaitResponse::new();
resp.exit_status = state.exit_status;
resp.exited_at = state.exited_at;
info!("Wait request for {:?} returns {:?}", req, &resp);
return Ok(resp);
}
container
.wait_channel(req.get_exec_id().as_option())
.await?
};
wait_rx.await.unwrap_or_default();
// get lock again.
let container = self.get_container(req.get_id()).await?;
let (_, code, exited_at) = container.get_exit_info(exec_id).await?;
let mut resp = WaitResponse::new();
resp.exit_status = code as u32;
let ts = convert_to_timestamp(exited_at);
resp.exited_at = SingularPtrField::some(ts);
info!("Wait request for {:?} returns {:?}", req, &resp);
Ok(resp)
}
async fn shutdown(&self, _ctx: &TtrpcContext, _req: ShutdownRequest) -> TtrpcResult<Empty> {
debug!("Shutdown request");
let containers = self.containers.lock().await;
if containers.len() > 0 {
return Ok(Empty::new());
}
self.exit.signal();
Ok(Empty::default())
}
}
| 35.783784 | 100 | 0.59054 |
143c8aaaa749ba132966b35ffcd8b4eae30c72a5 | 336 | #[doc = "Reader of register RESPCMD"]
pub type R = crate::R<u32, super::RESPCMD>;
#[doc = "Reader of field `RESPCMD`"]
pub type RESPCMD_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:5 - Response command index"]
#[inline(always)]
pub fn respcmd(&self) -> RESPCMD_R {
RESPCMD_R::new((self.bits & 0x3f) as u8)
}
}
| 28 | 48 | 0.604167 |
d5fffc7ef9bc1d013f69b8d035b57b890378fd38 | 368 | // run-pass
#![allow(unused_variables)]
// ignore-emscripten no threads support
use std::thread;
use std::mem;
fn main() {
let y = 0u8;
let closure = move |x: u8| y + x;
// Check that both closures are capturing by value
assert_eq!(1, mem::size_of_val(&closure));
thread::spawn(move|| {
let ok = closure;
}).join().ok().unwrap();
}
| 19.368421 | 54 | 0.605978 |
16dc1fe877cfc87b818b841643eabf9eeed2a585 | 4,776 | use std::fmt::Debug;
use std::fmt;
use crate::model::channel::Message;
use crate::client::Context;
use crate::framework::standard::{Args, CommandOptions};
/// This type describes why a check has failed and occurs on
/// [`CheckResult::Failure`].
///
/// **Note**:
/// The bot-developer is supposed to process this `enum` as the framework is not.
/// It solely serves as a way to inform a user about why a check
/// has failed and for the developer to log given failure (e.g. bugs or statstics)
/// occurring in [`Check`]s.
///
/// [`Check`]: struct.Check.html
/// [`CheckResult::Failure`]: enum.CheckResult.html#variant.Failure
#[derive(Clone, Debug)]
pub enum Reason {
/// No information on the failure.
Unknown,
/// Information dedicated to the user.
User(String),
/// Information purely for logging purposes.
Log(String),
/// Information for the user but also for logging purposes.
UserAndLog { user: String, log: String },
#[doc(hidden)]
__Nonexhaustive,
}
/// Returned from [`Check`]s.
/// If `Success`, the [`Check`] is considered as passed.
/// If `Failure`, the [`Check`] is considered as failed and can return further
/// information on the cause via [`Reason`].
///
/// [`Check`]: struct.Check.html
/// [`Reason`]: enum.Reason.html
#[derive(Clone, Debug)]
pub enum CheckResult {
Success,
Failure(Reason),
}
impl CheckResult {
/// Creates a new [`CheckResult::Failure`] with [`Reason::User`].
///
/// [`CheckResult::Failure`]: enum.CheckResult.html#variant.Failure
/// [`Reason::User`]: enum.Reason.html#variant.User
pub fn new_user<D>(d: D) -> Self
where D: fmt::Display {
CheckResult::Failure(Reason::User(d.to_string()))
}
/// Creates a new [`CheckResult::Failure`] with [`Reason::Log`].
///
/// [`CheckResult::Failure`]: enum.CheckResult.html#variant.Failure
/// [`Reason::Log`]: enum.Reason.html#variant.Log
pub fn new_log<D>(d: D) -> Self
where D: fmt::Display {
CheckResult::Failure(Reason::Log(d.to_string()))
}
/// Creates a new [`CheckResult::Failure`] with [`Reason::Unknown`].
///
/// [`CheckResult::Failure`]: enum.CheckResult.html#variant.Failure
/// [`Reason::Unknown`]: enum.Reason.html#variant.Unknown
pub fn new_unknown() -> Self {
CheckResult::Failure(Reason::Unknown)
}
/// Creates a new [`CheckResult::Failure`] with [`Reason::UserAndLog`].
///
/// [`CheckResult::Failure`]: enum.CheckResult.html#variant.Failure
/// [`Reason::UserAndLog`]: enum.Reason.html#variant.UserAndLog
pub fn new_user_and_log<D>(user: D, log: D) -> Self
where D: fmt::Display {
CheckResult::Failure(Reason::UserAndLog {
user: user.to_string(),
log: log.to_string(),
})
}
/// Returns `true` if [`CheckResult`] is [`CheckResult::Success`] and
/// `false` if not.
///
/// [`CheckResult`]: enum.CheckResult.html
/// [`CheckResult::Success`]: enum.CheckResult.html#variant.Success
pub fn is_success(&self) -> bool {
if let CheckResult::Success = self {
return true;
}
false
}
}
impl From<bool> for CheckResult {
fn from(succeeded: bool) -> Self {
if succeeded {
CheckResult::Success
} else {
CheckResult::Failure(Reason::Unknown)
}
}
}
impl From<Reason> for CheckResult {
fn from(reason: Reason) -> Self {
CheckResult::Failure(reason)
}
}
pub type CheckFunction = fn(&mut Context, &Message, &mut Args, &CommandOptions) -> CheckResult;
/// A check can be part of a command or group and will be executed to
/// determine whether a user is permitted to use related item.
///
/// Additionally, a check may hold additional settings.
pub struct Check {
/// Name listed in help-system.
pub name: &'static str,
/// Function that will be executed.
pub function: CheckFunction,
/// Whether a check should be evaluated in the help-system.
/// `false` will ignore check and won't fail execution.
pub check_in_help: bool,
/// Whether a check shall be listed in the help-system.
/// `false` won't affect whether the check will be evaluated help,
/// solely `check_in_help` sets this.
pub display_in_help: bool,
}
impl Debug for Check {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Check")
.field("name", &self.name)
.field("function", &"<fn>")
.field("check_in_help", &self.check_in_help)
.field("display_in_help", &self.display_in_help)
.finish()
}
}
impl PartialEq for Check {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
}
}
| 31.84 | 95 | 0.623116 |
e845ea699135c64e011605875bda51ced7f60b7e | 8,277 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use std::str::FromStr;
use clap::Args;
use serde::Deserialize;
use serde::Serialize;
use crate::configs::Config;
pub const STORAGE_TYPE: &str = "STORAGE_TYPE";
pub const STORAGE_NUM_CPUS: &str = "STORAGE_NUM_CPUS";
// Disk Storage env.
pub const DISK_STORAGE_DATA_PATH: &str = "DISK_STORAGE_DATA_PATH";
pub const DISK_STORAGE_TEMP_DATA_PATH: &str = "DISK_STORAGE_TEMP_DATA_PATH";
// S3 Storage env.
const S3_STORAGE_REGION: &str = "S3_STORAGE_REGION";
const S3_STORAGE_ENDPOINT_URL: &str = "S3_STORAGE_ENDPOINT_URL";
const S3_STORAGE_ACCESS_KEY_ID: &str = "S3_STORAGE_ACCESS_KEY_ID";
const S3_STORAGE_SECRET_ACCESS_KEY: &str = "S3_STORAGE_SECRET_ACCESS_KEY";
const S3_STORAGE_ENABLE_POD_IAM_POLICY: &str = "S3_STORAGE_ENABLE_POD_IAM_POLICY";
const S3_STORAGE_BUCKET: &str = "S3_STORAGE_BUCKET";
const S3_STORAGE_ROOT: &str = "S3_STORAGE_ROOT";
// Azure Storage Blob env.
const AZURE_STORAGE_ACCOUNT: &str = "AZURE_STORAGE_ACCOUNT";
const AZURE_BLOB_MASTER_KEY: &str = "AZURE_BLOB_MASTER_KEY";
const AZURE_BLOB_CONTAINER: &str = "AZURE_BLOB_CONTAINER";
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub enum StorageType {
Disk,
S3,
AzureStorageBlob,
}
// Implement the trait
impl FromStr for StorageType {
type Err = &'static str;
fn from_str(s: &str) -> std::result::Result<StorageType, &'static str> {
match s {
"disk" => Ok(StorageType::Disk),
"s3" => Ok(StorageType::S3),
"azure_storage_blob" => Ok(StorageType::AzureStorageBlob),
_ => Err("no match for storage type"),
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Args)]
#[serde(default)]
pub struct DiskStorageConfig {
/// Disk storage backend data path
#[clap(long, env = DISK_STORAGE_DATA_PATH, default_value = "_data")]
pub data_path: String,
/// Disk storage temporary data path for external data
#[clap(long, env = DISK_STORAGE_TEMP_DATA_PATH, default_value = "")]
pub temp_data_path: String,
}
impl Default for DiskStorageConfig {
fn default() -> Self {
Self {
data_path: "_data".to_string(),
temp_data_path: "".to_string(),
}
}
}
#[derive(Clone, PartialEq, Serialize, Deserialize, Args)]
#[serde(default)]
pub struct S3StorageConfig {
/// Region for S3 storage
#[clap(long, env = S3_STORAGE_REGION, default_value = "")]
pub region: String,
/// Endpoint URL for S3 storage
#[clap(long, env = S3_STORAGE_ENDPOINT_URL, default_value = "https://s3.amazonaws.com")]
pub endpoint_url: String,
// Access key for S3 storage
#[clap(long, env = S3_STORAGE_ACCESS_KEY_ID, default_value = "")]
pub access_key_id: String,
/// Secret key for S3 storage
#[clap(long, env = S3_STORAGE_SECRET_ACCESS_KEY, default_value = "")]
pub secret_access_key: String,
/// Use iam role service account token to access S3 resource
#[clap(long, env = S3_STORAGE_ENABLE_POD_IAM_POLICY)]
pub enable_pod_iam_policy: bool,
/// S3 Bucket to use for storage
#[clap(long, env = S3_STORAGE_BUCKET, default_value = "")]
pub bucket: String,
/// <bucket>/<root>
#[clap(long, env = S3_STORAGE_ROOT, default_value = "")]
pub root: String,
}
impl Default for S3StorageConfig {
fn default() -> Self {
Self {
region: "".to_string(),
endpoint_url: "https://s3.amazonaws.com".to_string(),
access_key_id: "".to_string(),
secret_access_key: "".to_string(),
enable_pod_iam_policy: false,
bucket: "".to_string(),
root: "".to_string(),
}
}
}
impl fmt::Debug for S3StorageConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{{")?;
write!(f, "s3.storage.region: \"{}\", ", self.region)?;
write!(f, "s3.storage.endpoint_url: \"{}\", ", self.endpoint_url)?;
write!(f, "s3.storage.bucket: \"{}\", ", self.bucket)?;
write!(f, "}}")
}
}
#[derive(Clone, PartialEq, Serialize, Deserialize, Args)]
#[serde(default)]
pub struct AzureStorageBlobConfig {
/// Account for Azure storage
#[clap(long, env = AZURE_STORAGE_ACCOUNT, default_value = "")]
pub account: String,
/// Master key for Azure storage
#[clap(long, env = AZURE_BLOB_MASTER_KEY, default_value = "")]
pub master_key: String,
/// Container for Azure storage
#[clap(long, env = AZURE_BLOB_CONTAINER, default_value = "")]
pub container: String,
}
impl Default for AzureStorageBlobConfig {
fn default() -> Self {
Self {
account: "".to_string(),
master_key: "".to_string(),
container: "".to_string(),
}
}
}
impl fmt::Debug for AzureStorageBlobConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{{")?;
write!(f, "Azure.storage.container: \"{}\", ", self.container)?;
write!(f, "}}")
}
}
/// Storage config group.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Args)]
#[serde(default)]
pub struct StorageConfig {
/// Current storage type: disk|s3
#[clap(long, env = STORAGE_TYPE, default_value = "disk")]
pub storage_type: String,
#[clap(long, env = STORAGE_NUM_CPUS, default_value = "0")]
pub storage_num_cpus: u64,
// Disk storage backend config.
#[clap(flatten)]
pub disk: DiskStorageConfig,
// S3 storage backend config.
#[clap(flatten)]
pub s3: S3StorageConfig,
// azure storage blob config.
#[clap(flatten)]
pub azure_storage_blob: AzureStorageBlobConfig,
}
impl Default for StorageConfig {
fn default() -> Self {
Self {
storage_type: "disk".to_string(),
disk: DiskStorageConfig::default(),
s3: S3StorageConfig::default(),
azure_storage_blob: AzureStorageBlobConfig::default(),
storage_num_cpus: 0,
}
}
}
impl StorageConfig {
pub fn load_from_env(mut_config: &mut Config) {
env_helper!(mut_config, storage, storage_type, String, STORAGE_TYPE);
env_helper!(mut_config, storage, storage_num_cpus, u64, STORAGE_NUM_CPUS);
// DISK.
env_helper!(
mut_config.storage,
disk,
data_path,
String,
DISK_STORAGE_DATA_PATH
);
env_helper!(
mut_config.storage,
disk,
temp_data_path,
String,
DISK_STORAGE_TEMP_DATA_PATH
);
// S3.
env_helper!(mut_config.storage, s3, region, String, S3_STORAGE_REGION);
env_helper!(
mut_config.storage,
s3,
endpoint_url,
String,
S3_STORAGE_ENDPOINT_URL
);
env_helper!(
mut_config.storage,
s3,
access_key_id,
String,
S3_STORAGE_ACCESS_KEY_ID
);
env_helper!(
mut_config.storage,
s3,
secret_access_key,
String,
S3_STORAGE_SECRET_ACCESS_KEY
);
env_helper!(
mut_config.storage,
s3,
enable_pod_iam_policy,
bool,
S3_STORAGE_ENABLE_POD_IAM_POLICY
);
env_helper!(mut_config.storage, s3, bucket, String, S3_STORAGE_BUCKET);
env_helper!(mut_config.storage, s3, root, String, S3_STORAGE_ROOT);
// Azure Storage Blob.
env_helper!(
mut_config.storage,
azure_storage_blob,
account,
String,
AZURE_BLOB_MASTER_KEY
);
}
}
| 29.560714 | 92 | 0.62571 |
ccaecfe483396b2742b0aa4bf52386d758c248b4 | 14,218 | //! ECIES-ed25519: An Integrated Encryption Scheme on Twisted Edwards Curve25519.
//!
//! ECIES can be used to encrypt data using a public key such that it can only be decrypted
//! by the holder of the corresponding private key. It is based on [curve25519-dalek](https://docs.rs/curve25519-dalek).
//!
//! There are two different backends for HKDF-SHA256 / AES-GCM operations:
//!
//! - The `pure_rust` backend (default). It uses a collection of pure-rust implementations of SHA2, HKDF, AES, and AEAD.
//!
//! - The `ring` backend uses [ring](https://briansmith.org/rustdoc/ring/). It uses rock solid primitives based on BoringSSL,
//! but cannot run on all platforms. For example it won't work in web assembly. To enable it add the following to your Cargo.toml:
//!
//! `ecies-ed25519 = { version = "0.3", features = ["ring"] }`
//!
//! ## Example Usage
//! ```rust
//! let mut csprng = rand::thread_rng();
//! let (secret, public) = ecies_ed25519::generate_keypair(&mut csprng);
//!
//! let message = "I 💖🔒";
//!
//! // Encrypt the message with the public key such that only the holder of the secret key can decrypt.
//! let encrypted = ecies_ed25519::encrypt(&public, message.as_bytes(), &mut csprng).unwrap();
//!
//! // Decrypt the message with the secret key
//! let decrypted = ecies_ed25519::decrypt(&secret, &encrypted);
//!```
//!
//! ## `serde` support
//!
//! The `serde` feature is provided for serializing / deserializing private and public keys.
//!
use curve25519_dalek::scalar::Scalar;
use failure::Fail;
use rand::{CryptoRng, RngCore};
mod keys;
pub use keys::*;
#[cfg(feature = "ring")]
mod ring_backend;
#[cfg(feature = "ring")]
use ring_backend::*;
#[cfg(feature = "pure_rust")]
mod pure_rust_backend;
#[cfg(feature = "pure_rust")]
use pure_rust_backend::*;
#[cfg(not(any(feature = "ring", feature = "pure_rust")))]
compile_error!(
"ecies-rd25519: Either feature 'ring' or 'pure_rust' must be enabled for this crate."
);
#[cfg(all(feature = "ring", feature = "pure_rust"))]
compile_error!(
"ecies-rd25519: Feature 'ring' and 'pure_rust' cannot both be enabled. Please choose one."
);
const HKDF_INFO: &[u8; 13] = b"ecies-ed25519";
const AES_IV_LENGTH: usize = 12;
type AesKey = [u8; 32];
type SharedSecret = [u8; 32];
/// Generate a keypair, ready for use in ECIES
pub fn generate_keypair<R: CryptoRng + RngCore>(rng: &mut R) -> (SecretKey, PublicKey) {
let secret = SecretKey::generate(rng);
let public = PublicKey::from_secret(&secret);
(secret, public)
}
/// Encrypt a message using ECIES, it can only be decrypted by the receiver's SecretKey.
pub fn encrypt<R: CryptoRng + RngCore>(
receiver_pub: &PublicKey,
msg: &[u8],
rng: &mut R,
) -> Result<Vec<u8>, Error> {
let (ephemeral_sk, ephemeral_pk) = generate_keypair(rng);
let aes_key = encapsulate(&ephemeral_sk, &receiver_pub);
let encrypted = aes_encrypt(&aes_key, msg, rng)?;
let mut cipher_text = Vec::with_capacity(PUBLIC_KEY_LENGTH + encrypted.len());
cipher_text.extend(ephemeral_pk.to_bytes().iter());
cipher_text.extend(encrypted);
Ok(cipher_text)
}
/// Decrypt a ECIES encrypted ciphertext using the receiver's SecretKey.
pub fn decrypt(receiver_sec: &SecretKey, ciphertext: &[u8]) -> Result<Vec<u8>, Error> {
if ciphertext.len() <= PUBLIC_KEY_LENGTH {
return Err(Error::DecryptionFailedCiphertextShort);
}
let ephemeral_pk = PublicKey::from_bytes(&ciphertext[..PUBLIC_KEY_LENGTH])?;
let encrypted = &ciphertext[PUBLIC_KEY_LENGTH..];
let aes_key = decapsulate(&receiver_sec, &ephemeral_pk);
let decrypted = aes_decrypt(&aes_key, encrypted).map_err(|_| Error::DecryptionFailed)?;
Ok(decrypted)
}
fn generate_shared(secret: &SecretKey, public: &PublicKey) -> SharedSecret {
let public = public.to_point();
let secret = Scalar::from_bits(secret.to_bytes());
let shared_point = public * secret;
let shared_point_compressed = shared_point.compress();
let output = shared_point_compressed.as_bytes().to_owned();
output
}
fn encapsulate(emphemeral_sk: &SecretKey, peer_pk: &PublicKey) -> AesKey {
let shared_point = generate_shared(emphemeral_sk, peer_pk);
let emphemeral_pk = PublicKey::from_secret(emphemeral_sk);
let mut master = [0u8; 32 * 2];
master[..32].clone_from_slice(emphemeral_pk.0.as_bytes());
master[32..].clone_from_slice(&shared_point);
let key = hkdf_sha256(&master);
key
}
fn decapsulate(sk: &SecretKey, emphemeral_pk: &PublicKey) -> AesKey {
let shared_point = generate_shared(sk, emphemeral_pk);
let mut master = [0u8; 32 * 2];
master[..32].clone_from_slice(emphemeral_pk.0.as_bytes());
master[32..].clone_from_slice(&shared_point);
let key = hkdf_sha256(&master);
key
}
/// Error types
#[derive(Debug, Fail)]
pub enum Error {
/// Encryption failed
#[fail(display = "ecies-rd25519: encryption failed")]
EncryptionFailed,
/// Encryption failed - RNG error
#[fail(display = "ecies-rd25519: encryption failed - RNG error")]
EncryptionFailedRng,
/// Decryption failed
#[fail(display = "ecies-rd25519: decryption failed")]
DecryptionFailed,
/// Decryption failed - ciphertext too short
#[fail(display = "ecies-rd25519: decryption failed - ciphertext too short")]
DecryptionFailedCiphertextShort,
/// Invalid public key bytes
#[fail(display = "ecies-rd25519: invalid public key bytes")]
InvalidPublicKeyBytes,
/// Invalid secret key bytes
#[fail(display = "ecies-rd25519: invalid secret key bytes")]
InvalidSecretKeyBytes,
}
#[cfg(test)]
pub mod tests {
use super::*;
use rand::thread_rng;
use rand::SeedableRng;
#[test]
fn test_shared() {
let (emphemeral_sk, emphemeral_pk) = generate_keypair(&mut thread_rng());
let (peer_sk, peer_pk) = generate_keypair(&mut thread_rng());
assert_eq!(
generate_shared(&emphemeral_sk, &peer_pk),
generate_shared(&peer_sk, &emphemeral_pk)
);
// Make sure it fails when wrong keys used
assert_ne!(
generate_shared(&emphemeral_sk, &emphemeral_pk),
generate_shared(&peer_sk, &peer_pk)
)
}
#[test]
fn test_encapsulation() {
let (emphemeral_sk, emphemeral_pk) = generate_keypair(&mut thread_rng());
let (peer_sk, peer_pk) = generate_keypair(&mut thread_rng());
assert_eq!(
encapsulate(&emphemeral_sk, &peer_pk),
decapsulate(&peer_sk, &emphemeral_pk)
)
}
#[test]
fn test_aes() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let mut key = [0u8; 32];
test_rng.fill_bytes(&mut key);
let plaintext = b"ABC";
let encrypted = aes_encrypt(&key, plaintext, &mut test_rng).unwrap();
let decrypted = aes_decrypt(&key, &encrypted).unwrap();
assert_eq!(plaintext, decrypted.as_slice());
// Test bad ciphertext
assert!(aes_decrypt(&key, &[0u8; 16]).is_err());
// Test bad secret key
let bad_secret = SecretKey::generate(&mut thread_rng());
assert!(aes_decrypt(&bad_secret.as_bytes(), &encrypted).is_err());
}
#[test]
fn test_ecies_ed25519() {
let (peer_sk, peer_pk) = generate_keypair(&mut thread_rng());
let plaintext = b"ABOLISH ICE";
let encrypted = encrypt(&peer_pk, plaintext, &mut thread_rng()).unwrap();
let decrypted = decrypt(&peer_sk, &encrypted).unwrap();
assert_eq!(plaintext, decrypted.as_slice());
// Test bad ciphertext
assert!(decrypt(&peer_sk, &[0u8; 16]).is_err());
// Test that it fails when using a bad secret key
let bad_secret = SecretKey::generate(&mut thread_rng());
assert!(decrypt(&bad_secret, &encrypted).is_err());
}
#[test]
fn test_hkdf_sha256_interop() {
let known_key: Vec<u8> = vec![
204, 68, 78, 7, 8, 70, 53, 136, 56, 115, 129, 183, 226, 82, 147, 253, 62, 59, 170, 188,
131, 119, 31, 21, 249, 255, 19, 103, 230, 24, 213, 204,
];
let key = hkdf_sha256(b"ABC123");
assert_eq!(key.to_vec(), known_key);
}
#[test]
fn test_aes_interop() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let mut key = [0u8; 32];
test_rng.fill_bytes(&mut key);
let plaintext = b"ABC";
let known_encrypted: Vec<u8> = vec![
218, 65, 89, 124, 81, 87, 72, 141, 119, 36, 224, 63, 149, 218, 64, 106, 159, 178, 238,
212, 36, 223, 93, 107, 19, 211, 62, 75, 195, 46, 177,
];
let decrypted = aes_decrypt(&key, &known_encrypted).unwrap();
assert_eq!(plaintext, decrypted.as_slice());
}
#[test]
fn test_ecies_ed25519_interop() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let (peer_sk, _peer_pk) = generate_keypair(&mut test_rng);
let plaintext = b"ABC";
let known_encrypted: Vec<u8> = vec![
235, 249, 207, 231, 91, 38, 106, 202, 22, 34, 114, 191, 107, 122, 99, 157, 43, 210, 46,
229, 219, 208, 111, 176, 98, 154, 42, 250, 114, 233, 68, 8, 159, 7, 231, 190, 85, 81,
56, 122, 152, 186, 151, 124, 246, 147, 163, 153, 29, 85, 248, 238, 194, 15, 180, 98,
163, 36, 49, 191, 133, 242, 186,
];
let decrypted = decrypt(&peer_sk, &known_encrypted).unwrap();
assert_eq!(plaintext, decrypted.as_slice());
}
#[test]
fn test_public_key_extract() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let secret = SecretKey::generate(&mut test_rng);
let public = PublicKey::from_secret(&secret);
PublicKey::from_bytes(public.as_bytes()).unwrap();
// Test bad bytes
assert!(PublicKey::from_bytes(&[0u8; 16]).is_err());
assert!(SecretKey::from_bytes(&[0u8; 16]).is_err());
}
#[cfg(feature = "serde")]
#[test]
fn test_hex() {
use hex::{FromHex, ToHex};
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let (secret, public) = generate_keypair(&mut test_rng);
// lower
let serialized_secret: String = secret.encode_hex();
let serialized_public: String = public.encode_hex();
let deserialized_secret = SecretKey::from_hex(serialized_secret).unwrap();
let deserialized_public = PublicKey::from_hex(&serialized_public).unwrap();
assert_eq!(secret.to_bytes(), deserialized_secret.to_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
// UPPER
let serialized_secret: String = secret.encode_hex_upper();
let serialized_public: String = public.encode_hex_upper();
let deserialized_secret = SecretKey::from_hex(serialized_secret).unwrap();
let deserialized_public = PublicKey::from_hex(serialized_public).unwrap();
assert_eq!(secret.to_bytes(), deserialized_secret.to_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
}
#[cfg(feature = "serde")]
#[test]
fn test_serde_json() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let (secret, public) = generate_keypair(&mut test_rng);
// String
let serialized_secret = serde_json::to_string(&secret).unwrap();
let serialized_public = serde_json::to_string(&public).unwrap();
let deserialized_secret: SecretKey = serde_json::from_str(&serialized_secret).unwrap();
let deserialized_public: PublicKey = serde_json::from_str(&serialized_public).unwrap();
assert_eq!(secret.to_bytes(), deserialized_secret.to_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
// Stringy bytes
let deserialized_secret: SecretKey =
serde_json::from_slice(serialized_secret.as_bytes()).unwrap();
let deserialized_public: PublicKey =
serde_json::from_slice(serialized_public.as_bytes()).unwrap();
assert_eq!(secret.as_bytes(), deserialized_secret.as_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
// Bytes
let serialized_secret = serde_json::to_vec(&secret).unwrap();
let serialized_public = serde_json::to_vec(&public).unwrap();
let deserialized_secret: SecretKey = serde_json::from_slice(&serialized_secret).unwrap();
let deserialized_public: PublicKey = serde_json::from_slice(&serialized_public).unwrap();
assert_eq!(secret.as_bytes(), deserialized_secret.as_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
// Test errors - mangle some bits and confirm it doesn't work:
let mut serialized_public = serde_json::to_vec(&public).unwrap();
serialized_public[0] = 50;
assert!(serde_json::from_slice::<PublicKey>(&serialized_public).is_err());
let mut serialized_public = serde_json::to_vec(&public).unwrap();
serialized_public.push(48);
serialized_public.push(49);
assert!(serde_json::from_slice::<PublicKey>(&serialized_public).is_err());
}
#[cfg(feature = "serde")]
#[test]
fn test_serde_cbor() {
let mut test_rng = rand::rngs::StdRng::from_seed([0u8; 32]);
let (secret, public) = generate_keypair(&mut test_rng);
let serialized_secret = serde_cbor::to_vec(&secret).unwrap();
let serialized_public = serde_cbor::to_vec(&public).unwrap();
let deserialized_secret: SecretKey = serde_cbor::from_slice(&serialized_secret).unwrap();
let deserialized_public: PublicKey = serde_cbor::from_slice(&serialized_public).unwrap();
assert_eq!(secret.as_bytes(), deserialized_secret.as_bytes());
assert_eq!(public.as_bytes(), deserialized_public.as_bytes());
// Test errors - mangle some bits and confirm it doesn't work:
let mut serialized_public = serde_cbor::to_vec(&public).unwrap();
serialized_public[6] = 120;
assert!(serde_cbor::from_slice::<PublicKey>(&serialized_public).is_err());
}
}
| 34.848039 | 134 | 0.648966 |
9110efed7cb4c52f70ed71de8034ab1d8809d078 | 3,474 | use std::cmp;
use std::collections::HashMap;
use super::Code;
use super::Sink;
use super::Lz77Encode;
/// A `Lz77Encode` implementation used by default.
#[derive(Debug)]
pub struct DefaultLz77Encoder {
window_size: u16,
buf: Vec<u8>,
}
impl DefaultLz77Encoder {
/// Makes a new encoder instance.
///
/// # Examples
/// ```
/// use libflate::deflate;
/// use libflate::lz77::{self, Lz77Encode, DefaultLz77Encoder};
///
/// let lz77 = DefaultLz77Encoder::new();
/// assert_eq!(lz77.window_size(), lz77::MAX_WINDOW_SIZE);
///
/// let options = deflate::EncodeOptions::with_lz77(lz77);
/// let _deflate = deflate::Encoder::with_options(Vec::new(), options);
/// ```
pub fn new() -> Self {
Self::with_window_size(super::MAX_WINDOW_SIZE)
}
/// Makes a new encoder instance with specified window size.
///
/// Larger window size is prefered to raise compression ratio,
/// but it may require more working memory to encode and decode data.
///
/// # Examples
/// ```
/// use libflate::deflate;
/// use libflate::lz77::{self, Lz77Encode, DefaultLz77Encoder};
///
/// let lz77 = DefaultLz77Encoder::with_window_size(1024);
/// assert_eq!(lz77.window_size(), 1024);
///
/// let options = deflate::EncodeOptions::with_lz77(lz77);
/// let _deflate = deflate::Encoder::with_options(Vec::new(), options);
/// ```
pub fn with_window_size(size: u16) -> Self {
DefaultLz77Encoder {
window_size: cmp::min(size, super::MAX_WINDOW_SIZE),
buf: Vec::new(),
}
}
}
impl Default for DefaultLz77Encoder {
fn default() -> Self {
Self::new()
}
}
impl Lz77Encode for DefaultLz77Encoder {
fn encode<S>(&mut self, buf: &[u8], sink: S)
where
S: Sink,
{
self.buf.extend_from_slice(buf);
if self.buf.len() >= self.window_size as usize * 8 {
self.flush(sink);
}
}
fn flush<S>(&mut self, mut sink: S)
where
S: Sink,
{
let mut prefix_table = HashMap::new();
let mut i = 0;
while i < cmp::max(3, self.buf.len()) - 3 {
let key = prefix(&self.buf[i..]);
let matched = prefix_table.insert(key, i);
if let Some(j) = matched {
let distance = i - j;
if distance <= self.window_size as usize {
let length = 3 + longest_common_prefix(&self.buf, i + 3, j + 3);
sink.consume(Code::Pointer {
length: length,
backward_distance: distance as u16,
});
i += length as usize;
continue;
}
}
sink.consume(Code::Literal(self.buf[i]));
i += 1;
}
for b in &self.buf[i..] {
sink.consume(Code::Literal(*b));
}
self.buf.clear();
}
fn window_size(&self) -> u16 {
self.window_size
}
}
fn prefix(buf: &[u8]) -> [u8; 3] {
unsafe {
[
*buf.get_unchecked(0),
*buf.get_unchecked(1),
*buf.get_unchecked(2),
]
}
}
fn longest_common_prefix(buf: &[u8], i: usize, j: usize) -> u16 {
buf[i..]
.iter()
.take(super::MAX_LENGTH as usize - 3)
.zip(&buf[j..])
.take_while(|&(x, y)| x == y)
.count() as u16
}
| 28.47541 | 84 | 0.525907 |
b919172fb27131175b5f8c591f191896ae51cce9 | 287 | /*!
```rudra-test
test_type = "normal"
expected_analyzers = ["UnsafeDataflow"]
```
!*/
use std::fmt::Debug;
fn test_order_unsafe<I: Iterator<Item = impl Debug>>(mut iter: I) {
unsafe {
std::ptr::read(&Box::new(1234) as *const _);
}
println!("{:?}", iter.next());
}
| 17.9375 | 67 | 0.585366 |
562a540f5a41be245609abcffd51ee8bff29e7d4 | 2,596 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [wasm_bindgen (extends = :: js_sys :: Object , js_name = MIDIMessageEventInit)]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `MidiMessageEventInit` dictionary."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MidiMessageEventInit`*"]
pub type MidiMessageEventInit;
}
impl MidiMessageEventInit {
#[doc = "Construct a new `MidiMessageEventInit`."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MidiMessageEventInit`*"]
pub fn new() -> Self {
#[allow(unused_mut)]
let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new());
ret
}
#[doc = "Change the `bubbles` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MidiMessageEventInit`*"]
pub fn bubbles(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("bubbles"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `cancelable` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MidiMessageEventInit`*"]
pub fn cancelable(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("cancelable"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `composed` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MidiMessageEventInit`*"]
pub fn composed(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("composed"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
}
| 35.081081 | 103 | 0.568182 |
08f493990452bb577dfc622debbaab441c56e927 | 33,403 | use std::os::raw::{c_int, c_uint, c_double};
fake_enum! {
pub enum arm64_shifter {
ARM64_SFT_INVALID = 0,
ARM64_SFT_LSL = 1,
ARM64_SFT_MSL = 2,
ARM64_SFT_LSR = 3,
ARM64_SFT_ASR = 4,
ARM64_SFT_ROR = 5,
}
}
fake_enum! {
pub enum arm64_extender {
ARM64_EXT_INVALID = 0,
ARM64_EXT_UXTB = 1,
ARM64_EXT_UXTH = 2,
ARM64_EXT_UXTW = 3,
ARM64_EXT_UXTX = 4,
ARM64_EXT_SXTB = 5,
ARM64_EXT_SXTH = 6,
ARM64_EXT_SXTW = 7,
ARM64_EXT_SXTX = 8,
}
}
fake_enum! {
pub enum arm64_cc {
ARM64_CC_INVALID = 0,
ARM64_CC_EQ = 1,
ARM64_CC_NE = 2,
ARM64_CC_HS = 3,
ARM64_CC_LO = 4,
ARM64_CC_MI = 5,
ARM64_CC_PL = 6,
ARM64_CC_VS = 7,
ARM64_CC_VC = 8,
ARM64_CC_HI = 9,
ARM64_CC_LS = 10,
ARM64_CC_GE = 11,
ARM64_CC_LT = 12,
ARM64_CC_GT = 13,
ARM64_CC_LE = 14,
ARM64_CC_AL = 15,
ARM64_CC_NV = 16,
}
}
fake_enum! {
pub enum arm64_mrs_reg {
ARM64_SYSREG_INVALID = 0,
ARM64_SYSREG_MDCCSR_EL0 = 38920,
ARM64_SYSREG_DBGDTRRX_EL0 = 38952,
ARM64_SYSREG_MDRAR_EL1 = 32896,
ARM64_SYSREG_OSLSR_EL1 = 32908,
ARM64_SYSREG_DBGAUTHSTATUS_EL1 = 33782,
ARM64_SYSREG_PMCEID0_EL0 = 56550,
ARM64_SYSREG_PMCEID1_EL0 = 56551,
ARM64_SYSREG_MIDR_EL1 = 49152,
ARM64_SYSREG_CCSIDR_EL1 = 51200,
ARM64_SYSREG_CLIDR_EL1 = 51201,
ARM64_SYSREG_CTR_EL0 = 55297,
ARM64_SYSREG_MPIDR_EL1 = 49157,
ARM64_SYSREG_REVIDR_EL1 = 49158,
ARM64_SYSREG_AIDR_EL1 = 51207,
ARM64_SYSREG_DCZID_EL0 = 55303,
ARM64_SYSREG_ID_PFR0_EL1 = 49160,
ARM64_SYSREG_ID_PFR1_EL1 = 49161,
ARM64_SYSREG_ID_DFR0_EL1 = 49162,
ARM64_SYSREG_ID_AFR0_EL1 = 49163,
ARM64_SYSREG_ID_MMFR0_EL1 = 49164,
ARM64_SYSREG_ID_MMFR1_EL1 = 49165,
ARM64_SYSREG_ID_MMFR2_EL1 = 49166,
ARM64_SYSREG_ID_MMFR3_EL1 = 49167,
ARM64_SYSREG_ID_ISAR0_EL1 = 49168,
ARM64_SYSREG_ID_ISAR1_EL1 = 49169,
ARM64_SYSREG_ID_ISAR2_EL1 = 49170,
ARM64_SYSREG_ID_ISAR3_EL1 = 49171,
ARM64_SYSREG_ID_ISAR4_EL1 = 49172,
ARM64_SYSREG_ID_ISAR5_EL1 = 49173,
ARM64_SYSREG_ID_A64PFR0_EL1 = 49184,
ARM64_SYSREG_ID_A64PFR1_EL1 = 49185,
ARM64_SYSREG_ID_A64DFR0_EL1 = 49192,
ARM64_SYSREG_ID_A64DFR1_EL1 = 49193,
ARM64_SYSREG_ID_A64AFR0_EL1 = 49196,
ARM64_SYSREG_ID_A64AFR1_EL1 = 49197,
ARM64_SYSREG_ID_A64ISAR0_EL1 = 49200,
ARM64_SYSREG_ID_A64ISAR1_EL1 = 49201,
ARM64_SYSREG_ID_A64MMFR0_EL1 = 49208,
ARM64_SYSREG_ID_A64MMFR1_EL1 = 49209,
ARM64_SYSREG_MVFR0_EL1 = 49176,
ARM64_SYSREG_MVFR1_EL1 = 49177,
ARM64_SYSREG_MVFR2_EL1 = 49178,
ARM64_SYSREG_RVBAR_EL1 = 50689,
ARM64_SYSREG_RVBAR_EL2 = 58881,
ARM64_SYSREG_RVBAR_EL3 = 62977,
ARM64_SYSREG_ISR_EL1 = 50696,
ARM64_SYSREG_CNTPCT_EL0 = 57089,
ARM64_SYSREG_CNTVCT_EL0 = 57090,
ARM64_SYSREG_TRCSTATR = 34840,
ARM64_SYSREG_TRCIDR8 = 34822,
ARM64_SYSREG_TRCIDR9 = 34830,
ARM64_SYSREG_TRCIDR10 = 34838,
ARM64_SYSREG_TRCIDR11 = 34846,
ARM64_SYSREG_TRCIDR12 = 34854,
ARM64_SYSREG_TRCIDR13 = 34862,
ARM64_SYSREG_TRCIDR0 = 34887,
ARM64_SYSREG_TRCIDR1 = 34895,
ARM64_SYSREG_TRCIDR2 = 34903,
ARM64_SYSREG_TRCIDR3 = 34911,
ARM64_SYSREG_TRCIDR4 = 34919,
ARM64_SYSREG_TRCIDR5 = 34927,
ARM64_SYSREG_TRCIDR6 = 34935,
ARM64_SYSREG_TRCIDR7 = 34943,
ARM64_SYSREG_TRCOSLSR = 34956,
ARM64_SYSREG_TRCPDSR = 34988,
ARM64_SYSREG_TRCDEVAFF0 = 35798,
ARM64_SYSREG_TRCDEVAFF1 = 35806,
ARM64_SYSREG_TRCLSR = 35822,
ARM64_SYSREG_TRCAUTHSTATUS = 35830,
ARM64_SYSREG_TRCDEVARCH = 35838,
ARM64_SYSREG_TRCDEVID = 35735,
ARM64_SYSREG_TRCDEVTYPE = 35743,
ARM64_SYSREG_TRCPIDR4 = 35751,
ARM64_SYSREG_TRCPIDR5 = 35759,
ARM64_SYSREG_TRCPIDR6 = 35767,
ARM64_SYSREG_TRCPIDR7 = 35775,
ARM64_SYSREG_TRCPIDR0 = 35783,
ARM64_SYSREG_TRCPIDR1 = 35791,
ARM64_SYSREG_TRCPIDR2 = 35799,
ARM64_SYSREG_TRCPIDR3 = 35807,
ARM64_SYSREG_TRCCIDR0 = 35815,
ARM64_SYSREG_TRCCIDR1 = 35823,
ARM64_SYSREG_TRCCIDR2 = 35831,
ARM64_SYSREG_TRCCIDR3 = 35839,
ARM64_SYSREG_ICC_IAR1_EL1 = 50784,
ARM64_SYSREG_ICC_IAR0_EL1 = 50752,
ARM64_SYSREG_ICC_HPPIR1_EL1 = 50786,
ARM64_SYSREG_ICC_HPPIR0_EL1 = 50754,
ARM64_SYSREG_ICC_RPR_EL1 = 50779,
ARM64_SYSREG_ICH_VTR_EL2 = 58969,
ARM64_SYSREG_ICH_EISR_EL2 = 58971,
ARM64_SYSREG_ICH_ELSR_EL2 = 58973,
}
}
pub type arm64_sysreg = arm64_mrs_reg;
fake_enum! {
pub enum arm64_msr_reg {
ARM64_SYSREG_DBGDTRTX_EL0 = 38952,
ARM64_SYSREG_OSLAR_EL1 = 32900,
ARM64_SYSREG_PMSWINC_EL0 = 56548,
ARM64_SYSREG_TRCOSLAR = 34948,
ARM64_SYSREG_TRCLAR = 35814,
ARM64_SYSREG_ICC_EOIR1_EL1 = 50785,
ARM64_SYSREG_ICC_EOIR0_EL1 = 50753,
ARM64_SYSREG_ICC_DIR_EL1 = 50777,
ARM64_SYSREG_ICC_SGI1R_EL1 = 50781,
ARM64_SYSREG_ICC_ASGI1R_EL1 = 50782,
ARM64_SYSREG_ICC_SGI0R_EL1 = 50783,
}
}
fake_enum! {
pub enum arm64_pstate {
ARM64_PSTATE_INVALID = 0,
ARM64_PSTATE_SPSEL = 5,
ARM64_PSTATE_DAIFSET = 30,
ARM64_PSTATE_DAIFCLR = 31,
}
}
fake_enum! {
pub enum arm64_vas {
ARM64_VAS_INVALID = 0,
ARM64_VAS_8B = 1,
ARM64_VAS_16B = 2,
ARM64_VAS_4H = 3,
ARM64_VAS_8H = 4,
ARM64_VAS_2S = 5,
ARM64_VAS_4S = 6,
ARM64_VAS_1D = 7,
ARM64_VAS_2D = 8,
ARM64_VAS_1Q = 9,
}
}
fake_enum! {
pub enum arm64_vess {
ARM64_VESS_INVALID = 0,
ARM64_VESS_B = 1,
ARM64_VESS_H = 2,
ARM64_VESS_S = 3,
ARM64_VESS_D = 4,
}
}
fake_enum! {
pub enum arm64_barrier_op {
ARM64_BARRIER_INVALID = 0,
ARM64_BARRIER_OSHLD = 1,
ARM64_BARRIER_OSHST = 2,
ARM64_BARRIER_OSH = 3,
ARM64_BARRIER_NSHLD = 5,
ARM64_BARRIER_NSHST = 6,
ARM64_BARRIER_NSH = 7,
ARM64_BARRIER_ISHLD = 9,
ARM64_BARRIER_ISHST = 10,
ARM64_BARRIER_ISH = 11,
ARM64_BARRIER_LD = 13,
ARM64_BARRIER_ST = 14,
ARM64_BARRIER_SY = 15,
}
}
fake_enum! {
pub enum arm64_op_type {
ARM64_OP_INVALID = 0,
ARM64_OP_REG = 1,
ARM64_OP_IMM = 2,
ARM64_OP_MEM = 3,
ARM64_OP_FP = 4,
ARM64_OP_CIMM = 64,
ARM64_OP_REG_MRS = 65,
ARM64_OP_REG_MSR = 66,
ARM64_OP_PSTATE = 67,
ARM64_OP_SYS = 68,
ARM64_OP_PREFETCH = 69,
ARM64_OP_BARRIER = 70,
}
}
fake_enum! {
pub enum arm64_tlbi_op {
ARM64_TLBI_INVALID = 0,
ARM64_TLBI_VMALLE1IS = 1,
ARM64_TLBI_VAE1IS = 2,
ARM64_TLBI_ASIDE1IS = 3,
ARM64_TLBI_VAAE1IS = 4,
ARM64_TLBI_VALE1IS = 5,
ARM64_TLBI_VAALE1IS = 6,
ARM64_TLBI_ALLE2IS = 7,
ARM64_TLBI_VAE2IS = 8,
ARM64_TLBI_ALLE1IS = 9,
ARM64_TLBI_VALE2IS = 10,
ARM64_TLBI_VMALLS12E1IS = 11,
ARM64_TLBI_ALLE3IS = 12,
ARM64_TLBI_VAE3IS = 13,
ARM64_TLBI_VALE3IS = 14,
ARM64_TLBI_IPAS2E1IS = 15,
ARM64_TLBI_IPAS2LE1IS = 16,
ARM64_TLBI_IPAS2E1 = 17,
ARM64_TLBI_IPAS2LE1 = 18,
ARM64_TLBI_VMALLE1 = 19,
ARM64_TLBI_VAE1 = 20,
ARM64_TLBI_ASIDE1 = 21,
ARM64_TLBI_VAAE1 = 22,
ARM64_TLBI_VALE1 = 23,
ARM64_TLBI_VAALE1 = 24,
ARM64_TLBI_ALLE2 = 25,
ARM64_TLBI_VAE2 = 26,
ARM64_TLBI_ALLE1 = 27,
ARM64_TLBI_VALE2 = 28,
ARM64_TLBI_VMALLS12E1 = 29,
ARM64_TLBI_ALLE3 = 30,
ARM64_TLBI_VAE3 = 31,
ARM64_TLBI_VALE3 = 32,
}
}
fake_enum! {
pub enum arm64_at_op {
ARM64_AT_S1E1R = 0,
ARM64_AT_S1E1W = 1,
ARM64_AT_S1E0R = 2,
ARM64_AT_S1E0W = 3,
ARM64_AT_S1E2R = 4,
ARM64_AT_S1E2W = 5,
ARM64_AT_S12E1R = 6,
ARM64_AT_S12E1W = 7,
ARM64_AT_S12E0R = 8,
ARM64_AT_S12E0W = 9,
ARM64_AT_S1E3R = 10,
ARM64_AT_S1E3W = 11,
}
}
fake_enum! {
pub enum arm64_dc_op {
ARM64_DC_INVALID = 0,
ARM64_DC_ZVA = 1,
ARM64_DC_IVAC = 2,
ARM64_DC_ISW = 3,
ARM64_DC_CVAC = 4,
ARM64_DC_CSW = 5,
ARM64_DC_CVAU = 6,
ARM64_DC_CIVAC = 7,
ARM64_DC_CISW = 8,
}
}
fake_enum! {
pub enum arm64_ic_op {
ARM64_IC_INVALID = 0,
ARM64_IC_IALLUIS = 1,
ARM64_IC_IALLU = 2,
ARM64_IC_IVAU = 3,
}
}
fake_enum! {
pub enum arm64_prefetch_op {
ARM64_PRFM_INVALID = 0,
ARM64_PRFM_PLDL1KEEP = 1,
ARM64_PRFM_PLDL1STRM = 2,
ARM64_PRFM_PLDL2KEEP = 3,
ARM64_PRFM_PLDL2STRM = 4,
ARM64_PRFM_PLDL3KEEP = 5,
ARM64_PRFM_PLDL3STRM = 6,
ARM64_PRFM_PLIL1KEEP = 9,
ARM64_PRFM_PLIL1STRM = 10,
ARM64_PRFM_PLIL2KEEP = 11,
ARM64_PRFM_PLIL2STRM = 12,
ARM64_PRFM_PLIL3KEEP = 13,
ARM64_PRFM_PLIL3STRM = 14,
ARM64_PRFM_PSTL1KEEP = 17,
ARM64_PRFM_PSTL1STRM = 18,
ARM64_PRFM_PSTL2KEEP = 19,
ARM64_PRFM_PSTL2STRM = 20,
ARM64_PRFM_PSTL3KEEP = 21,
ARM64_PRFM_PSTL3STRM = 22,
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct arm64_op_mem {
pub base: c_uint,
pub index: c_uint,
pub disp: i32,
}
impl ::std::default::Default for arm64_op_mem {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct cs_arm64_op {
pub vector_index: c_int,
pub vas: arm64_vas,
pub vess: arm64_vess,
pub shift: arm64_shift_pair,
pub ext: arm64_extender,
pub typ: arm64_op_type,
data: arm64_op_mem,
}
impl cs_arm64_op {
#[inline]
pub unsafe fn reg(&self) -> &c_uint {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn imm(&self) -> &i64 {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn fp(&self) -> &c_double {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn mem(&self) -> &arm64_op_mem {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn pstate(&self) -> &arm64_pstate {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn sys(&self) -> &c_uint {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn prefetch(&self) -> &arm64_prefetch_op {
::std::mem::transmute(&self.data)
}
#[inline]
pub unsafe fn barrier(&self) -> &arm64_barrier_op {
::std::mem::transmute(&self.data)
}
}
impl ::std::default::Default for cs_arm64_op {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct arm64_shift_pair {
pub typ: arm64_shifter,
pub value: c_uint,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct cs_arm64 {
pub cc: arm64_cc,
pub update_flags: u8,
pub writeback: u8,
pub op_count: u8,
pub operands: [cs_arm64_op; 8usize],
}
impl ::std::default::Default for cs_arm64 {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
fake_enum! {
pub enum arm64_reg {
ARM64_REG_INVALID = 0,
ARM64_REG_X29 = 1,
ARM64_REG_X30 = 2,
ARM64_REG_NZCV = 3,
ARM64_REG_SP = 4,
ARM64_REG_WSP = 5,
ARM64_REG_WZR = 6,
ARM64_REG_XZR = 7,
ARM64_REG_B0 = 8,
ARM64_REG_B1 = 9,
ARM64_REG_B2 = 10,
ARM64_REG_B3 = 11,
ARM64_REG_B4 = 12,
ARM64_REG_B5 = 13,
ARM64_REG_B6 = 14,
ARM64_REG_B7 = 15,
ARM64_REG_B8 = 16,
ARM64_REG_B9 = 17,
ARM64_REG_B10 = 18,
ARM64_REG_B11 = 19,
ARM64_REG_B12 = 20,
ARM64_REG_B13 = 21,
ARM64_REG_B14 = 22,
ARM64_REG_B15 = 23,
ARM64_REG_B16 = 24,
ARM64_REG_B17 = 25,
ARM64_REG_B18 = 26,
ARM64_REG_B19 = 27,
ARM64_REG_B20 = 28,
ARM64_REG_B21 = 29,
ARM64_REG_B22 = 30,
ARM64_REG_B23 = 31,
ARM64_REG_B24 = 32,
ARM64_REG_B25 = 33,
ARM64_REG_B26 = 34,
ARM64_REG_B27 = 35,
ARM64_REG_B28 = 36,
ARM64_REG_B29 = 37,
ARM64_REG_B30 = 38,
ARM64_REG_B31 = 39,
ARM64_REG_D0 = 40,
ARM64_REG_D1 = 41,
ARM64_REG_D2 = 42,
ARM64_REG_D3 = 43,
ARM64_REG_D4 = 44,
ARM64_REG_D5 = 45,
ARM64_REG_D6 = 46,
ARM64_REG_D7 = 47,
ARM64_REG_D8 = 48,
ARM64_REG_D9 = 49,
ARM64_REG_D10 = 50,
ARM64_REG_D11 = 51,
ARM64_REG_D12 = 52,
ARM64_REG_D13 = 53,
ARM64_REG_D14 = 54,
ARM64_REG_D15 = 55,
ARM64_REG_D16 = 56,
ARM64_REG_D17 = 57,
ARM64_REG_D18 = 58,
ARM64_REG_D19 = 59,
ARM64_REG_D20 = 60,
ARM64_REG_D21 = 61,
ARM64_REG_D22 = 62,
ARM64_REG_D23 = 63,
ARM64_REG_D24 = 64,
ARM64_REG_D25 = 65,
ARM64_REG_D26 = 66,
ARM64_REG_D27 = 67,
ARM64_REG_D28 = 68,
ARM64_REG_D29 = 69,
ARM64_REG_D30 = 70,
ARM64_REG_D31 = 71,
ARM64_REG_H0 = 72,
ARM64_REG_H1 = 73,
ARM64_REG_H2 = 74,
ARM64_REG_H3 = 75,
ARM64_REG_H4 = 76,
ARM64_REG_H5 = 77,
ARM64_REG_H6 = 78,
ARM64_REG_H7 = 79,
ARM64_REG_H8 = 80,
ARM64_REG_H9 = 81,
ARM64_REG_H10 = 82,
ARM64_REG_H11 = 83,
ARM64_REG_H12 = 84,
ARM64_REG_H13 = 85,
ARM64_REG_H14 = 86,
ARM64_REG_H15 = 87,
ARM64_REG_H16 = 88,
ARM64_REG_H17 = 89,
ARM64_REG_H18 = 90,
ARM64_REG_H19 = 91,
ARM64_REG_H20 = 92,
ARM64_REG_H21 = 93,
ARM64_REG_H22 = 94,
ARM64_REG_H23 = 95,
ARM64_REG_H24 = 96,
ARM64_REG_H25 = 97,
ARM64_REG_H26 = 98,
ARM64_REG_H27 = 99,
ARM64_REG_H28 = 100,
ARM64_REG_H29 = 101,
ARM64_REG_H30 = 102,
ARM64_REG_H31 = 103,
ARM64_REG_Q0 = 104,
ARM64_REG_Q1 = 105,
ARM64_REG_Q2 = 106,
ARM64_REG_Q3 = 107,
ARM64_REG_Q4 = 108,
ARM64_REG_Q5 = 109,
ARM64_REG_Q6 = 110,
ARM64_REG_Q7 = 111,
ARM64_REG_Q8 = 112,
ARM64_REG_Q9 = 113,
ARM64_REG_Q10 = 114,
ARM64_REG_Q11 = 115,
ARM64_REG_Q12 = 116,
ARM64_REG_Q13 = 117,
ARM64_REG_Q14 = 118,
ARM64_REG_Q15 = 119,
ARM64_REG_Q16 = 120,
ARM64_REG_Q17 = 121,
ARM64_REG_Q18 = 122,
ARM64_REG_Q19 = 123,
ARM64_REG_Q20 = 124,
ARM64_REG_Q21 = 125,
ARM64_REG_Q22 = 126,
ARM64_REG_Q23 = 127,
ARM64_REG_Q24 = 128,
ARM64_REG_Q25 = 129,
ARM64_REG_Q26 = 130,
ARM64_REG_Q27 = 131,
ARM64_REG_Q28 = 132,
ARM64_REG_Q29 = 133,
ARM64_REG_Q30 = 134,
ARM64_REG_Q31 = 135,
ARM64_REG_S0 = 136,
ARM64_REG_S1 = 137,
ARM64_REG_S2 = 138,
ARM64_REG_S3 = 139,
ARM64_REG_S4 = 140,
ARM64_REG_S5 = 141,
ARM64_REG_S6 = 142,
ARM64_REG_S7 = 143,
ARM64_REG_S8 = 144,
ARM64_REG_S9 = 145,
ARM64_REG_S10 = 146,
ARM64_REG_S11 = 147,
ARM64_REG_S12 = 148,
ARM64_REG_S13 = 149,
ARM64_REG_S14 = 150,
ARM64_REG_S15 = 151,
ARM64_REG_S16 = 152,
ARM64_REG_S17 = 153,
ARM64_REG_S18 = 154,
ARM64_REG_S19 = 155,
ARM64_REG_S20 = 156,
ARM64_REG_S21 = 157,
ARM64_REG_S22 = 158,
ARM64_REG_S23 = 159,
ARM64_REG_S24 = 160,
ARM64_REG_S25 = 161,
ARM64_REG_S26 = 162,
ARM64_REG_S27 = 163,
ARM64_REG_S28 = 164,
ARM64_REG_S29 = 165,
ARM64_REG_S30 = 166,
ARM64_REG_S31 = 167,
ARM64_REG_W0 = 168,
ARM64_REG_W1 = 169,
ARM64_REG_W2 = 170,
ARM64_REG_W3 = 171,
ARM64_REG_W4 = 172,
ARM64_REG_W5 = 173,
ARM64_REG_W6 = 174,
ARM64_REG_W7 = 175,
ARM64_REG_W8 = 176,
ARM64_REG_W9 = 177,
ARM64_REG_W10 = 178,
ARM64_REG_W11 = 179,
ARM64_REG_W12 = 180,
ARM64_REG_W13 = 181,
ARM64_REG_W14 = 182,
ARM64_REG_W15 = 183,
ARM64_REG_W16 = 184,
ARM64_REG_W17 = 185,
ARM64_REG_W18 = 186,
ARM64_REG_W19 = 187,
ARM64_REG_W20 = 188,
ARM64_REG_W21 = 189,
ARM64_REG_W22 = 190,
ARM64_REG_W23 = 191,
ARM64_REG_W24 = 192,
ARM64_REG_W25 = 193,
ARM64_REG_W26 = 194,
ARM64_REG_W27 = 195,
ARM64_REG_W28 = 196,
ARM64_REG_W29 = 197,
ARM64_REG_W30 = 198,
ARM64_REG_X0 = 199,
ARM64_REG_X1 = 200,
ARM64_REG_X2 = 201,
ARM64_REG_X3 = 202,
ARM64_REG_X4 = 203,
ARM64_REG_X5 = 204,
ARM64_REG_X6 = 205,
ARM64_REG_X7 = 206,
ARM64_REG_X8 = 207,
ARM64_REG_X9 = 208,
ARM64_REG_X10 = 209,
ARM64_REG_X11 = 210,
ARM64_REG_X12 = 211,
ARM64_REG_X13 = 212,
ARM64_REG_X14 = 213,
ARM64_REG_X15 = 214,
ARM64_REG_X16 = 215,
ARM64_REG_X17 = 216,
ARM64_REG_X18 = 217,
ARM64_REG_X19 = 218,
ARM64_REG_X20 = 219,
ARM64_REG_X21 = 220,
ARM64_REG_X22 = 221,
ARM64_REG_X23 = 222,
ARM64_REG_X24 = 223,
ARM64_REG_X25 = 224,
ARM64_REG_X26 = 225,
ARM64_REG_X27 = 226,
ARM64_REG_X28 = 227,
ARM64_REG_V0 = 228,
ARM64_REG_V1 = 229,
ARM64_REG_V2 = 230,
ARM64_REG_V3 = 231,
ARM64_REG_V4 = 232,
ARM64_REG_V5 = 233,
ARM64_REG_V6 = 234,
ARM64_REG_V7 = 235,
ARM64_REG_V8 = 236,
ARM64_REG_V9 = 237,
ARM64_REG_V10 = 238,
ARM64_REG_V11 = 239,
ARM64_REG_V12 = 240,
ARM64_REG_V13 = 241,
ARM64_REG_V14 = 242,
ARM64_REG_V15 = 243,
ARM64_REG_V16 = 244,
ARM64_REG_V17 = 245,
ARM64_REG_V18 = 246,
ARM64_REG_V19 = 247,
ARM64_REG_V20 = 248,
ARM64_REG_V21 = 249,
ARM64_REG_V22 = 250,
ARM64_REG_V23 = 251,
ARM64_REG_V24 = 252,
ARM64_REG_V25 = 253,
ARM64_REG_V26 = 254,
ARM64_REG_V27 = 255,
ARM64_REG_V28 = 256,
ARM64_REG_V29 = 257,
ARM64_REG_V30 = 258,
ARM64_REG_V31 = 259,
ARM64_REG_ENDING = 260,
ARM64_REG_IP1 = ARM64_REG_X16,
ARM64_REG_IP0 = ARM64_REG_X17,
ARM64_REG_FP = ARM64_REG_X29,
ARM64_REG_LR = ARM64_REG_X30,
}
}
fake_enum_nonrec! {
pub enum arm64_insn {
ARM64_INS_INVALID = 0,
ARM64_INS_ABS = 1,
ARM64_INS_ADC = 2,
ARM64_INS_ADDHN = 3,
ARM64_INS_ADDHN2 = 4,
ARM64_INS_ADDP = 5,
ARM64_INS_ADD = 6,
ARM64_INS_ADDV = 7,
ARM64_INS_ADR = 8,
ARM64_INS_ADRP = 9,
ARM64_INS_AESD = 10,
ARM64_INS_AESE = 11,
ARM64_INS_AESIMC = 12,
ARM64_INS_AESMC = 13,
ARM64_INS_AND = 14,
ARM64_INS_ASR = 15,
ARM64_INS_B = 16,
ARM64_INS_BFM = 17,
ARM64_INS_BIC = 18,
ARM64_INS_BIF = 19,
ARM64_INS_BIT = 20,
ARM64_INS_BL = 21,
ARM64_INS_BLR = 22,
ARM64_INS_BR = 23,
ARM64_INS_BRK = 24,
ARM64_INS_BSL = 25,
ARM64_INS_CBNZ = 26,
ARM64_INS_CBZ = 27,
ARM64_INS_CCMN = 28,
ARM64_INS_CCMP = 29,
ARM64_INS_CLREX = 30,
ARM64_INS_CLS = 31,
ARM64_INS_CLZ = 32,
ARM64_INS_CMEQ = 33,
ARM64_INS_CMGE = 34,
ARM64_INS_CMGT = 35,
ARM64_INS_CMHI = 36,
ARM64_INS_CMHS = 37,
ARM64_INS_CMLE = 38,
ARM64_INS_CMLT = 39,
ARM64_INS_CMTST = 40,
ARM64_INS_CNT = 41,
ARM64_INS_MOV = 42,
ARM64_INS_CRC32B = 43,
ARM64_INS_CRC32CB = 44,
ARM64_INS_CRC32CH = 45,
ARM64_INS_CRC32CW = 46,
ARM64_INS_CRC32CX = 47,
ARM64_INS_CRC32H = 48,
ARM64_INS_CRC32W = 49,
ARM64_INS_CRC32X = 50,
ARM64_INS_CSEL = 51,
ARM64_INS_CSINC = 52,
ARM64_INS_CSINV = 53,
ARM64_INS_CSNEG = 54,
ARM64_INS_DCPS1 = 55,
ARM64_INS_DCPS2 = 56,
ARM64_INS_DCPS3 = 57,
ARM64_INS_DMB = 58,
ARM64_INS_DRPS = 59,
ARM64_INS_DSB = 60,
ARM64_INS_DUP = 61,
ARM64_INS_EON = 62,
ARM64_INS_EOR = 63,
ARM64_INS_ERET = 64,
ARM64_INS_EXTR = 65,
ARM64_INS_EXT = 66,
ARM64_INS_FABD = 67,
ARM64_INS_FABS = 68,
ARM64_INS_FACGE = 69,
ARM64_INS_FACGT = 70,
ARM64_INS_FADD = 71,
ARM64_INS_FADDP = 72,
ARM64_INS_FCCMP = 73,
ARM64_INS_FCCMPE = 74,
ARM64_INS_FCMEQ = 75,
ARM64_INS_FCMGE = 76,
ARM64_INS_FCMGT = 77,
ARM64_INS_FCMLE = 78,
ARM64_INS_FCMLT = 79,
ARM64_INS_FCMP = 80,
ARM64_INS_FCMPE = 81,
ARM64_INS_FCSEL = 82,
ARM64_INS_FCVTAS = 83,
ARM64_INS_FCVTAU = 84,
ARM64_INS_FCVT = 85,
ARM64_INS_FCVTL = 86,
ARM64_INS_FCVTL2 = 87,
ARM64_INS_FCVTMS = 88,
ARM64_INS_FCVTMU = 89,
ARM64_INS_FCVTNS = 90,
ARM64_INS_FCVTNU = 91,
ARM64_INS_FCVTN = 92,
ARM64_INS_FCVTN2 = 93,
ARM64_INS_FCVTPS = 94,
ARM64_INS_FCVTPU = 95,
ARM64_INS_FCVTXN = 96,
ARM64_INS_FCVTXN2 = 97,
ARM64_INS_FCVTZS = 98,
ARM64_INS_FCVTZU = 99,
ARM64_INS_FDIV = 100,
ARM64_INS_FMADD = 101,
ARM64_INS_FMAX = 102,
ARM64_INS_FMAXNM = 103,
ARM64_INS_FMAXNMP = 104,
ARM64_INS_FMAXNMV = 105,
ARM64_INS_FMAXP = 106,
ARM64_INS_FMAXV = 107,
ARM64_INS_FMIN = 108,
ARM64_INS_FMINNM = 109,
ARM64_INS_FMINNMP = 110,
ARM64_INS_FMINNMV = 111,
ARM64_INS_FMINP = 112,
ARM64_INS_FMINV = 113,
ARM64_INS_FMLA = 114,
ARM64_INS_FMLS = 115,
ARM64_INS_FMOV = 116,
ARM64_INS_FMSUB = 117,
ARM64_INS_FMUL = 118,
ARM64_INS_FMULX = 119,
ARM64_INS_FNEG = 120,
ARM64_INS_FNMADD = 121,
ARM64_INS_FNMSUB = 122,
ARM64_INS_FNMUL = 123,
ARM64_INS_FRECPE = 124,
ARM64_INS_FRECPS = 125,
ARM64_INS_FRECPX = 126,
ARM64_INS_FRINTA = 127,
ARM64_INS_FRINTI = 128,
ARM64_INS_FRINTM = 129,
ARM64_INS_FRINTN = 130,
ARM64_INS_FRINTP = 131,
ARM64_INS_FRINTX = 132,
ARM64_INS_FRINTZ = 133,
ARM64_INS_FRSQRTE = 134,
ARM64_INS_FRSQRTS = 135,
ARM64_INS_FSQRT = 136,
ARM64_INS_FSUB = 137,
ARM64_INS_HINT = 138,
ARM64_INS_HLT = 139,
ARM64_INS_HVC = 140,
ARM64_INS_INS = 141,
ARM64_INS_ISB = 142,
ARM64_INS_LD1 = 143,
ARM64_INS_LD1R = 144,
ARM64_INS_LD2R = 145,
ARM64_INS_LD2 = 146,
ARM64_INS_LD3R = 147,
ARM64_INS_LD3 = 148,
ARM64_INS_LD4 = 149,
ARM64_INS_LD4R = 150,
ARM64_INS_LDARB = 151,
ARM64_INS_LDARH = 152,
ARM64_INS_LDAR = 153,
ARM64_INS_LDAXP = 154,
ARM64_INS_LDAXRB = 155,
ARM64_INS_LDAXRH = 156,
ARM64_INS_LDAXR = 157,
ARM64_INS_LDNP = 158,
ARM64_INS_LDP = 159,
ARM64_INS_LDPSW = 160,
ARM64_INS_LDRB = 161,
ARM64_INS_LDR = 162,
ARM64_INS_LDRH = 163,
ARM64_INS_LDRSB = 164,
ARM64_INS_LDRSH = 165,
ARM64_INS_LDRSW = 166,
ARM64_INS_LDTRB = 167,
ARM64_INS_LDTRH = 168,
ARM64_INS_LDTRSB = 169,
ARM64_INS_LDTRSH = 170,
ARM64_INS_LDTRSW = 171,
ARM64_INS_LDTR = 172,
ARM64_INS_LDURB = 173,
ARM64_INS_LDUR = 174,
ARM64_INS_LDURH = 175,
ARM64_INS_LDURSB = 176,
ARM64_INS_LDURSH = 177,
ARM64_INS_LDURSW = 178,
ARM64_INS_LDXP = 179,
ARM64_INS_LDXRB = 180,
ARM64_INS_LDXRH = 181,
ARM64_INS_LDXR = 182,
ARM64_INS_LSL = 183,
ARM64_INS_LSR = 184,
ARM64_INS_MADD = 185,
ARM64_INS_MLA = 186,
ARM64_INS_MLS = 187,
ARM64_INS_MOVI = 188,
ARM64_INS_MOVK = 189,
ARM64_INS_MOVN = 190,
ARM64_INS_MOVZ = 191,
ARM64_INS_MRS = 192,
ARM64_INS_MSR = 193,
ARM64_INS_MSUB = 194,
ARM64_INS_MUL = 195,
ARM64_INS_MVNI = 196,
ARM64_INS_NEG = 197,
ARM64_INS_NOT = 198,
ARM64_INS_ORN = 199,
ARM64_INS_ORR = 200,
ARM64_INS_PMULL2 = 201,
ARM64_INS_PMULL = 202,
ARM64_INS_PMUL = 203,
ARM64_INS_PRFM = 204,
ARM64_INS_PRFUM = 205,
ARM64_INS_RADDHN = 206,
ARM64_INS_RADDHN2 = 207,
ARM64_INS_RBIT = 208,
ARM64_INS_RET = 209,
ARM64_INS_REV16 = 210,
ARM64_INS_REV32 = 211,
ARM64_INS_REV64 = 212,
ARM64_INS_REV = 213,
ARM64_INS_ROR = 214,
ARM64_INS_RSHRN2 = 215,
ARM64_INS_RSHRN = 216,
ARM64_INS_RSUBHN = 217,
ARM64_INS_RSUBHN2 = 218,
ARM64_INS_SABAL2 = 219,
ARM64_INS_SABAL = 220,
ARM64_INS_SABA = 221,
ARM64_INS_SABDL2 = 222,
ARM64_INS_SABDL = 223,
ARM64_INS_SABD = 224,
ARM64_INS_SADALP = 225,
ARM64_INS_SADDLP = 226,
ARM64_INS_SADDLV = 227,
ARM64_INS_SADDL2 = 228,
ARM64_INS_SADDL = 229,
ARM64_INS_SADDW2 = 230,
ARM64_INS_SADDW = 231,
ARM64_INS_SBC = 232,
ARM64_INS_SBFM = 233,
ARM64_INS_SCVTF = 234,
ARM64_INS_SDIV = 235,
ARM64_INS_SHA1C = 236,
ARM64_INS_SHA1H = 237,
ARM64_INS_SHA1M = 238,
ARM64_INS_SHA1P = 239,
ARM64_INS_SHA1SU0 = 240,
ARM64_INS_SHA1SU1 = 241,
ARM64_INS_SHA256H2 = 242,
ARM64_INS_SHA256H = 243,
ARM64_INS_SHA256SU0 = 244,
ARM64_INS_SHA256SU1 = 245,
ARM64_INS_SHADD = 246,
ARM64_INS_SHLL2 = 247,
ARM64_INS_SHLL = 248,
ARM64_INS_SHL = 249,
ARM64_INS_SHRN2 = 250,
ARM64_INS_SHRN = 251,
ARM64_INS_SHSUB = 252,
ARM64_INS_SLI = 253,
ARM64_INS_SMADDL = 254,
ARM64_INS_SMAXP = 255,
ARM64_INS_SMAXV = 256,
ARM64_INS_SMAX = 257,
ARM64_INS_SMC = 258,
ARM64_INS_SMINP = 259,
ARM64_INS_SMINV = 260,
ARM64_INS_SMIN = 261,
ARM64_INS_SMLAL2 = 262,
ARM64_INS_SMLAL = 263,
ARM64_INS_SMLSL2 = 264,
ARM64_INS_SMLSL = 265,
ARM64_INS_SMOV = 266,
ARM64_INS_SMSUBL = 267,
ARM64_INS_SMULH = 268,
ARM64_INS_SMULL2 = 269,
ARM64_INS_SMULL = 270,
ARM64_INS_SQABS = 271,
ARM64_INS_SQADD = 272,
ARM64_INS_SQDMLAL = 273,
ARM64_INS_SQDMLAL2 = 274,
ARM64_INS_SQDMLSL = 275,
ARM64_INS_SQDMLSL2 = 276,
ARM64_INS_SQDMULH = 277,
ARM64_INS_SQDMULL = 278,
ARM64_INS_SQDMULL2 = 279,
ARM64_INS_SQNEG = 280,
ARM64_INS_SQRDMULH = 281,
ARM64_INS_SQRSHL = 282,
ARM64_INS_SQRSHRN = 283,
ARM64_INS_SQRSHRN2 = 284,
ARM64_INS_SQRSHRUN = 285,
ARM64_INS_SQRSHRUN2 = 286,
ARM64_INS_SQSHLU = 287,
ARM64_INS_SQSHL = 288,
ARM64_INS_SQSHRN = 289,
ARM64_INS_SQSHRN2 = 290,
ARM64_INS_SQSHRUN = 291,
ARM64_INS_SQSHRUN2 = 292,
ARM64_INS_SQSUB = 293,
ARM64_INS_SQXTN2 = 294,
ARM64_INS_SQXTN = 295,
ARM64_INS_SQXTUN2 = 296,
ARM64_INS_SQXTUN = 297,
ARM64_INS_SRHADD = 298,
ARM64_INS_SRI = 299,
ARM64_INS_SRSHL = 300,
ARM64_INS_SRSHR = 301,
ARM64_INS_SRSRA = 302,
ARM64_INS_SSHLL2 = 303,
ARM64_INS_SSHLL = 304,
ARM64_INS_SSHL = 305,
ARM64_INS_SSHR = 306,
ARM64_INS_SSRA = 307,
ARM64_INS_SSUBL2 = 308,
ARM64_INS_SSUBL = 309,
ARM64_INS_SSUBW2 = 310,
ARM64_INS_SSUBW = 311,
ARM64_INS_ST1 = 312,
ARM64_INS_ST2 = 313,
ARM64_INS_ST3 = 314,
ARM64_INS_ST4 = 315,
ARM64_INS_STLRB = 316,
ARM64_INS_STLRH = 317,
ARM64_INS_STLR = 318,
ARM64_INS_STLXP = 319,
ARM64_INS_STLXRB = 320,
ARM64_INS_STLXRH = 321,
ARM64_INS_STLXR = 322,
ARM64_INS_STNP = 323,
ARM64_INS_STP = 324,
ARM64_INS_STRB = 325,
ARM64_INS_STR = 326,
ARM64_INS_STRH = 327,
ARM64_INS_STTRB = 328,
ARM64_INS_STTRH = 329,
ARM64_INS_STTR = 330,
ARM64_INS_STURB = 331,
ARM64_INS_STUR = 332,
ARM64_INS_STURH = 333,
ARM64_INS_STXP = 334,
ARM64_INS_STXRB = 335,
ARM64_INS_STXRH = 336,
ARM64_INS_STXR = 337,
ARM64_INS_SUBHN = 338,
ARM64_INS_SUBHN2 = 339,
ARM64_INS_SUB = 340,
ARM64_INS_SUQADD = 341,
ARM64_INS_SVC = 342,
ARM64_INS_SYSL = 343,
ARM64_INS_SYS = 344,
ARM64_INS_TBL = 345,
ARM64_INS_TBNZ = 346,
ARM64_INS_TBX = 347,
ARM64_INS_TBZ = 348,
ARM64_INS_TRN1 = 349,
ARM64_INS_TRN2 = 350,
ARM64_INS_UABAL2 = 351,
ARM64_INS_UABAL = 352,
ARM64_INS_UABA = 353,
ARM64_INS_UABDL2 = 354,
ARM64_INS_UABDL = 355,
ARM64_INS_UABD = 356,
ARM64_INS_UADALP = 357,
ARM64_INS_UADDLP = 358,
ARM64_INS_UADDLV = 359,
ARM64_INS_UADDL2 = 360,
ARM64_INS_UADDL = 361,
ARM64_INS_UADDW2 = 362,
ARM64_INS_UADDW = 363,
ARM64_INS_UBFM = 364,
ARM64_INS_UCVTF = 365,
ARM64_INS_UDIV = 366,
ARM64_INS_UHADD = 367,
ARM64_INS_UHSUB = 368,
ARM64_INS_UMADDL = 369,
ARM64_INS_UMAXP = 370,
ARM64_INS_UMAXV = 371,
ARM64_INS_UMAX = 372,
ARM64_INS_UMINP = 373,
ARM64_INS_UMINV = 374,
ARM64_INS_UMIN = 375,
ARM64_INS_UMLAL2 = 376,
ARM64_INS_UMLAL = 377,
ARM64_INS_UMLSL2 = 378,
ARM64_INS_UMLSL = 379,
ARM64_INS_UMOV = 380,
ARM64_INS_UMSUBL = 381,
ARM64_INS_UMULH = 382,
ARM64_INS_UMULL2 = 383,
ARM64_INS_UMULL = 384,
ARM64_INS_UQADD = 385,
ARM64_INS_UQRSHL = 386,
ARM64_INS_UQRSHRN = 387,
ARM64_INS_UQRSHRN2 = 388,
ARM64_INS_UQSHL = 389,
ARM64_INS_UQSHRN = 390,
ARM64_INS_UQSHRN2 = 391,
ARM64_INS_UQSUB = 392,
ARM64_INS_UQXTN2 = 393,
ARM64_INS_UQXTN = 394,
ARM64_INS_URECPE = 395,
ARM64_INS_URHADD = 396,
ARM64_INS_URSHL = 397,
ARM64_INS_URSHR = 398,
ARM64_INS_URSQRTE = 399,
ARM64_INS_URSRA = 400,
ARM64_INS_USHLL2 = 401,
ARM64_INS_USHLL = 402,
ARM64_INS_USHL = 403,
ARM64_INS_USHR = 404,
ARM64_INS_USQADD = 405,
ARM64_INS_USRA = 406,
ARM64_INS_USUBL2 = 407,
ARM64_INS_USUBL = 408,
ARM64_INS_USUBW2 = 409,
ARM64_INS_USUBW = 410,
ARM64_INS_UZP1 = 411,
ARM64_INS_UZP2 = 412,
ARM64_INS_XTN2 = 413,
ARM64_INS_XTN = 414,
ARM64_INS_ZIP1 = 415,
ARM64_INS_ZIP2 = 416,
ARM64_INS_MNEG = 417,
ARM64_INS_UMNEGL = 418,
ARM64_INS_SMNEGL = 419,
ARM64_INS_NOP = 420,
ARM64_INS_YIELD = 421,
ARM64_INS_WFE = 422,
ARM64_INS_WFI = 423,
ARM64_INS_SEV = 424,
ARM64_INS_SEVL = 425,
ARM64_INS_NGC = 426,
ARM64_INS_SBFIZ = 427,
ARM64_INS_UBFIZ = 428,
ARM64_INS_SBFX = 429,
ARM64_INS_UBFX = 430,
ARM64_INS_BFI = 431,
ARM64_INS_BFXIL = 432,
ARM64_INS_CMN = 433,
ARM64_INS_MVN = 434,
ARM64_INS_TST = 435,
ARM64_INS_CSET = 436,
ARM64_INS_CINC = 437,
ARM64_INS_CSETM = 438,
ARM64_INS_CINV = 439,
ARM64_INS_CNEG = 440,
ARM64_INS_SXTB = 441,
ARM64_INS_SXTH = 442,
ARM64_INS_SXTW = 443,
ARM64_INS_CMP = 444,
ARM64_INS_UXTB = 445,
ARM64_INS_UXTH = 446,
ARM64_INS_UXTW = 447,
ARM64_INS_IC = 448,
ARM64_INS_DC = 449,
ARM64_INS_AT = 450,
ARM64_INS_TLBI = 451,
ARM64_INS_ENDING = 452,
}
}
fake_enum! {
pub enum arm64_insn_group {
ARM64_GRP_INVALID = 0,
ARM64_GRP_JUMP = 1,
ARM64_GRP_CRYPTO = 128,
ARM64_GRP_FPARMV8 = 129,
ARM64_GRP_NEON = 130,
ARM64_GRP_CRC = 131,
ARM64_GRP_ENDING = 132,
}
}
| 28.770887 | 57 | 0.580577 |
082799864be86525750c17f50b4376b57c9d0d50 | 3,643 | use crate::consts::*;
use crate::time::ControlledTime;
use crate::types::{
ArrowTimeToml,
Directions::{self, *},
Speed,
};
use bevy::{
app::AppExit,
input::{keyboard::KeyCode, Input},
prelude::*,
};
use serde_derive::Serialize;
use std::fs::File;
use std::io::prelude::*;
#[derive(Serialize, Debug, Default)]
struct Presses {
arrows: Vec<ArrowTimeToml>,
}
fn save_key_presses(
time: Res<ControlledTime>,
keyboard_input: Res<Input<KeyCode>>,
mut presses: ResMut<Presses>,
) {
let directions = [Up, Down, Left, Right];
for direction in directions.iter() {
if direction.key_just_pressed(&keyboard_input) {
presses.arrows.push(ArrowTimeToml {
click_time: time.seconds_since_startup(),
speed: Speed::Slow,
direction: *direction,
});
}
}
}
fn save_to_file_on_exit(mut event_reader: EventReader<AppExit>, presses: Res<Presses>) {
for _event in event_reader.iter() {
let text = toml::to_string(&*presses).expect("Couldn't convert to toml text");
let mut file = File::create("map.toml").expect("Couldn't open map.toml");
file.write_all(text.as_bytes())
.expect("Couldn't write to map.toml");
}
}
struct MapMakerArrow(Directions);
fn setup_map_maker_arrows(
mut commands: Commands,
mut materials: ResMut<Assets<ColorMaterial>>,
asset_server: ResMut<AssetServer>,
) {
let border_handle = materials.add(asset_server.load("images/arrow_border.png").into());
let directions = [Up, Down, Left, Right];
for direction in directions.iter() {
let y = match direction {
Up => 150.,
Down => 50.,
Left => -50.,
Right => -150.,
};
let mut transform = Transform::from_translation(Vec3::new(0., y, 1.));
transform.rotate(Quat::from_rotation_z(direction.rotation()));
commands
.spawn_bundle(SpriteBundle {
material: border_handle.clone(),
sprite: Sprite::new(Vec2::new(140., 140.)),
transform,
..Default::default()
})
.insert(MapMakerArrow(*direction));
}
}
fn toggle_map_maker_arrows(
mut query: Query<(&mut Visible, &MapMakerArrow)>,
keyboard_input: Res<Input<KeyCode>>,
) {
for (mut visible, arrow) in query.iter_mut() {
visible.is_visible = arrow.0.key_pressed(&keyboard_input);
}
}
struct MapMakerAudio(Handle<AudioSource>);
impl FromWorld for MapMakerAudio {
fn from_world(world: &mut World) -> Self {
let world = world.cell();
let asset_server = world.get_resource_mut::<AssetServer>().unwrap();
let audio = asset_server.load("map_maker_song.mp3");
Self(audio)
}
}
fn start_song(audio: Res<Audio>, map_maker_audio: Res<MapMakerAudio>) {
audio.play(map_maker_audio.0.clone());
}
pub struct MapMakerPlugin;
impl Plugin for MapMakerPlugin {
fn build(&self, app: &mut AppBuilder) {
app.init_resource::<Presses>()
.init_resource::<MapMakerAudio>()
.add_system_set(
SystemSet::on_enter(AppState::MakeMap)
.with_system(setup_map_maker_arrows.system())
.with_system(start_song.system()),
)
.add_system_set(
SystemSet::on_update(AppState::Game)
.with_system(toggle_map_maker_arrows.system())
.with_system(save_to_file_on_exit.system())
.with_system(save_key_presses.system()),
);
}
}
| 30.358333 | 91 | 0.603349 |
900c2b7185729e2d2087db03a654ecb54428b1a0 | 3,597 | #![feature(test)]
extern crate test;
extern crate parinfer_rust;
#[macro_use]
extern crate serde_json;
use std::ffi::CString;
use test::Bencher;
const LONG_MAP_WITH_STRINGS : &str = include_str!("perf/long_map_with_strings");
const REALLY_LONG_FILE : &str = include_str!("perf/really_long_file");
const REALLY_LONG_FILE_WITH_UNCLOSED_PAREN : &str = include_str!("perf/really_long_file_with_unclosed_paren");
const REALLY_LONG_FILE_WITH_UNCLOSED_QUOTE : &str = include_str!("perf/really_long_file_with_unclosed_quote");
fn build_case(mode: &str, text: &str) -> CString {
CString::new(json!({
"mode": mode,
"text": text,
"options": {
"forceBalance": false,
"partialResult": false,
"returnParens": false
}
}).to_string()).unwrap()
}
#[bench]
fn bench_paren_long_map_with_strings(b: &mut Bencher) {
unsafe {
let options = build_case("paren", LONG_MAP_WITH_STRINGS);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_indent_long_map_with_strings(b: &mut Bencher) {
unsafe {
let options = build_case("indent", LONG_MAP_WITH_STRINGS);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_smart_long_map_with_strings(b: &mut Bencher) {
unsafe {
let options = build_case("smart", LONG_MAP_WITH_STRINGS);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_paren_really_long_file(b: &mut Bencher) {
unsafe {
let options = build_case("paren", REALLY_LONG_FILE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_indent_really_long_file(b: &mut Bencher) {
unsafe {
let options = build_case("indent", REALLY_LONG_FILE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_smart_really_long_file(b: &mut Bencher) {
unsafe {
let options = build_case("smart", REALLY_LONG_FILE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_paren_really_long_file_with_unclosed_paren(b: &mut Bencher) {
unsafe {
let options = build_case("paren", REALLY_LONG_FILE_WITH_UNCLOSED_PAREN);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_indent_really_long_file_with_unclosed_paren(b: &mut Bencher) {
unsafe {
let options = build_case("indent", REALLY_LONG_FILE_WITH_UNCLOSED_PAREN);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_smart_really_long_file_with_unclosed_paren(b: &mut Bencher) {
unsafe {
let options = build_case("smart", REALLY_LONG_FILE_WITH_UNCLOSED_PAREN);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_quote_really_long_file_with_unclosed_quote(b: &mut Bencher) {
unsafe {
let options = build_case("paren", REALLY_LONG_FILE_WITH_UNCLOSED_QUOTE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_indent_really_long_file_with_unclosed_quote(b: &mut Bencher) {
unsafe {
let options = build_case("indent", REALLY_LONG_FILE_WITH_UNCLOSED_QUOTE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
#[bench]
fn bench_smart_really_long_file_with_unclosed_quote(b: &mut Bencher) {
unsafe {
let options = build_case("smart", REALLY_LONG_FILE_WITH_UNCLOSED_QUOTE);
b.iter(|| parinfer_rust::run_parinfer(options.as_ptr()));
}
}
| 29.008065 | 110 | 0.685849 |
89835f04005d64ca0011529bc08ec8aa0703e3f0 | 1,493 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TIMER_CURRENT_VAL {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
| 22.969231 | 59 | 0.499665 |
1ed06938e95c878e8fcc1044aa6465676bb81105 | 70,901 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Compilation of match statements
//!
//! I will endeavor to explain the code as best I can. I have only a loose
//! understanding of some parts of it.
//!
//! ## Matching
//!
//! The basic state of the code is maintained in an array `m` of `Match`
//! objects. Each `Match` describes some list of patterns, all of which must
//! match against the current list of values. If those patterns match, then
//! the arm listed in the match is the correct arm. A given arm may have
//! multiple corresponding match entries, one for each alternative that
//! remains. As we proceed these sets of matches are adjusted by the various
//! `enter_XXX()` functions, each of which adjusts the set of options given
//! some information about the value which has been matched.
//!
//! So, initially, there is one value and N matches, each of which have one
//! constituent pattern. N here is usually the number of arms but may be
//! greater, if some arms have multiple alternatives. For example, here:
//!
//! enum Foo { A, B(int), C(uint, uint) }
//! match foo {
//! A => ...,
//! B(x) => ...,
//! C(1u, 2) => ...,
//! C(_) => ...
//! }
//!
//! The value would be `foo`. There would be four matches, each of which
//! contains one pattern (and, in one case, a guard). We could collect the
//! various options and then compile the code for the case where `foo` is an
//! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
//! drop the two matches that do not match a `C` and (2) expand the other two
//! into two patterns each. In the first case, the two patterns would be `1u`
//! and `2`, and the in the second case the _ pattern would be expanded into
//! `_` and `_`. The two values are of course the arguments to `C`.
//!
//! Here is a quick guide to the various functions:
//!
//! - `compile_submatch()`: The main workhouse. It takes a list of values and
//! a list of matches and finds the various possibilities that could occur.
//!
//! - `enter_XXX()`: modifies the list of matches based on some information
//! about the value that has been matched. For example,
//! `enter_rec_or_struct()` adjusts the values given that a record or struct
//! has been matched. This is an infallible pattern, so *all* of the matches
//! must be either wildcards or record/struct patterns. `enter_opt()`
//! handles the fallible cases, and it is correspondingly more complex.
//!
//! ## Bindings
//!
//! We store information about the bound variables for each arm as part of the
//! per-arm `ArmData` struct. There is a mapping from identifiers to
//! `BindingInfo` structs. These structs contain the mode/id/type of the
//! binding, but they also contain an LLVM value which points at an alloca
//! called `llmatch`. For by value bindings that are Copy, we also create
//! an extra alloca that we copy the matched value to so that any changes
//! we do to our copy is not reflected in the original and vice-versa.
//! We don't do this if it's a move since the original value can't be used
//! and thus allowing us to cheat in not creating an extra alloca.
//!
//! The `llmatch` binding always stores a pointer into the value being matched
//! which points at the data for the binding. If the value being matched has
//! type `T`, then, `llmatch` will point at an alloca of type `T*` (and hence
//! `llmatch` has type `T**`). So, if you have a pattern like:
//!
//! let a: A = ...;
//! let b: B = ...;
//! match (a, b) { (ref c, d) => { ... } }
//!
//! For `c` and `d`, we would generate allocas of type `C*` and `D*`
//! respectively. These are called the `llmatch`. As we match, when we come
//! up against an identifier, we store the current pointer into the
//! corresponding alloca.
//!
//! Once a pattern is completely matched, and assuming that there is no guard
//! pattern, we will branch to a block that leads to the body itself. For any
//! by-value bindings, this block will first load the ptr from `llmatch` (the
//! one of type `D*`) and then load a second time to get the actual value (the
//! one of type `D`). For by ref bindings, the value of the local variable is
//! simply the first alloca.
//!
//! So, for the example above, we would generate a setup kind of like this:
//!
//! +-------+
//! | Entry |
//! +-------+
//! |
//! +--------------------------------------------+
//! | llmatch_c = (addr of first half of tuple) |
//! | llmatch_d = (addr of second half of tuple) |
//! +--------------------------------------------+
//! |
//! +--------------------------------------+
//! | *llbinding_d = **llmatch_d |
//! +--------------------------------------+
//!
//! If there is a guard, the situation is slightly different, because we must
//! execute the guard code. Moreover, we need to do so once for each of the
//! alternatives that lead to the arm, because if the guard fails, they may
//! have different points from which to continue the search. Therefore, in that
//! case, we generate code that looks more like:
//!
//! +-------+
//! | Entry |
//! +-------+
//! |
//! +-------------------------------------------+
//! | llmatch_c = (addr of first half of tuple) |
//! | llmatch_d = (addr of first half of tuple) |
//! +-------------------------------------------+
//! |
//! +-------------------------------------------------+
//! | *llbinding_d = **llmatch_d |
//! | check condition |
//! | if false { goto next case } |
//! | if true { goto body } |
//! +-------------------------------------------------+
//!
//! The handling for the cleanups is a bit... sensitive. Basically, the body
//! is the one that invokes `add_clean()` for each binding. During the guard
//! evaluation, we add temporary cleanups and revoke them after the guard is
//! evaluated (it could fail, after all). Note that guards and moves are
//! just plain incompatible.
//!
//! Some relevant helper functions that manage bindings:
//! - `create_bindings_map()`
//! - `insert_lllocals()`
//!
//!
//! ## Notes on vector pattern matching.
//!
//! Vector pattern matching is surprisingly tricky. The problem is that
//! the structure of the vector isn't fully known, and slice matches
//! can be done on subparts of it.
//!
//! The way that vector pattern matches are dealt with, then, is as
//! follows. First, we make the actual condition associated with a
//! vector pattern simply a vector length comparison. So the pattern
//! [1, .. x] gets the condition "vec len >= 1", and the pattern
//! [.. x] gets the condition "vec len >= 0". The problem here is that
//! having the condition "vec len >= 1" hold clearly does not mean that
//! only a pattern that has exactly that condition will match. This
//! means that it may well be the case that a condition holds, but none
//! of the patterns matching that condition match; to deal with this,
//! when doing vector length matches, we have match failures proceed to
//! the next condition to check.
//!
//! There are a couple more subtleties to deal with. While the "actual"
//! condition associated with vector length tests is simply a test on
//! the vector length, the actual vec_len Opt entry contains more
//! information used to restrict which matches are associated with it.
//! So that all matches in a submatch are matching against the same
//! values from inside the vector, they are split up by how many
//! elements they match at the front and at the back of the vector. In
//! order to make sure that arms are properly checked in order, even
//! with the overmatching conditions, each vec_len Opt entry is
//! associated with a range of matches.
//! Consider the following:
//!
//! match &[1, 2, 3] {
//! [1, 1, .. _] => 0,
//! [1, 2, 2, .. _] => 1,
//! [1, 2, 3, .. _] => 2,
//! [1, 2, .. _] => 3,
//! _ => 4
//! }
//! The proper arm to match is arm 2, but arms 0 and 3 both have the
//! condition "len >= 2". If arm 3 was lumped in with arm 0, then the
//! wrong branch would be taken. Instead, vec_len Opts are associated
//! with a contiguous range of matches that have the same "shape".
//! This is sort of ugly and requires a bunch of special handling of
//! vec_len options.
pub use self::BranchKind::*;
pub use self::OptResult::*;
pub use self::TransBindingMode::*;
use self::Opt::*;
use self::FailureHandler::*;
use back::abi;
use llvm::{ValueRef, BasicBlockRef};
use middle::check_match::StaticInliner;
use middle::check_match;
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::lang_items::StrEqFnLangItem;
use middle::mem_categorization as mc;
use middle::pat_util::*;
use middle::resolve::DefMap;
use trans::adt;
use trans::base::*;
use trans::build::{AddCase, And, BitCast, Br, CondBr, GEPi, InBoundsGEP, Load};
use trans::build::{Mul, Not, Store, Sub, add_comment};
use trans::build;
use trans::callee;
use trans::cleanup::{mod, CleanupMethods};
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::expr::{mod, Dest};
use trans::tvec;
use trans::type_of;
use trans::debuginfo;
use middle::ty::{mod, Ty};
use session::config::FullDebugInfo;
use util::common::indenter;
use util::nodemap::FnvHashMap;
use util::ppaux::{Repr, vec_map_to_string};
use std;
use std::iter::AdditiveIterator;
use std::rc::Rc;
use syntax::ast;
use syntax::ast::{DUMMY_NODE_ID, Ident};
use syntax::codemap::Span;
use syntax::fold::Folder;
use syntax::ptr::P;
#[deriving(Show)]
struct ConstantExpr<'a>(&'a ast::Expr);
impl<'a> Copy for ConstantExpr<'a> {}
impl<'a> ConstantExpr<'a> {
fn eq(self, other: ConstantExpr<'a>, tcx: &ty::ctxt) -> bool {
let ConstantExpr(expr) = self;
let ConstantExpr(other_expr) = other;
match const_eval::compare_lit_exprs(tcx, expr, other_expr) {
Some(val1) => val1 == 0,
None => panic!("compare_list_exprs: type mismatch"),
}
}
}
// An option identifying a branch (either a literal, an enum variant or a range)
#[deriving(Show)]
enum Opt<'a, 'tcx> {
ConstantValue(ConstantExpr<'a>),
ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>),
Variant(ty::Disr, Rc<adt::Repr<'tcx>>, ast::DefId),
SliceLengthEqual(uint),
SliceLengthGreaterOrEqual(/* prefix length */ uint, /* suffix length */ uint),
}
impl<'a, 'tcx> Opt<'a, 'tcx> {
fn eq(&self, other: &Opt<'a, 'tcx>, tcx: &ty::ctxt<'tcx>) -> bool {
match (self, other) {
(&ConstantValue(a), &ConstantValue(b)) => a.eq(b, tcx),
(&ConstantRange(a1, a2), &ConstantRange(b1, b2)) => {
a1.eq(b1, tcx) && a2.eq(b2, tcx)
}
(&Variant(a_disr, ref a_repr, a_def), &Variant(b_disr, ref b_repr, b_def)) => {
a_disr == b_disr && *a_repr == *b_repr && a_def == b_def
}
(&SliceLengthEqual(a), &SliceLengthEqual(b)) => a == b,
(&SliceLengthGreaterOrEqual(a1, a2), &SliceLengthGreaterOrEqual(b1, b2)) => {
a1 == b1 && a2 == b2
}
_ => false
}
}
fn trans<'blk>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> {
let _icx = push_ctxt("match::trans_opt");
let ccx = bcx.ccx();
match *self {
ConstantValue(ConstantExpr(lit_expr)) => {
let lit_ty = ty::node_id_to_type(bcx.tcx(), lit_expr.id);
let (llval, _) = consts::const_expr(ccx, &*lit_expr);
let lit_datum = immediate_rvalue(llval, lit_ty);
let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
SingleResult(Result::new(bcx, lit_datum.val))
}
ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2)) => {
let (l1, _) = consts::const_expr(ccx, &**l1);
let (l2, _) = consts::const_expr(ccx, &**l2);
RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
}
Variant(disr_val, ref repr, _) => {
adt::trans_case(bcx, &**repr, disr_val)
}
SliceLengthEqual(length) => {
SingleResult(Result::new(bcx, C_uint(ccx, length)))
}
SliceLengthGreaterOrEqual(prefix, suffix) => {
LowerBound(Result::new(bcx, C_uint(ccx, prefix + suffix)))
}
}
}
}
#[deriving(PartialEq)]
pub enum BranchKind {
NoBranch,
Single,
Switch,
Compare,
CompareSliceLength
}
impl Copy for BranchKind {}
pub enum OptResult<'blk, 'tcx: 'blk> {
SingleResult(Result<'blk, 'tcx>),
RangeResult(Result<'blk, 'tcx>, Result<'blk, 'tcx>),
LowerBound(Result<'blk, 'tcx>)
}
#[deriving(Clone)]
pub enum TransBindingMode {
TrByCopy(/* llbinding */ ValueRef),
TrByMove,
TrByRef,
}
impl Copy for TransBindingMode {}
/// Information about a pattern binding:
/// - `llmatch` is a pointer to a stack slot. The stack slot contains a
/// pointer into the value being matched. Hence, llmatch has type `T**`
/// where `T` is the value being matched.
/// - `trmode` is the trans binding mode
/// - `id` is the node id of the binding
/// - `ty` is the Rust type of the binding
#[deriving(Clone)]
pub struct BindingInfo<'tcx> {
pub llmatch: ValueRef,
pub trmode: TransBindingMode,
pub id: ast::NodeId,
pub span: Span,
pub ty: Ty<'tcx>,
}
impl<'tcx> Copy for BindingInfo<'tcx> {}
type BindingsMap<'tcx> = FnvHashMap<Ident, BindingInfo<'tcx>>;
struct ArmData<'p, 'blk, 'tcx: 'blk> {
bodycx: Block<'blk, 'tcx>,
arm: &'p ast::Arm,
bindings_map: BindingsMap<'tcx>
}
/// Info about Match.
/// If all `pats` are matched then arm `data` will be executed.
/// As we proceed `bound_ptrs` are filled with pointers to values to be bound,
/// these pointers are stored in llmatch variables just before executing `data` arm.
struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
pats: Vec<&'p ast::Pat>,
data: &'a ArmData<'p, 'blk, 'tcx>,
bound_ptrs: Vec<(Ident, ValueRef)>,
}
impl<'a, 'p, 'blk, 'tcx> Repr<'tcx> for Match<'a, 'p, 'blk, 'tcx> {
fn repr(&self, tcx: &ty::ctxt) -> String {
if tcx.sess.verbose() {
// for many programs, this just take too long to serialize
self.pats.repr(tcx)
} else {
format!("{} pats", self.pats.len())
}
}
}
fn has_nested_bindings(m: &[Match], col: uint) -> bool {
for br in m.iter() {
match br.pats[col].node {
ast::PatIdent(_, _, Some(_)) => return true,
_ => ()
}
}
return false;
}
fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: uint,
val: ValueRef)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
bcx.val_to_string(val));
let _indenter = indenter();
m.iter().map(|br| {
let mut bound_ptrs = br.bound_ptrs.clone();
let mut pat = br.pats[col];
loop {
pat = match pat.node {
ast::PatIdent(_, ref path, Some(ref inner)) => {
bound_ptrs.push((path.node, val));
&**inner
},
_ => break
}
}
let mut pats = br.pats.clone();
pats[col] = pat;
Match {
pats: pats,
data: &*br.data,
bound_ptrs: bound_ptrs
}
}).collect()
}
type EnterPatterns<'a, 'p> = |&[&'p ast::Pat]|: 'a -> Option<Vec<&'p ast::Pat>>;
fn enter_match<'a, 'b, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
dm: &DefMap,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: uint,
val: ValueRef,
e: EnterPatterns<'b, 'p>)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("enter_match(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
bcx.val_to_string(val));
let _indenter = indenter();
m.iter().filter_map(|br| {
e(br.pats.as_slice()).map(|pats| {
let this = br.pats[col];
let mut bound_ptrs = br.bound_ptrs.clone();
match this.node {
ast::PatIdent(_, ref path, None) => {
if pat_is_binding(dm, &*this) {
bound_ptrs.push((path.node, val));
}
}
ast::PatVec(ref before, Some(ref slice), ref after) => {
if let ast::PatIdent(_, ref path, None) = slice.node {
let subslice_val = bind_subslice_pat(
bcx, this.id, val,
before.len(), after.len());
bound_ptrs.push((path.node, subslice_val));
}
}
_ => {}
}
Match {
pats: pats,
data: br.data,
bound_ptrs: bound_ptrs
}
})
}).collect()
}
fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
dm: &DefMap,
m: &[Match<'a, 'p, 'blk, 'tcx>],
col: uint,
val: ValueRef)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("enter_default(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
bcx.val_to_string(val));
let _indenter = indenter();
// Collect all of the matches that can match against anything.
enter_match(bcx, dm, m, col, val, |pats| {
if pat_is_binding_or_wild(dm, &*pats[col]) {
let mut r = pats[..col].to_vec();
r.push_all(pats[col + 1..]);
Some(r)
} else {
None
}
})
}
// <pcwalton> nmatsakis: what does enter_opt do?
// <pcwalton> in trans/match
// <pcwalton> trans/match.rs is like stumbling around in a dark cave
// <nmatsakis> pcwalton: the enter family of functions adjust the set of
// patterns as needed
// <nmatsakis> yeah, at some point I kind of achieved some level of
// understanding
// <nmatsakis> anyhow, they adjust the patterns given that something of that
// kind has been found
// <nmatsakis> pcwalton: ok, right, so enter_XXX() adjusts the patterns, as I
// said
// <nmatsakis> enter_match() kind of embodies the generic code
// <nmatsakis> it is provided with a function that tests each pattern to see
// if it might possibly apply and so forth
// <nmatsakis> so, if you have a pattern like {a: _, b: _, _} and one like _
// <nmatsakis> then _ would be expanded to (_, _)
// <nmatsakis> one spot for each of the sub-patterns
// <nmatsakis> enter_opt() is one of the more complex; it covers the fallible
// cases
// <nmatsakis> enter_rec_or_struct() or enter_tuple() are simpler, since they
// are infallible patterns
// <nmatsakis> so all patterns must either be records (resp. tuples) or
// wildcards
/// The above is now outdated in that enter_match() now takes a function that
/// takes the complete row of patterns rather than just the first one.
/// Also, most of the enter_() family functions have been unified with
/// the check_match specialization step.
fn enter_opt<'a, 'p, 'blk, 'tcx>(
bcx: Block<'blk, 'tcx>,
_: ast::NodeId,
dm: &DefMap,
m: &[Match<'a, 'p, 'blk, 'tcx>],
opt: &Opt,
col: uint,
variant_size: uint,
val: ValueRef)
-> Vec<Match<'a, 'p, 'blk, 'tcx>> {
debug!("enter_opt(bcx={}, m={}, opt={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
*opt,
col,
bcx.val_to_string(val));
let _indenter = indenter();
let ctor = match opt {
&ConstantValue(ConstantExpr(expr)) => check_match::ConstantValue(
const_eval::eval_const_expr(bcx.tcx(), &*expr)
),
&ConstantRange(ConstantExpr(lo), ConstantExpr(hi)) => check_match::ConstantRange(
const_eval::eval_const_expr(bcx.tcx(), &*lo),
const_eval::eval_const_expr(bcx.tcx(), &*hi)
),
&SliceLengthEqual(n) =>
check_match::Slice(n),
&SliceLengthGreaterOrEqual(before, after) =>
check_match::SliceWithSubslice(before, after),
&Variant(_, _, def_id) =>
check_match::Constructor::Variant(def_id)
};
let param_env = ty::empty_parameter_environment();
let mcx = check_match::MatchCheckCtxt {
tcx: bcx.tcx(),
param_env: param_env,
};
enter_match(bcx, dm, m, col, val, |pats|
check_match::specialize(&mcx, pats.as_slice(), &ctor, col, variant_size)
)
}
// Returns the options in one column of matches. An option is something that
// needs to be conditionally matched at runtime; for example, the discriminant
// on a set of enum variants or a literal.
fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>], col: uint)
-> Vec<Opt<'p, 'tcx>> {
let tcx = bcx.tcx();
let mut found: Vec<Opt> = vec![];
for br in m.iter() {
let cur = br.pats[col];
let opt = match cur.node {
ast::PatLit(ref l) => ConstantValue(ConstantExpr(&**l)),
ast::PatIdent(..) | ast::PatEnum(..) | ast::PatStruct(..) => {
// This is either an enum variant or a variable binding.
let opt_def = tcx.def_map.borrow().get(&cur.id).cloned();
match opt_def {
Some(def::DefVariant(enum_id, var_id, _)) => {
let variant = ty::enum_variant_with_id(tcx, enum_id, var_id);
Variant(variant.disr_val, adt::represent_node(bcx, cur.id), var_id)
}
_ => continue
}
}
ast::PatRange(ref l1, ref l2) => {
ConstantRange(ConstantExpr(&**l1), ConstantExpr(&**l2))
}
ast::PatVec(ref before, None, ref after) => {
SliceLengthEqual(before.len() + after.len())
}
ast::PatVec(ref before, Some(_), ref after) => {
SliceLengthGreaterOrEqual(before.len(), after.len())
}
_ => continue
};
if !found.iter().any(|x| x.eq(&opt, tcx)) {
found.push(opt);
}
}
found
}
struct ExtractedBlock<'blk, 'tcx: 'blk> {
vals: Vec<ValueRef>,
bcx: Block<'blk, 'tcx>,
}
fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
repr: &adt::Repr<'tcx>,
disr_val: ty::Disr,
val: ValueRef)
-> ExtractedBlock<'blk, 'tcx> {
let _icx = push_ctxt("match::extract_variant_args");
let args = Vec::from_fn(adt::num_args(repr, disr_val), |i| {
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
});
ExtractedBlock { vals: args, bcx: bcx }
}
/// Helper for converting from the ValueRef that we pass around in the match code, which is always
/// an lvalue, into a Datum. Eventually we should just pass around a Datum and be done with it.
fn match_datum<'tcx>(val: ValueRef, left_ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
Datum::new(val, left_ty, Lvalue)
}
fn bind_subslice_pat(bcx: Block,
pat_id: ast::NodeId,
val: ValueRef,
offset_left: uint,
offset_right: uint) -> ValueRef {
let _icx = push_ctxt("match::bind_subslice_pat");
let vec_ty = node_id_type(bcx, pat_id);
let vt = tvec::vec_types(bcx, ty::sequence_element_type(bcx.tcx(), ty::type_content(vec_ty)));
let vec_datum = match_datum(val, vec_ty);
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let slice_byte_offset = Mul(bcx, vt.llunit_size, C_uint(bcx.ccx(), offset_left));
let slice_begin = tvec::pointer_add_byte(bcx, base, slice_byte_offset);
let slice_len_offset = C_uint(bcx.ccx(), offset_left + offset_right);
let slice_len = Sub(bcx, len, slice_len_offset);
let slice_ty = ty::mk_slice(bcx.tcx(),
ty::ReStatic,
ty::mt {ty: vt.unit_ty, mutbl: ast::MutImmutable});
let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
Store(bcx, slice_begin,
GEPi(bcx, scratch.val, &[0u, abi::FAT_PTR_ADDR]));
Store(bcx, slice_len, GEPi(bcx, scratch.val, &[0u, abi::FAT_PTR_EXTRA]));
scratch.val
}
fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
left_ty: Ty,
before: uint,
after: uint,
val: ValueRef)
-> ExtractedBlock<'blk, 'tcx> {
let _icx = push_ctxt("match::extract_vec_elems");
let vec_datum = match_datum(val, left_ty);
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let mut elems = vec![];
elems.extend(range(0, before).map(|i| GEPi(bcx, base, &[i])));
elems.extend(range(0, after).rev().map(|i| {
InBoundsGEP(bcx, base, &[
Sub(bcx, len, C_uint(bcx.ccx(), i + 1))
])
}));
ExtractedBlock { vals: elems, bcx: bcx }
}
// Macro for deciding whether any of the remaining matches fit a given kind of
// pattern. Note that, because the macro is well-typed, either ALL of the
// matches should fit that sort of pattern or NONE (however, some of the
// matches may be wildcards like _ or identifiers).
macro_rules! any_pat (
($m:expr, $col:expr, $pattern:pat) => (
($m).iter().any(|br| {
match br.pats[$col].node {
$pattern => true,
_ => false
}
})
)
)
fn any_uniq_pat(m: &[Match], col: uint) -> bool {
any_pat!(m, col, ast::PatBox(_))
}
fn any_region_pat(m: &[Match], col: uint) -> bool {
any_pat!(m, col, ast::PatRegion(_))
}
fn any_irrefutable_adt_pat(tcx: &ty::ctxt, m: &[Match], col: uint) -> bool {
m.iter().any(|br| {
let pat = br.pats[col];
match pat.node {
ast::PatTup(_) => true,
ast::PatStruct(..) => {
match tcx.def_map.borrow().get(&pat.id) {
Some(&def::DefVariant(..)) => false,
_ => true,
}
}
ast::PatEnum(..) | ast::PatIdent(_, _, None) => {
match tcx.def_map.borrow().get(&pat.id) {
Some(&def::DefStruct(..)) => true,
_ => false
}
}
_ => false
}
})
}
/// What to do when the pattern match fails.
enum FailureHandler {
Infallible,
JumpToBasicBlock(BasicBlockRef),
Unreachable
}
impl FailureHandler {
fn is_fallible(&self) -> bool {
match *self {
Infallible => false,
_ => true
}
}
fn is_infallible(&self) -> bool {
!self.is_fallible()
}
fn handle_fail(&self, bcx: Block) {
match *self {
Infallible =>
panic!("attempted to panic in a non-panicking panic handler!"),
JumpToBasicBlock(basic_block) =>
Br(bcx, basic_block),
Unreachable =>
build::Unreachable(bcx)
}
}
}
fn pick_column_to_specialize(def_map: &DefMap, m: &[Match]) -> Option<uint> {
fn pat_score(def_map: &DefMap, pat: &ast::Pat) -> uint {
match pat.node {
ast::PatIdent(_, _, Some(ref inner)) => pat_score(def_map, &**inner),
_ if pat_is_refutable(def_map, pat) => 1u,
_ => 0u
}
}
let column_score: |&[Match], uint| -> uint = |m, col| {
let total_score = m.iter()
.map(|row| row.pats[col])
.map(|pat| pat_score(def_map, pat))
.sum();
// Irrefutable columns always go first, they'd only be duplicated in the branches.
if total_score == 0 {
std::uint::MAX
} else {
total_score
}
};
let column_contains_any_nonwild_patterns: |&uint| -> bool = |&col| {
m.iter().any(|row| match row.pats[col].node {
ast::PatWild(_) => false,
_ => true
})
};
range(0, m[0].pats.len())
.filter(column_contains_any_nonwild_patterns)
.map(|col| (col, column_score(m, col)))
.max_by(|&(_, score)| score)
.map(|(col, _)| col)
}
// Compiles a comparison between two things.
fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
rhs_t: Ty<'tcx>)
-> Result<'blk, 'tcx> {
fn compare_str<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
rhs_t: Ty<'tcx>)
-> Result<'blk, 'tcx> {
let did = langcall(cx,
None,
format!("comparison of `{}`",
cx.ty_to_string(rhs_t)).as_slice(),
StrEqFnLangItem);
callee::trans_lang_call(cx, did, &[lhs, rhs], None)
}
let _icx = push_ctxt("compare_values");
if ty::type_is_scalar(rhs_t) {
let rs = compare_scalar_types(cx, lhs, rhs, rhs_t, ast::BiEq);
return Result::new(rs.bcx, rs.val);
}
match rhs_t.sty {
ty::ty_rptr(_, mt) => match mt.ty.sty {
ty::ty_str => compare_str(cx, lhs, rhs, rhs_t),
ty::ty_vec(ty, _) => match ty.sty {
ty::ty_uint(ast::TyU8) => {
// NOTE: cast &[u8] to &str and abuse the str_eq lang item,
// which calls memcmp().
let t = ty::mk_str_slice(cx.tcx(), ty::ReStatic, ast::MutImmutable);
let lhs = BitCast(cx, lhs, type_of::type_of(cx.ccx(), t).ptr_to());
let rhs = BitCast(cx, rhs, type_of::type_of(cx.ccx(), t).ptr_to());
compare_str(cx, lhs, rhs, rhs_t)
},
_ => cx.sess().bug("only byte strings supported in compare_values"),
},
_ => cx.sess().bug("only string and byte strings supported in compare_values"),
},
_ => cx.sess().bug("only scalars, byte strings, and strings supported in compare_values"),
}
}
/// For each binding in `data.bindings_map`, adds an appropriate entry into the `fcx.lllocals` map
fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
bindings_map: &BindingsMap<'tcx>,
cs: Option<cleanup::ScopeId>)
-> Block<'blk, 'tcx> {
for (&ident, &binding_info) in bindings_map.iter() {
let llval = match binding_info.trmode {
// By value mut binding for a copy type: load from the ptr
// into the matched value and copy to our alloca
TrByCopy(llbinding) => {
let llval = Load(bcx, binding_info.llmatch);
let datum = Datum::new(llval, binding_info.ty, Lvalue);
call_lifetime_start(bcx, llbinding);
bcx = datum.store_to(bcx, llbinding);
if let Some(cs) = cs {
bcx.fcx.schedule_lifetime_end(cs, llbinding);
}
llbinding
},
// By value move bindings: load from the ptr into the matched value
TrByMove => Load(bcx, binding_info.llmatch),
// By ref binding: use the ptr into the matched value
TrByRef => binding_info.llmatch
};
let datum = Datum::new(llval, binding_info.ty, Lvalue);
if let Some(cs) = cs {
bcx.fcx.schedule_drop_and_zero_mem(cs, llval, binding_info.ty);
bcx.fcx.schedule_lifetime_end(cs, binding_info.llmatch);
}
debug!("binding {} to {}",
binding_info.id,
bcx.val_to_string(llval));
bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
if bcx.sess().opts.debuginfo == FullDebugInfo {
debuginfo::create_match_binding_metadata(bcx,
ident,
binding_info);
}
}
bcx
}
fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
guard_expr: &ast::Expr,
data: &ArmData<'p, 'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[ValueRef],
chk: &FailureHandler,
has_genuine_default: bool)
-> Block<'blk, 'tcx> {
debug!("compile_guard(bcx={}, guard_expr={}, m={}, vals={})",
bcx.to_str(),
bcx.expr_to_string(guard_expr),
m.repr(bcx.tcx()),
vec_map_to_string(vals, |v| bcx.val_to_string(*v)));
let _indenter = indenter();
let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
let val = val.to_llbool(bcx);
for (_, &binding_info) in data.bindings_map.iter() {
if let TrByCopy(llbinding) = binding_info.trmode {
call_lifetime_end(bcx, llbinding);
}
}
with_cond(bcx, Not(bcx, val), |bcx| {
// Guard does not match: remove all bindings from the lllocals table
for (_, &binding_info) in data.bindings_map.iter() {
call_lifetime_end(bcx, binding_info.llmatch);
bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
}
match chk {
// If the default arm is the only one left, move on to the next
// condition explicitly rather than (possibly) falling back to
// the default arm.
&JumpToBasicBlock(_) if m.len() == 1 && has_genuine_default => {
chk.handle_fail(bcx);
}
_ => {
compile_submatch(bcx, m, vals, chk, has_genuine_default);
}
};
bcx
})
}
fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[ValueRef],
chk: &FailureHandler,
has_genuine_default: bool) {
debug!("compile_submatch(bcx={}, m={}, vals={})",
bcx.to_str(),
m.repr(bcx.tcx()),
vec_map_to_string(vals, |v| bcx.val_to_string(*v)));
let _indenter = indenter();
let _icx = push_ctxt("match::compile_submatch");
let mut bcx = bcx;
if m.len() == 0u {
if chk.is_fallible() {
chk.handle_fail(bcx);
}
return;
}
let tcx = bcx.tcx();
let def_map = &tcx.def_map;
match pick_column_to_specialize(def_map, m) {
Some(col) => {
let val = vals[col];
if has_nested_bindings(m, col) {
let expanded = expand_nested_bindings(bcx, m, col, val);
compile_submatch_continue(bcx,
expanded.as_slice(),
vals,
chk,
col,
val,
has_genuine_default)
} else {
compile_submatch_continue(bcx, m, vals, chk, col, val, has_genuine_default)
}
}
None => {
let data = &m[0].data;
for &(ref ident, ref value_ptr) in m[0].bound_ptrs.iter() {
let llmatch = data.bindings_map[*ident].llmatch;
call_lifetime_start(bcx, llmatch);
Store(bcx, *value_ptr, llmatch);
}
match data.arm.guard {
Some(ref guard_expr) => {
bcx = compile_guard(bcx,
&**guard_expr,
m[0].data,
m[1..m.len()],
vals,
chk,
has_genuine_default);
}
_ => ()
}
Br(bcx, data.bodycx.llbb);
}
}
}
fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
m: &[Match<'a, 'p, 'blk, 'tcx>],
vals: &[ValueRef],
chk: &FailureHandler,
col: uint,
val: ValueRef,
has_genuine_default: bool) {
let fcx = bcx.fcx;
let tcx = bcx.tcx();
let dm = &tcx.def_map;
let mut vals_left = vals[0u..col].to_vec();
vals_left.push_all(vals[col + 1u..]);
let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
let pat_id = m.iter().map(|br| br.pats[col].id)
.find(|&id| id != DUMMY_NODE_ID)
.unwrap_or(DUMMY_NODE_ID);
let left_ty = if pat_id == DUMMY_NODE_ID {
ty::mk_nil(tcx)
} else {
node_id_type(bcx, pat_id)
};
let mcx = check_match::MatchCheckCtxt {
tcx: bcx.tcx(),
param_env: ty::empty_parameter_environment(),
};
let adt_vals = if any_irrefutable_adt_pat(bcx.tcx(), m, col) {
let repr = adt::represent_type(bcx.ccx(), left_ty);
let arg_count = adt::num_args(&*repr, 0);
let field_vals: Vec<ValueRef> = std::iter::range(0, arg_count).map(|ix|
adt::trans_field_ptr(bcx, &*repr, val, 0, ix)
).collect();
Some(field_vals)
} else if any_uniq_pat(m, col) || any_region_pat(m, col) {
Some(vec!(Load(bcx, val)))
} else {
match left_ty.sty {
ty::ty_vec(_, Some(n)) => {
let args = extract_vec_elems(bcx, left_ty, n, 0, val);
Some(args.vals)
}
_ => None
}
};
match adt_vals {
Some(field_vals) => {
let pats = enter_match(bcx, dm, m, col, val, |pats|
check_match::specialize(&mcx, pats,
&check_match::Single, col,
field_vals.len())
);
let mut vals = field_vals;
vals.push_all(vals_left.as_slice());
compile_submatch(bcx, pats.as_slice(), vals.as_slice(), chk, has_genuine_default);
return;
}
_ => ()
}
// Decide what kind of branch we need
let opts = get_branches(bcx, m, col);
debug!("options={}", opts);
let mut kind = NoBranch;
let mut test_val = val;
debug!("test_val={}", bcx.val_to_string(test_val));
if opts.len() > 0u {
match opts[0] {
ConstantValue(_) | ConstantRange(_, _) => {
test_val = load_if_immediate(bcx, val, left_ty);
kind = if ty::type_is_integral(left_ty) {
Switch
} else {
Compare
};
}
Variant(_, ref repr, _) => {
let (the_kind, val_opt) = adt::trans_switch(bcx, &**repr, val);
kind = the_kind;
for &tval in val_opt.iter() { test_val = tval; }
}
SliceLengthEqual(_) | SliceLengthGreaterOrEqual(_, _) => {
let (_, len) = tvec::get_base_and_len(bcx, val, left_ty);
test_val = len;
kind = Switch;
}
}
}
for o in opts.iter() {
match *o {
ConstantRange(_, _) => { kind = Compare; break },
SliceLengthGreaterOrEqual(_, _) => { kind = CompareSliceLength; break },
_ => ()
}
}
let else_cx = match kind {
NoBranch | Single => bcx,
_ => bcx.fcx.new_temp_block("match_else")
};
let sw = if kind == Switch {
build::Switch(bcx, test_val, else_cx.llbb, opts.len())
} else {
C_int(ccx, 0i) // Placeholder for when not using a switch
};
let defaults = enter_default(else_cx, dm, m, col, val);
let exhaustive = chk.is_infallible() && defaults.len() == 0u;
let len = opts.len();
// Compile subtrees for each option
for (i, opt) in opts.iter().enumerate() {
// In some cases of range and vector pattern matching, we need to
// override the failure case so that instead of failing, it proceeds
// to try more matching. branch_chk, then, is the proper failure case
// for the current conditional branch.
let mut branch_chk = None;
let mut opt_cx = else_cx;
if !exhaustive || i + 1 < len {
opt_cx = bcx.fcx.new_temp_block("match_case");
match kind {
Single => Br(bcx, opt_cx.llbb),
Switch => {
match opt.trans(bcx) {
SingleResult(r) => {
AddCase(sw, r.val, opt_cx.llbb);
bcx = r.bcx;
}
_ => {
bcx.sess().bug(
"in compile_submatch, expected \
opt.trans() to return a SingleResult")
}
}
}
Compare | CompareSliceLength => {
let t = if kind == Compare {
left_ty
} else {
ty::mk_uint() // vector length
};
let Result { bcx: after_cx, val: matches } = {
match opt.trans(bcx) {
SingleResult(Result { bcx, val }) => {
compare_values(bcx, test_val, val, t)
}
RangeResult(Result { val: vbegin, .. },
Result { bcx, val: vend }) => {
let Result { bcx, val: llge } =
compare_scalar_types(
bcx, test_val,
vbegin, t, ast::BiGe);
let Result { bcx, val: llle } =
compare_scalar_types(
bcx, test_val, vend,
t, ast::BiLe);
Result::new(bcx, And(bcx, llge, llle))
}
LowerBound(Result { bcx, val }) => {
compare_scalar_types(bcx, test_val, val, t, ast::BiGe)
}
}
};
bcx = fcx.new_temp_block("compare_next");
// If none of the sub-cases match, and the current condition
// is guarded or has multiple patterns, move on to the next
// condition, if there is any, rather than falling back to
// the default.
let guarded = m[i].data.arm.guard.is_some();
let multi_pats = m[i].pats.len() > 1;
if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
branch_chk = Some(JumpToBasicBlock(bcx.llbb));
}
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
}
_ => ()
}
} else if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb);
}
let mut size = 0u;
let mut unpacked = Vec::new();
match *opt {
Variant(disr_val, ref repr, _) => {
let ExtractedBlock {vals: argvals, bcx: new_bcx} =
extract_variant_args(opt_cx, &**repr, disr_val, val);
size = argvals.len();
unpacked = argvals;
opt_cx = new_bcx;
}
SliceLengthEqual(len) => {
let args = extract_vec_elems(opt_cx, left_ty, len, 0, val);
size = args.vals.len();
unpacked = args.vals.clone();
opt_cx = args.bcx;
}
SliceLengthGreaterOrEqual(before, after) => {
let args = extract_vec_elems(opt_cx, left_ty, before, after, val);
size = args.vals.len();
unpacked = args.vals.clone();
opt_cx = args.bcx;
}
ConstantValue(_) | ConstantRange(_, _) => ()
}
let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
let mut opt_vals = unpacked;
opt_vals.push_all(vals_left.as_slice());
compile_submatch(opt_cx,
opt_ms.as_slice(),
opt_vals.as_slice(),
branch_chk.as_ref().unwrap_or(chk),
has_genuine_default);
}
// Compile the fall-through case, if any
if !exhaustive && kind != Single {
if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb);
}
match chk {
// If there is only one default arm left, move on to the next
// condition explicitly rather than (eventually) falling back to
// the last default arm.
&JumpToBasicBlock(_) if defaults.len() == 1 && has_genuine_default => {
chk.handle_fail(else_cx);
}
_ => {
compile_submatch(else_cx,
defaults.as_slice(),
vals_left.as_slice(),
chk,
has_genuine_default);
}
}
}
}
pub fn trans_match<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match_expr: &ast::Expr,
discr_expr: &ast::Expr,
arms: &[ast::Arm],
dest: Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::trans_match");
trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
}
/// Checks whether the binding in `discr` is assigned to anywhere in the expression `body`
fn is_discr_reassigned(bcx: Block, discr: &ast::Expr, body: &ast::Expr) -> bool {
let (vid, field) = match discr.node {
ast::ExprPath(..) => match bcx.def(discr.id) {
def::DefLocal(vid) | def::DefUpvar(vid, _, _) => (vid, None),
_ => return false
},
ast::ExprField(ref base, field) => {
let vid = match bcx.tcx().def_map.borrow().get(&base.id) {
Some(&def::DefLocal(vid)) | Some(&def::DefUpvar(vid, _, _)) => vid,
_ => return false
};
(vid, Some(mc::NamedField(field.node.name)))
},
ast::ExprTupField(ref base, field) => {
let vid = match bcx.tcx().def_map.borrow().get(&base.id) {
Some(&def::DefLocal(vid)) | Some(&def::DefUpvar(vid, _, _)) => vid,
_ => return false
};
(vid, Some(mc::PositionalField(field.node)))
},
_ => return false
};
let mut rc = ReassignmentChecker {
node: vid,
field: field,
reassigned: false
};
{
let param_env = ty::empty_parameter_environment();
let mut visitor = euv::ExprUseVisitor::new(&mut rc, bcx, param_env);
visitor.walk_expr(body);
}
rc.reassigned
}
struct ReassignmentChecker {
node: ast::NodeId,
field: Option<mc::FieldName>,
reassigned: bool
}
// Determine if the expression we're matching on is reassigned to within
// the body of the match's arm.
// We only care for the `mutate` callback since this check only matters
// for cases where the matched value is moved.
impl<'tcx> euv::Delegate<'tcx> for ReassignmentChecker {
fn consume(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: euv::ConsumeMode) {}
fn matched_pat(&mut self, _: &ast::Pat, _: mc::cmt, _: euv::MatchMode) {}
fn consume_pat(&mut self, _: &ast::Pat, _: mc::cmt, _: euv::ConsumeMode) {}
fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
_: ty::BorrowKind, _: euv::LoanCause) {}
fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
match cmt.cat {
mc::cat_upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
mc::cat_local(vid) => self.reassigned = self.node == vid,
mc::cat_interior(ref base_cmt, mc::InteriorField(field)) => {
match base_cmt.cat {
mc::cat_upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
mc::cat_local(vid) => {
self.reassigned = self.node == vid && Some(field) == self.field
},
_ => {}
}
},
_ => {}
}
}
}
fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
discr: &ast::Expr, body: &ast::Expr)
-> BindingsMap<'tcx> {
// Create the bindings map, which is a mapping from each binding name
// to an alloca() that will be the value for that local variable.
// Note that we use the names because each binding will have many ids
// from the various alternatives.
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let reassigned = is_discr_reassigned(bcx, discr, body);
let mut bindings_map = FnvHashMap::new();
pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let ident = path1.node;
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
let tcx = bcx.tcx();
let param_env = ty::empty_parameter_environment();
let llmatch;
let trmode;
match bm {
ast::BindByValue(_)
if !ty::type_moves_by_default(tcx,
variable_ty,
¶m_env) || reassigned => {
llmatch = alloca_no_lifetime(bcx,
llvariable_ty.ptr_to(),
"__llmatch");
trmode = TrByCopy(alloca_no_lifetime(bcx,
llvariable_ty,
bcx.ident(ident).as_slice()));
}
ast::BindByValue(_) => {
// in this case, the final type of the variable will be T,
// but during matching we need to store a *T as explained
// above
llmatch = alloca_no_lifetime(bcx,
llvariable_ty.ptr_to(),
bcx.ident(ident).as_slice());
trmode = TrByMove;
}
ast::BindByRef(_) => {
llmatch = alloca_no_lifetime(bcx,
llvariable_ty,
bcx.ident(ident).as_slice());
trmode = TrByRef;
}
};
bindings_map.insert(ident, BindingInfo {
llmatch: llmatch,
trmode: trmode,
id: p_id,
span: span,
ty: variable_ty
});
});
return bindings_map;
}
fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
match_id: ast::NodeId,
discr_expr: &ast::Expr,
arms: &[ast::Arm],
dest: Dest) -> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::trans_match_inner");
let fcx = scope_cx.fcx;
let mut bcx = scope_cx;
let tcx = bcx.tcx();
let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
"match"));
if bcx.unreachable.get() {
return bcx;
}
let t = node_id_type(bcx, discr_expr.id);
let chk = if ty::type_is_empty(tcx, t) {
Unreachable
} else {
Infallible
};
let arm_datas: Vec<ArmData> = arms.iter().map(|arm| ArmData {
bodycx: fcx.new_id_block("case_body", arm.body.id),
arm: arm,
bindings_map: create_bindings_map(bcx, &*arm.pats[0], discr_expr, &*arm.body)
}).collect();
let mut static_inliner = StaticInliner::new(scope_cx.tcx());
let arm_pats: Vec<Vec<P<ast::Pat>>> = arm_datas.iter().map(|arm_data| {
arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect()
}).collect();
let mut matches = Vec::new();
for (arm_data, pats) in arm_datas.iter().zip(arm_pats.iter()) {
matches.extend(pats.iter().map(|p| Match {
pats: vec![&**p],
data: arm_data,
bound_ptrs: Vec::new(),
}));
}
// `compile_submatch` works one column of arm patterns a time and
// then peels that column off. So as we progress, it may become
// impossible to tell whether we have a genuine default arm, i.e.
// `_ => foo` or not. Sometimes it is important to know that in order
// to decide whether moving on to the next condition or falling back
// to the default arm.
let has_default = arms.last().map_or(false, |arm| {
arm.pats.len() == 1
&& arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
});
compile_submatch(bcx, matches.as_slice(), &[discr_datum.val], &chk, has_default);
let mut arm_cxs = Vec::new();
for arm_data in arm_datas.iter() {
let mut bcx = arm_data.bodycx;
// insert bindings into the lllocals map and add cleanups
let cs = fcx.push_custom_cleanup_scope();
bcx = insert_lllocals(bcx, &arm_data.bindings_map, Some(cleanup::CustomScope(cs)));
bcx = expr::trans_into(bcx, &*arm_data.arm.body, dest);
bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cs);
arm_cxs.push(bcx);
}
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.as_slice());
return bcx;
}
/// Generates code for a local variable declaration like `let <pat>;` or `let <pat> =
/// <opt_init_expr>`.
pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
local: &ast::Local)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::store_local");
let mut bcx = bcx;
let tcx = bcx.tcx();
let pat = &*local.pat;
fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
pat: &ast::Pat)
-> Block<'blk, 'tcx> {
// create dummy memory for the variables if we have no
// value to store into them immediately
let tcx = bcx.tcx();
pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| {
let scope = cleanup::var_scope(tcx, p_id);
bcx = mk_binding_alloca(
bcx, p_id, &path1.node, scope, (),
|(), bcx, llval, ty| { zero_mem(bcx, llval, ty); bcx });
});
bcx
}
match local.init {
Some(ref init_expr) => {
// Optimize the "let x = expr" case. This just writes
// the result of evaluating `expr` directly into the alloca
// for `x`. Often the general path results in similar or the
// same code post-optimization, but not always. In particular,
// in unsafe code, you can have expressions like
//
// let x = intrinsics::uninit();
//
// In such cases, the more general path is unsafe, because
// it assumes it is matching against a valid value.
match simple_identifier(&*pat) {
Some(ident) => {
let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca(
bcx, pat.id, ident, var_scope, (),
|(), bcx, v, _| expr::trans_into(bcx, &**init_expr,
expr::SaveIn(v)));
}
None => {}
}
// General path.
let init_datum =
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &**init_expr, "let"));
if bcx.sess().asm_comments() {
add_comment(bcx, "creating zeroable ref llval");
}
let var_scope = cleanup::var_scope(tcx, local.id);
bind_irrefutable_pat(bcx, pat, init_datum.val, var_scope)
}
None => {
create_dummy_locals(bcx, pat)
}
}
}
/// Generates code for argument patterns like `fn foo(<pat>: T)`.
/// Creates entries in the `lllocals` map for each of the bindings
/// in `pat`.
///
/// # Arguments
///
/// - `pat` is the argument pattern
/// - `llval` is a pointer to the argument value (in other words,
/// if the argument type is `T`, then `llval` is a `T*`). In some
/// cases, this code may zero out the memory `llval` points at.
pub fn store_arg<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
pat: &ast::Pat,
arg: Datum<'tcx, Rvalue>,
arg_scope: cleanup::ScopeId)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::store_arg");
match simple_identifier(&*pat) {
Some(ident) => {
// Generate nicer LLVM for the common case of fn a pattern
// like `x: T`
let arg_ty = node_id_type(bcx, pat.id);
if type_of::arg_is_indirect(bcx.ccx(), arg_ty)
&& bcx.sess().opts.debuginfo != FullDebugInfo {
// Don't copy an indirect argument to an alloca, the caller
// already put it in a temporary alloca and gave it up, unless
// we emit extra-debug-info, which requires local allocas :(.
let arg_val = arg.add_clean(bcx.fcx, arg_scope);
bcx.fcx.lllocals.borrow_mut()
.insert(pat.id, Datum::new(arg_val, arg_ty, Lvalue));
bcx
} else {
mk_binding_alloca(
bcx, pat.id, ident, arg_scope, arg,
|arg, bcx, llval, _| arg.store_to(bcx, llval))
}
}
None => {
// General path. Copy out the values that are used in the
// pattern.
let arg = unpack_datum!(
bcx, arg.to_lvalue_datum_in_scope(bcx, "__arg", arg_scope));
bind_irrefutable_pat(bcx, pat, arg.val, arg_scope)
}
}
}
/// Generates code for the pattern binding in a `for` loop like
/// `for <pat> in <expr> { ... }`.
pub fn store_for_loop_binding<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pat: &ast::Pat,
llvalue: ValueRef,
body_scope: cleanup::ScopeId)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("match::store_for_loop_binding");
if simple_identifier(&*pat).is_some() {
// Generate nicer LLVM for the common case of a `for` loop pattern
// like `for x in blahblah { ... }`.
let binding_type = node_id_type(bcx, pat.id);
bcx.fcx.lllocals.borrow_mut().insert(pat.id,
Datum::new(llvalue,
binding_type,
Lvalue));
return bcx
}
// General path. Copy out the values that are used in the pattern.
bind_irrefutable_pat(bcx, pat, llvalue, body_scope)
}
fn mk_binding_alloca<'blk, 'tcx, A>(bcx: Block<'blk, 'tcx>,
p_id: ast::NodeId,
ident: &ast::Ident,
cleanup_scope: cleanup::ScopeId,
arg: A,
populate: |A, Block<'blk, 'tcx>, ValueRef, Ty<'tcx>|
-> Block<'blk, 'tcx>)
-> Block<'blk, 'tcx> {
let var_ty = node_id_type(bcx, p_id);
// Allocate memory on stack for the binding.
let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).as_slice());
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
let bcx = populate(arg, bcx, llval, var_ty);
bcx.fcx.schedule_lifetime_end(cleanup_scope, llval);
bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty);
// Now that memory is initialized and has cleanup scheduled,
// create the datum and insert into the local variable map.
let datum = Datum::new(llval, var_ty, Lvalue);
bcx.fcx.lllocals.borrow_mut().insert(p_id, datum);
bcx
}
/// A simple version of the pattern matching code that only handles
/// irrefutable patterns. This is used in let/argument patterns,
/// not in match statements. Unifying this code with the code above
/// sounds nice, but in practice it produces very inefficient code,
/// since the match code is so much more general. In most cases,
/// LLVM is able to optimize the code, but it causes longer compile
/// times and makes the generated code nigh impossible to read.
///
/// # Arguments
/// - bcx: starting basic block context
/// - pat: the irrefutable pattern being matched.
/// - val: the value being matched -- must be an lvalue (by ref, with cleanup)
fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pat: &ast::Pat,
val: ValueRef,
cleanup_scope: cleanup::ScopeId)
-> Block<'blk, 'tcx> {
debug!("bind_irrefutable_pat(bcx={}, pat={})",
bcx.to_str(),
pat.repr(bcx.tcx()));
if bcx.sess().asm_comments() {
add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
pat.repr(bcx.tcx())).as_slice());
}
let _indenter = indenter();
let _icx = push_ctxt("match::bind_irrefutable_pat");
let mut bcx = bcx;
let tcx = bcx.tcx();
let ccx = bcx.ccx();
match pat.node {
ast::PatIdent(pat_binding_mode, ref path1, ref inner) => {
if pat_is_binding(&tcx.def_map, &*pat) {
// Allocate the stack slot where the value of this
// binding will live and place it into the appropriate
// map.
bcx = mk_binding_alloca(
bcx, pat.id, &path1.node, cleanup_scope, (),
|(), bcx, llval, ty| {
match pat_binding_mode {
ast::BindByValue(_) => {
// By value binding: move the value that `val`
// points at into the binding's stack slot.
let d = Datum::new(val, ty, Lvalue);
d.store_to(bcx, llval)
}
ast::BindByRef(_) => {
// By ref binding: the value of the variable
// is the pointer `val` itself.
Store(bcx, val, llval);
bcx
}
}
});
}
for inner_pat in inner.iter() {
bcx = bind_irrefutable_pat(bcx, &**inner_pat, val, cleanup_scope);
}
}
ast::PatEnum(_, ref sub_pats) => {
let opt_def = bcx.tcx().def_map.borrow().get(&pat.id).cloned();
match opt_def {
Some(def::DefVariant(enum_id, var_id, _)) => {
let repr = adt::represent_node(bcx, pat.id);
let vinfo = ty::enum_variant_with_id(ccx.tcx(),
enum_id,
var_id);
let args = extract_variant_args(bcx,
&*repr,
vinfo.disr_val,
val);
for sub_pat in sub_pats.iter() {
for (i, &argval) in args.vals.iter().enumerate() {
bcx = bind_irrefutable_pat(bcx, &*sub_pat[i],
argval, cleanup_scope);
}
}
}
Some(def::DefStruct(..)) => {
match *sub_pats {
None => {
// This is a unit-like struct. Nothing to do here.
}
Some(ref elems) => {
// This is the tuple struct case.
let repr = adt::represent_node(bcx, pat.id);
for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, &*repr,
val, 0, i);
bcx = bind_irrefutable_pat(bcx, &**elem,
fldptr, cleanup_scope);
}
}
}
}
_ => {
// Nothing to do here.
}
}
}
ast::PatStruct(_, ref fields, _) => {
let tcx = bcx.tcx();
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
expr::with_field_tys(tcx, pat_ty, Some(pat.id), |discr, field_tys| {
for f in fields.iter() {
let ix = ty::field_idx_strict(tcx, f.node.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, &*pat_repr, val,
discr, ix);
bcx = bind_irrefutable_pat(bcx, &*f.node.pat, fldptr, cleanup_scope);
}
})
}
ast::PatTup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id);
for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, &*repr, val, 0, i);
bcx = bind_irrefutable_pat(bcx, &**elem, fldptr, cleanup_scope);
}
}
ast::PatBox(ref inner) => {
let llbox = Load(bcx, val);
bcx = bind_irrefutable_pat(bcx, &**inner, llbox, cleanup_scope);
}
ast::PatRegion(ref inner) => {
let loaded_val = Load(bcx, val);
bcx = bind_irrefutable_pat(bcx, &**inner, loaded_val, cleanup_scope);
}
ast::PatVec(ref before, ref slice, ref after) => {
let pat_ty = node_id_type(bcx, pat.id);
let mut extracted = extract_vec_elems(bcx, pat_ty, before.len(), after.len(), val);
match slice {
&Some(_) => {
extracted.vals.insert(
before.len(),
bind_subslice_pat(bcx, pat.id, val, before.len(), after.len())
);
}
&None => ()
}
bcx = before
.iter()
.chain(slice.iter())
.chain(after.iter())
.zip(extracted.vals.into_iter())
.fold(bcx, |bcx, (inner, elem)|
bind_irrefutable_pat(bcx, &**inner, elem, cleanup_scope)
);
}
ast::PatMac(..) => {
bcx.sess().span_bug(pat.span, "unexpanded macro");
}
ast::PatWild(_) | ast::PatLit(_) | ast::PatRange(_, _) => ()
}
return bcx;
}
| 40.125071 | 98 | 0.50555 |
1de8430ef8a260568effbfa21e280cd6665d1331 | 6,309 | use std::sync::Arc;
use crate::{
array::{Array, MutableArray, Offset, TryExtend, TryPush},
bitmap::MutableBitmap,
buffer::MutableBuffer,
datatypes::{DataType, Field},
error::{ArrowError, Result},
};
use super::ListArray;
/// The mutable version of [`ListArray`].
#[derive(Debug)]
pub struct MutableListArray<O: Offset, M: MutableArray> {
data_type: DataType,
offsets: MutableBuffer<O>,
values: M,
validity: Option<MutableBitmap>,
}
impl<O: Offset, M: MutableArray + Default> MutableListArray<O, M> {
/// Creates a new empty [`MutableListArray`].
pub fn new() -> Self {
let values = M::default();
let data_type = ListArray::<O>::default_datatype(values.data_type().clone());
Self::new_from(values, data_type, 0)
}
/// Creates a new [`MutableListArray`] with a capacity.
pub fn with_capacity(capacity: usize) -> Self {
let values = M::default();
let data_type = ListArray::<O>::default_datatype(values.data_type().clone());
let mut offsets = MutableBuffer::<O>::with_capacity(capacity + 1);
offsets.push(O::default());
assert_eq!(values.len(), 0);
Self {
data_type,
offsets,
values,
validity: None,
}
}
}
impl<O: Offset, M: MutableArray + Default> Default for MutableListArray<O, M> {
fn default() -> Self {
Self::new()
}
}
impl<O: Offset, M: MutableArray> From<MutableListArray<O, M>> for ListArray<O> {
fn from(mut other: MutableListArray<O, M>) -> Self {
ListArray::from_data(
other.data_type,
other.offsets.into(),
other.values.as_arc(),
other.validity.map(|x| x.into()),
)
}
}
impl<O, M, I, T> TryExtend<Option<I>> for MutableListArray<O, M>
where
O: Offset,
M: MutableArray + TryExtend<Option<T>>,
I: IntoIterator<Item = Option<T>>,
{
fn try_extend<II: IntoIterator<Item = Option<I>>>(&mut self, iter: II) -> Result<()> {
for items in iter {
self.try_push(items)?;
}
Ok(())
}
}
impl<O, M, I, T> TryPush<Option<I>> for MutableListArray<O, M>
where
O: Offset,
M: MutableArray + TryExtend<Option<T>>,
I: IntoIterator<Item = Option<T>>,
{
#[inline]
fn try_push(&mut self, item: Option<I>) -> Result<()> {
if let Some(items) = item {
let values = self.mut_values();
values.try_extend(items)?;
self.try_push_valid()?;
} else {
self.push_null();
}
Ok(())
}
}
impl<O: Offset, M: MutableArray> MutableListArray<O, M> {
/// Creates a new [`MutableListArray`] from a [`MutableArray`] and capacity.
pub fn new_from(values: M, data_type: DataType, capacity: usize) -> Self {
let mut offsets = MutableBuffer::<O>::with_capacity(capacity + 1);
offsets.push(O::default());
assert_eq!(values.len(), 0);
ListArray::<O>::get_child_field(&data_type);
Self {
data_type,
offsets,
values,
validity: None,
}
}
/// Creates a new [`MutableListArray`] from a [`MutableArray`].
pub fn new_with_field(values: M, name: &str, nullable: bool) -> Self {
let field = Box::new(Field::new(name, values.data_type().clone(), nullable));
let data_type = if O::is_large() {
DataType::LargeList(field)
} else {
DataType::List(field)
};
Self::new_from(values, data_type, 0)
}
/// Creates a new [`MutableListArray`] from a [`MutableArray`] and capacity.
pub fn new_with_capacity(values: M, capacity: usize) -> Self {
let data_type = ListArray::<O>::default_datatype(values.data_type().clone());
Self::new_from(values, data_type, capacity)
}
#[inline]
fn try_push_valid(&mut self) -> Result<()> {
let size = self.values.len();
let size = O::from_usize(size).ok_or(ArrowError::KeyOverflowError)?; // todo: make this error
assert!(size >= *self.offsets.last().unwrap());
self.offsets.push(size);
if let Some(validity) = &mut self.validity {
validity.push(true)
}
Ok(())
}
#[inline]
fn push_null(&mut self) {
self.offsets.push(self.last_offset());
match &mut self.validity {
Some(validity) => validity.push(false),
None => self.init_validity(),
}
}
/// The values
pub fn mut_values(&mut self) -> &mut M {
&mut self.values
}
/// The values
pub fn values(&self) -> &M {
&self.values
}
#[inline]
fn last_offset(&self) -> O {
*self.offsets.last().unwrap()
}
fn init_validity(&mut self) {
let len = self.offsets.len() - 1;
let mut validity = MutableBitmap::new();
validity.extend_constant(len, true);
validity.set(len - 1, false);
self.validity = Some(validity)
}
/// Converts itself into an [`Array`].
pub fn into_arc(self) -> Arc<dyn Array> {
let a: ListArray<O> = self.into();
Arc::new(a)
}
}
impl<O: Offset, M: MutableArray + 'static> MutableArray for MutableListArray<O, M> {
fn len(&self) -> usize {
self.offsets.len() - 1
}
fn validity(&self) -> Option<&MutableBitmap> {
self.validity.as_ref()
}
fn as_box(&mut self) -> Box<dyn Array> {
Box::new(ListArray::from_data(
self.data_type.clone(),
std::mem::take(&mut self.offsets).into(),
self.values.as_arc(),
std::mem::take(&mut self.validity).map(|x| x.into()),
))
}
fn as_arc(&mut self) -> Arc<dyn Array> {
Arc::new(ListArray::from_data(
self.data_type.clone(),
std::mem::take(&mut self.offsets).into(),
self.values.as_arc(),
std::mem::take(&mut self.validity).map(|x| x.into()),
))
}
fn data_type(&self) -> &DataType {
&self.data_type
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn as_mut_any(&mut self) -> &mut dyn std::any::Any {
self
}
#[inline]
fn push_null(&mut self) {
self.push_null()
}
}
| 27.915929 | 101 | 0.55825 |
ffd78b1001e960df0b3e46c8aed95ee5efcba560 | 25,491 | use super::*;
use crate::{models::*, zeroless_view, StageId};
use anyhow::{bail, format_err};
use arrayref::array_ref;
use arrayvec::ArrayVec;
use bytes::Bytes;
use croaring::{treemap::NativeSerializer, Treemap as RoaringTreemap};
use derive_more::*;
use maplit::hashmap;
use modular_bitfield::prelude::*;
use once_cell::sync::Lazy;
use serde::{Deserialize, *};
use std::{collections::HashMap, fmt::Display, sync::Arc};
#[derive(Debug)]
pub struct ErasedTable<T>(pub T)
where
T: Table;
impl<T> Table for ErasedTable<T>
where
T: Table,
{
type Key = Vec<u8>;
type Value = Vec<u8>;
type SeekKey = Vec<u8>;
fn db_name(&self) -> string::String<StaticBytes> {
self.0.db_name()
}
}
impl<T> ErasedTable<T>
where
T: Table,
{
pub fn encode_key(object: T::Key) -> <<T as Table>::Key as TableEncode>::Encoded {
object.encode()
}
pub fn decode_key(input: &[u8]) -> anyhow::Result<T::Key>
where
<T as Table>::Key: TableDecode,
{
T::Key::decode(input)
}
pub fn encode_value(object: T::Value) -> <<T as Table>::Value as TableEncode>::Encoded {
object.encode()
}
pub fn decode_value(input: &[u8]) -> anyhow::Result<T::Value> {
T::Value::decode(input)
}
pub fn encode_seek_key(object: T::SeekKey) -> <<T as Table>::SeekKey as TableEncode>::Encoded {
object.encode()
}
}
#[macro_export]
macro_rules! decl_table {
($name:ident => $key:ty => $value:ty => $seek_key:ty) => {
#[derive(Clone, Copy, Debug, Default)]
pub struct $name;
impl $crate::kv::traits::Table for $name {
type Key = $key;
type SeekKey = $seek_key;
type Value = $value;
fn db_name(&self) -> string::String<bytes::Bytes> {
unsafe {
string::String::from_utf8_unchecked(bytes::Bytes::from_static(
Self::const_db_name().as_bytes(),
))
}
}
}
impl $name {
pub const fn const_db_name() -> &'static str {
stringify!($name)
}
pub const fn erased(self) -> $crate::kv::tables::ErasedTable<Self> {
$crate::kv::tables::ErasedTable(self)
}
}
impl std::fmt::Display for $name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", Self::const_db_name())
}
}
};
($name:ident => $key:ty => $value:ty) => {
decl_table!($name => $key => $value => $key);
};
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct TableInfo {
pub dup_sort: bool,
}
impl traits::TableEncode for Vec<u8> {
type Encoded = Self;
fn encode(self) -> Self::Encoded {
self
}
}
impl traits::TableDecode for Vec<u8> {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
Ok(b.to_vec())
}
}
impl traits::TableEncode for Bytes {
type Encoded = Self;
fn encode(self) -> Self::Encoded {
self
}
}
impl traits::TableDecode for Bytes {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
Ok(b.to_vec().into())
}
}
#[derive(Clone, Debug, Default, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord)]
pub struct VariableVec<const LEN: usize> {
pub inner: ArrayVec<u8, LEN>,
}
impl<const LEN: usize> FromIterator<u8> for VariableVec<LEN> {
fn from_iter<T: IntoIterator<Item = u8>>(iter: T) -> Self {
Self {
inner: ArrayVec::from_iter(iter),
}
}
}
impl<const LEN: usize> AsRef<[u8]> for VariableVec<LEN> {
fn as_ref(&self) -> &[u8] {
self.inner.as_ref()
}
}
impl<const LEN: usize> traits::TableEncode for VariableVec<LEN> {
type Encoded = Self;
fn encode(self) -> Self::Encoded {
self
}
}
impl<const LEN: usize> traits::TableDecode for VariableVec<LEN> {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
let mut out = Self::default();
out.try_extend_from_slice(b)?;
Ok(out)
}
}
impl<const LEN: usize> From<VariableVec<LEN>> for Vec<u8> {
fn from(v: VariableVec<LEN>) -> Self {
v.to_vec()
}
}
#[derive(Clone, Debug)]
pub struct InvalidLength<const EXPECTED: usize> {
pub got: usize,
}
impl<const EXPECTED: usize> Display for InvalidLength<EXPECTED> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Invalid length: {} != {}", EXPECTED, self.got)
}
}
impl<const EXPECTED: usize> std::error::Error for InvalidLength<EXPECTED> {}
#[derive(Clone, Debug)]
pub struct TooShort<const MINIMUM: usize> {
pub got: usize,
}
impl<const MINIMUM: usize> Display for TooShort<MINIMUM> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Too short: {} < {}", self.got, MINIMUM)
}
}
impl<const MINIMUM: usize> std::error::Error for TooShort<MINIMUM> {}
#[derive(Clone, Debug)]
pub struct TooLong<const MAXIMUM: usize> {
pub got: usize,
}
impl<const MAXIMUM: usize> Display for TooLong<MAXIMUM> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Too long: {} > {}", self.got, MAXIMUM)
}
}
impl<const MAXIMUM: usize> std::error::Error for TooLong<MAXIMUM> {}
macro_rules! u64_table_object {
($ty:ident) => {
impl TableEncode for $ty {
type Encoded = [u8; 8];
fn encode(self) -> Self::Encoded {
self.to_be_bytes()
}
}
impl TableDecode for $ty {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
match b.len() {
8 => Ok(u64::from_be_bytes(*array_ref!(&*b, 0, 8)).into()),
other => Err(InvalidLength::<8> { got: other }.into()),
}
}
}
};
}
u64_table_object!(u64);
u64_table_object!(BlockNumber);
u64_table_object!(TxIndex);
#[derive(
Clone,
Copy,
Debug,
Deref,
DerefMut,
Default,
Display,
PartialEq,
Eq,
From,
PartialOrd,
Ord,
Hash,
Serialize,
Deserialize,
)]
#[serde(transparent)]
pub struct TruncateStart<T>(pub T);
impl<T, const LEN: usize> TableEncode for TruncateStart<T>
where
T: TableEncode<Encoded = [u8; LEN]>,
{
type Encoded = VariableVec<LEN>;
fn encode(self) -> Self::Encoded {
let arr = self.0.encode();
let mut out = Self::Encoded::default();
out.try_extend_from_slice(zeroless_view(&arr)).unwrap();
out
}
}
impl<T, const LEN: usize> TableDecode for TruncateStart<T>
where
T: TableEncode<Encoded = [u8; LEN]> + TableDecode,
{
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() > LEN {
return Err(TooLong::<LEN> { got: b.len() }.into());
}
let mut arr = [0; LEN];
arr[LEN - b.len()..].copy_from_slice(b);
T::decode(&arr).map(Self)
}
}
macro_rules! scale_table_object {
($ty:ty) => {
impl TableEncode for $ty {
type Encoded = Vec<u8>;
fn encode(self) -> Self::Encoded {
::parity_scale_codec::Encode::encode(&self)
}
}
impl TableDecode for $ty {
fn decode(mut b: &[u8]) -> anyhow::Result<Self> {
Ok(<Self as ::parity_scale_codec::Decode>::decode(&mut b)?)
}
}
};
}
scale_table_object!(BodyForStorage);
scale_table_object!(BlockHeader);
scale_table_object!(MessageWithSignature);
scale_table_object!(Vec<crate::models::Log>);
macro_rules! ron_table_object {
($ty:ident) => {
impl TableEncode for $ty {
type Encoded = String;
fn encode(self) -> Self::Encoded {
ron::to_string(&self).unwrap()
}
}
impl TableDecode for $ty {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
Ok(ron::from_str(std::str::from_utf8(b)?)?)
}
}
};
}
ron_table_object!(ChainSpec);
impl TableEncode for Address {
type Encoded = [u8; ADDRESS_LENGTH];
fn encode(self) -> Self::Encoded {
self.0
}
}
impl TableDecode for Address {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
match b.len() {
ADDRESS_LENGTH => Ok(Address::from_slice(&*b)),
other => Err(InvalidLength::<ADDRESS_LENGTH> { got: other }.into()),
}
}
}
impl TableEncode for Vec<Address> {
type Encoded = Vec<u8>;
fn encode(self) -> Self::Encoded {
let mut v = Vec::with_capacity(self.len() * ADDRESS_LENGTH);
for addr in self {
v.extend_from_slice(&addr.encode());
}
v
}
}
impl TableDecode for Vec<Address> {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() % ADDRESS_LENGTH != 0 {
bail!("Slice len should be divisible by {}", ADDRESS_LENGTH);
}
let mut v = Vec::with_capacity(b.len() / ADDRESS_LENGTH);
for i in 0..b.len() / ADDRESS_LENGTH {
let offset = i * ADDRESS_LENGTH;
v.push(Address::decode(&b[offset..offset + ADDRESS_LENGTH])?);
}
Ok(v)
}
}
impl TableEncode for H256 {
type Encoded = [u8; KECCAK_LENGTH];
fn encode(self) -> Self::Encoded {
self.0
}
}
impl TableDecode for H256
where
InvalidLength<KECCAK_LENGTH>: 'static,
{
fn decode(b: &[u8]) -> anyhow::Result<Self> {
match b.len() {
KECCAK_LENGTH => Ok(H256::from_slice(&*b)),
other => Err(InvalidLength::<KECCAK_LENGTH> { got: other }.into()),
}
}
}
impl TableEncode for U256 {
type Encoded = VariableVec<KECCAK_LENGTH>;
fn encode(self) -> Self::Encoded {
self.to_be_bytes()
.into_iter()
.skip_while(|&v| v == 0)
.collect()
}
}
impl TableDecode for U256 {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() > KECCAK_LENGTH {
return Err(TooLong::<KECCAK_LENGTH> { got: b.len() }.into());
}
let mut v = [0; 32];
v[KECCAK_LENGTH - b.len()..].copy_from_slice(b);
Ok(Self::from_be_bytes(v))
}
}
impl TableEncode for (H256, U256) {
type Encoded = VariableVec<{ KECCAK_LENGTH + KECCAK_LENGTH }>;
fn encode(self) -> Self::Encoded {
let mut out = Self::Encoded::default();
out.try_extend_from_slice(&self.0.encode()).unwrap();
out.try_extend_from_slice(&self.1.encode()).unwrap();
out
}
}
impl TableDecode for (H256, U256) {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() > KECCAK_LENGTH + KECCAK_LENGTH {
return Err(TooLong::<{ KECCAK_LENGTH + KECCAK_LENGTH }> { got: b.len() }.into());
}
if b.len() < KECCAK_LENGTH {
return Err(TooShort::<{ KECCAK_LENGTH }> { got: b.len() }.into());
}
let (location, value) = b.split_at(KECCAK_LENGTH);
Ok((H256::decode(location)?, U256::decode(value)?))
}
}
impl TableEncode for RoaringTreemap {
type Encoded = Vec<u8>;
fn encode(mut self) -> Self::Encoded {
self.run_optimize();
self.serialize().unwrap()
}
}
impl TableDecode for RoaringTreemap {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
Ok(RoaringTreemap::deserialize(b)?)
}
}
#[derive(Debug)]
pub struct BitmapKey<K> {
pub inner: K,
pub block_number: BlockNumber,
}
impl TableEncode for BitmapKey<Address> {
type Encoded = [u8; ADDRESS_LENGTH + BLOCK_NUMBER_LENGTH];
fn encode(self) -> Self::Encoded {
let mut out = [0; ADDRESS_LENGTH + BLOCK_NUMBER_LENGTH];
out[..ADDRESS_LENGTH].copy_from_slice(&self.inner.encode());
out[ADDRESS_LENGTH..].copy_from_slice(&self.block_number.encode());
out
}
}
impl TableDecode for BitmapKey<Address> {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() != ADDRESS_LENGTH + BLOCK_NUMBER_LENGTH {
return Err(
InvalidLength::<{ ADDRESS_LENGTH + BLOCK_NUMBER_LENGTH }> { got: b.len() }.into(),
);
}
Ok(Self {
inner: Address::decode(&b[..ADDRESS_LENGTH])?,
block_number: BlockNumber::decode(&b[ADDRESS_LENGTH..])?,
})
}
}
impl TableEncode for BitmapKey<(Address, H256)> {
type Encoded = [u8; ADDRESS_LENGTH + KECCAK_LENGTH + BLOCK_NUMBER_LENGTH];
fn encode(self) -> Self::Encoded {
let mut out = [0; ADDRESS_LENGTH + KECCAK_LENGTH + BLOCK_NUMBER_LENGTH];
out[..ADDRESS_LENGTH].copy_from_slice(&self.inner.0.encode());
out[ADDRESS_LENGTH..ADDRESS_LENGTH + KECCAK_LENGTH].copy_from_slice(&self.inner.1.encode());
out[ADDRESS_LENGTH + KECCAK_LENGTH..].copy_from_slice(&self.block_number.encode());
out
}
}
impl TableDecode for BitmapKey<(Address, H256)> {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() != ADDRESS_LENGTH + KECCAK_LENGTH + BLOCK_NUMBER_LENGTH {
return Err(
InvalidLength::<{ ADDRESS_LENGTH + KECCAK_LENGTH + BLOCK_NUMBER_LENGTH }> {
got: b.len(),
}
.into(),
);
}
Ok(Self {
inner: (
Address::decode(&b[..ADDRESS_LENGTH])?,
H256::decode(&b[ADDRESS_LENGTH..ADDRESS_LENGTH + KECCAK_LENGTH])?,
),
block_number: BlockNumber::decode(&b[ADDRESS_LENGTH + KECCAK_LENGTH..])?,
})
}
}
impl TableEncode for StageId {
type Encoded = &'static str;
fn encode(self) -> Self::Encoded {
self.0
}
}
impl<A, B, const A_LEN: usize, const B_LEN: usize> TableEncode for (A, B)
where
A: TableObject<Encoded = [u8; A_LEN]>,
B: TableObject<Encoded = [u8; B_LEN]>,
{
type Encoded = VariableVec<256>;
fn encode(self) -> Self::Encoded {
let mut v = Self::Encoded::default();
v.try_extend_from_slice(&self.0.encode()).unwrap();
v.try_extend_from_slice(&self.1.encode()).unwrap();
v
}
}
impl<A, B, const A_LEN: usize, const B_LEN: usize> TableDecode for (A, B)
where
A: TableObject<Encoded = [u8; A_LEN]>,
B: TableObject<Encoded = [u8; B_LEN]>,
{
fn decode(v: &[u8]) -> anyhow::Result<Self> {
if v.len() != A_LEN + B_LEN {
bail!("Invalid len: {} != {} + {}", v.len(), A_LEN, B_LEN);
}
Ok((
A::decode(&v[..A_LEN]).unwrap(),
B::decode(&v[A_LEN..]).unwrap(),
))
}
}
impl DupSort for Storage {
type SeekBothKey = H256;
}
impl DupSort for AccountChangeSet {
type SeekBothKey = Address;
}
impl DupSort for StorageChangeSet {
type SeekBothKey = H256;
}
impl DupSort for HashedStorage {
type SeekBothKey = H256;
}
impl DupSort for CallTraceSet {
type SeekBothKey = Vec<u8>;
}
pub type AccountChangeKey = BlockNumber;
impl TableEncode for crate::models::Account {
type Encoded = EncodedAccount;
fn encode(self) -> Self::Encoded {
self.encode_for_storage()
}
}
impl TableDecode for crate::models::Account {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
crate::models::Account::decode_for_storage(b)?.ok_or_else(|| format_err!("cannot be empty"))
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct AccountChange {
pub address: Address,
pub account: Option<crate::models::Account>,
}
impl TableEncode for AccountChange {
type Encoded = VariableVec<{ ADDRESS_LENGTH + MAX_ACCOUNT_LEN }>;
fn encode(self) -> Self::Encoded {
let mut out = Self::Encoded::default();
out.try_extend_from_slice(&self.address.encode()).unwrap();
if let Some(account) = self.account {
out.try_extend_from_slice(&account.encode()).unwrap();
}
out
}
}
impl TableDecode for AccountChange {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() < ADDRESS_LENGTH {
return Err(TooShort::<{ ADDRESS_LENGTH }> { got: b.len() }.into());
}
Ok(Self {
address: TableDecode::decode(&b[..ADDRESS_LENGTH])?,
account: if b.len() > ADDRESS_LENGTH {
Some(TableDecode::decode(&b[ADDRESS_LENGTH..])?)
} else {
None
},
})
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct StorageChangeKey {
pub block_number: BlockNumber,
pub address: Address,
}
impl TableEncode for StorageChangeKey {
type Encoded = [u8; BLOCK_NUMBER_LENGTH + ADDRESS_LENGTH];
fn encode(self) -> Self::Encoded {
let mut out = [0; BLOCK_NUMBER_LENGTH + ADDRESS_LENGTH];
out[..BLOCK_NUMBER_LENGTH].copy_from_slice(&self.block_number.encode());
out[BLOCK_NUMBER_LENGTH..].copy_from_slice(&self.address.encode());
out
}
}
impl TableDecode for StorageChangeKey {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() != BLOCK_NUMBER_LENGTH + ADDRESS_LENGTH {
return Err(
InvalidLength::<{ BLOCK_NUMBER_LENGTH + ADDRESS_LENGTH }> { got: b.len() }.into(),
);
}
Ok(Self {
block_number: BlockNumber::decode(&b[..BLOCK_NUMBER_LENGTH])?,
address: Address::decode(
&b[BLOCK_NUMBER_LENGTH..BLOCK_NUMBER_LENGTH + ADDRESS_LENGTH],
)?,
})
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct StorageChange {
pub location: H256,
pub value: U256,
}
impl TableEncode for StorageChange {
type Encoded = VariableVec<{ KECCAK_LENGTH + KECCAK_LENGTH }>;
fn encode(self) -> Self::Encoded {
let mut out = Self::Encoded::default();
out.try_extend_from_slice(&self.location.encode()).unwrap();
out.try_extend_from_slice(&self.value.encode()).unwrap();
out
}
}
impl TableDecode for StorageChange {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() < KECCAK_LENGTH {
return Err(TooShort::<KECCAK_LENGTH> { got: b.len() }.into());
}
Ok(Self {
location: H256::decode(&b[..KECCAK_LENGTH])?,
value: U256::decode(&b[KECCAK_LENGTH..])?,
})
}
}
pub type HeaderKey = (BlockNumber, H256);
#[bitfield]
#[derive(Clone, Copy, Debug, Default)]
struct CallTraceSetFlags {
flag_from: bool,
flag_to: bool,
#[skip]
unused: B6,
}
#[derive(Clone, Copy, Debug)]
pub struct CallTraceSetEntry {
pub address: Address,
pub from: bool,
pub to: bool,
}
impl TableEncode for CallTraceSetEntry {
type Encoded = [u8; ADDRESS_LENGTH + 1];
fn encode(self) -> Self::Encoded {
let mut v = [0; ADDRESS_LENGTH + 1];
v[..ADDRESS_LENGTH].copy_from_slice(&self.address.encode());
let mut field_set = CallTraceSetFlags::default();
field_set.set_flag_from(self.from);
field_set.set_flag_to(self.to);
v[ADDRESS_LENGTH] = field_set.into_bytes()[0];
v
}
}
impl TableDecode for CallTraceSetEntry {
fn decode(b: &[u8]) -> anyhow::Result<Self> {
if b.len() != ADDRESS_LENGTH + 1 {
return Err(InvalidLength::<{ ADDRESS_LENGTH + 1 }> { got: b.len() }.into());
}
let field_set = CallTraceSetFlags::from_bytes([b[ADDRESS_LENGTH]]);
Ok(Self {
address: Address::decode(&b[..ADDRESS_LENGTH])?,
from: field_set.flag_from(),
to: field_set.flag_to(),
})
}
}
decl_table!(Account => Address => crate::models::Account);
decl_table!(Storage => Address => (H256, U256));
decl_table!(AccountChangeSet => AccountChangeKey => AccountChange);
decl_table!(StorageChangeSet => StorageChangeKey => StorageChange => BlockNumber);
decl_table!(HashedAccount => H256 => crate::models::Account);
decl_table!(HashedStorage => H256 => (H256, U256));
decl_table!(AccountHistory => BitmapKey<Address> => RoaringTreemap);
decl_table!(StorageHistory => BitmapKey<(Address, H256)> => RoaringTreemap);
decl_table!(Code => H256 => Bytes);
decl_table!(TrieAccount => Vec<u8> => Vec<u8>);
decl_table!(TrieStorage => Vec<u8> => Vec<u8>);
decl_table!(DbInfo => Vec<u8> => Vec<u8>);
decl_table!(SnapshotInfo => Vec<u8> => Vec<u8>);
decl_table!(BittorrentInfo => Vec<u8> => Vec<u8>);
decl_table!(HeaderNumber => H256 => BlockNumber);
decl_table!(CanonicalHeader => BlockNumber => H256);
decl_table!(Header => HeaderKey => BlockHeader => BlockNumber);
decl_table!(HeadersTotalDifficulty => HeaderKey => U256);
decl_table!(BlockBody => HeaderKey => BodyForStorage => BlockNumber);
decl_table!(BlockTransaction => TxIndex => MessageWithSignature);
decl_table!(TotalGas => BlockNumber => u64);
decl_table!(TotalTx => BlockNumber => u64);
decl_table!(Log => (BlockNumber, TxIndex) => Vec<crate::models::Log>);
decl_table!(LogTopicIndex => Vec<u8> => RoaringTreemap);
decl_table!(LogAddressIndex => Vec<u8> => RoaringTreemap);
decl_table!(CallTraceSet => BlockNumber => CallTraceSetEntry);
decl_table!(CallFromIndex => BitmapKey<Address> => RoaringTreemap);
decl_table!(CallToIndex => BitmapKey<Address> => RoaringTreemap);
decl_table!(BlockTransactionLookup => H256 => TruncateStart<BlockNumber>);
decl_table!(Config => H256 => ChainSpec);
decl_table!(SyncStage => StageId => BlockNumber);
decl_table!(TxSender => HeaderKey => Vec<Address>);
decl_table!(LastBlock => Vec<u8> => Vec<u8>);
decl_table!(Migration => Vec<u8> => Vec<u8>);
decl_table!(Sequence => Vec<u8> => Vec<u8>);
decl_table!(LastHeader => VariableVec<0> => H256);
decl_table!(Issuance => Vec<u8> => Vec<u8>);
pub type DatabaseChart = Arc<HashMap<&'static str, TableInfo>>;
pub static CHAINDATA_TABLES: Lazy<Arc<HashMap<&'static str, TableInfo>>> = Lazy::new(|| {
Arc::new(hashmap! {
Account::const_db_name() => TableInfo::default(),
Storage::const_db_name() => TableInfo {
dup_sort: true,
},
AccountChangeSet::const_db_name() => TableInfo {
dup_sort: true,
},
StorageChangeSet::const_db_name() => TableInfo {
dup_sort: true,
},
HashedAccount::const_db_name() => TableInfo::default(),
HashedStorage::const_db_name() => TableInfo {
dup_sort: true,
},
AccountHistory::const_db_name() => TableInfo::default(),
StorageHistory::const_db_name() => TableInfo::default(),
Code::const_db_name() => TableInfo::default(),
TrieAccount::const_db_name() => TableInfo::default(),
TrieStorage::const_db_name() => TableInfo::default(),
DbInfo::const_db_name() => TableInfo::default(),
SnapshotInfo::const_db_name() => TableInfo::default(),
BittorrentInfo::const_db_name() => TableInfo::default(),
HeaderNumber::const_db_name() => TableInfo::default(),
CanonicalHeader::const_db_name() => TableInfo::default(),
Header::const_db_name() => TableInfo::default(),
HeadersTotalDifficulty::const_db_name() => TableInfo::default(),
BlockBody::const_db_name() => TableInfo::default(),
BlockTransaction::const_db_name() => TableInfo::default(),
TotalGas::const_db_name() => TableInfo::default(),
TotalTx::const_db_name() => TableInfo::default(),
Log::const_db_name() => TableInfo::default(),
LogTopicIndex::const_db_name() => TableInfo::default(),
LogAddressIndex::const_db_name() => TableInfo::default(),
CallTraceSet::const_db_name() => TableInfo {
dup_sort: true,
},
CallFromIndex::const_db_name() => TableInfo::default(),
CallToIndex::const_db_name() => TableInfo::default(),
BlockTransactionLookup::const_db_name() => TableInfo::default(),
Config::const_db_name() => TableInfo::default(),
SyncStage::const_db_name() => TableInfo::default(),
TxSender::const_db_name() => TableInfo::default(),
LastBlock::const_db_name() => TableInfo::default(),
Migration::const_db_name() => TableInfo::default(),
Sequence::const_db_name() => TableInfo::default(),
LastHeader::const_db_name() => TableInfo::default(),
Issuance::const_db_name() => TableInfo::default(),
})
});
#[cfg(test)]
mod tests {
use super::*;
use hex_literal::hex;
#[test]
fn u256() {
for (fixture, expected) in [
(U256::ZERO, vec![]),
(
U256::from(0xDEADBEEFBAADCAFE_u128),
hex!("DEADBEEFBAADCAFE").to_vec(),
),
(U256::MAX, hex!("FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF").to_vec()),
] {
assert_eq!(fixture.encode().to_vec(), expected);
assert_eq!(U256::decode(&expected).unwrap(), fixture);
}
}
#[test]
fn log() {
let input = vec![
crate::models::Log {
address: Address::from([0; 20]),
topics: vec![H256([1; 32]), H256([2; 32])],
data: hex!("BAADCAFE").to_vec().into(),
},
crate::models::Log {
address: Address::from([1; 20]),
topics: vec![H256([3; 32]), H256([4; 32])],
data: hex!("DEADBEEF").to_vec().into(),
},
];
let encoded = input.clone().encode();
println!("{}", hex::encode(&encoded));
assert_eq!(Vec::<crate::models::Log>::decode(&encoded).unwrap(), input);
}
}
| 28.934166 | 122 | 0.584442 |
297cd987ae9f2c13486037ffc2a262f9d070e650 | 246 | #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct InvalidActionError {
pub message: String,
}
| 27.333333 | 62 | 0.715447 |
def64d1088f7cc105f7662bb23eeef0e6fd4562c | 476 | use gremlin_client::{GremlinClient, Vertex};
use std::thread;
fn main() -> Result<(), Box<std::error::Error>> {
let client = GremlinClient::connect("localhost")?;
let c = client.clone();
let result = thread::spawn(move || {
c.execute("g.V(param)", &[("param", &1)])?
.filter_map(Result::ok)
.map(|f| f.take::<Vertex>())
.collect::<Result<Vec<Vertex>, _>>()
});
println!("{:?}", result.join());
Ok(())
}
| 23.8 | 54 | 0.523109 |
6728a1e7bc74e0a886a74eaaa211a8d085854ceb | 9,818 | use crate::models::{Game, GameTraits, Item, ItemTraits, ItemTraitsMut};
use crate::utils::{load_database, load_genres_from_games, load_tags_from_games};
/// This collection can store items or games.
/// When used with items, ItemTraits are also needed.
/// When used with games, both ItemTraits and GameTraits are needed.
#[derive(Serialize, Default, Debug)]
pub struct ItemCollection<T> {
pub count: usize,
pub items: Vec<T>,
}
impl<T> ItemCollection<T> {
pub fn new(items: Vec<T>) -> Self {
Self {
count: items.len(),
items,
}
}
}
impl<T: ItemTraits> ItemCollection<T> {
/// Returns a refrence the item corresponding to the id if it exists, None otherwise.
pub fn get_item_by_id(&self, id: usize) -> Option<&T> {
match self.items.get(id - 1) {
Some(item) => Some(item),
None => None,
}
}
/// Returns a reference the item corresponding to the name if it exists, None otherwise.
pub fn get_item_by_name(&self, name: &str) -> Option<&T> {
// assumre there is only one element with a given name
match self.items.iter().find(|&item| item.get_name() == name) {
Some(item) => Some(item),
None => None,
}
}
}
impl<T: ItemTraits + ItemTraitsMut> ItemCollection<T> {
/// Adds an item, set and returns the item id.
pub fn add_item(&mut self, mut item: T) -> usize {
self.count += 1;
item.set_id(self.count);
self.items.push(item);
self.count
}
/// Returns a mutable refrence the item corresponding to the id if it exists, None otherwise.
pub fn get_item_by_id_mut(&mut self, id: usize) -> Option<&mut T> {
match self.items.get_mut(id - 1) {
Some(item) => Some(item),
None => None,
}
}
/// Returns a mutable reference the item corresponding to the name if it exists, None otherwise.
pub fn get_item_by_name_mut(&mut self, name: &str) -> Option<&mut T> {
// assume there is only one element with a given name
match self.items.iter_mut().find(|item| item.get_name() == name) {
Some(item) => Some(item),
None => None,
}
}
}
impl<T: GameTraits> ItemCollection<T> {
pub fn get_item_with_field(
&self,
field_name: &str,
field_value: &str,
) -> ItemCollection<&T> {
let gs = self
.items
.iter()
.filter(|&item| item.field_contains(field_name, field_value));
let mut games: Vec<&T> = Vec::new();
for game in gs {
games.push(game);
}
ItemCollection::new(games)
}
/// Returns a vector of references to items corresponding to the tag.
pub fn get_item_with_tag(&self, tag_name: &str) -> ItemCollection<&T> {
self.get_item_with_field("Tags", tag_name)
}
/// Returns a vector of references to items corresponding to the genre.
pub fn get_item_with_genre(&self, genre_name: &str) -> ItemCollection<&T> {
self.get_item_with_field("Genre", genre_name)
}
}
/// # DataBase
/// Store the game database in three different collection:
/// - a games collection
/// - a tags collection
/// - a genres collection
///
/// Each collection stores items (being games, tags or genres) using the
/// follwing struct:
/// ```
/// pub struct ItemCollection<T> {
/// pub count: usize,
/// pub items: Vec<T>,
/// }
/// ```
///
/// The games collection also stores a vector of games, each game
/// being described using the following struct:
/// ```
/// pub struct Game {
/// pub id: usize,
/// pub name: String,
/// pub cover: String,
/// pub engine: String,
/// pub setup: String,
/// pub runtime: String,
/// pub store: String,
/// pub hints: String,
/// pub genres: Vec<String>,
/// pub tags: Vec<String>,
/// pub year: String,
/// pub dev: String,
/// pub publi: String,
/// pub version: String,
/// pub status: String,
/// }
/// ```
///
/// The tags/genres collection also stores a vector of tags/genres, each
/// tag/genre being described by the following struct:
/// ```
/// pub struct Item {
/// pub id: usize,
/// pub name: String,
/// pub games: Vec<usize>,
/// }
/// ```
///
pub struct DataBase {
/// Store the games collection (see above for details).
pub games: ItemCollection<Game>,
/// Store the tags collection (see above for details).
pub tags: ItemCollection<Item>,
/// Store the genres collection (see above for details).
pub genres: ItemCollection<Item>,
}
/// Public API
impl DataBase {
/// Create a database from a file
pub fn new(filename: &str) -> Self {
let mut games: ItemCollection<Game> = ItemCollection::default();
let mut tags: ItemCollection<Item> = ItemCollection::default();
let mut genres: ItemCollection<Item> = ItemCollection::default();
load_database(filename, &mut games);
load_tags_from_games(&mut tags, &games);
load_genres_from_games(&mut genres, &games);
Self {
games,
tags,
genres,
}
}
/// Return the number of games in the database
pub fn get_games_count(&self) -> usize {
self.games.count
}
/// Returns a reference the item corresponding to the name if it exists, None otherwise.
pub fn get_game_by_name(&self, name: &str) -> Option<&Game> {
self.games.get_item_by_name(name)
}
/// Returns a refrence the item corresponding to the id if it exists, None otherwise.
pub fn get_game_by_id(&self, id: usize) -> Option<&Game> {
self.games.get_item_by_id(id)
}
/// Returns a vector of references to games corresponding to the tag.
pub fn get_games_by_tag(&self, name: &str) -> ItemCollection<&Game> {
self.games.get_item_with_tag(name)
}
/// Returns a vector of references to games corresponding to the genre.
pub fn get_games_by_genre(&self, name: &str) -> ItemCollection<&Game> {
self.games.get_item_with_genre(name)
}
/// Return the number of tags in the database
pub fn get_tags_count(&self) -> usize {
self.tags.count
}
/// Return the tags in the database
pub fn get_tag_names(&self) -> ItemCollection<&str> {
let mut tags: Vec<&str> = Vec::new();
for tag in &self.tags.items {
tags.push(tag.name.as_str());
}
ItemCollection::new(tags)
}
/// Return the number of genres in the database
pub fn get_genres_count(&self) -> usize {
self.genres.count
}
}
/*-------------------------- TESTS --------------------------------*/
#[cfg(test)]
mod test_collection_items_methods {
use super::*;
use models::Item;
#[test]
fn new() {
let items: Vec<Item> = Vec::new();
let collection = ItemCollection::new(items);
assert_eq!(collection.count, 0);
let item = Item::new();
let items = vec![item];
let collection = ItemCollection::new(items);
assert_eq!(collection.count, 1);
}
#[test]
fn add_item() {
let items: Vec<Item> = Vec::new();
let mut collection = ItemCollection::new(items);
let item = Item::new();
let id = collection.add_item(item);
assert_eq!(collection.count, 1);
assert_eq!(id, collection.items[0].id);
}
#[test]
fn get_by_name() {
let mut item1 = Item::new();
item1.name = "item 1".to_string();
let mut item2 = Item::new();
item2.name = "item 2".to_string();
let mut item2_bis = Item::new();
item2_bis.name = "item 2".to_string();
let items = vec![item1, item2];
let collection = ItemCollection::new(items);
match collection.get_item_by_name("item 2") {
Some(item_check) => assert!(item2_bis == *item_check),
None => panic!("Should have found item"),
}
}
#[test]
fn get_by_id() {
let mut item1 = Item::new();
item1.id = 1;
let mut item2 = Item::new();
item2.id = 2;
let mut item2_bis = Item::new();
item2_bis.id = 2;
let items = vec![item1, item2];
let collection = ItemCollection::new(items);
match collection.get_item_by_id(2) {
Some(item_check) => assert!(item2_bis == *item_check),
None => panic!("Should have found item"),
}
}
}
#[cfg(test)]
mod test_collection_games_methods {
use super::*;
use models::Game;
#[test]
fn get_by_tag() {
let mut games: Vec<Game> = Vec::new();
let mut g1 = Game::new();
g1.name = "to be found".to_string();
g1.tags = vec!["tag1".to_string()];
games.push(g1);
let mut g2 = Game::new();
g2.name = "not to be found".to_string();
g2.tags = vec!["tag2".to_string()];
games.push(g2);
let collection = ItemCollection::new(games);
let g1_test = collection.get_item_with_tag("tag1");
assert_eq!(g1_test.items[0].name, "to be found".to_string());
assert_eq!(g1_test.count, 1);
}
#[test]
fn get_by_genre() {
let mut games: Vec<Game> = Vec::new();
let mut g1 = Game::new();
g1.name = "to be found".to_string();
g1.genres = vec!["ge1".to_string()];
games.push(g1);
let mut g2 = Game::new();
g2.name = "not to be found".to_string();
g2.genres = vec!["ge2".to_string()];
games.push(g2);
let collection = ItemCollection::new(games);
let g1_test = collection.get_item_with_genre("ge1");
assert_eq!(g1_test.items[0].name, "to be found".to_string());
assert_eq!(g1_test.count, 1);
}
}
| 33.394558 | 100 | 0.584131 |
08c71db06b66e5b4db8c835b0cc01a640c603b68 | 1,330 | pub(crate) mod epoch;
pub(crate) mod genesis;
pub(crate) mod primitive;
pub(crate) mod receipt;
pub(crate) mod transaction;
use std::error::Error;
use derive_more::{Display, From};
use crate::{ProtocolError, ProtocolErrorKind};
pub use epoch::{Epoch, EpochHeader, EpochId, Pill, Proof, Validator};
pub use ethbloom::{Bloom, BloomRef, Input as BloomInput};
pub use genesis::{Genesis, GenesisStateAlloc, GenesisStateAsset};
pub use primitive::{
Account, Address, ApprovedInfo, Asset, AssetID, AssetInfo, Balance, ContractAccount,
ContractAddress, ContractType, Fee, Hash, MerkleRoot, UserAccount, UserAddress,
GENESIS_EPOCH_ID,
};
pub use receipt::{Receipt, ReceiptResult};
pub use transaction::{CarryingAsset, RawTransaction, SignedTransaction, TransactionAction};
#[derive(Debug, Display, From)]
pub enum TypesError {
#[display(fmt = "Expect {:?}, get {:?}.", expect, real)]
LengthMismatch { expect: usize, real: usize },
#[display(fmt = "{:?}", error)]
FromHex { error: hex::FromHexError },
#[display(fmt = "{:?} is an invalid address", address)]
InvalidAddress { address: String },
}
impl Error for TypesError {}
impl From<TypesError> for ProtocolError {
fn from(error: TypesError) -> ProtocolError {
ProtocolError::new(ProtocolErrorKind::Types, Box::new(error))
}
}
| 30.930233 | 91 | 0.710526 |
483880d7930e4a982e760953fdf60ceff9c52739 | 2,156 | // Copyright 2017 rust-ipfs-api Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//
use response::serde;
use serde::de::{Deserialize, Deserializer, Error};
/// See
/// [libp2p](https://github.com/libp2p/go-libp2p-routing/blob/master/notifications/query.go#L16).
///
#[derive(Debug)]
pub enum DhtType {
SendingQuery,
PeerResponse,
FinalPeer,
QueryError,
Provider,
Value,
AddingPeer,
DialingPeer,
}
impl<'de> Deserialize<'de> for DhtType {
#[inline]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match deserializer.deserialize_i64(serde::IntegerVisitor)? {
0 => Ok(DhtType::SendingQuery),
1 => Ok(DhtType::PeerResponse),
2 => Ok(DhtType::FinalPeer),
3 => Ok(DhtType::QueryError),
4 => Ok(DhtType::Provider),
5 => Ok(DhtType::Value),
6 => Ok(DhtType::AddingPeer),
7 => Ok(DhtType::DialingPeer),
i => Err(D::Error::custom(format!("unknown dht type '{}'", i))),
}
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct DhtPeerResponse {
#[serde(rename = "ID")]
pub id: String,
#[serde(deserialize_with = "serde::deserialize_vec")]
pub addrs: Vec<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct DhtMessage {
#[serde(rename = "ID")]
pub id: String,
#[serde(rename = "Type")]
pub typ: DhtType,
#[serde(deserialize_with = "serde::deserialize_vec")]
pub responses: Vec<DhtPeerResponse>,
pub extra: String,
}
pub type DhtFindPeerResponse = DhtMessage;
pub type DhtFindProvsResponse = DhtMessage;
pub type DhtGetResponse = DhtMessage;
pub type DhtProvideResponse = DhtMessage;
pub type DhtPutResponse = DhtMessage;
pub type DhtQueryResponse = DhtMessage;
| 25.975904 | 97 | 0.644249 |
e636053f6669b0ef54fe034f89dcd36a9145025f | 1,759 | use crate::binary::read::{ReadBinary, ReadCtxt};
use crate::error::ParseError;
use crate::woff2::{PackedU16, TableDirectoryEntry, Woff2Font};
#[derive(Debug)]
pub struct Directory {
version: u32,
entries: Vec<FontEntry>,
}
#[derive(Debug)]
pub struct FontEntry {
flavor: u32,
table_indices: Vec<usize>,
}
impl<'a> ReadBinary<'a> for FontEntry {
type HostType = Self;
fn read(ctxt: &mut ReadCtxt<'a>) -> Result<Self, ParseError> {
let num_tables = ctxt.read::<PackedU16>()?;
let flavor = ctxt.read_u32be()?;
let table_indices = (0..num_tables)
.map(|_i| ctxt.read::<PackedU16>().map(usize::from))
.collect::<Result<Vec<_>, _>>()?;
Ok(FontEntry {
flavor,
table_indices,
})
}
}
impl<'a> ReadBinary<'a> for Directory {
type HostType = Self;
fn read(ctxt: &mut ReadCtxt<'_>) -> Result<Self, ParseError> {
let ttc_version = ctxt.read_u32be()?;
let num_fonts = ctxt.read::<PackedU16>()?;
let entries = (0..num_fonts)
.map(|_i| ctxt.read::<FontEntry>())
.collect::<Result<Vec<_>, _>>()?;
Ok(Directory {
version: ttc_version,
entries,
})
}
}
impl Directory {
pub fn fonts(&self) -> impl Iterator<Item = &FontEntry> + '_ {
self.entries.iter()
}
pub fn get(&self, index: usize) -> Option<&FontEntry> {
self.entries.get(index)
}
}
impl FontEntry {
pub fn table_entries<'a>(
&'a self,
file: &'a Woff2Font<'_>,
) -> impl Iterator<Item = &TableDirectoryEntry> + '_ {
self.table_indices
.iter()
.flat_map(move |&index| file.table_directory.get(index))
}
}
| 24.774648 | 68 | 0.56282 |
089b9fe9e6e5ba96015ddc697ca4e75ca5cef8d9 | 5,256 | use crate::components::{Children, Parent};
use bevy_ecs::{Commands, Entity, Query, World, WorldWriter};
pub fn run_on_hierarchy<T, S>(
children_query: &Query<&Children>,
state: &mut S,
entity: Entity,
parent_result: Option<T>,
mut previous_result: Option<T>,
run: &mut dyn FnMut(&mut S, Entity, Option<T>, Option<T>) -> Option<T>,
) -> Option<T>
where
T: Clone,
{
// TODO: not a huge fan of this pattern. are there ways to do recursive updates in legion without allocations?
// TODO: the problem above might be resolvable with world splitting
let children = children_query
.get::<Children>(entity)
.ok()
.map(|children| children.0.iter().cloned().collect::<Vec<Entity>>());
let parent_result = run(state, entity, parent_result, previous_result);
previous_result = None;
if let Some(children) = children {
for child in children {
previous_result = run_on_hierarchy(
children_query,
state,
child,
parent_result.clone(),
previous_result,
run,
);
}
} else {
previous_result = parent_result;
}
previous_result
}
#[derive(Debug)]
pub struct DespawnRecursive {
entity: Entity,
}
fn despawn_with_children_recursive(world: &mut World, entity: Entity) {
// first, make the entity's own parent forget about it
if let Ok(parent) = world.get::<Parent>(entity).map(|parent| parent.0) {
if let Ok(mut children) = world.get_mut::<Children>(parent) {
children.retain(|c| *c != entity);
}
}
// then despawn the entity and all of its children
despawn_with_children_recursive_inner(world, entity);
}
// Should only be called by `despawn_with_children_recursive`!
fn despawn_with_children_recursive_inner(world: &mut World, entity: Entity) {
if let Some(children) = world
.get::<Children>(entity)
.ok()
.map(|children| children.0.iter().cloned().collect::<Vec<Entity>>())
{
for e in children {
despawn_with_children_recursive(world, e);
}
}
if let Err(e) = world.despawn(entity) {
log::debug!("Failed to despawn entity {:?}: {}", entity, e);
}
}
impl WorldWriter for DespawnRecursive {
fn write(self: Box<Self>, world: &mut World) {
despawn_with_children_recursive(world, self.entity);
}
}
pub trait DespawnRecursiveExt {
/// Despawns the provided entity and its children.
fn despawn_recursive(&mut self, entity: Entity) -> &mut Self;
}
impl DespawnRecursiveExt for Commands {
/// Despawns the provided entity and its children.
fn despawn_recursive(&mut self, entity: Entity) -> &mut Self {
self.write_world(DespawnRecursive { entity })
}
}
#[cfg(test)]
mod tests {
use super::DespawnRecursiveExt;
use crate::{components::Children, hierarchy::BuildChildren};
use bevy_ecs::{Commands, Resources, World};
#[test]
fn despawn_recursive() {
let mut world = World::default();
let mut resources = Resources::default();
let mut command_buffer = Commands::default();
command_buffer.set_entity_reserver(world.get_entity_reserver());
command_buffer.spawn((0u32, 0u64)).with_children(|parent| {
parent.spawn((0u32, 0u64));
});
// Create a grandparent entity which will _not_ be deleted
command_buffer.spawn((1u32, 1u64));
let grandparent_entity = command_buffer.current_entity().unwrap();
command_buffer.with_children(|parent| {
// Add a child to the grandparent (the "parent"), which will get deleted
parent.spawn((2u32, 2u64));
// All descendents of the "parent" should also be deleted.
parent.with_children(|parent| {
parent.spawn((3u32, 3u64)).with_children(|parent| {
// child
parent.spawn((4u32, 4u64));
});
parent.spawn((5u32, 5u64));
});
});
command_buffer.spawn((0u32, 0u64));
command_buffer.apply(&mut world, &mut resources);
let parent_entity = world.get::<Children>(grandparent_entity).unwrap()[0];
command_buffer.despawn_recursive(parent_entity);
command_buffer.despawn_recursive(parent_entity); // despawning the same entity twice should not panic
command_buffer.apply(&mut world, &mut resources);
let results = world
.query::<(&u32, &u64)>()
.iter()
.map(|(a, b)| (*a, *b))
.collect::<Vec<_>>();
{
let children = world.get::<Children>(grandparent_entity).unwrap();
assert_eq!(
children.iter().any(|&i| i == parent_entity),
false,
"grandparent should no longer know about its child which has been removed"
);
}
// parent_entity and its children should be deleted,
// the grandparent tuple (1, 1) and (0, 0) tuples remaining.
assert_eq!(
results,
vec![(0u32, 0u64), (0u32, 0u64), (0u32, 0u64), (1u32, 1u64)]
);
}
}
| 33.056604 | 114 | 0.600837 |
769e72829f6f646dade5bec126c47ca5eeb81fde | 548 | extern crate bindgen;
extern crate cc;
use std::env;
use std::path::PathBuf;
fn main() {
cc::Build::new()
.file("include/redismodule.c")
.include("include/")
.compile("libredismodule.a");
let bindings = bindgen::Builder::default()
.header("include/redismodule.h")
.generate()
.expect("error generating bindings");
let out = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out.join("bindings.rs"))
.expect("failed to write bindings to file");
}
| 24.909091 | 58 | 0.607664 |
038f8599f21d6e900a565db328aa58e7f73fc8d1 | 5,122 | #![cfg(all(target_os = "windows", target_arch = "x86_64"))]
use minidump::{CrashReason, Minidump, MinidumpMemoryList, MinidumpSystemInfo, MinidumpThreadList};
use minidump_writer::minidump_writer::MinidumpWriter;
use std::mem;
use windows_sys::Win32::{
Foundation::{EXCEPTION_ILLEGAL_INSTRUCTION, STATUS_INVALID_PARAMETER},
System::{
Diagnostics::Debug::{RtlCaptureContext, EXCEPTION_POINTERS, EXCEPTION_RECORD},
Threading::GetCurrentThreadId,
},
};
mod common;
use common::start_child_and_return;
fn get_crash_reason<'a, T: std::ops::Deref<Target = [u8]> + 'a>(
md: &Minidump<'a, T>,
) -> CrashReason {
let exc: minidump::MinidumpException<'_> =
md.get_stream().expect("unable to find exception stream");
exc.get_crash_reason(
minidump::system_info::Os::Windows,
minidump::system_info::Cpu::X86_64,
)
}
/// Ensures that we can write minidumps for the current process, even if this is
/// not necessarily the primary intended use case of out-of-process dumping
#[test]
fn dump_current_process() {
let mut tmpfile = tempfile::Builder::new()
.prefix("windows_current_process")
.tempfile()
.unwrap();
unsafe {
let mut exception_record: EXCEPTION_RECORD = mem::zeroed();
let mut exception_context = mem::MaybeUninit::uninit();
RtlCaptureContext(exception_context.as_mut_ptr());
let mut exception_context = exception_context.assume_init();
let exception_ptrs = EXCEPTION_POINTERS {
ExceptionRecord: &mut exception_record,
ContextRecord: &mut exception_context,
};
exception_record.ExceptionCode = STATUS_INVALID_PARAMETER;
let crash_context = crash_context::CrashContext {
exception_pointers: (&exception_ptrs as *const EXCEPTION_POINTERS).cast(),
process_id: std::process::id(),
thread_id: GetCurrentThreadId(),
exception_code: STATUS_INVALID_PARAMETER,
};
let dumper = MinidumpWriter::new(crash_context).expect("failed to create MinidumpWriter");
dumper
.dump(tmpfile.as_file_mut())
.expect("failed to write minidump");
}
let md = Minidump::read_path(tmpfile.path()).expect("failed to read minidump");
let _: MinidumpThreadList = md.get_stream().expect("Couldn't find MinidumpThreadList");
let _: MinidumpMemoryList = md.get_stream().expect("Couldn't find MinidumpMemoryList");
let _: MinidumpSystemInfo = md.get_stream().expect("Couldn't find MinidumpSystemInfo");
let crash_reason = get_crash_reason(&md);
assert_eq!(
crash_reason,
CrashReason::from_windows_error(STATUS_INVALID_PARAMETER as u32)
);
}
/// Ensures that we can write minidumps for an external process. Unfortunately
/// this requires us to know the actual pointer in the client process for the
/// exception, as the `MiniDumpWriteDump` syscall directly reads points from
/// the process memory, so we communicate that back from the client process
/// via stdout
#[test]
fn dump_external_process() {
use std::io::BufRead;
let mut child = start_child_and_return(&[&format!("{:x}", EXCEPTION_ILLEGAL_INSTRUCTION)]);
let (process_id, exception_pointers, thread_id, exception_code) = {
let mut f = std::io::BufReader::new(child.stdout.as_mut().expect("Can't open stdout"));
let mut buf = String::new();
f.read_line(&mut buf).expect("failed to read stdout");
assert!(!buf.is_empty());
let mut biter = buf.trim().split(' ');
let process_id: u32 = biter.next().unwrap().parse().unwrap();
let exception_pointers: usize = biter.next().unwrap().parse().unwrap();
let thread_id: u32 = biter.next().unwrap().parse().unwrap();
let exception_code = u32::from_str_radix(biter.next().unwrap(), 16).unwrap() as i32;
(process_id, exception_pointers, thread_id, exception_code)
};
assert_eq!(exception_code, EXCEPTION_ILLEGAL_INSTRUCTION);
let crash_context = crash_context::CrashContext {
exception_pointers: exception_pointers as _,
process_id,
thread_id,
exception_code,
};
let mut tmpfile = tempfile::Builder::new()
.prefix("windows_external_process")
.tempfile()
.unwrap();
let dumper = MinidumpWriter::new(crash_context).expect("failed to create MinidumpWriter");
dumper
.dump(tmpfile.as_file_mut())
.expect("failed to write minidump");
child.kill().expect("failed to kill child");
let md = Minidump::read_path(tmpfile.path()).expect("failed to read minidump");
let _: MinidumpThreadList = md.get_stream().expect("Couldn't find MinidumpThreadList");
let _: MinidumpMemoryList = md.get_stream().expect("Couldn't find MinidumpMemoryList");
let _: MinidumpSystemInfo = md.get_stream().expect("Couldn't find MinidumpSystemInfo");
let crash_reason = get_crash_reason(&md);
assert_eq!(
crash_reason,
CrashReason::from_windows_code(EXCEPTION_ILLEGAL_INSTRUCTION as u32)
);
}
| 36.070423 | 98 | 0.680398 |
ff48c57d364af8968771eec830bdcafacdf0d238 | 2,989 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{format_err, Context, Result};
use diem_types::chain_id::ChainId;
use reqwest::{blocking, StatusCode, Url};
use std::{
path::Path,
process::{Child, Command, Stdio},
};
pub struct Process {
port: u16,
process: Child,
}
impl Drop for Process {
fn drop(&mut self) {
// Kill process process if still running.
match self.process.try_wait().unwrap() {
Some(status) => {
if !status.success() {
panic!(
"Process terminated with status: {}",
status.code().unwrap_or(-1)
);
}
}
None => {
self.process.kill().unwrap();
}
}
}
}
impl Process {
pub fn start(
faucet_bin_path: &Path,
port: u16,
server_port: u16,
diem_root_key_path: &Path,
) -> Self {
Self {
port,
process: Command::new(faucet_bin_path)
.arg("-s")
.arg(format!("http://localhost:{}", server_port))
.arg("-p")
.arg(format!("{}", port))
.arg("-m")
.arg(
diem_root_key_path
.canonicalize()
.expect("Unable to get canonical path of diem root key file")
.to_str()
.unwrap(),
)
.arg("-c")
.arg(ChainId::test().id().to_string())
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.with_context(|| {
format!(
"Error launching faucet process with binary: {:?}",
faucet_bin_path
)
})
.expect("Failed to spawn faucet process"),
}
}
pub fn mint_url(&self) -> String {
return format!("http://localhost:{}/mint", self.port);
}
pub fn health_check_url(&self) -> Url {
Url::parse(format!("http://localhost:{}/-/healthy", self.port).as_str()).unwrap()
}
pub fn wait_for_connectivity(&self) -> Result<()> {
let client = blocking::Client::new();
let num_attempts = 60;
for i in 0..num_attempts {
println!("Wait for faucet connectivity attempt: {}", i);
let resp = client.get(self.health_check_url()).send();
if let Ok(ret) = resp {
if let StatusCode::OK = ret.status() {
println!("{}", ret.text()?);
return Ok(());
}
}
::std::thread::sleep(::std::time::Duration::from_millis(500));
}
Err(format_err!("Faucet launch failed"))
}
}
| 29.594059 | 89 | 0.449314 |
014db6a14118b846c6f43d8cbe9131503f5a5527 | 557 | #![doc(hidden)]
use crate::compiler_plugin;
use crate::customize::CustomizeCallbackDefault;
use crate::gen::all::gen_all;
use crate::Customize;
#[doc(hidden)]
pub fn protoc_gen_rust_main() {
compiler_plugin::plugin_main(|r| {
let customize = Customize::parse_from_parameter(r.parameter).expect("parse options");
gen_all(
r.file_descriptors,
"protoc --rust-out=...",
r.files_to_generate,
&customize,
&CustomizeCallbackDefault,
)
})
.expect("plugin failed");
}
| 25.318182 | 93 | 0.62298 |
0948d17aa9873062e15cc47677bed42950c33803 | 3,584 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use glib::object::IsA;
use glib::translate::*;
use gst_sys;
use Element;
use Object;
use TagList;
use TagMergeMode;
glib_wrapper! {
pub struct TagSetter(Interface<gst_sys::GstTagSetter>) @requires Element, Object;
match fn {
get_type => || gst_sys::gst_tag_setter_get_type(),
}
}
unsafe impl Send for TagSetter {}
unsafe impl Sync for TagSetter {}
pub const NONE_TAG_SETTER: Option<&TagSetter> = None;
pub trait TagSetterExt: 'static {
//fn add_tag_valist(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);
//fn add_tag_valist_values(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);
//fn add_tag_value(&self, mode: TagMergeMode, tag: &str, value: /*Ignored*/&glib::Value);
//fn add_tag_values(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);
//fn add_tags(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);
fn get_tag_list(&self) -> Option<TagList>;
fn get_tag_merge_mode(&self) -> TagMergeMode;
fn merge_tags(&self, list: &TagList, mode: TagMergeMode);
fn reset_tags(&self);
fn set_tag_merge_mode(&self, mode: TagMergeMode);
}
impl<O: IsA<TagSetter>> TagSetterExt for O {
//fn add_tag_valist(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported) {
// unsafe { TODO: call gst_sys:gst_tag_setter_add_tag_valist() }
//}
//fn add_tag_valist_values(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported) {
// unsafe { TODO: call gst_sys:gst_tag_setter_add_tag_valist_values() }
//}
//fn add_tag_value(&self, mode: TagMergeMode, tag: &str, value: /*Ignored*/&glib::Value) {
// unsafe { TODO: call gst_sys:gst_tag_setter_add_tag_value() }
//}
//fn add_tag_values(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {
// unsafe { TODO: call gst_sys:gst_tag_setter_add_tag_values() }
//}
//fn add_tags(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {
// unsafe { TODO: call gst_sys:gst_tag_setter_add_tags() }
//}
fn get_tag_list(&self) -> Option<TagList> {
unsafe {
from_glib_none(gst_sys::gst_tag_setter_get_tag_list(
self.as_ref().to_glib_none().0,
))
}
}
fn get_tag_merge_mode(&self) -> TagMergeMode {
unsafe {
from_glib(gst_sys::gst_tag_setter_get_tag_merge_mode(
self.as_ref().to_glib_none().0,
))
}
}
fn merge_tags(&self, list: &TagList, mode: TagMergeMode) {
unsafe {
gst_sys::gst_tag_setter_merge_tags(
self.as_ref().to_glib_none().0,
list.to_glib_none().0,
mode.to_glib(),
);
}
}
fn reset_tags(&self) {
unsafe {
gst_sys::gst_tag_setter_reset_tags(self.as_ref().to_glib_none().0);
}
}
fn set_tag_merge_mode(&self, mode: TagMergeMode) {
unsafe {
gst_sys::gst_tag_setter_set_tag_merge_mode(
self.as_ref().to_glib_none().0,
mode.to_glib(),
);
}
}
}
| 32.581818 | 132 | 0.634766 |
c10fc86e7d00f04e0fa183aaf07a01da75675bcb | 4,964 | use super::support;
use imgui::*;
use memflow::*;
use memflow_win32::error::Result;
use serde::{Deserialize, Serialize};
// see https://github.com/serde-rs/serde/issues/368
#[allow(unused)]
fn default_as_true() -> bool {
true
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub connector: String,
#[serde(default)]
pub args: String,
// TODO: expose caching options (lifetimes, etc)
#[serde(default = "default_as_true")]
pub parse_sections: bool,
}
impl Default for Config {
fn default() -> Config {
Config {
connector: String::new(),
args: String::new(),
parse_sections: false,
}
}
}
pub struct Settings {
config: Config,
}
impl Settings {
/// Loads the current config from the {PWD}/Plugins/memflow.toml file.
pub fn new() -> Self {
// load config file
let pwd = std::env::current_dir().expect("unable to get pwd");
let config = if let Ok(configstr) =
std::fs::read_to_string(pwd.join("Plugins").join("memflow.toml"))
{
toml::from_str::<Config>(&configstr).unwrap_or_default()
} else {
Config::default()
};
Self { config }
}
/// Saves the current configuration to the {PWD}/Plugins/memflow.toml file.
pub fn persist(&self) -> Result<()> {
let pwd = std::env::current_dir().map_err(|_| "unable to get pwd")?;
let configstr =
toml::to_string_pretty(&self.config).map_err(|_| "unable to serialize config")?;
std::fs::write(pwd.join("Plugins").join("memflow.toml"), &configstr)
.map_err(|_| "unable to write config file")?;
Ok(())
}
/// Retrieves the current config
pub fn config(&self) -> Config {
self.config.clone()
}
/// Displays the configuration UI to the user and updates the config
/// This function blocks until the user clicks the "Ok" button.
pub fn configure(&mut self) {
let inventory = unsafe { ConnectorInventory::scan() };
let connectors: Vec<ImString> = inventory
.available_connectors()
.iter()
.map(|c| ImString::from(c.to_owned()))
.collect::<Vec<_>>();
let mut connector_idx = connectors
.iter()
.enumerate()
.find(|(_, c)| c.to_str() == self.config.connector)
.and_then(|(i, _)| Some(i as i32))
.unwrap_or_default();
let mut connector_args = ImString::from(self.config.args.clone());
let mut parse_sections = self.config.parse_sections;
{
support::show_window("memflow", 400.0, 265.0, |run, ui| {
let connectors_ref: Vec<&ImStr> =
connectors.iter().map(|c| c.as_ref()).collect::<Vec<_>>();
Window::new(im_str!("memflow"))
.position([10.0, 10.0], Condition::Always)
.size([375.0, 1000.0], Condition::Always)
.title_bar(false)
.resizable(false)
.movable(false)
.scroll_bar(false)
.save_settings(false)
.focus_on_appearing(false)
.movable(false)
.build(ui, || {
ui.text(im_str!("Inventory"));
ui.separator();
ui.list_box(
im_str!("Connector"),
&mut connector_idx,
&connectors_ref[..],
4,
);
ui.input_text(im_str!("Args"), &mut connector_args).build();
ui.dummy([0.0, 16.0]);
ui.text(im_str!("Options"));
ui.separator();
ui.checkbox(im_str!("Parse Sections"), &mut parse_sections);
// TODO: configure caching
ui.dummy([0.0, 16.0]);
if ui.button(im_str!("Load"), [64.0, 26.0]) {
// update config
self.config.connector = connectors
.get(connector_idx as usize)
.and_then(|c| Some(c.to_string()))
.unwrap_or_default();
self.config.args = connector_args.to_str().to_owned();
self.config.parse_sections = parse_sections;
// close window
*run = false;
}
ui.same_line(64.0 + 16.0);
if ui.button(im_str!("Cancel"), [64.0, 26.0]) {
*run = false;
}
});
});
}
}
}
| 32.657895 | 92 | 0.476632 |
03d16feb483a9c5110bc1e257e5115d2429f58ab | 162,455 | // ignore-tidy-filelength
//! This module contains the "cleaned" pieces of the AST, and the functions
//! that clean them.
pub mod inline;
pub mod cfg;
mod simplify;
mod auto_trait;
mod blanket_impl;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc_target::spec::abi::Abi;
use rustc_typeck::hir_ty_to_ty;
use rustc::infer::region_constraints::{RegionConstraintData, Constraint};
use rustc::middle::resolve_lifetime as rl;
use rustc::middle::lang_items;
use rustc::middle::stability;
use rustc::mir::interpret::{GlobalId, ConstValue};
use rustc::hir::{self, HirVec};
use rustc::hir::def::{self, Res, DefKind, CtorKind};
use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::ty::subst::{InternalSubsts, SubstsRef, UnpackedKind};
use rustc::ty::{self, DefIdTree, TyCtxt, Region, RegionVid, Ty, AdtKind};
use rustc::ty::fold::TypeFolder;
use rustc::ty::layout::VariantIdx;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use syntax::ast::{self, AttrStyle, Ident};
use syntax::attr;
use syntax::ext::base::MacroKind;
use syntax::source_map::{dummy_spanned, Spanned};
use syntax::ptr::P;
use syntax::symbol::{Symbol, kw, sym};
use syntax::symbol::InternedString;
use syntax_pos::{self, Pos, FileName};
use std::collections::hash_map::Entry;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::default::Default;
use std::{mem, slice, vec};
use std::iter::{FromIterator, once};
use std::rc::Rc;
use std::cell::RefCell;
use std::sync::Arc;
use std::u32;
use parking_lot::ReentrantMutex;
use crate::core::{self, DocContext};
use crate::doctree;
use crate::visit_ast;
use crate::html::render::{cache, ExternalLocation};
use crate::html::item_type::ItemType;
use self::cfg::Cfg;
use self::auto_trait::AutoTraitFinder;
use self::blanket_impl::BlanketImplFinder;
pub use self::Type::*;
pub use self::Mutability::*;
pub use self::ItemEnum::*;
pub use self::SelfTy::*;
pub use self::FunctionRetTy::*;
pub use self::Visibility::{Public, Inherited};
thread_local!(pub static MAX_DEF_ID: RefCell<FxHashMap<CrateNum, DefId>> = Default::default());
const FN_OUTPUT_NAME: &'static str = "Output";
// extract the stability index for a node from tcx, if possible
fn get_stability(cx: &DocContext<'_>, def_id: DefId) -> Option<Stability> {
cx.tcx.lookup_stability(def_id).clean(cx)
}
fn get_deprecation(cx: &DocContext<'_>, def_id: DefId) -> Option<Deprecation> {
cx.tcx.lookup_deprecation(def_id).clean(cx)
}
pub trait Clean<T> {
fn clean(&self, cx: &DocContext<'_>) -> T;
}
impl<T: Clean<U>, U> Clean<Vec<U>> for [T] {
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl<T: Clean<U>, U, V: Idx> Clean<IndexVec<V, U>> for IndexVec<V, T> {
fn clean(&self, cx: &DocContext<'_>) -> IndexVec<V, U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl<T: Clean<U>, U> Clean<U> for P<T> {
fn clean(&self, cx: &DocContext<'_>) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<U> for Rc<T> {
fn clean(&self, cx: &DocContext<'_>) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Option<U>> for Option<T> {
fn clean(&self, cx: &DocContext<'_>) -> Option<U> {
self.as_ref().map(|v| v.clean(cx))
}
}
impl<T, U> Clean<U> for ty::Binder<T> where T: Clean<U> {
fn clean(&self, cx: &DocContext<'_>) -> U {
self.skip_binder().clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Vec<U>> for P<[T]> {
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
#[derive(Clone, Debug)]
pub struct Crate {
pub name: String,
pub version: Option<String>,
pub src: FileName,
pub module: Option<Item>,
pub externs: Vec<(CrateNum, ExternalCrate)>,
pub primitives: Vec<(DefId, PrimitiveType, Attributes)>,
// These are later on moved into `CACHEKEY`, leaving the map empty.
// Only here so that they can be filtered through the rustdoc passes.
pub external_traits: Arc<ReentrantMutex<RefCell<FxHashMap<DefId, Trait>>>>,
pub masked_crates: FxHashSet<CrateNum>,
}
impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Crate {
use crate::visit_lib::LibEmbargoVisitor;
{
let mut r = cx.renderinfo.borrow_mut();
r.deref_trait_did = cx.tcx.lang_items().deref_trait();
r.deref_mut_trait_did = cx.tcx.lang_items().deref_mut_trait();
r.owned_box_did = cx.tcx.lang_items().owned_box();
}
let mut externs = Vec::new();
for &cnum in cx.tcx.crates().iter() {
externs.push((cnum, cnum.clean(cx)));
// Analyze doc-reachability for extern items
LibEmbargoVisitor::new(cx).visit_lib(cnum);
}
externs.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
// Clean the crate, translating the entire libsyntax AST to one that is
// understood by rustdoc.
let mut module = self.module.clean(cx);
let mut masked_crates = FxHashSet::default();
match module.inner {
ModuleItem(ref module) => {
for it in &module.items {
// `compiler_builtins` should be masked too, but we can't apply
// `#[doc(masked)]` to the injected `extern crate` because it's unstable.
if it.is_extern_crate()
&& (it.attrs.has_doc_flag(sym::masked)
|| self.cx.tcx.is_compiler_builtins(it.def_id.krate))
{
masked_crates.insert(it.def_id.krate);
}
}
}
_ => unreachable!(),
}
let ExternalCrate { name, src, primitives, keywords, .. } = LOCAL_CRATE.clean(cx);
{
let m = match module.inner {
ModuleItem(ref mut m) => m,
_ => unreachable!(),
};
m.items.extend(primitives.iter().map(|&(def_id, prim, ref attrs)| {
Item {
source: Span::empty(),
name: Some(prim.to_url_str().to_string()),
attrs: attrs.clone(),
visibility: Some(Public),
stability: get_stability(cx, def_id),
deprecation: get_deprecation(cx, def_id),
def_id,
inner: PrimitiveItem(prim),
}
}));
m.items.extend(keywords.into_iter().map(|(def_id, kw, attrs)| {
Item {
source: Span::empty(),
name: Some(kw.clone()),
attrs: attrs,
visibility: Some(Public),
stability: get_stability(cx, def_id),
deprecation: get_deprecation(cx, def_id),
def_id,
inner: KeywordItem(kw),
}
}));
}
Crate {
name,
version: None,
src,
module: Some(module),
externs,
primitives,
external_traits: cx.external_traits.clone(),
masked_crates,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ExternalCrate {
pub name: String,
pub src: FileName,
pub attrs: Attributes,
pub primitives: Vec<(DefId, PrimitiveType, Attributes)>,
pub keywords: Vec<(DefId, String, Attributes)>,
}
impl Clean<ExternalCrate> for CrateNum {
fn clean(&self, cx: &DocContext<'_>) -> ExternalCrate {
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
let krate_span = cx.tcx.def_span(root);
let krate_src = cx.sess().source_map().span_to_filename(krate_span);
// Collect all inner modules which are tagged as implementations of
// primitives.
//
// Note that this loop only searches the top-level items of the crate,
// and this is intentional. If we were to search the entire crate for an
// item tagged with `#[doc(primitive)]` then we would also have to
// search the entirety of external modules for items tagged
// `#[doc(primitive)]`, which is a pretty inefficient process (decoding
// all that metadata unconditionally).
//
// In order to keep the metadata load under control, the
// `#[doc(primitive)]` feature is explicitly designed to only allow the
// primitive tags to show up as the top level items in a crate.
//
// Also note that this does not attempt to deal with modules tagged
// duplicately for the same primitive. This is handled later on when
// rendering by delegating everything to a hash map.
let as_primitive = |res: Res| {
if let Res::Def(DefKind::Mod, def_id) = res {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut prim = None;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::primitive) {
prim = PrimitiveType::from_str(&v.as_str());
if prim.is_some() {
break;
}
// FIXME: should warn on unknown primitives?
}
}
}
return prim.map(|p| (def_id, p, attrs));
}
None
};
let primitives = if root.is_local() {
cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| {
let item = cx.tcx.hir().expect_item_by_hir_id(id.id);
match item.node {
hir::ItemKind::Mod(_) => {
as_primitive(Res::Def(
DefKind::Mod,
cx.tcx.hir().local_def_id_from_hir_id(id.id),
))
}
hir::ItemKind::Use(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() => {
as_primitive(path.res).map(|(_, prim, attrs)| {
// Pretend the primitive is local.
(cx.tcx.hir().local_def_id_from_hir_id(id.id), prim, attrs)
})
}
_ => None
}
}).collect()
} else {
cx.tcx.item_children(root).iter().map(|item| item.res)
.filter_map(as_primitive).collect()
};
let as_keyword = |res: Res| {
if let Res::Def(DefKind::Mod, def_id) = res {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut keyword = None;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::keyword) {
if v.is_doc_keyword() {
keyword = Some(v.to_string());
break;
}
// FIXME: should warn on unknown keywords?
}
}
}
return keyword.map(|p| (def_id, p, attrs));
}
None
};
let keywords = if root.is_local() {
cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| {
let item = cx.tcx.hir().expect_item_by_hir_id(id.id);
match item.node {
hir::ItemKind::Mod(_) => {
as_keyword(Res::Def(
DefKind::Mod,
cx.tcx.hir().local_def_id_from_hir_id(id.id),
))
}
hir::ItemKind::Use(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() => {
as_keyword(path.res).map(|(_, prim, attrs)| {
(cx.tcx.hir().local_def_id_from_hir_id(id.id), prim, attrs)
})
}
_ => None
}
}).collect()
} else {
cx.tcx.item_children(root).iter().map(|item| item.res)
.filter_map(as_keyword).collect()
};
ExternalCrate {
name: cx.tcx.crate_name(*self).to_string(),
src: krate_src,
attrs: cx.tcx.get_attrs(root).clean(cx),
primitives,
keywords,
}
}
}
/// Anything with a source location and set of attributes and, optionally, a
/// name. That is, anything that can be documented. This doesn't correspond
/// directly to the AST's concept of an item; it's a strict superset.
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Item {
/// Stringified span
pub source: Span,
/// Not everything has a name. E.g., impls
pub name: Option<String>,
pub attrs: Attributes,
pub inner: ItemEnum,
pub visibility: Option<Visibility>,
pub def_id: DefId,
pub stability: Option<Stability>,
pub deprecation: Option<Deprecation>,
}
impl fmt::Debug for Item {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let fake = MAX_DEF_ID.with(|m| m.borrow().get(&self.def_id.krate)
.map(|id| self.def_id >= *id).unwrap_or(false));
let def_id: &dyn fmt::Debug = if fake { &"**FAKE**" } else { &self.def_id };
fmt.debug_struct("Item")
.field("source", &self.source)
.field("name", &self.name)
.field("attrs", &self.attrs)
.field("inner", &self.inner)
.field("visibility", &self.visibility)
.field("def_id", def_id)
.field("stability", &self.stability)
.field("deprecation", &self.deprecation)
.finish()
}
}
impl Item {
/// Finds the `doc` attribute as a NameValue and returns the corresponding
/// value found.
pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
self.attrs.doc_value()
}
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub fn collapsed_doc_value(&self) -> Option<String> {
self.attrs.collapsed_doc_value()
}
pub fn links(&self) -> Vec<(String, String)> {
self.attrs.links(&self.def_id.krate)
}
pub fn is_crate(&self) -> bool {
match self.inner {
StrippedItem(box ModuleItem(Module { is_crate: true, ..})) |
ModuleItem(Module { is_crate: true, ..}) => true,
_ => false,
}
}
pub fn is_mod(&self) -> bool {
self.type_() == ItemType::Module
}
pub fn is_trait(&self) -> bool {
self.type_() == ItemType::Trait
}
pub fn is_struct(&self) -> bool {
self.type_() == ItemType::Struct
}
pub fn is_enum(&self) -> bool {
self.type_() == ItemType::Enum
}
pub fn is_variant(&self) -> bool {
self.type_() == ItemType::Variant
}
pub fn is_associated_type(&self) -> bool {
self.type_() == ItemType::AssocType
}
pub fn is_associated_const(&self) -> bool {
self.type_() == ItemType::AssocConst
}
pub fn is_method(&self) -> bool {
self.type_() == ItemType::Method
}
pub fn is_ty_method(&self) -> bool {
self.type_() == ItemType::TyMethod
}
pub fn is_typedef(&self) -> bool {
self.type_() == ItemType::Typedef
}
pub fn is_primitive(&self) -> bool {
self.type_() == ItemType::Primitive
}
pub fn is_union(&self) -> bool {
self.type_() == ItemType::Union
}
pub fn is_import(&self) -> bool {
self.type_() == ItemType::Import
}
pub fn is_extern_crate(&self) -> bool {
self.type_() == ItemType::ExternCrate
}
pub fn is_keyword(&self) -> bool {
self.type_() == ItemType::Keyword
}
pub fn is_stripped(&self) -> bool {
match self.inner { StrippedItem(..) => true, _ => false }
}
pub fn has_stripped_fields(&self) -> Option<bool> {
match self.inner {
StructItem(ref _struct) => Some(_struct.fields_stripped),
UnionItem(ref union) => Some(union.fields_stripped),
VariantItem(Variant { kind: VariantKind::Struct(ref vstruct)} ) => {
Some(vstruct.fields_stripped)
},
_ => None,
}
}
pub fn stability_class(&self) -> Option<String> {
self.stability.as_ref().and_then(|ref s| {
let mut classes = Vec::with_capacity(2);
if s.level == stability::Unstable {
classes.push("unstable");
}
if s.deprecation.is_some() {
classes.push("deprecated");
}
if classes.len() != 0 {
Some(classes.join(" "))
} else {
None
}
})
}
pub fn stable_since(&self) -> Option<&str> {
self.stability.as_ref().map(|s| &s.since[..])
}
pub fn is_non_exhaustive(&self) -> bool {
self.attrs.other_attrs.iter()
.any(|a| a.check_name(sym::non_exhaustive))
}
/// Returns a documentation-level item type from the item.
pub fn type_(&self) -> ItemType {
ItemType::from(self)
}
/// Returns the info in the item's `#[deprecated]` or `#[rustc_deprecated]` attributes.
///
/// If the item is not deprecated, returns `None`.
pub fn deprecation(&self) -> Option<&Deprecation> {
self.deprecation
.as_ref()
.or_else(|| self.stability.as_ref().and_then(|s| s.deprecation.as_ref()))
}
pub fn is_default(&self) -> bool {
match self.inner {
ItemEnum::MethodItem(ref meth) => {
if let Some(defaultness) = meth.defaultness {
defaultness.has_value() && !defaultness.is_final()
} else {
false
}
}
_ => false,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ItemEnum {
ExternCrateItem(String, Option<String>),
ImportItem(Import),
StructItem(Struct),
UnionItem(Union),
EnumItem(Enum),
FunctionItem(Function),
ModuleItem(Module),
TypedefItem(Typedef, bool /* is associated type */),
ExistentialItem(Existential, bool /* is associated type */),
StaticItem(Static),
ConstantItem(Constant),
TraitItem(Trait),
TraitAliasItem(TraitAlias),
ImplItem(Impl),
/// A method signature only. Used for required methods in traits (ie,
/// non-default-methods).
TyMethodItem(TyMethod),
/// A method with a body.
MethodItem(Method),
StructFieldItem(Type),
VariantItem(Variant),
/// `fn`s from an extern block
ForeignFunctionItem(Function),
/// `static`s from an extern block
ForeignStaticItem(Static),
/// `type`s from an extern block
ForeignTypeItem,
MacroItem(Macro),
ProcMacroItem(ProcMacro),
PrimitiveItem(PrimitiveType),
AssocConstItem(Type, Option<String>),
AssocTypeItem(Vec<GenericBound>, Option<Type>),
/// An item that has been stripped by a rustdoc pass
StrippedItem(Box<ItemEnum>),
KeywordItem(String),
}
impl ItemEnum {
pub fn generics(&self) -> Option<&Generics> {
Some(match *self {
ItemEnum::StructItem(ref s) => &s.generics,
ItemEnum::EnumItem(ref e) => &e.generics,
ItemEnum::FunctionItem(ref f) => &f.generics,
ItemEnum::TypedefItem(ref t, _) => &t.generics,
ItemEnum::ExistentialItem(ref t, _) => &t.generics,
ItemEnum::TraitItem(ref t) => &t.generics,
ItemEnum::ImplItem(ref i) => &i.generics,
ItemEnum::TyMethodItem(ref i) => &i.generics,
ItemEnum::MethodItem(ref i) => &i.generics,
ItemEnum::ForeignFunctionItem(ref f) => &f.generics,
ItemEnum::TraitAliasItem(ref ta) => &ta.generics,
_ => return None,
})
}
pub fn is_associated(&self) -> bool {
match *self {
ItemEnum::TypedefItem(_, _) |
ItemEnum::AssocTypeItem(_, _) => true,
_ => false,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Module {
pub items: Vec<Item>,
pub is_crate: bool,
}
impl Clean<Item> for doctree::Module {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let name = if self.name.is_some() {
self.name.expect("No name provided").clean(cx)
} else {
String::new()
};
// maintain a stack of mod ids, for doc comment path resolution
// but we also need to resolve the module's own docs based on whether its docs were written
// inside or outside the module, so check for that
let attrs = self.attrs.clean(cx);
let mut items: Vec<Item> = vec![];
items.extend(self.extern_crates.iter().flat_map(|x| x.clean(cx)));
items.extend(self.imports.iter().flat_map(|x| x.clean(cx)));
items.extend(self.structs.iter().map(|x| x.clean(cx)));
items.extend(self.unions.iter().map(|x| x.clean(cx)));
items.extend(self.enums.iter().map(|x| x.clean(cx)));
items.extend(self.fns.iter().map(|x| x.clean(cx)));
items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx)));
items.extend(self.mods.iter().map(|x| x.clean(cx)));
items.extend(self.typedefs.iter().map(|x| x.clean(cx)));
items.extend(self.existentials.iter().map(|x| x.clean(cx)));
items.extend(self.statics.iter().map(|x| x.clean(cx)));
items.extend(self.constants.iter().map(|x| x.clean(cx)));
items.extend(self.traits.iter().map(|x| x.clean(cx)));
items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
items.extend(self.macros.iter().map(|x| x.clean(cx)));
items.extend(self.proc_macros.iter().map(|x| x.clean(cx)));
items.extend(self.trait_aliases.iter().map(|x| x.clean(cx)));
// determine if we should display the inner contents or
// the outer `mod` item for the source code.
let whence = {
let cm = cx.sess().source_map();
let outer = cm.lookup_char_pos(self.where_outer.lo());
let inner = cm.lookup_char_pos(self.where_inner.lo());
if outer.file.start_pos == inner.file.start_pos {
// mod foo { ... }
self.where_outer
} else {
// mod foo; (and a separate SourceFile for the contents)
self.where_inner
}
};
Item {
name: Some(name),
attrs,
source: whence.clean(cx),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
inner: ModuleItem(Module {
is_crate: self.is_crate,
items,
})
}
}
}
pub struct ListAttributesIter<'a> {
attrs: slice::Iter<'a, ast::Attribute>,
current_list: vec::IntoIter<ast::NestedMetaItem>,
name: Symbol,
}
impl<'a> Iterator for ListAttributesIter<'a> {
type Item = ast::NestedMetaItem;
fn next(&mut self) -> Option<Self::Item> {
if let Some(nested) = self.current_list.next() {
return Some(nested);
}
for attr in &mut self.attrs {
if let Some(list) = attr.meta_item_list() {
if attr.check_name(self.name) {
self.current_list = list.into_iter();
if let Some(nested) = self.current_list.next() {
return Some(nested);
}
}
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
let lower = self.current_list.len();
(lower, None)
}
}
pub trait AttributesExt {
/// Finds an attribute as List and returns the list of attributes nested inside.
fn lists<'a>(&'a self, name: Symbol) -> ListAttributesIter<'a>;
}
impl AttributesExt for [ast::Attribute] {
fn lists<'a>(&'a self, name: Symbol) -> ListAttributesIter<'a> {
ListAttributesIter {
attrs: self.iter(),
current_list: Vec::new().into_iter(),
name,
}
}
}
pub trait NestedAttributesExt {
/// Returns `true` if the attribute list contains a specific `Word`
fn has_word(self, word: Symbol) -> bool;
}
impl<I: IntoIterator<Item=ast::NestedMetaItem>> NestedAttributesExt for I {
fn has_word(self, word: Symbol) -> bool {
self.into_iter().any(|attr| attr.is_word() && attr.check_name(word))
}
}
/// A portion of documentation, extracted from a `#[doc]` attribute.
///
/// Each variant contains the line number within the complete doc-comment where the fragment
/// starts, as well as the Span where the corresponding doc comment or attribute is located.
///
/// Included files are kept separate from inline doc comments so that proper line-number
/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are
/// kept separate because of issue #42760.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum DocFragment {
/// A doc fragment created from a `///` or `//!` doc comment.
SugaredDoc(usize, syntax_pos::Span, String),
/// A doc fragment created from a "raw" `#[doc=""]` attribute.
RawDoc(usize, syntax_pos::Span, String),
/// A doc fragment created from a `#[doc(include="filename")]` attribute. Contains both the
/// given filename and the file contents.
Include(usize, syntax_pos::Span, String, String),
}
impl DocFragment {
pub fn as_str(&self) -> &str {
match *self {
DocFragment::SugaredDoc(_, _, ref s) => &s[..],
DocFragment::RawDoc(_, _, ref s) => &s[..],
DocFragment::Include(_, _, _, ref s) => &s[..],
}
}
pub fn span(&self) -> syntax_pos::Span {
match *self {
DocFragment::SugaredDoc(_, span, _) |
DocFragment::RawDoc(_, span, _) |
DocFragment::Include(_, span, _, _) => span,
}
}
}
impl<'a> FromIterator<&'a DocFragment> for String {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = &'a DocFragment>
{
iter.into_iter().fold(String::new(), |mut acc, frag| {
if !acc.is_empty() {
acc.push('\n');
}
match *frag {
DocFragment::SugaredDoc(_, _, ref docs)
| DocFragment::RawDoc(_, _, ref docs)
| DocFragment::Include(_, _, _, ref docs) =>
acc.push_str(docs),
}
acc
})
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)]
pub struct Attributes {
pub doc_strings: Vec<DocFragment>,
pub other_attrs: Vec<ast::Attribute>,
pub cfg: Option<Arc<Cfg>>,
pub span: Option<syntax_pos::Span>,
/// map from Rust paths to resolved defs and potential URL fragments
pub links: Vec<(String, Option<DefId>, Option<String>)>,
pub inner_docs: bool,
}
impl Attributes {
/// Extracts the content from an attribute `#[doc(cfg(content))]`.
fn extract_cfg(mi: &ast::MetaItem) -> Option<&ast::MetaItem> {
use syntax::ast::NestedMetaItem::MetaItem;
if let ast::MetaItemKind::List(ref nmis) = mi.node {
if nmis.len() == 1 {
if let MetaItem(ref cfg_mi) = nmis[0] {
if cfg_mi.check_name(sym::cfg) {
if let ast::MetaItemKind::List(ref cfg_nmis) = cfg_mi.node {
if cfg_nmis.len() == 1 {
if let MetaItem(ref content_mi) = cfg_nmis[0] {
return Some(content_mi);
}
}
}
}
}
}
}
None
}
/// Reads a `MetaItem` from within an attribute, looks for whether it is a
/// `#[doc(include="file")]`, and returns the filename and contents of the file as loaded from
/// its expansion.
fn extract_include(mi: &ast::MetaItem)
-> Option<(String, String)>
{
mi.meta_item_list().and_then(|list| {
for meta in list {
if meta.check_name(sym::include) {
// the actual compiled `#[doc(include="filename")]` gets expanded to
// `#[doc(include(file="filename", contents="file contents")]` so we need to
// look for that instead
return meta.meta_item_list().and_then(|list| {
let mut filename: Option<String> = None;
let mut contents: Option<String> = None;
for it in list {
if it.check_name(sym::file) {
if let Some(name) = it.value_str() {
filename = Some(name.to_string());
}
} else if it.check_name(sym::contents) {
if let Some(docs) = it.value_str() {
contents = Some(docs.to_string());
}
}
}
if let (Some(filename), Some(contents)) = (filename, contents) {
Some((filename, contents))
} else {
None
}
});
}
}
None
})
}
pub fn has_doc_flag(&self, flag: Symbol) -> bool {
for attr in &self.other_attrs {
if !attr.check_name(sym::doc) { continue; }
if let Some(items) = attr.meta_item_list() {
if items.iter().filter_map(|i| i.meta_item()).any(|it| it.check_name(flag)) {
return true;
}
}
}
false
}
pub fn from_ast(diagnostic: &::errors::Handler,
attrs: &[ast::Attribute]) -> Attributes {
let mut doc_strings = vec![];
let mut sp = None;
let mut cfg = Cfg::True;
let mut doc_line = 0;
let other_attrs = attrs.iter().filter_map(|attr| {
attr.with_desugared_doc(|attr| {
if attr.check_name(sym::doc) {
if let Some(mi) = attr.meta() {
if let Some(value) = mi.value_str() {
// Extracted #[doc = "..."]
let value = value.to_string();
let line = doc_line;
doc_line += value.lines().count();
if attr.is_sugared_doc {
doc_strings.push(DocFragment::SugaredDoc(line, attr.span, value));
} else {
doc_strings.push(DocFragment::RawDoc(line, attr.span, value));
}
if sp.is_none() {
sp = Some(attr.span);
}
return None;
} else if let Some(cfg_mi) = Attributes::extract_cfg(&mi) {
// Extracted #[doc(cfg(...))]
match Cfg::parse(cfg_mi) {
Ok(new_cfg) => cfg &= new_cfg,
Err(e) => diagnostic.span_err(e.span, e.msg),
}
return None;
} else if let Some((filename, contents)) = Attributes::extract_include(&mi)
{
let line = doc_line;
doc_line += contents.lines().count();
doc_strings.push(DocFragment::Include(line,
attr.span,
filename,
contents));
}
}
}
Some(attr.clone())
})
}).collect();
// treat #[target_feature(enable = "feat")] attributes as if they were
// #[doc(cfg(target_feature = "feat"))] attributes as well
for attr in attrs.lists(sym::target_feature) {
if attr.check_name(sym::enable) {
if let Some(feat) = attr.value_str() {
let meta = attr::mk_name_value_item_str(
Ident::with_empty_ctxt(sym::target_feature),
dummy_spanned(feat));
if let Ok(feat_cfg) = Cfg::parse(&meta) {
cfg &= feat_cfg;
}
}
}
}
let inner_docs = attrs.iter()
.filter(|a| a.check_name(sym::doc))
.next()
.map_or(true, |a| a.style == AttrStyle::Inner);
Attributes {
doc_strings,
other_attrs,
cfg: if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) },
span: sp,
links: vec![],
inner_docs,
}
}
/// Finds the `doc` attribute as a NameValue and returns the corresponding
/// value found.
pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
self.doc_strings.first().map(|s| s.as_str())
}
/// Finds all `doc` attributes as NameValues and returns their corresponding values, joined
/// with newlines.
pub fn collapsed_doc_value(&self) -> Option<String> {
if !self.doc_strings.is_empty() {
Some(self.doc_strings.iter().collect())
} else {
None
}
}
/// Gets links as a vector
///
/// Cache must be populated before call
pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> {
use crate::html::format::href;
self.links.iter().filter_map(|&(ref s, did, ref fragment)| {
match did {
Some(did) => {
if let Some((mut href, ..)) = href(did) {
if let Some(ref fragment) = *fragment {
href.push_str("#");
href.push_str(fragment);
}
Some((s.clone(), href))
} else {
None
}
}
None => {
if let Some(ref fragment) = *fragment {
let cache = cache();
let url = match cache.extern_locations.get(krate) {
Some(&(_, ref src, ExternalLocation::Local)) =>
src.to_str().expect("invalid file path"),
Some(&(_, _, ExternalLocation::Remote(ref s))) => s,
Some(&(_, _, ExternalLocation::Unknown)) | None =>
"https://doc.rust-lang.org/nightly",
};
// This is a primitive so the url is done "by hand".
let tail = fragment.find('#').unwrap_or_else(|| fragment.len());
Some((s.clone(),
format!("{}{}std/primitive.{}.html{}",
url,
if !url.ends_with('/') { "/" } else { "" },
&fragment[..tail],
&fragment[tail..])))
} else {
panic!("This isn't a primitive?!");
}
}
}
}).collect()
}
}
impl PartialEq for Attributes {
fn eq(&self, rhs: &Self) -> bool {
self.doc_strings == rhs.doc_strings &&
self.cfg == rhs.cfg &&
self.span == rhs.span &&
self.links == rhs.links &&
self.other_attrs.iter().map(|attr| attr.id).eq(rhs.other_attrs.iter().map(|attr| attr.id))
}
}
impl Eq for Attributes {}
impl Hash for Attributes {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.doc_strings.hash(hasher);
self.cfg.hash(hasher);
self.span.hash(hasher);
self.links.hash(hasher);
for attr in &self.other_attrs {
attr.id.hash(hasher);
}
}
}
impl AttributesExt for Attributes {
fn lists<'a>(&'a self, name: Symbol) -> ListAttributesIter<'a> {
self.other_attrs.lists(name)
}
}
impl Clean<Attributes> for [ast::Attribute] {
fn clean(&self, cx: &DocContext<'_>) -> Attributes {
Attributes::from_ast(cx.sess().diagnostic(), self)
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericBound {
TraitBound(PolyTrait, hir::TraitBoundModifier),
Outlives(Lifetime),
}
impl GenericBound {
fn maybe_sized(cx: &DocContext<'_>) -> GenericBound {
let did = cx.tcx.require_lang_item(lang_items::SizedTraitLangItem);
let empty = cx.tcx.intern_substs(&[]);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
GenericBound::TraitBound(PolyTrait {
trait_: ResolvedPath {
path,
param_names: None,
did,
is_generic: false,
},
generic_params: Vec::new(),
}, hir::TraitBoundModifier::Maybe)
}
fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
use rustc::hir::TraitBoundModifier as TBM;
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
if trait_.def_id() == cx.tcx.lang_items().sized_trait() {
return true;
}
}
false
}
fn get_poly_trait(&self) -> Option<PolyTrait> {
if let GenericBound::TraitBound(ref p, _) = *self {
return Some(p.clone())
}
None
}
fn get_trait_type(&self) -> Option<Type> {
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self {
Some(trait_.clone())
} else {
None
}
}
}
impl Clean<GenericBound> for hir::GenericBound {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
match *self {
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(lt.clean(cx)),
hir::GenericBound::Trait(ref t, modifier) => {
GenericBound::TraitBound(t.clean(cx), modifier)
}
}
}
}
fn external_generic_args(
cx: &DocContext<'_>,
trait_did: Option<DefId>,
has_self: bool,
bindings: Vec<TypeBinding>,
substs: SubstsRef<'_>,
) -> GenericArgs {
let mut skip_self = has_self;
let mut ty_sty = None;
let args: Vec<_> = substs.iter().filter_map(|kind| match kind.unpack() {
UnpackedKind::Lifetime(lt) => {
lt.clean(cx).and_then(|lt| Some(GenericArg::Lifetime(lt)))
}
UnpackedKind::Type(_) if skip_self => {
skip_self = false;
None
}
UnpackedKind::Type(ty) => {
ty_sty = Some(&ty.sty);
Some(GenericArg::Type(ty.clean(cx)))
}
UnpackedKind::Const(ct) => Some(GenericArg::Const(ct.clean(cx))),
}).collect();
match trait_did {
// Attempt to sugar an external path like Fn<(A, B,), C> to Fn(A, B) -> C
Some(did) if cx.tcx.lang_items().fn_trait_kind(did).is_some() => {
assert!(ty_sty.is_some());
let inputs = match ty_sty {
Some(ty::Tuple(ref tys)) => tys.iter().map(|t| t.expect_ty().clean(cx)).collect(),
_ => return GenericArgs::AngleBracketed { args, bindings },
};
let output = None;
// FIXME(#20299) return type comes from a projection now
// match types[1].sty {
// ty::Tuple(ref v) if v.is_empty() => None, // -> ()
// _ => Some(types[1].clean(cx))
// };
GenericArgs::Parenthesized { inputs, output }
},
_ => {
GenericArgs::AngleBracketed { args, bindings }
}
}
}
// trait_did should be set to a trait's DefId if called on a TraitRef, in order to sugar
// from Fn<(A, B,), C> to Fn(A, B) -> C
fn external_path(cx: &DocContext<'_>, name: &str, trait_did: Option<DefId>, has_self: bool,
bindings: Vec<TypeBinding>, substs: SubstsRef<'_>) -> Path {
Path {
global: false,
res: Res::Err,
segments: vec![PathSegment {
name: name.to_string(),
args: external_generic_args(cx, trait_did, has_self, bindings, substs)
}],
}
}
impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>) {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
let (trait_ref, ref bounds) = *self;
inline::record_extern_fqn(cx, trait_ref.def_id, TypeKind::Trait);
let path = external_path(cx, &cx.tcx.item_name(trait_ref.def_id).as_str(),
Some(trait_ref.def_id), true, bounds.clone(), trait_ref.substs);
debug!("ty::TraitRef\n subst: {:?}\n", trait_ref.substs);
// collect any late bound regions
let mut late_bounds = vec![];
for ty_s in trait_ref.input_types().skip(1) {
if let ty::Tuple(ts) = ty_s.sty {
for &ty_s in ts {
if let ty::Ref(ref reg, _, _) = ty_s.expect_ty().sty {
if let &ty::RegionKind::ReLateBound(..) = *reg {
debug!(" hit an ReLateBound {:?}", reg);
if let Some(Lifetime(name)) = reg.clean(cx) {
late_bounds.push(GenericParamDef {
name,
kind: GenericParamDefKind::Lifetime,
});
}
}
}
}
}
}
GenericBound::TraitBound(
PolyTrait {
trait_: ResolvedPath {
path,
param_names: None,
did: trait_ref.def_id,
is_generic: false,
},
generic_params: late_bounds,
},
hir::TraitBoundModifier::None
)
}
}
impl<'tcx> Clean<GenericBound> for ty::TraitRef<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
(self, vec![]).clean(cx)
}
}
impl<'tcx> Clean<Option<Vec<GenericBound>>> for InternalSubsts<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Option<Vec<GenericBound>> {
let mut v = Vec::new();
v.extend(self.regions().filter_map(|r| r.clean(cx)).map(GenericBound::Outlives));
v.extend(self.types().map(|t| GenericBound::TraitBound(PolyTrait {
trait_: t.clean(cx),
generic_params: Vec::new(),
}, hir::TraitBoundModifier::None)));
if !v.is_empty() {Some(v)} else {None}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Lifetime(String);
impl Lifetime {
pub fn get_ref<'a>(&'a self) -> &'a str {
let Lifetime(ref s) = *self;
let s: &'a str = s;
s
}
pub fn statik() -> Lifetime {
Lifetime("'static".to_string())
}
}
impl Clean<Lifetime> for hir::Lifetime {
fn clean(&self, cx: &DocContext<'_>) -> Lifetime {
if self.hir_id != hir::DUMMY_HIR_ID {
let def = cx.tcx.named_region(self.hir_id);
match def {
Some(rl::Region::EarlyBound(_, node_id, _)) |
Some(rl::Region::LateBound(_, node_id, _)) |
Some(rl::Region::Free(_, node_id)) => {
if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() {
return lt;
}
}
_ => {}
}
}
Lifetime(self.name.ident().to_string())
}
}
impl Clean<Lifetime> for hir::GenericParam {
fn clean(&self, _: &DocContext<'_>) -> Lifetime {
match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().expect("no more bounds").name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
Lifetime(s)
} else {
Lifetime(self.name.ident().to_string())
}
}
_ => panic!(),
}
}
}
impl Clean<Constant> for hir::ConstArg {
fn clean(&self, cx: &DocContext<'_>) -> Constant {
Constant {
type_: cx.tcx.type_of(cx.tcx.hir().body_owner_def_id(self.value.body)).clean(cx),
expr: print_const_expr(cx, self.value.body),
}
}
}
impl<'tcx> Clean<Lifetime> for ty::GenericParamDef {
fn clean(&self, _cx: &DocContext<'_>) -> Lifetime {
Lifetime(self.name.to_string())
}
}
impl Clean<Option<Lifetime>> for ty::RegionKind {
fn clean(&self, cx: &DocContext<'_>) -> Option<Lifetime> {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
ty::ReEarlyBound(ref data) => Some(Lifetime(data.name.clean(cx))),
ty::ReLateBound(..) |
ty::ReFree(..) |
ty::ReScope(..) |
ty::ReVar(..) |
ty::RePlaceholder(..) |
ty::ReEmpty |
ty::ReClosureBound(_) |
ty::ReErased => {
debug!("Cannot clean region {:?}", self);
None
}
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum WherePredicate {
BoundPredicate { ty: Type, bounds: Vec<GenericBound> },
RegionPredicate { lifetime: Lifetime, bounds: Vec<GenericBound> },
EqPredicate { lhs: Type, rhs: Type },
}
impl WherePredicate {
pub fn get_bounds(&self) -> Option<&[GenericBound]> {
match *self {
WherePredicate::BoundPredicate { ref bounds, .. } => Some(bounds),
WherePredicate::RegionPredicate { ref bounds, .. } => Some(bounds),
_ => None,
}
}
}
impl Clean<WherePredicate> for hir::WherePredicate {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
match *self {
hir::WherePredicate::BoundPredicate(ref wbp) => {
WherePredicate::BoundPredicate {
ty: wbp.bounded_ty.clean(cx),
bounds: wbp.bounds.clean(cx)
}
}
hir::WherePredicate::RegionPredicate(ref wrp) => {
WherePredicate::RegionPredicate {
lifetime: wrp.lifetime.clean(cx),
bounds: wrp.bounds.clean(cx)
}
}
hir::WherePredicate::EqPredicate(ref wrp) => {
WherePredicate::EqPredicate {
lhs: wrp.lhs_ty.clean(cx),
rhs: wrp.rhs_ty.clean(cx)
}
}
}
}
}
impl<'a> Clean<Option<WherePredicate>> for ty::Predicate<'a> {
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
use rustc::ty::Predicate;
match *self {
Predicate::Trait(ref pred) => Some(pred.clean(cx)),
Predicate::Subtype(ref pred) => Some(pred.clean(cx)),
Predicate::RegionOutlives(ref pred) => pred.clean(cx),
Predicate::TypeOutlives(ref pred) => pred.clean(cx),
Predicate::Projection(ref pred) => Some(pred.clean(cx)),
Predicate::WellFormed(..) |
Predicate::ObjectSafe(..) |
Predicate::ClosureKind(..) |
Predicate::ConstEvaluatable(..) => panic!("not user writable"),
}
}
}
impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
WherePredicate::BoundPredicate {
ty: self.trait_ref.self_ty().clean(cx),
bounds: vec![self.trait_ref.clean(cx)]
}
}
}
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
fn clean(&self, _cx: &DocContext<'_>) -> WherePredicate {
panic!("subtype predicates are an internal rustc artifact \
and should not be seen by rustdoc")
}
}
impl<'tcx> Clean<Option<WherePredicate>> for
ty::OutlivesPredicate<ty::Region<'tcx>,ty::Region<'tcx>> {
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
let ty::OutlivesPredicate(ref a, ref b) = *self;
match (a, b) {
(ty::ReEmpty, ty::ReEmpty) => {
return None;
},
_ => {}
}
Some(WherePredicate::RegionPredicate {
lifetime: a.clean(cx).expect("failed to clean lifetime"),
bounds: vec![GenericBound::Outlives(b.clean(cx).expect("failed to clean bounds"))]
})
}
}
impl<'tcx> Clean<Option<WherePredicate>> for ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>> {
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
let ty::OutlivesPredicate(ref ty, ref lt) = *self;
match lt {
ty::ReEmpty => return None,
_ => {}
}
Some(WherePredicate::BoundPredicate {
ty: ty.clean(cx),
bounds: vec![GenericBound::Outlives(lt.clean(cx).expect("failed to clean lifetimes"))]
})
}
}
impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
WherePredicate::EqPredicate {
lhs: self.projection_ty.clean(cx),
rhs: self.ty.clean(cx)
}
}
}
impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Type {
let trait_ = match self.trait_ref(cx.tcx).clean(cx) {
GenericBound::TraitBound(t, _) => t.trait_,
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
};
Type::QPath {
name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
self_type: box self.self_ty().clean(cx),
trait_: box trait_
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericParamDefKind {
Lifetime,
Type {
did: DefId,
bounds: Vec<GenericBound>,
default: Option<Type>,
synthetic: Option<hir::SyntheticTyParamKind>,
},
Const {
did: DefId,
ty: Type,
},
}
impl GenericParamDefKind {
pub fn is_type(&self) -> bool {
match *self {
GenericParamDefKind::Type { .. } => true,
_ => false,
}
}
pub fn get_type(&self, cx: &DocContext<'_>) -> Option<Type> {
match *self {
GenericParamDefKind::Type { did, .. } => {
rustc_typeck::checked_type_of(cx.tcx, did, false).map(|t| t.clean(cx))
}
GenericParamDefKind::Const { ref ty, .. } => Some(ty.clone()),
GenericParamDefKind::Lifetime => None,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct GenericParamDef {
pub name: String,
pub kind: GenericParamDefKind,
}
impl GenericParamDef {
pub fn is_synthetic_type_param(&self) -> bool {
match self.kind {
GenericParamDefKind::Lifetime |
GenericParamDefKind::Const { .. } => false,
GenericParamDefKind::Type { ref synthetic, .. } => synthetic.is_some(),
}
}
pub fn is_type(&self) -> bool {
self.kind.is_type()
}
pub fn get_type(&self, cx: &DocContext<'_>) -> Option<Type> {
self.kind.get_type(cx)
}
pub fn get_bounds(&self) -> Option<&[GenericBound]> {
match self.kind {
GenericParamDefKind::Type { ref bounds, .. } => Some(bounds),
_ => None,
}
}
}
impl Clean<GenericParamDef> for ty::GenericParamDef {
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
let (name, kind) = match self.kind {
ty::GenericParamDefKind::Lifetime => {
(self.name.to_string(), GenericParamDefKind::Lifetime)
}
ty::GenericParamDefKind::Type { has_default, .. } => {
cx.renderinfo.borrow_mut().external_param_names
.insert(self.def_id, self.name.clean(cx));
let default = if has_default {
Some(cx.tcx.type_of(self.def_id).clean(cx))
} else {
None
};
(self.name.clean(cx), GenericParamDefKind::Type {
did: self.def_id,
bounds: vec![], // These are filled in from the where-clauses.
default,
synthetic: None,
})
}
ty::GenericParamDefKind::Const { .. } => {
(self.name.clean(cx), GenericParamDefKind::Const {
did: self.def_id,
ty: cx.tcx.type_of(self.def_id).clean(cx),
})
}
};
GenericParamDef {
name,
kind,
}
}
}
impl Clean<GenericParamDef> for hir::GenericParam {
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
let (name, kind) = match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
let name = if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().expect("no more bounds").name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
s
} else {
self.name.ident().to_string()
};
(name, GenericParamDefKind::Lifetime)
}
hir::GenericParamKind::Type { ref default, synthetic } => {
(self.name.ident().name.clean(cx), GenericParamDefKind::Type {
did: cx.tcx.hir().local_def_id_from_hir_id(self.hir_id),
bounds: self.bounds.clean(cx),
default: default.clean(cx),
synthetic: synthetic,
})
}
hir::GenericParamKind::Const { ref ty } => {
(self.name.ident().name.clean(cx), GenericParamDefKind::Const {
did: cx.tcx.hir().local_def_id_from_hir_id(self.hir_id),
ty: ty.clean(cx),
})
}
};
GenericParamDef {
name,
kind,
}
}
}
// maybe use a Generic enum and use Vec<Generic>?
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)]
pub struct Generics {
pub params: Vec<GenericParamDef>,
pub where_predicates: Vec<WherePredicate>,
}
impl Clean<Generics> for hir::Generics {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
// Synthetic type-parameters are inserted after normal ones.
// In order for normal parameters to be able to refer to synthetic ones,
// scans them first.
fn is_impl_trait(param: &hir::GenericParam) -> bool {
match param.kind {
hir::GenericParamKind::Type { synthetic, .. } => {
synthetic == Some(hir::SyntheticTyParamKind::ImplTrait)
}
_ => false,
}
}
let impl_trait_params = self.params
.iter()
.filter(|param| is_impl_trait(param))
.map(|param| {
let param: GenericParamDef = param.clean(cx);
match param.kind {
GenericParamDefKind::Lifetime => unreachable!(),
GenericParamDefKind::Type { did, ref bounds, .. } => {
cx.impl_trait_bounds.borrow_mut().insert(did, bounds.clone());
}
GenericParamDefKind::Const { .. } => unreachable!(),
}
param
})
.collect::<Vec<_>>();
let mut params = Vec::with_capacity(self.params.len());
for p in self.params.iter().filter(|p| !is_impl_trait(p)) {
let p = p.clean(cx);
params.push(p);
}
params.extend(impl_trait_params);
let mut generics = Generics {
params,
where_predicates: self.where_clause.predicates.clean(cx),
};
// Some duplicates are generated for ?Sized bounds between type params and where
// predicates. The point in here is to move the bounds definitions from type params
// to where predicates when such cases occur.
for where_pred in &mut generics.where_predicates {
match *where_pred {
WherePredicate::BoundPredicate { ty: Generic(ref name), ref mut bounds } => {
if bounds.is_empty() {
for param in &mut generics.params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => {
if ¶m.name == name {
mem::swap(bounds, ty_bounds);
break
}
}
GenericParamDefKind::Const { .. } => {}
}
}
}
}
_ => continue,
}
}
generics
}
}
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
&'a &'tcx ty::GenericPredicates<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
use self::WherePredicate as WP;
let (gens, preds) = *self;
// Bounds in the type_params and lifetimes fields are repeated in the
// predicates field (see rustc_typeck::collect::ty_generics), so remove
// them.
let stripped_typarams = gens.params.iter().filter_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => None,
ty::GenericParamDefKind::Type { .. } => {
if param.name.as_symbol() == kw::SelfUpper {
assert_eq!(param.index, 0);
return None;
}
Some(param.clean(cx))
}
ty::GenericParamDefKind::Const { .. } => None,
}).collect::<Vec<GenericParamDef>>();
let mut where_predicates = preds.predicates.iter()
.flat_map(|(p, _)| p.clean(cx))
.collect::<Vec<_>>();
// Type parameters and have a Sized bound by default unless removed with
// ?Sized. Scan through the predicates and mark any type parameter with
// a Sized bound, removing the bounds as we find them.
//
// Note that associated types also have a sized bound by default, but we
// don't actually know the set of associated types right here so that's
// handled in cleaning associated types
let mut sized_params = FxHashSet::default();
where_predicates.retain(|pred| {
match *pred {
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
if bounds.iter().any(|b| b.is_sized_bound(cx)) {
sized_params.insert(g.clone());
false
} else {
true
}
}
_ => true,
}
});
// Run through the type parameters again and insert a ?Sized
// unbound for any we didn't find to be Sized.
for tp in &stripped_typarams {
if !sized_params.contains(&tp.name) {
where_predicates.push(WP::BoundPredicate {
ty: Type::Generic(tp.name.clone()),
bounds: vec![GenericBound::maybe_sized(cx)],
})
}
}
// It would be nice to collect all of the bounds on a type and recombine
// them if possible, to avoid e.g., `where T: Foo, T: Bar, T: Sized, T: 'a`
// and instead see `where T: Foo + Bar + Sized + 'a`
Generics {
params: gens.params
.iter()
.flat_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => Some(param.clean(cx)),
ty::GenericParamDefKind::Type { .. } => None,
ty::GenericParamDefKind::Const { .. } => Some(param.clean(cx)),
}).chain(simplify::ty_params(stripped_typarams).into_iter())
.collect(),
where_predicates: simplify::where_clauses(cx, where_predicates),
}
}
}
/// The point of this function is to replace bounds with types.
///
/// i.e. `[T, U]` when you have the following bounds: `T: Display, U: Option<T>` will return
/// `[Display, Option]` (we just returns the list of the types, we don't care about the
/// wrapped types in here).
fn get_real_types(
generics: &Generics,
arg: &Type,
cx: &DocContext<'_>,
recurse: i32,
) -> FxHashSet<Type> {
let arg_s = arg.to_string();
let mut res = FxHashSet::default();
if recurse >= 10 { // FIXME: remove this whole recurse thing when the recursion bug is fixed
return res;
}
if arg.is_full_generic() {
if let Some(where_pred) = generics.where_predicates.iter().find(|g| {
match g {
&WherePredicate::BoundPredicate { ref ty, .. } => ty.def_id() == arg.def_id(),
_ => false,
}
}) {
let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
for bound in bounds.iter() {
match *bound {
GenericBound::TraitBound(ref poly_trait, _) => {
for x in poly_trait.generic_params.iter() {
if !x.is_type() {
continue
}
if let Some(ty) = x.get_type(cx) {
let adds = get_real_types(generics, &ty, cx, recurse + 1);
if !adds.is_empty() {
res.extend(adds);
} else if !ty.is_full_generic() {
res.insert(ty);
}
}
}
}
_ => {}
}
}
}
if let Some(bound) = generics.params.iter().find(|g| {
g.is_type() && g.name == arg_s
}) {
for bound in bound.get_bounds().unwrap_or_else(|| &[]) {
if let Some(ty) = bound.get_trait_type() {
let adds = get_real_types(generics, &ty, cx, recurse + 1);
if !adds.is_empty() {
res.extend(adds);
} else if !ty.is_full_generic() {
res.insert(ty.clone());
}
}
}
}
} else {
res.insert(arg.clone());
if let Some(gens) = arg.generics() {
for gen in gens.iter() {
if gen.is_full_generic() {
let adds = get_real_types(generics, gen, cx, recurse + 1);
if !adds.is_empty() {
res.extend(adds);
}
} else {
res.insert(gen.clone());
}
}
}
}
res
}
/// Return the full list of types when bounds have been resolved.
///
/// i.e. `fn foo<A: Display, B: Option<A>>(x: u32, y: B)` will return
/// `[u32, Display, Option]`.
pub fn get_all_types(
generics: &Generics,
decl: &FnDecl,
cx: &DocContext<'_>,
) -> (Vec<Type>, Vec<Type>) {
let mut all_types = FxHashSet::default();
for arg in decl.inputs.values.iter() {
if arg.type_.is_self_type() {
continue;
}
let args = get_real_types(generics, &arg.type_, cx, 0);
if !args.is_empty() {
all_types.extend(args);
} else {
all_types.insert(arg.type_.clone());
}
}
let ret_types = match decl.output {
FunctionRetTy::Return(ref return_type) => {
let mut ret = get_real_types(generics, &return_type, cx, 0);
if ret.is_empty() {
ret.insert(return_type.clone());
}
ret.into_iter().collect()
}
_ => Vec::new(),
};
(all_types.into_iter().collect(), ret_types)
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Method {
pub generics: Generics,
pub decl: FnDecl,
pub header: hir::FnHeader,
pub defaultness: Option<hir::Defaultness>,
pub all_types: Vec<Type>,
pub ret_types: Vec<Type>,
}
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId,
Option<hir::Defaultness>) {
fn clean(&self, cx: &DocContext<'_>) -> Method {
let (generics, decl) = enter_impl_trait(cx, || {
(self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
});
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Method {
decl,
generics,
header: self.0.header,
defaultness: self.3,
all_types,
ret_types,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TyMethod {
pub header: hir::FnHeader,
pub decl: FnDecl,
pub generics: Generics,
pub all_types: Vec<Type>,
pub ret_types: Vec<Type>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Function {
pub decl: FnDecl,
pub generics: Generics,
pub header: hir::FnHeader,
pub all_types: Vec<Type>,
pub ret_types: Vec<Type>,
}
impl Clean<Item> for doctree::Function {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&self.decl, self.body).clean(cx))
});
let did = cx.tcx.hir().local_def_id_from_hir_id(self.id);
let constness = if cx.tcx.is_min_const_fn(did) {
hir::Constness::Const
} else {
hir::Constness::NotConst
};
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: did,
inner: FunctionItem(Function {
decl,
generics,
header: hir::FnHeader { constness, ..self.header },
all_types,
ret_types,
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct FnDecl {
pub inputs: Arguments,
pub output: FunctionRetTy,
pub attrs: Attributes,
}
impl FnDecl {
pub fn self_type(&self) -> Option<SelfTy> {
self.inputs.values.get(0).and_then(|v| v.to_self())
}
/// Returns the sugared return type for an async function.
///
/// For example, if the return type is `impl std::future::Future<Output = i32>`, this function
/// will return `i32`.
///
/// # Panics
///
/// This function will panic if the return type does not match the expected sugaring for async
/// functions.
pub fn sugared_async_return_type(&self) -> FunctionRetTy {
match &self.output {
FunctionRetTy::Return(Type::ImplTrait(bounds)) => {
match &bounds[0] {
GenericBound::TraitBound(PolyTrait { trait_, .. }, ..) => {
let bindings = trait_.bindings().unwrap();
FunctionRetTy::Return(bindings[0].ty().clone())
}
_ => panic!("unexpected desugaring of async function"),
}
}
_ => panic!("unexpected desugaring of async function"),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Arguments {
pub values: Vec<Argument>,
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
let mut name = self.1.get(i).map(|ident| ident.to_string())
.unwrap_or(String::new());
if name.is_empty() {
name = "_".to_string();
}
Argument {
name,
type_: ty.clean(cx),
}
}).collect()
}
}
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
let body = cx.tcx.hir().body(self.1);
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
Argument {
name: name_from_pat(&body.arguments[i].pat),
type_: ty.clean(cx),
}
}).collect()
}
}
}
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
where (&'a [hir::Ty], A): Clean<Arguments>
{
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
FnDecl {
inputs: (&self.0.inputs[..], self.1).clean(cx),
output: self.0.output.clean(cx),
attrs: Attributes::default(),
}
}
}
impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
let (did, sig) = *self;
let mut names = if cx.tcx.hir().as_local_hir_id(did).is_some() {
vec![].into_iter()
} else {
cx.tcx.fn_arg_names(did).into_iter()
};
FnDecl {
output: Return(sig.skip_binder().output().clean(cx)),
attrs: Attributes::default(),
inputs: Arguments {
values: sig.skip_binder().inputs().iter().map(|t| {
Argument {
type_: t.clean(cx),
name: names.next().map_or(String::new(), |name| name.to_string()),
}
}).collect(),
},
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Argument {
pub type_: Type,
pub name: String,
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
pub enum SelfTy {
SelfValue,
SelfBorrowed(Option<Lifetime>, Mutability),
SelfExplicit(Type),
}
impl Argument {
pub fn to_self(&self) -> Option<SelfTy> {
if self.name != "self" {
return None;
}
if self.type_.is_self_type() {
return Some(SelfValue);
}
match self.type_ {
BorrowedRef{ref lifetime, mutability, ref type_} if type_.is_self_type() => {
Some(SelfBorrowed(lifetime.clone(), mutability))
}
_ => Some(SelfExplicit(self.type_.clone()))
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum FunctionRetTy {
Return(Type),
DefaultReturn,
}
impl Clean<FunctionRetTy> for hir::FunctionRetTy {
fn clean(&self, cx: &DocContext<'_>) -> FunctionRetTy {
match *self {
hir::Return(ref typ) => Return(typ.clean(cx)),
hir::DefaultReturn(..) => DefaultReturn,
}
}
}
impl GetDefId for FunctionRetTy {
fn def_id(&self) -> Option<DefId> {
match *self {
Return(ref ty) => ty.def_id(),
DefaultReturn => None,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Trait {
pub auto: bool,
pub unsafety: hir::Unsafety,
pub items: Vec<Item>,
pub generics: Generics,
pub bounds: Vec<GenericBound>,
pub is_spotlight: bool,
pub is_auto: bool,
}
impl Clean<Item> for doctree::Trait {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let attrs = self.attrs.clean(cx);
let is_spotlight = attrs.has_doc_flag(sym::spotlight);
Item {
name: Some(self.name.clean(cx)),
attrs: attrs,
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: TraitItem(Trait {
auto: self.is_auto.clean(cx),
unsafety: self.unsafety,
items: self.items.clean(cx),
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
is_spotlight,
is_auto: self.is_auto.clean(cx),
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TraitAlias {
pub generics: Generics,
pub bounds: Vec<GenericBound>,
}
impl Clean<Item> for doctree::TraitAlias {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let attrs = self.attrs.clean(cx);
Item {
name: Some(self.name.clean(cx)),
attrs,
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: TraitAliasItem(TraitAlias {
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
}),
}
}
}
impl Clean<bool> for hir::IsAuto {
fn clean(&self, _: &DocContext<'_>) -> bool {
match *self {
hir::IsAuto::Yes => true,
hir::IsAuto::No => false,
}
}
}
impl Clean<Type> for hir::TraitRef {
fn clean(&self, cx: &DocContext<'_>) -> Type {
resolve_type(cx, self.path.clean(cx), self.hir_ref_id)
}
}
impl Clean<PolyTrait> for hir::PolyTraitRef {
fn clean(&self, cx: &DocContext<'_>) -> PolyTrait {
PolyTrait {
trait_: self.trait_ref.clean(cx),
generic_params: self.bound_generic_params.clean(cx)
}
}
}
impl Clean<Item> for hir::TraitItem {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.node {
hir::TraitItemKind::Const(ref ty, default) => {
AssocConstItem(ty.clean(cx),
default.map(|e| print_const_expr(cx, e)))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
MethodItem((sig, &self.generics, body, None).clean(cx))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref names)) => {
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
});
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
TyMethodItem(TyMethod {
header: sig.header,
decl,
generics,
all_types,
ret_types,
})
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
AssocTypeItem(bounds.clean(cx), default.clean(cx))
}
};
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
Item {
name: Some(self.ident.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: local_did,
visibility: None,
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
inner,
}
}
}
impl Clean<Item> for hir::ImplItem {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.node {
hir::ImplItemKind::Const(ref ty, expr) => {
AssocConstItem(ty.clean(cx),
Some(print_const_expr(cx, expr)))
}
hir::ImplItemKind::Method(ref sig, body) => {
MethodItem((sig, &self.generics, body, Some(self.defaultness)).clean(cx))
}
hir::ImplItemKind::Type(ref ty) => TypedefItem(Typedef {
type_: ty.clean(cx),
generics: Generics::default(),
}, true),
hir::ImplItemKind::Existential(ref bounds) => ExistentialItem(Existential {
bounds: bounds.clean(cx),
generics: Generics::default(),
}, true),
};
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
Item {
name: Some(self.ident.name.clean(cx)),
source: self.span.clean(cx),
attrs: self.attrs.clean(cx),
def_id: local_did,
visibility: self.vis.clean(cx),
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
inner,
}
}
}
impl<'tcx> Clean<Item> for ty::AssocItem {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind {
ty::AssocKind::Const => {
let ty = cx.tcx.type_of(self.def_id);
let default = if self.defaultness.has_value() {
Some(inline::print_inlined_const(cx, self.def_id))
} else {
None
};
AssocConstItem(ty.clean(cx), default)
}
ty::AssocKind::Method => {
let generics = (cx.tcx.generics_of(self.def_id),
&cx.tcx.explicit_predicates_of(self.def_id)).clean(cx);
let sig = cx.tcx.fn_sig(self.def_id);
let mut decl = (self.def_id, sig).clean(cx);
if self.method_has_self_argument {
let self_ty = match self.container {
ty::ImplContainer(def_id) => {
cx.tcx.type_of(def_id)
}
ty::TraitContainer(_) => cx.tcx.mk_self_type()
};
let self_arg_ty = *sig.input(0).skip_binder();
if self_arg_ty == self_ty {
decl.inputs.values[0].type_ = Generic(String::from("Self"));
} else if let ty::Ref(_, ty, _) = self_arg_ty.sty {
if ty == self_ty {
match decl.inputs.values[0].type_ {
BorrowedRef{ref mut type_, ..} => {
**type_ = Generic(String::from("Self"))
}
_ => unreachable!(),
}
}
}
}
let provided = match self.container {
ty::ImplContainer(_) => true,
ty::TraitContainer(_) => self.defaultness.has_value()
};
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
if provided {
let constness = if cx.tcx.is_min_const_fn(self.def_id) {
hir::Constness::Const
} else {
hir::Constness::NotConst
};
let defaultness = match self.container {
ty::ImplContainer(_) => Some(self.defaultness),
ty::TraitContainer(_) => None,
};
MethodItem(Method {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness,
asyncness: hir::IsAsync::NotAsync,
},
defaultness,
all_types,
ret_types,
})
} else {
TyMethodItem(TyMethod {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
all_types,
ret_types,
})
}
}
ty::AssocKind::Type => {
let my_name = self.ident.name.clean(cx);
if let ty::TraitContainer(did) = self.container {
// When loading a cross-crate associated type, the bounds for this type
// are actually located on the trait/impl itself, so we need to load
// all of the generics from there and then look for bounds that are
// applied to this associated type in question.
let predicates = cx.tcx.explicit_predicates_of(did);
let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
let mut bounds = generics.where_predicates.iter().filter_map(|pred| {
let (name, self_type, trait_, bounds) = match *pred {
WherePredicate::BoundPredicate {
ty: QPath { ref name, ref self_type, ref trait_ },
ref bounds
} => (name, self_type, trait_, bounds),
_ => return None,
};
if *name != my_name { return None }
match **trait_ {
ResolvedPath { did, .. } if did == self.container.id() => {}
_ => return None,
}
match **self_type {
Generic(ref s) if *s == "Self" => {}
_ => return None,
}
Some(bounds)
}).flat_map(|i| i.iter().cloned()).collect::<Vec<_>>();
// Our Sized/?Sized bound didn't get handled when creating the generics
// because we didn't actually get our whole set of bounds until just now
// (some of them may have come from the trait). If we do have a sized
// bound, we remove it, and if we don't then we add the `?Sized` bound
// at the end.
match bounds.iter().position(|b| b.is_sized_bound(cx)) {
Some(i) => { bounds.remove(i); }
None => bounds.push(GenericBound::maybe_sized(cx)),
}
let ty = if self.defaultness.has_value() {
Some(cx.tcx.type_of(self.def_id))
} else {
None
};
AssocTypeItem(bounds, ty.clean(cx))
} else {
TypedefItem(Typedef {
type_: cx.tcx.type_of(self.def_id).clean(cx),
generics: Generics {
params: Vec::new(),
where_predicates: Vec::new(),
},
}, true)
}
}
ty::AssocKind::Existential => unimplemented!(),
};
let visibility = match self.container {
ty::ImplContainer(_) => self.vis.clean(cx),
ty::TraitContainer(_) => None,
};
Item {
name: Some(self.ident.name.clean(cx)),
visibility,
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
def_id: self.def_id,
attrs: inline::load_attrs(cx, self.def_id),
source: cx.tcx.def_span(self.def_id).clean(cx),
inner,
}
}
}
/// A trait reference, which may have higher ranked lifetimes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct PolyTrait {
pub trait_: Type,
pub generic_params: Vec<GenericParamDef>,
}
/// A representation of a type suitable for hyperlinking purposes. Ideally, one can get the original
/// type out of the AST/`TyCtxt` given one of these, if more information is needed. Most
/// importantly, it does not preserve mutability or boxes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum Type {
/// Structs/enums/traits (most that would be an `hir::TyKind::Path`).
ResolvedPath {
path: Path,
param_names: Option<Vec<GenericBound>>,
did: DefId,
/// `true` if is a `T::Name` path for associated types.
is_generic: bool,
},
/// For parameterized types, so the consumer of the JSON don't go
/// looking for types which don't exist anywhere.
Generic(String),
/// Primitives are the fixed-size numeric types (plus int/usize/float), char,
/// arrays, slices, and tuples.
Primitive(PrimitiveType),
/// `extern "ABI" fn`
BareFunction(Box<BareFunctionDecl>),
Tuple(Vec<Type>),
Slice(Box<Type>),
Array(Box<Type>, String),
Never,
CVarArgs,
Unique(Box<Type>),
RawPointer(Mutability, Box<Type>),
BorrowedRef {
lifetime: Option<Lifetime>,
mutability: Mutability,
type_: Box<Type>,
},
// `<Type as Trait>::Name`
QPath {
name: String,
self_type: Box<Type>,
trait_: Box<Type>
},
// `_`
Infer,
// `impl TraitA + TraitB + ...`
ImplTrait(Vec<GenericBound>),
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Copy, Debug)]
pub enum PrimitiveType {
Isize, I8, I16, I32, I64, I128,
Usize, U8, U16, U32, U64, U128,
F32, F64,
Char,
Bool,
Str,
Slice,
Array,
Tuple,
Unit,
RawPointer,
Reference,
Fn,
Never,
CVarArgs,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Copy, Debug)]
pub enum TypeKind {
Enum,
Function,
Module,
Const,
Static,
Struct,
Union,
Trait,
Variant,
Typedef,
Foreign,
Macro,
Attr,
Derive,
TraitAlias,
}
pub trait GetDefId {
fn def_id(&self) -> Option<DefId>;
}
impl<T: GetDefId> GetDefId for Option<T> {
fn def_id(&self) -> Option<DefId> {
self.as_ref().and_then(|d| d.def_id())
}
}
impl Type {
pub fn primitive_type(&self) -> Option<PrimitiveType> {
match *self {
Primitive(p) | BorrowedRef { type_: box Primitive(p), ..} => Some(p),
Slice(..) | BorrowedRef { type_: box Slice(..), .. } => Some(PrimitiveType::Slice),
Array(..) | BorrowedRef { type_: box Array(..), .. } => Some(PrimitiveType::Array),
Tuple(ref tys) => if tys.is_empty() {
Some(PrimitiveType::Unit)
} else {
Some(PrimitiveType::Tuple)
},
RawPointer(..) => Some(PrimitiveType::RawPointer),
BorrowedRef { type_: box Generic(..), .. } => Some(PrimitiveType::Reference),
BareFunction(..) => Some(PrimitiveType::Fn),
Never => Some(PrimitiveType::Never),
_ => None,
}
}
pub fn is_generic(&self) -> bool {
match *self {
ResolvedPath { is_generic, .. } => is_generic,
_ => false,
}
}
pub fn is_self_type(&self) -> bool {
match *self {
Generic(ref name) => name == "Self",
_ => false
}
}
pub fn generics(&self) -> Option<Vec<Type>> {
match *self {
ResolvedPath { ref path, .. } => {
path.segments.last().and_then(|seg| {
if let GenericArgs::AngleBracketed { ref args, .. } = seg.args {
Some(args.iter().filter_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty.clone()),
_ => None,
}).collect())
} else {
None
}
})
}
_ => None,
}
}
pub fn bindings(&self) -> Option<&[TypeBinding]> {
match *self {
ResolvedPath { ref path, .. } => {
path.segments.last().and_then(|seg| {
if let GenericArgs::AngleBracketed { ref bindings, .. } = seg.args {
Some(&**bindings)
} else {
None
}
})
}
_ => None
}
}
pub fn is_full_generic(&self) -> bool {
match *self {
Type::Generic(_) => true,
_ => false,
}
}
}
impl GetDefId for Type {
fn def_id(&self) -> Option<DefId> {
match *self {
ResolvedPath { did, .. } => Some(did),
Primitive(p) => crate::html::render::cache().primitive_locations.get(&p).cloned(),
BorrowedRef { type_: box Generic(..), .. } =>
Primitive(PrimitiveType::Reference).def_id(),
BorrowedRef { ref type_, .. } => type_.def_id(),
Tuple(ref tys) => if tys.is_empty() {
Primitive(PrimitiveType::Unit).def_id()
} else {
Primitive(PrimitiveType::Tuple).def_id()
},
BareFunction(..) => Primitive(PrimitiveType::Fn).def_id(),
Never => Primitive(PrimitiveType::Never).def_id(),
Slice(..) => Primitive(PrimitiveType::Slice).def_id(),
Array(..) => Primitive(PrimitiveType::Array).def_id(),
RawPointer(..) => Primitive(PrimitiveType::RawPointer).def_id(),
QPath { ref self_type, .. } => self_type.def_id(),
_ => None,
}
}
}
impl PrimitiveType {
fn from_str(s: &str) -> Option<PrimitiveType> {
match s {
"isize" => Some(PrimitiveType::Isize),
"i8" => Some(PrimitiveType::I8),
"i16" => Some(PrimitiveType::I16),
"i32" => Some(PrimitiveType::I32),
"i64" => Some(PrimitiveType::I64),
"i128" => Some(PrimitiveType::I128),
"usize" => Some(PrimitiveType::Usize),
"u8" => Some(PrimitiveType::U8),
"u16" => Some(PrimitiveType::U16),
"u32" => Some(PrimitiveType::U32),
"u64" => Some(PrimitiveType::U64),
"u128" => Some(PrimitiveType::U128),
"bool" => Some(PrimitiveType::Bool),
"char" => Some(PrimitiveType::Char),
"str" => Some(PrimitiveType::Str),
"f32" => Some(PrimitiveType::F32),
"f64" => Some(PrimitiveType::F64),
"array" => Some(PrimitiveType::Array),
"slice" => Some(PrimitiveType::Slice),
"tuple" => Some(PrimitiveType::Tuple),
"unit" => Some(PrimitiveType::Unit),
"pointer" => Some(PrimitiveType::RawPointer),
"reference" => Some(PrimitiveType::Reference),
"fn" => Some(PrimitiveType::Fn),
"never" => Some(PrimitiveType::Never),
_ => None,
}
}
pub fn as_str(&self) -> &'static str {
use self::PrimitiveType::*;
match *self {
Isize => "isize",
I8 => "i8",
I16 => "i16",
I32 => "i32",
I64 => "i64",
I128 => "i128",
Usize => "usize",
U8 => "u8",
U16 => "u16",
U32 => "u32",
U64 => "u64",
U128 => "u128",
F32 => "f32",
F64 => "f64",
Str => "str",
Bool => "bool",
Char => "char",
Array => "array",
Slice => "slice",
Tuple => "tuple",
Unit => "unit",
RawPointer => "pointer",
Reference => "reference",
Fn => "fn",
Never => "never",
CVarArgs => "...",
}
}
pub fn to_url_str(&self) -> &'static str {
self.as_str()
}
}
impl From<ast::IntTy> for PrimitiveType {
fn from(int_ty: ast::IntTy) -> PrimitiveType {
match int_ty {
ast::IntTy::Isize => PrimitiveType::Isize,
ast::IntTy::I8 => PrimitiveType::I8,
ast::IntTy::I16 => PrimitiveType::I16,
ast::IntTy::I32 => PrimitiveType::I32,
ast::IntTy::I64 => PrimitiveType::I64,
ast::IntTy::I128 => PrimitiveType::I128,
}
}
}
impl From<ast::UintTy> for PrimitiveType {
fn from(uint_ty: ast::UintTy) -> PrimitiveType {
match uint_ty {
ast::UintTy::Usize => PrimitiveType::Usize,
ast::UintTy::U8 => PrimitiveType::U8,
ast::UintTy::U16 => PrimitiveType::U16,
ast::UintTy::U32 => PrimitiveType::U32,
ast::UintTy::U64 => PrimitiveType::U64,
ast::UintTy::U128 => PrimitiveType::U128,
}
}
}
impl From<ast::FloatTy> for PrimitiveType {
fn from(float_ty: ast::FloatTy) -> PrimitiveType {
match float_ty {
ast::FloatTy::F32 => PrimitiveType::F32,
ast::FloatTy::F64 => PrimitiveType::F64,
}
}
}
impl Clean<Type> for hir::Ty {
fn clean(&self, cx: &DocContext<'_>) -> Type {
use rustc::hir::*;
match self.node {
TyKind::Never => Never,
TyKind::Ptr(ref m) => RawPointer(m.mutbl.clean(cx), box m.ty.clean(cx)),
TyKind::Rptr(ref l, ref m) => {
let lifetime = if l.is_elided() {
None
} else {
Some(l.clean(cx))
};
BorrowedRef {lifetime: lifetime, mutability: m.mutbl.clean(cx),
type_: box m.ty.clean(cx)}
}
TyKind::Slice(ref ty) => Slice(box ty.clean(cx)),
TyKind::Array(ref ty, ref length) => {
let def_id = cx.tcx.hir().local_def_id_from_hir_id(length.hir_id);
let param_env = cx.tcx.param_env(def_id);
let substs = InternalSubsts::identity_for_item(cx.tcx, def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
promoted: None
};
let length = match cx.tcx.const_eval(param_env.and(cid)) {
Ok(length) => print_const(cx, length),
Err(_) => "_".to_string(),
};
Array(box ty.clean(cx), length)
},
TyKind::Tup(ref tys) => Tuple(tys.clean(cx)),
TyKind::Def(item_id, _) => {
let item = cx.tcx.hir().expect_item_by_hir_id(item_id.id);
if let hir::ItemKind::Existential(ref ty) = item.node {
ImplTrait(ty.bounds.clean(cx))
} else {
unreachable!()
}
}
TyKind::Path(hir::QPath::Resolved(None, ref path)) => {
if let Res::Def(DefKind::TyParam, did) = path.res {
if let Some(new_ty) = cx.ty_substs.borrow().get(&did).cloned() {
return new_ty;
}
if let Some(bounds) = cx.impl_trait_bounds.borrow_mut().remove(&did) {
return ImplTrait(bounds);
}
}
let mut alias = None;
if let Res::Def(DefKind::TyAlias, def_id) = path.res {
// Substitute private type aliases
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
if !cx.renderinfo.borrow().access_levels.is_exported(def_id) {
alias = Some(&cx.tcx.hir().expect_item_by_hir_id(hir_id).node);
}
}
};
if let Some(&hir::ItemKind::Ty(ref ty, ref generics)) = alias {
let provided_params = &path.segments.last().expect("segments were empty");
let mut ty_substs = FxHashMap::default();
let mut lt_substs = FxHashMap::default();
let mut ct_substs = FxHashMap::default();
provided_params.with_generic_args(|generic_args| {
let mut indices: GenericParamCount = Default::default();
for param in generics.params.iter() {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {
let mut j = 0;
let lifetime = generic_args.args.iter().find_map(|arg| {
match arg {
hir::GenericArg::Lifetime(lt) => {
if indices.lifetimes == j {
return Some(lt);
}
j += 1;
None
}
_ => None,
}
});
if let Some(lt) = lifetime.cloned() {
if !lt.is_elided() {
let lt_def_id =
cx.tcx.hir().local_def_id_from_hir_id(param.hir_id);
lt_substs.insert(lt_def_id, lt.clean(cx));
}
}
indices.lifetimes += 1;
}
hir::GenericParamKind::Type { ref default, .. } => {
let ty_param_def_id =
cx.tcx.hir().local_def_id_from_hir_id(param.hir_id);
let mut j = 0;
let type_ = generic_args.args.iter().find_map(|arg| {
match arg {
hir::GenericArg::Type(ty) => {
if indices.types == j {
return Some(ty);
}
j += 1;
None
}
_ => None,
}
});
if let Some(ty) = type_.cloned() {
ty_substs.insert(ty_param_def_id, ty.clean(cx));
} else if let Some(default) = default.clone() {
ty_substs.insert(ty_param_def_id,
default.into_inner().clean(cx));
}
indices.types += 1;
}
hir::GenericParamKind::Const { .. } => {
let const_param_def_id =
cx.tcx.hir().local_def_id_from_hir_id(param.hir_id);
let mut j = 0;
let const_ = generic_args.args.iter().find_map(|arg| {
match arg {
hir::GenericArg::Const(ct) => {
if indices.consts == j {
return Some(ct);
}
j += 1;
None
}
_ => None,
}
});
if let Some(ct) = const_.cloned() {
ct_substs.insert(const_param_def_id, ct.clean(cx));
}
// FIXME(const_generics:defaults)
indices.consts += 1;
}
}
}
});
return cx.enter_alias(ty_substs, lt_substs, ct_substs, || ty.clean(cx));
}
resolve_type(cx, path.clean(cx), self.hir_id)
}
TyKind::Path(hir::QPath::Resolved(Some(ref qself), ref p)) => {
let mut segments: Vec<_> = p.segments.clone().into();
segments.pop();
let trait_path = hir::Path {
span: p.span,
res: Res::Def(
DefKind::Trait,
cx.tcx.associated_item(p.res.def_id()).container.id(),
),
segments: segments.into(),
};
Type::QPath {
name: p.segments.last().expect("segments were empty").ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.hir_id)
}
}
TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => {
let mut res = Res::Err;
let ty = hir_ty_to_ty(cx.tcx, self);
if let ty::Projection(proj) = ty.sty {
res = Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id);
}
let trait_path = hir::Path {
span: self.span,
res,
segments: vec![].into(),
};
Type::QPath {
name: segment.ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.hir_id)
}
}
TyKind::TraitObject(ref bounds, ref lifetime) => {
match bounds[0].clean(cx).trait_ {
ResolvedPath { path, param_names: None, did, is_generic } => {
let mut bounds: Vec<self::GenericBound> = bounds[1..].iter().map(|bound| {
self::GenericBound::TraitBound(bound.clean(cx),
hir::TraitBoundModifier::None)
}).collect();
if !lifetime.is_elided() {
bounds.push(self::GenericBound::Outlives(lifetime.clean(cx)));
}
ResolvedPath { path, param_names: Some(bounds), did, is_generic, }
}
_ => Infer, // shouldn't happen
}
}
TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
TyKind::Infer | TyKind::Err => Infer,
TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.node),
TyKind::CVarArgs(_) => CVarArgs,
}
}
}
impl<'tcx> Clean<Type> for Ty<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Type {
debug!("cleaning type: {:?}", self);
match self.sty {
ty::Never => Never,
ty::Bool => Primitive(PrimitiveType::Bool),
ty::Char => Primitive(PrimitiveType::Char),
ty::Int(int_ty) => Primitive(int_ty.into()),
ty::Uint(uint_ty) => Primitive(uint_ty.into()),
ty::Float(float_ty) => Primitive(float_ty.into()),
ty::Str => Primitive(PrimitiveType::Str),
ty::Slice(ty) => Slice(box ty.clean(cx)),
ty::Array(ty, n) => {
let mut n = cx.tcx.lift(&n).expect("array lift failed");
if let ConstValue::Unevaluated(def_id, substs) = n.val {
let param_env = cx.tcx.param_env(def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
promoted: None
};
if let Ok(new_n) = cx.tcx.const_eval(param_env.and(cid)) {
n = new_n;
}
};
let n = print_const(cx, n);
Array(box ty.clean(cx), n)
}
ty::RawPtr(mt) => RawPointer(mt.mutbl.clean(cx), box mt.ty.clean(cx)),
ty::Ref(r, ty, mutbl) => BorrowedRef {
lifetime: r.clean(cx),
mutability: mutbl.clean(cx),
type_: box ty.clean(cx),
},
ty::FnDef(..) |
ty::FnPtr(_) => {
let ty = cx.tcx.lift(self).expect("FnPtr lift failed");
let sig = ty.fn_sig(cx.tcx);
BareFunction(box BareFunctionDecl {
unsafety: sig.unsafety(),
generic_params: Vec::new(),
decl: (cx.tcx.hir().local_def_id(ast::CRATE_NODE_ID), sig).clean(cx),
abi: sig.abi(),
})
}
ty::Adt(def, substs) => {
let did = def.did;
let kind = match def.adt_kind() {
AdtKind::Struct => TypeKind::Struct,
AdtKind::Union => TypeKind::Union,
AdtKind::Enum => TypeKind::Enum,
};
inline::record_extern_fqn(cx, did, kind);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
None, false, vec![], substs);
ResolvedPath {
path,
param_names: None,
did,
is_generic: false,
}
}
ty::Foreign(did) => {
inline::record_extern_fqn(cx, did, TypeKind::Foreign);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
None, false, vec![], InternalSubsts::empty());
ResolvedPath {
path: path,
param_names: None,
did: did,
is_generic: false,
}
}
ty::Dynamic(ref obj, ref reg) => {
// HACK: pick the first `did` as the `did` of the trait object. Someone
// might want to implement "native" support for marker-trait-only
// trait objects.
let mut dids = obj.principal_def_id().into_iter().chain(obj.auto_traits());
let did = dids.next().unwrap_or_else(|| {
panic!("found trait object `{:?}` with no traits?", self)
});
let substs = match obj.principal() {
Some(principal) => principal.skip_binder().substs,
// marker traits have no substs.
_ => cx.tcx.intern_substs(&[])
};
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let mut param_names = vec![];
reg.clean(cx).map(|b| param_names.push(GenericBound::Outlives(b)));
for did in dids {
let empty = cx.tcx.intern_substs(&[]);
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let bound = GenericBound::TraitBound(PolyTrait {
trait_: ResolvedPath {
path,
param_names: None,
did,
is_generic: false,
},
generic_params: Vec::new(),
}, hir::TraitBoundModifier::None);
param_names.push(bound);
}
let mut bindings = vec![];
for pb in obj.projection_bounds() {
bindings.push(TypeBinding {
name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
kind: TypeBindingKind::Equality {
ty: pb.skip_binder().ty.clean(cx)
},
});
}
let path = external_path(cx, &cx.tcx.item_name(did).as_str(), Some(did),
false, bindings, substs);
ResolvedPath {
path,
param_names: Some(param_names),
did,
is_generic: false,
}
}
ty::Tuple(ref t) => {
Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx))
}
ty::Projection(ref data) => data.clean(cx),
ty::Param(ref p) => Generic(p.name.to_string()),
ty::Opaque(def_id, substs) => {
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
let predicates_of = cx.tcx.explicit_predicates_of(def_id);
let substs = cx.tcx.lift(&substs).expect("Opaque lift failed");
let bounds = predicates_of.instantiate(cx.tcx, substs);
let mut regions = vec![];
let mut has_sized = false;
let mut bounds = bounds.predicates.iter().filter_map(|predicate| {
let trait_ref = if let Some(tr) = predicate.to_opt_poly_trait_ref() {
tr
} else if let ty::Predicate::TypeOutlives(pred) = *predicate {
// these should turn up at the end
pred.skip_binder().1.clean(cx).map(|r| {
regions.push(GenericBound::Outlives(r))
});
return None;
} else {
return None;
};
if let Some(sized) = cx.tcx.lang_items().sized_trait() {
if trait_ref.def_id() == sized {
has_sized = true;
return None;
}
}
let bounds = bounds.predicates.iter().filter_map(|pred|
if let ty::Predicate::Projection(proj) = *pred {
let proj = proj.skip_binder();
if proj.projection_ty.trait_ref(cx.tcx) == *trait_ref.skip_binder() {
Some(TypeBinding {
name: cx.tcx.associated_item(proj.projection_ty.item_def_id)
.ident.name.clean(cx),
kind: TypeBindingKind::Equality {
ty: proj.ty.clean(cx),
},
})
} else {
None
}
} else {
None
}
).collect();
Some((trait_ref.skip_binder(), bounds).clean(cx))
}).collect::<Vec<_>>();
bounds.extend(regions);
if !has_sized && !bounds.is_empty() {
bounds.insert(0, GenericBound::maybe_sized(cx));
}
ImplTrait(bounds)
}
ty::Closure(..) | ty::Generator(..) => Tuple(vec![]), // FIXME(pcwalton)
ty::Bound(..) => panic!("Bound"),
ty::Placeholder(..) => panic!("Placeholder"),
ty::UnnormalizedProjection(..) => panic!("UnnormalizedProjection"),
ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
ty::Infer(..) => panic!("Infer"),
ty::Error => panic!("Error"),
}
}
}
impl<'tcx> Clean<Constant> for ty::Const<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Constant {
Constant {
type_: self.ty.clean(cx),
expr: format!("{}", self),
}
}
}
impl Clean<Item> for hir::StructField {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
Item {
name: Some(self.ident.name).clean(cx),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
def_id: local_did,
inner: StructFieldItem(self.ty.clean(cx)),
}
}
}
impl<'tcx> Clean<Item> for ty::FieldDef {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.ident.name).clean(cx),
attrs: cx.tcx.get_attrs(self.did).clean(cx),
source: cx.tcx.def_span(self.did).clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
def_id: self.did,
inner: StructFieldItem(cx.tcx.type_of(self.did).clean(cx)),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug)]
pub enum Visibility {
Public,
Inherited,
Crate,
Restricted(DefId, Path),
}
impl Clean<Option<Visibility>> for hir::Visibility {
fn clean(&self, cx: &DocContext<'_>) -> Option<Visibility> {
Some(match self.node {
hir::VisibilityKind::Public => Visibility::Public,
hir::VisibilityKind::Inherited => Visibility::Inherited,
hir::VisibilityKind::Crate(_) => Visibility::Crate,
hir::VisibilityKind::Restricted { ref path, .. } => {
let path = path.clean(cx);
let did = register_res(cx, path.res);
Visibility::Restricted(did, path)
}
})
}
}
impl Clean<Option<Visibility>> for ty::Visibility {
fn clean(&self, _: &DocContext<'_>) -> Option<Visibility> {
Some(if *self == ty::Visibility::Public { Public } else { Inherited })
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Struct {
pub struct_type: doctree::StructType,
pub generics: Generics,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Union {
pub struct_type: doctree::StructType,
pub generics: Generics,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
impl Clean<Item> for doctree::Struct {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: StructItem(Struct {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
}
}
}
impl Clean<Item> for doctree::Union {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: UnionItem(Union {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
}
}
}
/// This is a more limited form of the standard Struct, different in that
/// it lacks the things most items have (name, id, parameterization). Found
/// only as a variant in an enum.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct VariantStruct {
pub struct_type: doctree::StructType,
pub fields: Vec<Item>,
pub fields_stripped: bool,
}
impl Clean<VariantStruct> for ::rustc::hir::VariantData {
fn clean(&self, cx: &DocContext<'_>) -> VariantStruct {
VariantStruct {
struct_type: doctree::struct_type_from_def(self),
fields: self.fields().iter().map(|x| x.clean(cx)).collect(),
fields_stripped: false,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Enum {
pub variants: IndexVec<VariantIdx, Item>,
pub generics: Generics,
pub variants_stripped: bool,
}
impl Clean<Item> for doctree::Enum {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: EnumItem(Enum {
variants: self.variants.iter().map(|v| v.clean(cx)).collect(),
generics: self.generics.clean(cx),
variants_stripped: false,
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Variant {
pub kind: VariantKind,
}
impl Clean<Item> for doctree::Variant {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: None,
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
inner: VariantItem(Variant {
kind: self.def.clean(cx),
}),
}
}
}
impl<'tcx> Clean<Item> for ty::VariantDef {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let kind = match self.ctor_kind {
CtorKind::Const => VariantKind::CLike,
CtorKind::Fn => {
VariantKind::Tuple(
self.fields.iter().map(|f| cx.tcx.type_of(f.did).clean(cx)).collect()
)
}
CtorKind::Fictive => {
VariantKind::Struct(VariantStruct {
struct_type: doctree::Plain,
fields_stripped: false,
fields: self.fields.iter().map(|field| {
Item {
source: cx.tcx.def_span(field.did).clean(cx),
name: Some(field.ident.name.clean(cx)),
attrs: cx.tcx.get_attrs(field.did).clean(cx),
visibility: field.vis.clean(cx),
def_id: field.did,
stability: get_stability(cx, field.did),
deprecation: get_deprecation(cx, field.did),
inner: StructFieldItem(cx.tcx.type_of(field.did).clean(cx))
}
}).collect()
})
}
};
Item {
name: Some(self.ident.clean(cx)),
attrs: inline::load_attrs(cx, self.def_id),
source: cx.tcx.def_span(self.def_id).clean(cx),
visibility: Some(Inherited),
def_id: self.def_id,
inner: VariantItem(Variant { kind }),
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum VariantKind {
CLike,
Tuple(Vec<Type>),
Struct(VariantStruct),
}
impl Clean<VariantKind> for hir::VariantData {
fn clean(&self, cx: &DocContext<'_>) -> VariantKind {
match self {
hir::VariantData::Struct(..) => VariantKind::Struct(self.clean(cx)),
hir::VariantData::Tuple(..) =>
VariantKind::Tuple(self.fields().iter().map(|x| x.ty.clean(cx)).collect()),
hir::VariantData::Unit(..) => VariantKind::CLike,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Span {
pub filename: FileName,
pub loline: usize,
pub locol: usize,
pub hiline: usize,
pub hicol: usize,
pub original: syntax_pos::Span,
}
impl Span {
pub fn empty() -> Span {
Span {
filename: FileName::Anon(0),
loline: 0, locol: 0,
hiline: 0, hicol: 0,
original: syntax_pos::DUMMY_SP,
}
}
pub fn span(&self) -> syntax_pos::Span {
self.original
}
}
impl Clean<Span> for syntax_pos::Span {
fn clean(&self, cx: &DocContext<'_>) -> Span {
if self.is_dummy() {
return Span::empty();
}
let cm = cx.sess().source_map();
let filename = cm.span_to_filename(*self);
let lo = cm.lookup_char_pos(self.lo());
let hi = cm.lookup_char_pos(self.hi());
Span {
filename,
loline: lo.line,
locol: lo.col.to_usize(),
hiline: hi.line,
hicol: hi.col.to_usize(),
original: *self,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct Path {
pub global: bool,
pub res: Res,
pub segments: Vec<PathSegment>,
}
impl Path {
pub fn last_name(&self) -> &str {
self.segments.last().expect("segments were empty").name.as_str()
}
}
impl Clean<Path> for hir::Path {
fn clean(&self, cx: &DocContext<'_>) -> Path {
Path {
global: self.is_global(),
res: self.res,
segments: if self.is_global() { &self.segments[1..] } else { &self.segments }.clean(cx),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericArg {
Lifetime(Lifetime),
Type(Type),
Const(Constant),
}
impl fmt::Display for GenericArg {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
GenericArg::Lifetime(lt) => lt.fmt(f),
GenericArg::Type(ty) => ty.fmt(f),
GenericArg::Const(ct) => ct.fmt(f),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum GenericArgs {
AngleBracketed {
args: Vec<GenericArg>,
bindings: Vec<TypeBinding>,
},
Parenthesized {
inputs: Vec<Type>,
output: Option<Type>,
}
}
impl Clean<GenericArgs> for hir::GenericArgs {
fn clean(&self, cx: &DocContext<'_>) -> GenericArgs {
if self.parenthesized {
let output = self.bindings[0].ty().clean(cx);
GenericArgs::Parenthesized {
inputs: self.inputs().clean(cx),
output: if output != Type::Tuple(Vec::new()) { Some(output) } else { None }
}
} else {
let elide_lifetimes = self.args.iter().all(|arg| match arg {
hir::GenericArg::Lifetime(lt) => lt.is_elided(),
_ => true,
});
GenericArgs::AngleBracketed {
args: self.args.iter().filter_map(|arg| match arg {
hir::GenericArg::Lifetime(lt) if !elide_lifetimes => {
Some(GenericArg::Lifetime(lt.clean(cx)))
}
hir::GenericArg::Lifetime(_) => None,
hir::GenericArg::Type(ty) => Some(GenericArg::Type(ty.clean(cx))),
hir::GenericArg::Const(ct) => Some(GenericArg::Const(ct.clean(cx))),
}).collect(),
bindings: self.bindings.clean(cx),
}
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct PathSegment {
pub name: String,
pub args: GenericArgs,
}
impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext<'_>) -> PathSegment {
PathSegment {
name: self.ident.name.clean(cx),
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
}
}
}
fn strip_type(ty: Type) -> Type {
match ty {
Type::ResolvedPath { path, param_names, did, is_generic } => {
Type::ResolvedPath { path: strip_path(&path), param_names, did, is_generic }
}
Type::Tuple(inner_tys) => {
Type::Tuple(inner_tys.iter().map(|t| strip_type(t.clone())).collect())
}
Type::Slice(inner_ty) => Type::Slice(Box::new(strip_type(*inner_ty))),
Type::Array(inner_ty, s) => Type::Array(Box::new(strip_type(*inner_ty)), s),
Type::Unique(inner_ty) => Type::Unique(Box::new(strip_type(*inner_ty))),
Type::RawPointer(m, inner_ty) => Type::RawPointer(m, Box::new(strip_type(*inner_ty))),
Type::BorrowedRef { lifetime, mutability, type_ } => {
Type::BorrowedRef { lifetime, mutability, type_: Box::new(strip_type(*type_)) }
}
Type::QPath { name, self_type, trait_ } => {
Type::QPath {
name,
self_type: Box::new(strip_type(*self_type)), trait_: Box::new(strip_type(*trait_))
}
}
_ => ty
}
}
fn strip_path(path: &Path) -> Path {
let segments = path.segments.iter().map(|s| {
PathSegment {
name: s.name.clone(),
args: GenericArgs::AngleBracketed {
args: vec![],
bindings: vec![],
}
}
}).collect();
Path {
global: path.global,
res: path.res.clone(),
segments,
}
}
fn qpath_to_string(p: &hir::QPath) -> String {
let segments = match *p {
hir::QPath::Resolved(_, ref path) => &path.segments,
hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
};
let mut s = String::new();
for (i, seg) in segments.iter().enumerate() {
if i > 0 {
s.push_str("::");
}
if seg.ident.name != kw::PathRoot {
s.push_str(&*seg.ident.as_str());
}
}
s
}
impl Clean<String> for Ident {
#[inline]
fn clean(&self, cx: &DocContext<'_>) -> String {
self.name.clean(cx)
}
}
impl Clean<String> for ast::Name {
#[inline]
fn clean(&self, _: &DocContext<'_>) -> String {
self.to_string()
}
}
impl Clean<String> for InternedString {
#[inline]
fn clean(&self, _: &DocContext<'_>) -> String {
self.to_string()
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Typedef {
pub type_: Type,
pub generics: Generics,
}
impl Clean<Item> for doctree::Typedef {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: TypedefItem(Typedef {
type_: self.ty.clean(cx),
generics: self.gen.clean(cx),
}, false),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Existential {
pub bounds: Vec<GenericBound>,
pub generics: Generics,
}
impl Clean<Item> for doctree::Existential {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: ExistentialItem(Existential {
bounds: self.exist_ty.bounds.clean(cx),
generics: self.exist_ty.generics.clean(cx),
}, false),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct BareFunctionDecl {
pub unsafety: hir::Unsafety,
pub generic_params: Vec<GenericParamDef>,
pub decl: FnDecl,
pub abi: Abi,
}
impl Clean<BareFunctionDecl> for hir::BareFnTy {
fn clean(&self, cx: &DocContext<'_>) -> BareFunctionDecl {
let (generic_params, decl) = enter_impl_trait(cx, || {
(self.generic_params.clean(cx), (&*self.decl, &self.arg_names[..]).clean(cx))
});
BareFunctionDecl {
unsafety: self.unsafety,
abi: self.abi,
decl,
generic_params,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Static {
pub type_: Type,
pub mutability: Mutability,
/// It's useful to have the value of a static documented, but I have no
/// desire to represent expressions (that'd basically be all of the AST,
/// which is huge!). So, have a string.
pub expr: String,
}
impl Clean<Item> for doctree::Static {
fn clean(&self, cx: &DocContext<'_>) -> Item {
debug!("cleaning static {}: {:?}", self.name.clean(cx), self);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: StaticItem(Static {
type_: self.type_.clean(cx),
mutability: self.mutability.clean(cx),
expr: print_const_expr(cx, self.expr),
}),
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub struct Constant {
pub type_: Type,
pub expr: String,
}
impl Clean<Item> for doctree::Constant {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: ConstantItem(Constant {
type_: self.type_.clean(cx),
expr: print_const_expr(cx, self.expr),
}),
}
}
}
#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Hash)]
pub enum Mutability {
Mutable,
Immutable,
}
impl Clean<Mutability> for hir::Mutability {
fn clean(&self, _: &DocContext<'_>) -> Mutability {
match self {
&hir::MutMutable => Mutable,
&hir::MutImmutable => Immutable,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Debug, Hash)]
pub enum ImplPolarity {
Positive,
Negative,
}
impl Clean<ImplPolarity> for hir::ImplPolarity {
fn clean(&self, _: &DocContext<'_>) -> ImplPolarity {
match self {
&hir::ImplPolarity::Positive => ImplPolarity::Positive,
&hir::ImplPolarity::Negative => ImplPolarity::Negative,
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Impl {
pub unsafety: hir::Unsafety,
pub generics: Generics,
pub provided_trait_methods: FxHashSet<String>,
pub trait_: Option<Type>,
pub for_: Type,
pub items: Vec<Item>,
pub polarity: Option<ImplPolarity>,
pub synthetic: bool,
pub blanket_impl: Option<Type>,
}
pub fn get_auto_trait_and_blanket_impls(
cx: &DocContext<'tcx>,
ty: Ty<'tcx>,
param_env_def_id: DefId,
) -> impl Iterator<Item = Item> {
AutoTraitFinder::new(cx).get_auto_trait_impls(ty, param_env_def_id).into_iter()
.chain(BlanketImplFinder::new(cx).get_blanket_impls(ty, param_env_def_id))
}
impl Clean<Vec<Item>> for doctree::Impl {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
let mut ret = Vec::new();
let trait_ = self.trait_.clean(cx);
let items = self.items.clean(cx);
// If this impl block is an implementation of the Deref trait, then we
// need to try inlining the target's inherent impl blocks as well.
if trait_.def_id() == cx.tcx.lang_items().deref_trait() {
build_deref_target_impls(cx, &items, &mut ret);
}
let provided = trait_.def_id().map(|did| {
cx.tcx.provided_trait_methods(did)
.into_iter()
.map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or_default();
ret.push(Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
visibility: self.vis.clean(cx),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
inner: ImplItem(Impl {
unsafety: self.unsafety,
generics: self.generics.clean(cx),
provided_trait_methods: provided,
trait_,
for_: self.for_.clean(cx),
items,
polarity: Some(self.polarity.clean(cx)),
synthetic: false,
blanket_impl: None,
})
});
ret
}
}
fn build_deref_target_impls(cx: &DocContext<'_>,
items: &[Item],
ret: &mut Vec<Item>) {
use self::PrimitiveType::*;
let tcx = cx.tcx;
for item in items {
let target = match item.inner {
TypedefItem(ref t, true) => &t.type_,
_ => continue,
};
let primitive = match *target {
ResolvedPath { did, .. } if did.is_local() => continue,
ResolvedPath { did, .. } => {
ret.extend(inline::build_impls(cx, did));
continue
}
_ => match target.primitive_type() {
Some(prim) => prim,
None => continue,
}
};
let did = match primitive {
Isize => tcx.lang_items().isize_impl(),
I8 => tcx.lang_items().i8_impl(),
I16 => tcx.lang_items().i16_impl(),
I32 => tcx.lang_items().i32_impl(),
I64 => tcx.lang_items().i64_impl(),
I128 => tcx.lang_items().i128_impl(),
Usize => tcx.lang_items().usize_impl(),
U8 => tcx.lang_items().u8_impl(),
U16 => tcx.lang_items().u16_impl(),
U32 => tcx.lang_items().u32_impl(),
U64 => tcx.lang_items().u64_impl(),
U128 => tcx.lang_items().u128_impl(),
F32 => tcx.lang_items().f32_impl(),
F64 => tcx.lang_items().f64_impl(),
Char => tcx.lang_items().char_impl(),
Bool => None,
Str => tcx.lang_items().str_impl(),
Slice => tcx.lang_items().slice_impl(),
Array => tcx.lang_items().slice_impl(),
Tuple => None,
Unit => None,
RawPointer => tcx.lang_items().const_ptr_impl(),
Reference => None,
Fn => None,
Never => None,
CVarArgs => tcx.lang_items().va_list(),
};
if let Some(did) = did {
if !did.is_local() {
inline::build_impl(cx, did, ret);
}
}
}
}
impl Clean<Vec<Item>> for doctree::ExternCrate {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
let please_inline = self.vis.node.is_pub() && self.attrs.iter().any(|a| {
a.check_name(sym::doc) && match a.meta_item_list() {
Some(l) => attr::list_contains_name(&l, sym::inline),
None => false,
}
});
if please_inline {
let mut visited = FxHashSet::default();
let res = Res::Def(
DefKind::Mod,
DefId {
krate: self.cnum,
index: CRATE_DEF_INDEX,
},
);
if let Some(items) = inline::try_inline(cx, res, self.name, &mut visited) {
return items;
}
}
vec![Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: DefId { krate: self.cnum, index: CRATE_DEF_INDEX },
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ExternCrateItem(self.name.clean(cx), self.path.clone())
}]
}
}
impl Clean<Vec<Item>> for doctree::Import {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
// We consider inlining the documentation of `pub use` statements, but we
// forcefully don't inline if this is not public or if the
// #[doc(no_inline)] attribute is present.
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let mut denied = !self.vis.node.is_pub() || self.attrs.iter().any(|a| {
a.check_name(sym::doc) && match a.meta_item_list() {
Some(l) => attr::list_contains_name(&l, sym::no_inline) ||
attr::list_contains_name(&l, sym::hidden),
None => false,
}
});
// Also check whether imports were asked to be inlined, in case we're trying to re-export a
// crate in Rust 2018+
let please_inline = self.attrs.lists(sym::doc).has_word(sym::inline);
let path = self.path.clean(cx);
let inner = if self.glob {
if !denied {
let mut visited = FxHashSet::default();
if let Some(items) = inline::try_inline_glob(cx, path.res, &mut visited) {
return items;
}
}
Import::Glob(resolve_use_source(cx, path))
} else {
let name = self.name;
if !please_inline {
match path.res {
Res::Def(DefKind::Mod, did) => {
if !did.is_local() && did.index == CRATE_DEF_INDEX {
// if we're `pub use`ing an extern crate root, don't inline it unless we
// were specifically asked for it
denied = true;
}
}
_ => {}
}
}
if !denied {
let mut visited = FxHashSet::default();
if let Some(items) = inline::try_inline(cx, path.res, name, &mut visited) {
return items;
}
}
Import::Simple(name.clean(cx), resolve_use_source(cx, path))
};
vec![Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(ast::CRATE_NODE_ID),
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ImportItem(inner)
}]
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum Import {
// use source as str;
Simple(String, ImportSource),
// use source::*;
Glob(ImportSource)
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ImportSource {
pub path: Path,
pub did: Option<DefId>,
}
impl Clean<Vec<Item>> for hir::ForeignMod {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
let mut items = self.items.clean(cx);
for item in &mut items {
if let ForeignFunctionItem(ref mut f) = item.inner {
f.header.abi = self.abi;
}
}
items
}
}
impl Clean<Item> for hir::ForeignItem {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.node {
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => {
let (generics, decl) = enter_impl_trait(cx, || {
(generics.clean(cx), (&**decl, &names[..]).clean(cx))
});
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
ForeignFunctionItem(Function {
decl,
generics,
header: hir::FnHeader {
unsafety: hir::Unsafety::Unsafe,
abi: Abi::Rust,
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
all_types,
ret_types,
})
}
hir::ForeignItemKind::Static(ref ty, mutbl) => {
ForeignStaticItem(Static {
type_: ty.clean(cx),
mutability: mutbl.clean(cx),
expr: String::new(),
})
}
hir::ForeignItemKind::Type => {
ForeignTypeItem
}
};
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
Item {
name: Some(self.ident.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: local_did,
visibility: self.vis.clean(cx),
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
inner,
}
}
}
// Utilities
pub trait ToSource {
fn to_src(&self, cx: &DocContext<'_>) -> String;
}
impl ToSource for syntax_pos::Span {
fn to_src(&self, cx: &DocContext<'_>) -> String {
debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().source_map().span_to_snippet(*self) {
Ok(x) => x,
Err(_) => String::new()
};
debug!("got snippet {}", sn);
sn
}
}
fn name_from_pat(p: &hir::Pat) -> String {
use rustc::hir::*;
debug!("Trying to get a name from pattern: {:?}", p);
match p.node {
PatKind::Wild => "_".to_string(),
PatKind::Binding(_, _, ident, _) => ident.to_string(),
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Struct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", qpath_to_string(name),
fields.iter().map(|&Spanned { node: ref fp, .. }|
format!("{}: {}", fp.ident, name_from_pat(&*fp.pat)))
.collect::<Vec<String>>().join(", "),
if etc { ", .." } else { "" }
)
}
PatKind::Tuple(ref elts, _) => format!("({})", elts.iter().map(|p| name_from_pat(&**p))
.collect::<Vec<String>>().join(", ")),
PatKind::Box(ref p) => name_from_pat(&**p),
PatKind::Ref(ref p, _) => name_from_pat(&**p),
PatKind::Lit(..) => {
warn!("tried to get argument name from PatKind::Lit, \
which is silly in function arguments");
"()".to_string()
},
PatKind::Range(..) => panic!("tried to get argument name from PatKind::Range, \
which is not allowed in function arguments"),
PatKind::Slice(ref begin, ref mid, ref end) => {
let begin = begin.iter().map(|p| name_from_pat(&**p));
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
let end = end.iter().map(|p| name_from_pat(&**p));
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
},
}
}
fn print_const(cx: &DocContext<'_>, n: &ty::Const<'_>) -> String {
match n.val {
ConstValue::Unevaluated(def_id, _) => {
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
print_const_expr(cx, cx.tcx.hir().body_owned_by(hir_id))
} else {
inline::print_inlined_const(cx, def_id)
}
},
_ => {
let mut s = n.to_string();
// array lengths are obviously usize
if s.ends_with("usize") {
let n = s.len() - "usize".len();
s.truncate(n);
if s.ends_with(": ") {
let n = s.len() - ": ".len();
s.truncate(n);
}
}
s
},
}
}
fn print_const_expr(cx: &DocContext<'_>, body: hir::BodyId) -> String {
cx.tcx.hir().hir_to_pretty_string(body.hir_id)
}
/// Given a type Path, resolve it to a Type using the TyCtxt
fn resolve_type(cx: &DocContext<'_>,
path: Path,
id: hir::HirId) -> Type {
if id == hir::DUMMY_HIR_ID {
debug!("resolve_type({:?})", path);
} else {
debug!("resolve_type({:?},{:?})", path, id);
}
let is_generic = match path.res {
Res::PrimTy(p) => match p {
hir::Str => return Primitive(PrimitiveType::Str),
hir::Bool => return Primitive(PrimitiveType::Bool),
hir::Char => return Primitive(PrimitiveType::Char),
hir::Int(int_ty) => return Primitive(int_ty.into()),
hir::Uint(uint_ty) => return Primitive(uint_ty.into()),
hir::Float(float_ty) => return Primitive(float_ty.into()),
},
Res::SelfTy(..) if path.segments.len() == 1 => {
return Generic(kw::SelfUpper.to_string());
}
Res::Def(DefKind::TyParam, _) if path.segments.len() == 1 => {
return Generic(format!("{:#}", path));
}
Res::SelfTy(..)
| Res::Def(DefKind::TyParam, _)
| Res::Def(DefKind::AssocTy, _) => true,
_ => false,
};
let did = register_res(&*cx, path.res);
ResolvedPath { path: path, param_names: None, did: did, is_generic: is_generic }
}
pub fn register_res(cx: &DocContext<'_>, res: Res) -> DefId {
debug!("register_res({:?})", res);
let (did, kind) = match res {
Res::Def(DefKind::Fn, i) => (i, TypeKind::Function),
Res::Def(DefKind::TyAlias, i) => (i, TypeKind::Typedef),
Res::Def(DefKind::Enum, i) => (i, TypeKind::Enum),
Res::Def(DefKind::Trait, i) => (i, TypeKind::Trait),
Res::Def(DefKind::Struct, i) => (i, TypeKind::Struct),
Res::Def(DefKind::Union, i) => (i, TypeKind::Union),
Res::Def(DefKind::Mod, i) => (i, TypeKind::Module),
Res::Def(DefKind::ForeignTy, i) => (i, TypeKind::Foreign),
Res::Def(DefKind::Const, i) => (i, TypeKind::Const),
Res::Def(DefKind::Static, i) => (i, TypeKind::Static),
Res::Def(DefKind::Variant, i) => (cx.tcx.parent(i).expect("cannot get parent def id"),
TypeKind::Enum),
Res::Def(DefKind::Macro(mac_kind), i) => match mac_kind {
MacroKind::Bang => (i, TypeKind::Macro),
MacroKind::Attr => (i, TypeKind::Attr),
MacroKind::Derive => (i, TypeKind::Derive),
MacroKind::ProcMacroStub => unreachable!(),
},
Res::Def(DefKind::TraitAlias, i) => (i, TypeKind::TraitAlias),
Res::SelfTy(Some(def_id), _) => (def_id, TypeKind::Trait),
Res::SelfTy(_, Some(impl_def_id)) => return impl_def_id,
_ => return res.def_id()
};
if did.is_local() { return did }
inline::record_extern_fqn(cx, did, kind);
if let TypeKind::Trait = kind {
inline::record_extern_trait(cx, did);
}
did
}
fn resolve_use_source(cx: &DocContext<'_>, path: Path) -> ImportSource {
ImportSource {
did: if path.res.opt_def_id().is_none() {
None
} else {
Some(register_res(cx, path.res))
},
path,
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Macro {
pub source: String,
pub imported_from: Option<String>,
}
impl Clean<Item> for doctree::Macro {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let name = self.name.clean(cx);
Item {
name: Some(name.clone()),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Some(Public),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: self.def_id,
inner: MacroItem(Macro {
source: format!("macro_rules! {} {{\n{}}}",
name,
self.matchers.iter().map(|span| {
format!(" {} => {{ ... }};\n", span.to_src(cx))
}).collect::<String>()),
imported_from: self.imported_from.clean(cx),
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ProcMacro {
pub kind: MacroKind,
pub helpers: Vec<String>,
}
impl Clean<Item> for doctree::ProcMacro {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Some(Public),
stability: self.stab.clean(cx),
deprecation: self.depr.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_hir_id(self.id),
inner: ProcMacroItem(ProcMacro {
kind: self.kind,
helpers: self.helpers.clean(cx),
}),
}
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Stability {
pub level: stability::StabilityLevel,
pub feature: Option<String>,
pub since: String,
pub deprecation: Option<Deprecation>,
pub unstable_reason: Option<String>,
pub issue: Option<u32>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Deprecation {
pub since: Option<String>,
pub note: Option<String>,
}
impl Clean<Stability> for attr::Stability {
fn clean(&self, _: &DocContext<'_>) -> Stability {
Stability {
level: stability::StabilityLevel::from_attr_level(&self.level),
feature: Some(self.feature.to_string()).filter(|f| !f.is_empty()),
since: match self.level {
attr::Stable {ref since} => since.to_string(),
_ => String::new(),
},
deprecation: self.rustc_depr.as_ref().map(|d| {
Deprecation {
note: Some(d.reason.to_string()).filter(|r| !r.is_empty()),
since: Some(d.since.to_string()).filter(|d| !d.is_empty()),
}
}),
unstable_reason: match self.level {
attr::Unstable { reason: Some(ref reason), .. } => Some(reason.to_string()),
_ => None,
},
issue: match self.level {
attr::Unstable {issue, ..} => Some(issue),
_ => None,
}
}
}
}
impl<'a> Clean<Stability> for &'a attr::Stability {
fn clean(&self, dc: &DocContext<'_>) -> Stability {
(**self).clean(dc)
}
}
impl Clean<Deprecation> for attr::Deprecation {
fn clean(&self, _: &DocContext<'_>) -> Deprecation {
Deprecation {
since: self.since.map(|s| s.to_string()).filter(|s| !s.is_empty()),
note: self.note.map(|n| n.to_string()).filter(|n| !n.is_empty()),
}
}
}
/// An type binding on an associated type (e.g., `A = Bar` in `Foo<A = Bar>` or
/// `A: Send + Sync` in `Foo<A: Send + Sync>`).
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)]
pub struct TypeBinding {
pub name: String,
pub kind: TypeBindingKind,
}
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)]
pub enum TypeBindingKind {
Equality {
ty: Type,
},
Constraint {
bounds: Vec<GenericBound>,
},
}
impl TypeBinding {
pub fn ty(&self) -> &Type {
match self.kind {
TypeBindingKind::Equality { ref ty } => ty,
_ => panic!("expected equality type binding for parenthesized generic args"),
}
}
}
impl Clean<TypeBinding> for hir::TypeBinding {
fn clean(&self, cx: &DocContext<'_>) -> TypeBinding {
TypeBinding {
name: self.ident.name.clean(cx),
kind: self.kind.clean(cx),
}
}
}
impl Clean<TypeBindingKind> for hir::TypeBindingKind {
fn clean(&self, cx: &DocContext<'_>) -> TypeBindingKind {
match *self {
hir::TypeBindingKind::Equality { ref ty } =>
TypeBindingKind::Equality {
ty: ty.clean(cx),
},
hir::TypeBindingKind::Constraint { ref bounds } =>
TypeBindingKind::Constraint {
bounds: bounds.into_iter().map(|b| b.clean(cx)).collect(),
},
}
}
}
pub fn def_id_to_path(
cx: &DocContext<'_>,
did: DefId,
name: Option<String>
) -> Vec<String> {
let crate_name = name.unwrap_or_else(|| cx.tcx.crate_name(did.krate).to_string());
let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| {
// extern blocks have an empty name
let s = elem.data.to_string();
if !s.is_empty() {
Some(s)
} else {
None
}
});
once(crate_name).chain(relative).collect()
}
pub fn enter_impl_trait<F, R>(cx: &DocContext<'_>, f: F) -> R
where
F: FnOnce() -> R,
{
let old_bounds = mem::replace(&mut *cx.impl_trait_bounds.borrow_mut(), Default::default());
let r = f();
assert!(cx.impl_trait_bounds.borrow().is_empty());
*cx.impl_trait_bounds.borrow_mut() = old_bounds;
r
}
// Start of code copied from rust-clippy
pub fn path_to_def_local(tcx: TyCtxt<'_, '_, '_>, path: &[Symbol]) -> Option<DefId> {
let krate = tcx.hir().krate();
let mut items = krate.module.item_ids.clone();
let mut path_it = path.iter().peekable();
loop {
let segment = path_it.next()?;
for item_id in mem::replace(&mut items, HirVec::new()).iter() {
let item = tcx.hir().expect_item_by_hir_id(item_id.id);
if item.ident.name == *segment {
if path_it.peek().is_none() {
return Some(tcx.hir().local_def_id_from_hir_id(item_id.id))
}
items = match &item.node {
&hir::ItemKind::Mod(ref m) => m.item_ids.clone(),
_ => panic!("Unexpected item {:?} in path {:?} path")
};
break;
}
}
}
}
pub fn path_to_def(tcx: TyCtxt<'_, '_, '_>, path: &[Symbol]) -> Option<DefId> {
let crates = tcx.crates();
let krate = crates
.iter()
.find(|&&krate| tcx.crate_name(krate) == path[0]);
if let Some(krate) = krate {
let krate = DefId {
krate: *krate,
index: CRATE_DEF_INDEX,
};
let mut items = tcx.item_children(krate);
let mut path_it = path.iter().skip(1).peekable();
loop {
let segment = path_it.next()?;
for item in mem::replace(&mut items, &[]).iter() {
if item.ident.name == *segment {
if path_it.peek().is_none() {
return match item.res {
def::Res::Def(DefKind::Trait, did) => Some(did),
_ => None,
}
}
items = tcx.item_children(item.res.def_id());
break;
}
}
}
} else {
None
}
}
// End of code copied from rust-clippy
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
enum RegionTarget<'tcx> {
Region(Region<'tcx>),
RegionVid(RegionVid)
}
#[derive(Default, Debug, Clone)]
struct RegionDeps<'tcx> {
larger: FxHashSet<RegionTarget<'tcx>>,
smaller: FxHashSet<RegionTarget<'tcx>>
}
#[derive(Eq, PartialEq, Hash, Debug)]
enum SimpleBound {
TraitBound(Vec<PathSegment>, Vec<SimpleBound>, Vec<GenericParamDef>, hir::TraitBoundModifier),
Outlives(Lifetime),
}
impl From<GenericBound> for SimpleBound {
fn from(bound: GenericBound) -> Self {
match bound.clone() {
GenericBound::Outlives(l) => SimpleBound::Outlives(l),
GenericBound::TraitBound(t, mod_) => match t.trait_ {
Type::ResolvedPath { path, param_names, .. } => {
SimpleBound::TraitBound(path.segments,
param_names
.map_or_else(|| Vec::new(), |v| v.iter()
.map(|p| SimpleBound::from(p.clone()))
.collect()),
t.generic_params,
mod_)
}
_ => panic!("Unexpected bound {:?}", bound),
}
}
}
}
| 35.838297 | 100 | 0.496396 |
ef880e3e35c530e8a52ad75f4cb7dde14763f4a8 | 3,162 | use crate::prelude::*;
const FORTRESS: (&str, i32, i32) = (
"
------------
---######---
---#----#---
---#-M--#---
-###----###-
--M------M--
-###----###-
---#----#---
---#----#---
---######---
------------
",
12,
11,
);
pub fn apply_prefab(mb: &mut MapBuilder, rng: &mut RandomNumberGenerator) {
let mut placement = None;
// Create a Dijstra map template
let dijkstra_map = DijkstraMap::new(
SCREEN_WIDTH,
SCREEN_HEIGHT,
&[mb.map.point2d_to_index(mb.player_start)],
&mb.map,
1024.0,
);
let mut attempts = 0;
// While no sucessful placements and the number of attempts is less tht 10
// try to place fortress in existing map
while placement.is_none() && attempts < 10 {
// create random location for a rectangle of the fortress
let dimensions = Rect::with_size(
rng.range(0, SCREEN_WIDTH - FORTRESS.1),
rng.range(0, SCREEN_HEIGHT - FORTRESS.2),
FORTRESS.1,
FORTRESS.2,
);
// set flag whether the placement can be succesful with the condition
// that each tile in the fortress is less that 2000 away from player start
// and less that 20, additionly the Fortress rectangle does not contain the amulet
// for this to work the implication is that the player start is at idx=0?
let mut can_place = false;
dimensions.for_each(|pt| {
let idx = mb.map.point2d_to_index(pt);
let distance = dijkstra_map.map[idx];
if distance < 2000.0 && distance > 20.0 && mb.amulet_start != pt {
can_place = true;
}
});
// remove any monsters that were in the map in the proposed Fortress rectangle.
if can_place {
placement = Some(Point::new(dimensions.x1, dimensions.y1));
let points = dimensions.point_set();
mb.monster_spawns.retain(|pt| !points.contains(pt));
}
attempts += 1;
}
if let Some(placement) = placement {
let string_vec: Vec<char> = FORTRESS
.0
.chars() // create a character iterator
.filter(|a| *a != '\r' && *a != '\n') // remove new line characters
.collect(); // collect back into string
// index through fortress by simple iteration rather calculating the index by x,y
let mut i = 0;
for ty in placement.y..placement.y + FORTRESS.2 {
for tx in placement.x..placement.x + FORTRESS.1 {
let idx = map_idx(tx, ty);
let c = string_vec[i];
// match each character in the Fortress string vector
match c {
'M' => {
mb.map.tiles[idx] = TileType::Floor;
mb.monster_spawns.push(Point::new(tx, ty));
}
'-' => mb.map.tiles[idx] = TileType::Floor,
'#' => mb.map.tiles[idx] = TileType::Wall,
_ => println!("No idea what to do with [{}]", c),
}
i += 1;
}
}
}
}
| 32.9375 | 90 | 0.515497 |
fb04935c5cb80193ff315203423a97a50c6c9c46 | 7,598 | //! Common color structures used in vga programming.
use num_enum::TryFromPrimitive;
/// Represents the size of the vga palette in bytes.
pub const PALETTE_SIZE: usize = 768;
/// Represents a 16 bit color used for vga display.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, TryFromPrimitive)]
#[repr(u8)]
pub enum Color16 {
/// Represents the color `Black (0x0)`.
Black = 0x0,
/// Represents the color `Blue (0x1)`.
Blue = 0x1,
/// Represents the color `Green (0x2)`.
Green = 0x2,
/// Represents the color `Cyan (0x3)`.
Cyan = 0x3,
/// Represents the color `Red (0x4)`.
Red = 0x4,
/// Represents the color `Magenta (0x5)`.
Magenta = 0x5,
/// Represents the color `Brown (0x6)`.
Brown = 0x6,
/// Represents the color `LightGrey (0x7)`.
LightGrey = 0x7,
/// Represents the color `DarkGrey (0x8)`.
DarkGrey = 0x8,
/// Represents the color `LightBlue (0x9)`.
LightBlue = 0x9,
/// Represents the color `LightGreen (0xA)`.
LightGreen = 0xA,
/// Represents the color `LightCyan (0xB)`.
LightCyan = 0xB,
/// Represents the color `LightRed (0xC)`.
LightRed = 0xC,
/// Represents the color `Pink (0xD)`.
Pink = 0xD,
/// Represents the color `Yellow (0xE)`.
Yellow = 0xE,
/// Represents the color `White (0xF)`.
White = 0xF,
}
impl From<Color16> for u8 {
fn from(value: Color16) -> u8 {
value as u8
}
}
/// Represents a color for vga text modes.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct TextModeColor(u8);
impl TextModeColor {
/// Returns a new `TextModeColor` given the specified `foreground`
/// and `background` color.
pub const fn new(foreground: Color16, background: Color16) -> TextModeColor {
TextModeColor((background as u8) << 4 | (foreground as u8))
}
/// Sets the background color given the specified `background`;
pub fn set_background(&mut self, background: Color16) {
self.0 = (background as u8) << 4 | (self.0 & 0x0F);
}
/// Sets the foreground color given the specified `foreground`.
pub fn set_foreground(&mut self, foreground: Color16) {
self.0 = foreground as u8;
}
}
/// Represents the default vga 256 color palette.
pub const DEFAULT_PALETTE: [u8; PALETTE_SIZE] = [
0x0, 0x0, 0x0, 0x0, 0x0, 0x2A, 0x0, 0x2A, 0x0, 0x0, 0x2A, 0x2A, 0x2A, 0x0, 0x0, 0x2A, 0x0,
0x2A, 0x2A, 0x2A, 0x0, 0x2A, 0x2A, 0x2A, 0x0, 0x0, 0x15, 0x0, 0x0, 0x3F, 0x0, 0x2A, 0x15, 0x0,
0x2A, 0x3F, 0x2A, 0x0, 0x15, 0x2A, 0x0, 0x3F, 0x2A, 0x2A, 0x15, 0x2A, 0x2A, 0x3F, 0x0, 0x15,
0x0, 0x0, 0x15, 0x2A, 0x0, 0x3F, 0x0, 0x0, 0x3F, 0x2A, 0x2A, 0x15, 0x0, 0x2A, 0x15, 0x2A, 0x2A,
0x3F, 0x0, 0x2A, 0x3F, 0x2A, 0x0, 0x15, 0x15, 0x0, 0x15, 0x3F, 0x0, 0x3F, 0x15, 0x0, 0x3F,
0x3F, 0x2A, 0x15, 0x15, 0x2A, 0x15, 0x3F, 0x2A, 0x3F, 0x15, 0x2A, 0x3F, 0x3F, 0x15, 0x0, 0x0,
0x15, 0x0, 0x2A, 0x15, 0x2A, 0x0, 0x15, 0x2A, 0x2A, 0x3F, 0x0, 0x0, 0x3F, 0x0, 0x2A, 0x3F,
0x2A, 0x0, 0x3F, 0x2A, 0x2A, 0x15, 0x0, 0x15, 0x15, 0x0, 0x3F, 0x15, 0x2A, 0x15, 0x15, 0x2A,
0x3F, 0x3F, 0x0, 0x15, 0x3F, 0x0, 0x3F, 0x3F, 0x2A, 0x15, 0x3F, 0x2A, 0x3F, 0x15, 0x15, 0x0,
0x15, 0x15, 0x2A, 0x15, 0x3F, 0x0, 0x15, 0x3F, 0x2A, 0x3F, 0x15, 0x0, 0x3F, 0x15, 0x2A, 0x3F,
0x3F, 0x0, 0x3F, 0x3F, 0x2A, 0x15, 0x15, 0x15, 0x15, 0x15, 0x3F, 0x15, 0x3F, 0x15, 0x15, 0x3F,
0x3F, 0x3F, 0x15, 0x15, 0x3F, 0x15, 0x3F, 0x3F, 0x3F, 0x15, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
];
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_set_foreground() {
let mut color = TextModeColor::new(Color16::Yellow, Color16::Black);
color.set_foreground(Color16::Red);
assert_eq!(color.0 & 0x0F, Color16::Red as u8);
}
#[test]
fn test_set_background() {
let mut color = TextModeColor::new(Color16::Yellow, Color16::Black);
color.set_background(Color16::DarkGrey);
assert_eq!(color.0 >> 4, Color16::DarkGrey as u8);
}
}
| 53.132867 | 99 | 0.633061 |
03221b19cd4ba09be7435c20cbf6d19f31b63f32 | 13,768 | //! Represent the Spotify API.
use crate::node_fetch::{fetch, FetchMethod};
use base64::encode;
use js_sys::{Date, Promise};
use std::collections::HashMap;
use std::rc::Rc;
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::future_to_promise;
use wasm_bindgen_futures::JsFuture;
use web_sys::console;
/// The Spotify API access token needs to be refreshed after 50 minutes.
const ACCESS_TOKEN_LIFETIME: f64 = 3000.0;
/// Path to the Homebridge config file.
const HOMEBRIDGE_CONFIG: &str = "~/.homebridge/config.json"; // todo: tilde not supported
#[wasm_bindgen]
extern "C" {
pub type Fs;
pub fn require(name: &str) -> Fs;
#[wasm_bindgen(method, js_name = readFileSync)]
pub fn read_file(this: &Fs, file: &str) -> String;
#[wasm_bindgen(method, js_name = writeFileSync)]
pub fn write_file(this: &Fs, file: &str, data: String) -> String;
}
#[derive(Serialize, Deserialize)]
/// Represents the response when making an authorization request.
struct SpotifyAuthorization {
pub access_token: String,
pub token_type: String,
pub expires_in: u64,
pub refresh_token: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
/// Represents information of a device returned by the Spotify Web API.
pub struct SpotifyDevice {
pub id: String,
pub is_active: bool,
pub volume_percent: u32,
pub name: String,
// ... more attributes ...
}
#[derive(Serialize, Deserialize)]
/// Represents the response when requesting the playback state.
struct SpotifyPlayback {
pub device: SpotifyDevice,
pub is_playing: bool,
// ... more attributes ...
}
#[derive(Serialize, Deserialize, Debug)]
/// Represents a list of available Spotify devices
pub struct SpotifyDevices {
pub devices: Vec<SpotifyDevice>,
}
#[wasm_bindgen]
#[derive(Clone, Debug)]
/// Represents the Spotify API and state.
pub struct SpotifyApi {
client_id: String,
client_secret: String,
access_token: Rc<String>,
refresh_token: Rc<String>,
access_token_timestamp: Rc<f64>,
}
#[wasm_bindgen]
impl SpotifyApi {
#[wasm_bindgen(constructor)]
pub fn new(client_id: String, client_secret: String, refresh_token: String) -> SpotifyApi {
SpotifyApi {
client_id,
client_secret,
access_token: Rc::new("".to_owned()),
refresh_token: Rc::new(refresh_token),
access_token_timestamp: Rc::new(0.0),
}
}
/// Make a request to start playing music.
pub fn play(&self, device_id: String) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let mut url = "https://api.spotify.com/v1/me/player/play".to_owned();
url.push_str(&format!("?device_id={}", device_id));
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(&url, FetchMethod::Put, "", headers, true).await {
Err(e) => {
console::log_1(&format!("Error starting playback: {:?}", e).into())
}
Ok(_) => {} // player successfully started
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error starting playback"))
})
}
/// Make a request to pause Spotify.
pub fn pause(&self, device_id: String) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let mut url = "https://api.spotify.com/v1/me/player/pause".to_string();
url.push_str(&format!("?device_id={}", device_id));
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(&url, FetchMethod::Put, "", headers, true).await {
Err(e) => {
console::log_1(&format!("Error stopping playback: {:?}", e).into())
}
Ok(_) => {} // player successfully stopped playing
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error pausing playback"))
})
}
/// Check if Spotify device is currently playing.
pub fn is_playing(&self, device_id: String) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let mut url = "https://api.spotify.com/v1/me/player".to_string();
url.push_str(&format!("?device_id={}", device_id));
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(&url, FetchMethod::Get, "", headers, false).await {
Err(e) => {
console::log_1(&format!("Error getting playback state: {:?}", e).into())
}
Ok(result) => {
let json: SpotifyPlayback = result.into_serde().unwrap();
return Ok(JsValue::from(
json.is_playing && json.device.id == device_id,
));
}
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error fetching play state"))
})
}
/// Get volume for a specific device.
pub fn get_volume(&self, device_id: String) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let mut url = "https://api.spotify.com/v1/me/player".to_string();
url.push_str(&format!("?device_id={}", device_id));
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(&url, FetchMethod::Get, "", headers, false).await {
Err(e) => {
console::log_1(&format!("Error getting volume state: {:?}", e).into())
}
Ok(result) => {
let json: SpotifyPlayback = result.into_serde().unwrap();
return Ok(JsValue::from(json.device.volume_percent));
}
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error fetching volume"))
})
}
/// Set the volume for a specific device.
pub fn set_volume(&self, device_id: String, volume: u32) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let mut url = format!(
"https://api.spotify.com/v1/me/player/volume?volume_percent={}",
volume
);
url.push_str(&format!("&device_id={}", device_id));
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(&url, FetchMethod::Put, "", headers, true).await {
Err(e) => console::log_1(&format!("Error changing volume: {:?}", e).into()),
Ok(_) => {} // volume successfully updated
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error updating volume"))
})
}
/// Get available Spotify devices.
pub fn get_devices(&self) -> Promise {
let authorize_request = self.authorize();
future_to_promise(async move {
match JsFuture::from(authorize_request).await {
Ok(authorize_request) => {
let access_token: String = authorize_request.as_string().unwrap();
let url = "https://api.spotify.com/v1/me/player/devices";
let authorization_header = format!("Bearer {}", access_token);
let mut headers = HashMap::new();
headers.insert("Authorization".to_owned(), authorization_header);
match fetch(url, FetchMethod::Get, "", headers, false).await {
Err(e) => console::log_1(&format!("Error getting devices: {:?}", e).into()),
Ok(result) => {
return Ok(result);
}
}
}
Err(e) => console::log_1(
&format!("Error while authenticating to Spotify API: {:?}", e).into(),
),
}
Err(JsValue::from("Error fetching devices"))
})
}
/// Make an authorization request.
pub fn authorize(&self) -> Promise {
let mut refresh_token = Rc::clone(&self.refresh_token);
let mut access_token = Rc::clone(&self.access_token);
let mut access_token_timestamp = Rc::clone(&self.access_token_timestamp);
let url = "https://accounts.spotify.com/api/token";
let token = format!("{}:{}", self.client_id, self.client_secret);
let base64_token = encode(token);
let authorization_header = format!("Basic {}", base64_token);
future_to_promise(async move {
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_owned(),
"application/x-www-form-urlencoded;charset=UTF-8".to_owned(),
);
headers.insert("Authorization".to_owned(), authorization_header);
let body = format!("grant_type=refresh_token&refresh_token={}", *refresh_token);
if Date::now() - *access_token_timestamp <= ACCESS_TOKEN_LIFETIME {
return Ok(JsValue::from((*access_token).clone()));
}
if let Ok(result) = fetch(url, FetchMethod::Post, &body, headers, false).await {
let json: Result<SpotifyAuthorization, _> = result.into_serde();
return match json {
Ok(json) => {
access_token_timestamp = Rc::new(Date::now());
access_token = Rc::new(json.access_token.clone());
// todo: never called, and if then it'll fail
if let Some(new_refresh_token) = json.refresh_token {
// cache refresh token
let fs = require("fs");
let config_string = fs.read_file(HOMEBRIDGE_CONFIG);
let new_config_string = config_string
.replace(&(*refresh_token), &new_refresh_token.clone());
fs.write_file(HOMEBRIDGE_CONFIG, new_config_string);
refresh_token = Rc::new(new_refresh_token);
}
Ok(JsValue::from(json.access_token))
}
Err(_) => {
console::log_1(
&format!("Error while retrieving access token from Spotify API. Response was: {:?}", result).into(),
);
Err(JsValue::from(format!("Error while retrieving access token from Spotify API. Response was: {:?}", result)))
}
};
} else {
Err(JsValue::from("Error executing fetch request"))
}
})
}
}
| 39.225071 | 135 | 0.523605 |
efb3f2d327c801c5a01b4a59477ed582950d3a66 | 1,223 | //! Blink an led without using the BSP split() method.
#![no_std]
#![no_main]
#[cfg(not(feature = "panic_led"))]
use panic_halt as _;
use pygamer as hal;
use hal::clock::GenericClockController;
use hal::delay::Delay;
use hal::entry;
use hal::pac::{CorePeripherals, Peripherals};
use hal::prelude::*;
use hal::watchdog::{Watchdog, WatchdogTimeout};
#[entry]
fn main() -> ! {
let mut peripherals = Peripherals::take().unwrap();
let core = CorePeripherals::take().unwrap();
let mut clocks = GenericClockController::with_internal_32kosc(
peripherals.GCLK,
&mut peripherals.MCLK,
&mut peripherals.OSC32KCTRL,
&mut peripherals.OSCCTRL,
&mut peripherals.NVMCTRL,
);
let mut delay = Delay::new(core.SYST, &mut clocks);
delay.delay_ms(400u16);
let mut pins = hal::Pins::new(peripherals.PORT);
let mut red_led = pins.d13.into_open_drain_output(&mut pins.port);
let mut wdt = Watchdog::new(peripherals.WDT);
wdt.start(WatchdogTimeout::Cycles256 as u8);
loop {
delay.delay_ms(200u8);
wdt.feed();
red_led.set_high().unwrap();
delay.delay_ms(200u8);
wdt.feed();
red_led.set_low().unwrap();
}
}
| 26.586957 | 70 | 0.647588 |
71b162218b0152b21d9fdc97d61ee123705d4542 | 16,683 | // Copyright (c) 2020, BlockProject 3D
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of BlockProject 3D nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use byteorder::LittleEndian;
use byteorder::ByteOrder;
use std::io;
use std::io::Read;
use std::io::Seek;
use std::io::Write;
use std::fs::File;
use std::boxed::Box;
use xz::stream::Stream;
use std::num::Wrapping;
pub const SIZE_SECTION_HEADER: usize = 24;
#[derive(Copy, Clone)]
pub struct BPXSectionHeader
{
pub pointer: u64, //+0
pub csize: u32, //+8
pub size: u32, //+12
pub chksum: u32, //+16
pub btype: u8, //+20
pub flags: u8 //+21
}
impl BPXSectionHeader
{
pub fn read<TReader: io::Read>(reader: &mut TReader) -> io::Result<(u32, BPXSectionHeader)>
{
let mut buf: [u8;SIZE_SECTION_HEADER] = [0;SIZE_SECTION_HEADER];
let mut checksum: u32 = 0;
reader.read(&mut buf)?;
for i in 0..SIZE_SECTION_HEADER
{
checksum += buf[i] as u32;
}
return Ok((checksum, BPXSectionHeader {
pointer: LittleEndian::read_u64(&buf[0..8]),
csize: LittleEndian::read_u32(&buf[8..12]),
size: LittleEndian::read_u32(&buf[12..16]),
chksum: LittleEndian::read_u32(&buf[16..20]),
btype: buf[20],
flags: buf[21]
}));
}
pub fn new(size: u32, btype: u8) -> BPXSectionHeader
{
return BPXSectionHeader
{
pointer: 0, //+0
csize: 0, //+8
size: size, //+12
chksum: 0, //+16
btype: btype, //+20
flags: FLAG_CHECK_WEAK // +21
};
}
pub fn is_huge_section(&self) -> bool
{
return self.size > 1000000; //Return true if uncompressed size is greater than 1Mb
}
fn to_bytes(&self) -> [u8; SIZE_SECTION_HEADER]
{
let mut block: [u8; SIZE_SECTION_HEADER] = [0; SIZE_SECTION_HEADER];
LittleEndian::write_u64(&mut block[0..8], self.pointer);
LittleEndian::write_u32(&mut block[8..12], self.csize);
LittleEndian::write_u32(&mut block[12..16], self.size);
LittleEndian::write_u32(&mut block[16..20], self.chksum);
block[20] = self.btype;
block[21] = self.flags;
return block;
}
pub fn get_checksum(&self) -> u32
{
let mut checksum: u32 = 0;
let buf = self.to_bytes();
for i in 0..SIZE_SECTION_HEADER
{
checksum += buf[i] as u32;
}
return checksum;
}
pub fn write<TWriter: io::Write>(&self, writer: &mut TWriter) -> io::Result<()>
{
let buf = self.to_bytes();
writer.write(&buf)?;
writer.flush()?;
return Ok(());
}
}
pub trait Section : io::Read + io::Write + io::Seek
{
fn load_in_memory(&mut self) -> io::Result<Vec<u8>>;
fn size(&self) -> usize; //The computed size of the section
}
struct InMemorySection
{
data: Vec<u8>,
cursor: usize,
cur_size: usize
}
impl InMemorySection
{
pub fn new(data: Vec<u8>) -> InMemorySection
{
return InMemorySection
{
data: data,
cursor: 0,
cur_size: 0
}
}
}
impl io::Read for InMemorySection
{
fn read(&mut self, data: &mut [u8]) -> io::Result<usize>
{
for i in 0..data.len()
{
if self.cursor >= self.data.len()
{
return Ok(i);
}
data[i] = self.data[self.cursor];
self.cursor += 1;
}
return Ok(data.len())
}
}
impl io::Write for InMemorySection
{
fn write(&mut self, data: &[u8]) -> io::Result<usize>
{
for i in 0..data.len()
{
if self.cursor >= self.data.len()
{
return Ok(i);
}
self.data[self.cursor] = data[i];
self.cursor += 1;
if self.cursor >= self.cur_size
{
self.cur_size += 1
}
}
return Ok(data.len())
}
fn flush(&mut self) -> io::Result<()>
{
return Ok(());
}
}
fn slow_but_correct_add(value: usize, offset: isize) -> usize
//Unfortunatly rust requires much slower add operation (another reason to not use rust for large scale projects)
{
if offset < 0
{
return value - -offset as usize;
}
else if offset > 0
{
return value + offset as usize;
}
else
{
return value;
}
}
impl io::Seek for InMemorySection
{
fn seek(&mut self, state: io::SeekFrom) -> io::Result<u64>
{
match state
{
io::SeekFrom::Start(pos) => self.cursor = pos as usize,
io::SeekFrom::End(pos) => self.cursor = slow_but_correct_add(self.data.len(), pos as isize),
io::SeekFrom::Current(pos) => self.cursor = slow_but_correct_add(self.cursor, pos as isize)
}
return Ok(self.cursor as u64);
}
}
impl Section for InMemorySection
{
fn load_in_memory(&mut self) -> io::Result<Vec<u8>>
{
return Ok(self.data.clone());
}
fn size(&self) -> usize
{
return self.cur_size;
}
}
const SMALL_READ_BLOCK_SIZE: usize = 8192;
struct FileBasedSection
{
data: File,
buffer: [u8; SMALL_READ_BLOCK_SIZE],
written: usize,
cursor: usize,
cur_size: usize,
seek_ptr: u64
}
impl FileBasedSection
{
pub fn new(data: File) -> FileBasedSection
{
return FileBasedSection
{
data: data,
buffer: [0; SMALL_READ_BLOCK_SIZE],
written: 0,
cursor: usize::MAX,
cur_size: 0,
seek_ptr: 0
};
}
}
impl io::Read for FileBasedSection
{
fn read(&mut self, data: &mut [u8]) -> io::Result<usize>
{
let mut cnt: usize = 0;
for i in 0..data.len()
{
if self.cursor >= self.written
{
self.cursor = 0;
self.written = self.data.read(&mut self.buffer)?;
}
if self.cursor < self.written
{
data[i] = self.buffer[self.cursor];
self.cursor += 1;
cnt += 1;
}
}
return Ok(cnt);
}
}
impl io::Write for FileBasedSection
{
fn write(&mut self, data: &[u8]) -> io::Result<usize>
{
let len = self.data.write(data)?;
if self.seek_ptr >= self.cur_size as u64
{
self.cur_size += len;
self.seek_ptr += len as u64;
}
return Ok(len);
}
fn flush(&mut self) -> io::Result<()>
{
self.data.seek(io::SeekFrom::Current(self.cursor as i64))?;
self.cursor = usize::MAX;
return self.data.flush();
}
}
impl io::Seek for FileBasedSection
{
fn seek(&mut self, state: io::SeekFrom) -> io::Result<u64>
{
self.seek_ptr = self.data.seek(state)?;
return Ok(self.seek_ptr);
}
}
impl Section for FileBasedSection
{
fn load_in_memory(&mut self) -> io::Result<Vec<u8>>
{
let mut data: Vec<u8> = Vec::new();
self.data.read_to_end(&mut data)?;
return Ok(data);
}
fn size(&self) -> usize
{
return self.cur_size;
}
}
fn read_chksum(data: &[u8]) -> Wrapping<u32>
{
let mut chk: Wrapping<u32> = Wrapping(0);
for i in 0..data.len()
{
chk += Wrapping(data[i] as u32);
}
return chk;
}
const FLAG_COMPRESS_XZ: u8 = 0x2;
const FLAG_CHECK_WEAK: u8 = 0x8;
const READ_BLOCK_SIZE: usize = 65536;
fn block_based_deflate(input: &mut dyn Read, output: &mut dyn Write, inflated_size: usize) -> io::Result<(usize, u32)>
{
let mut count: usize = 0;
let mut encoder = match Stream::new_easy_encoder(0, xz::stream::Check::None)
{
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidInput, format!("[BPX] deflate initialization error: {}", e))),
Ok(v) => v
};
let mut chksum: Wrapping<u32> = Wrapping(0);
let mut csize: usize = 0;
while count < inflated_size {
let mut idata: [u8; READ_BLOCK_SIZE] = [0; READ_BLOCK_SIZE];
let mut status = xz::stream::Status::Ok;
let mut expected = xz::stream::Status::MemNeeded;
let mut action = xz::stream::Action::Run;
let mut res = input.read(&mut idata)?;
count += res;
chksum += read_chksum(&idata);
if count >= inflated_size
{
action = xz::stream::Action::Finish;
expected = xz::stream::Status::StreamEnd;
}
while status != expected
{
let mut odata: Vec<u8> = Vec::with_capacity(READ_BLOCK_SIZE * 2);
match encoder.process_vec(&idata[0..res], &mut odata, action)
{
Ok(s) => status = s,
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] deflate error: {}", e)))
}
res = 0;
output.write(&odata)?;
csize += odata.len();
}
}
return Ok((csize, chksum.0));
}
fn block_based_inflate(input: &mut dyn Read, output: &mut dyn Write, deflated_size: usize) -> io::Result<u32>
{
let mut decoder = match Stream::new_stream_decoder(u32::MAX as u64, xz::stream::CONCATENATED)
{
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] inflate error: {}", e))),
Ok(v) => v
};
let mut action = xz::stream::Action::Run;
let mut expected = xz::stream::Status::MemNeeded;
let mut chksum: Wrapping<u32> = Wrapping(0);
let mut remaining = deflated_size;
while remaining > 0 {
let mut idata: [u8; READ_BLOCK_SIZE] = [0; READ_BLOCK_SIZE];
let mut status = xz::stream::Status::Ok;
let mut res = input.read(&mut idata[0..std::cmp::min(READ_BLOCK_SIZE, remaining)])?;
remaining -= res;
if remaining == 0
{
action = xz::stream::Action::Finish;
expected = xz::stream::Status::StreamEnd;
}
while status != expected
{
let mut odata: Vec<u8> = Vec::with_capacity(READ_BLOCK_SIZE * 16);
match decoder.process_vec(&idata[0..res], &mut odata, action)
{
Ok(s) => status = s,
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] inflate error: {}", e)))
}
res = 0;
chksum += read_chksum(&odata);
output.write(&odata)?;
}
}
output.flush()?;
return Ok(chksum.0);
}
fn load_section_in_memory(bpx: &mut File, header: &BPXSectionHeader) -> io::Result<InMemorySection>
{
bpx.seek(io::SeekFrom::Start(header.pointer))?;
if header.flags & FLAG_COMPRESS_XZ == FLAG_COMPRESS_XZ
{
let mut section = InMemorySection::new(vec![0; header.size as usize]);
section.seek(io::SeekFrom::Start(0))?;
let chksum = block_based_inflate(bpx, &mut section, header.csize as usize)?;
println!("Unpacked section size: {}", section.size());
if header.flags & FLAG_CHECK_WEAK == FLAG_CHECK_WEAK && chksum != header.chksum
{
return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] checksum validation failed {} != {}", chksum, header.chksum)));
}
section.seek(io::SeekFrom::Start(0))?;
return Ok(section);
}
else
{
let mut data = vec![0; header.size as usize];
bpx.read(&mut data)?;
let chksum = read_chksum(&data);
if header.flags & FLAG_CHECK_WEAK == FLAG_CHECK_WEAK && chksum.0 != header.chksum
{
return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] checksum validation failed {} != {}", chksum, header.chksum)));
}
let mut section = InMemorySection::new(data);
section.seek(io::SeekFrom::Start(0))?;
return Ok(section);
}
}
fn load_section_as_file(bpx: &mut File, header: &BPXSectionHeader) -> io::Result<FileBasedSection>
{
let mut section = FileBasedSection::new(tempfile::tempfile()?);
bpx.seek(io::SeekFrom::Start(header.pointer))?;
if header.flags & FLAG_COMPRESS_XZ == FLAG_COMPRESS_XZ
{
let chksum = block_based_inflate(bpx, &mut section, header.csize as usize)?;
println!("Unpacked section size: {}", section.size());
if header.flags & FLAG_CHECK_WEAK == FLAG_CHECK_WEAK && chksum != header.chksum
{
return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] checksum validation failed {} != {}", chksum, header.chksum)));
}
}
else
{
let mut idata: [u8; READ_BLOCK_SIZE] = [0; READ_BLOCK_SIZE];
let mut count: usize = 0;
let mut chksum: Wrapping<u32> = Wrapping(0);
let mut remaining: usize = header.size as usize;
while count < header.size as usize
{
let res = bpx.read(&mut idata[0..std::cmp::min(READ_BLOCK_SIZE, remaining)])?;
section.write(&idata[0..res])?;
chksum += read_chksum(&idata[0..res]);
count += res;
remaining -= res;
}
if header.flags & FLAG_CHECK_WEAK == FLAG_CHECK_WEAK && chksum.0 != header.chksum
{
return Err(io::Error::new(io::ErrorKind::InvalidData, format!("[BPX] checksum validation failed {} != {}", chksum, header.chksum)));
}
section.flush()?;
}
section.seek(io::SeekFrom::Start(0))?;
return Ok(section);
}
pub fn open_section(bpx: &mut File, header: &BPXSectionHeader) -> io::Result<Box<dyn Section>>
{
if header.is_huge_section()
{
let data = load_section_as_file(bpx, &header)?;
return Ok(Box::from(data));
}
else
{
let data = load_section_in_memory(bpx, &header)?;
return Ok(Box::from(data));
}
}
pub fn create_section(header: &BPXSectionHeader) -> io::Result<Box<dyn Section>>
{
if header.is_huge_section() || header.size == 0
{
let mut section = FileBasedSection::new(tempfile::tempfile()?);
section.seek(io::SeekFrom::Start(0))?;
return Ok(Box::from(section));
}
else
{
let mut section = InMemorySection::new(vec![0; header.size as usize]);
section.seek(io::SeekFrom::Start(0))?;
return Ok(Box::from(section));
}
}
pub fn write_section(section: &mut Box<dyn Section>, out: &mut dyn Write) -> io::Result<(usize, u32, u8)>
{
if section.size() < READ_BLOCK_SIZE
{
let mut idata: [u8; READ_BLOCK_SIZE] = [0; READ_BLOCK_SIZE];
let mut count: usize = 0;
let mut chksum: Wrapping<u32> = Wrapping(0);
while count < section.size() as usize
{
let res = section.read(&mut idata)?;
out.write(&idata[0..res])?;
chksum += read_chksum(&idata[0..res]);
count += res;
}
section.flush()?;
return Ok((section.size(), chksum.0, FLAG_CHECK_WEAK));
}
else
{
let size = section.size();
let (csize, chksum) = block_based_deflate(section, out, size)?;
return Ok((csize, chksum, FLAG_CHECK_WEAK | FLAG_COMPRESS_XZ));
}
}
| 30.222826 | 144 | 0.574357 |
e4c4c989f66af5ec6bd6a2ba836ccc236cecec32 | 15,268 | //! Functions for computing [integral images](https://en.wikipedia.org/wiki/Summed_area_table)
//! and running sums of rows and columns.
use image::{Luma, GrayImage, GenericImageView, Pixel, Primitive};
use crate::definitions::Image;
use crate::map::{ChannelMap, WithChannel};
use std::ops::AddAssign;
/// Computes the 2d running sum of an image. Channels are summed independently.
///
/// An integral image I has width and height one greater than its source image F,
/// and is defined by I(x, y) = sum of F(x', y') for x' < x, y' < y, i.e. each pixel
/// in the integral image contains the sum of the pixel intensities of all input pixels
/// that are strictly above it and strictly to its left. In particular, the left column
/// and top row of an integral image are all 0, and the value of the bottom right pixel of
/// an integral image is equal to the sum of all pixels in the source image.
///
/// Integral images have the helpful property of allowing us to
/// compute the sum of pixel intensities in a rectangular region of an image
/// in constant time. Specifically, given a rectangle [l, r] * [t, b] in F,
/// the sum of the pixels in this rectangle is
/// I(r + 1, b + 1) - I(r + 1, t) - I(l, b + 1) + I(l, t).
///
/// # Examples
/// ```
/// # extern crate image;
/// # #[macro_use]
/// # extern crate imageproc;
/// # fn main() {
/// use imageproc::integral_image::{integral_image, sum_image_pixels};
///
/// let image = gray_image!(
/// 1, 2, 3;
/// 4, 5, 6);
///
/// let integral = gray_image!(type: u32,
/// 0, 0, 0, 0;
/// 0, 1, 3, 6;
/// 0, 5, 12, 21);
///
/// assert_pixels_eq!(integral_image::<_, u32>(&image), integral);
///
/// // Compute the sum of all pixels in the right two columns
/// assert_eq!(sum_image_pixels(&integral, 1, 0, 2, 1), 2 + 3 + 5 + 6);
///
/// // Compute the sum of all pixels in the top row
/// assert_eq!(sum_image_pixels(&integral, 0, 0, 2, 0), 1 + 2 + 3);
/// # }
/// ```
pub fn integral_image<P, T>(image: &Image<P>) -> Image<ChannelMap<P, T>>
where
P: Pixel<Subpixel = u8> + WithChannel<T> + 'static,
T: From<u8> + Primitive + AddAssign + 'static
{
integral_image_impl(image, false)
}
/// Computes the 2d running sum of the squares of the intensities in an image. Channels are summed
/// independently.
///
/// See the [`integral_image`](fn.integral_image.html) documentation for more information on integral images.
///
/// # Examples
/// ```
/// # extern crate image;
/// # #[macro_use]
/// # extern crate imageproc;
/// # fn main() {
/// use imageproc::integral_image::{integral_squared_image, sum_image_pixels};
///
/// let image = gray_image!(
/// 1, 2, 3;
/// 4, 5, 6);
///
/// let integral = gray_image!(type: u32,
/// 0, 0, 0, 0;
/// 0, 1, 5, 14;
/// 0, 17, 46, 91);
///
/// assert_pixels_eq!(integral_squared_image::<_, u32>(&image), integral);
///
/// // Compute the sum of the squares of all pixels in the right two columns
/// assert_eq!(sum_image_pixels(&integral, 1, 0, 2, 1), 4 + 9 + 25 + 36);
///
/// // Compute the sum of the squares of all pixels in the top row
/// assert_eq!(sum_image_pixels(&integral, 0, 0, 2, 0), 1 + 4 + 9);
/// # }
/// ```
pub fn integral_squared_image<P, T>(image: &Image<P>) -> Image<ChannelMap<P, T>>
where
P: Pixel<Subpixel = u8> + WithChannel<T> + 'static,
T: From<u8> + Primitive + AddAssign + 'static
{
integral_image_impl(image, true)
}
/// Implementation of `integral_image` and `integral_squared_image`.
fn integral_image_impl<P, T>(image: &Image<P>, square: bool) -> Image<ChannelMap<P, T>>
where
P: Pixel<Subpixel = u8> + WithChannel<T> + 'static,
T: From<u8> + Primitive + AddAssign + 'static
{
// TODO: Make faster, add a new IntegralImage type
// TODO: to make it harder to make off-by-one errors when computing sums of regions.
let (in_width, in_height) = image.dimensions();
let out_width = in_width + 1;
let out_height = in_height + 1;
let mut out = Image::<ChannelMap<P, T>>::new(out_width, out_height);
if in_width == 0 || in_height == 0 {
return out;
}
for y in 1..out_height {
let mut sum = vec![T::zero(); P::channel_count() as usize];
for x in 1..out_width {
unsafe {
for c in 0..P::channel_count() {
let pix: T = (image.unsafe_get_pixel(x - 1, y - 1).channels()[c as usize]).into();
if square {
sum[c as usize] += pix * pix;
} else {
sum[c as usize] += pix;
}
}
let above = out.unsafe_get_pixel(x, y - 1);
// For some reason there's no unsafe_get_pixel_mut, so to update the existing
// pixel here we need to use the method with bounds checking
let current = out.get_pixel_mut(x, y);
for c in 0..P::channel_count() {
current.channels_mut()[c as usize] = above.channels()[c as usize] + sum[c as usize];
}
}
}
}
out
}
/// Sums the pixels in positions [left, right] * [top, bottom] in F, where `integral_image` is the
/// integral image of F.
///
/// See the [`integral_image`](fn.integral_image.html) documentation for examples.
pub fn sum_image_pixels<T>(
integral_image: &Image<Luma<T>>,
left: u32,
top: u32,
right: u32,
bottom: u32,
) -> T
where T: Primitive + 'static
{
// TODO: better type-safety. It's too easy to pass the original image in here by mistake.
// TODO: it's also hard to see what the four u32s mean at the call site - use a Rect instead.
integral_image.get_pixel(right + 1, bottom + 1)[0]
+ integral_image.get_pixel(left, top)[0]
- integral_image.get_pixel(right + 1, top)[0]
- integral_image.get_pixel(left, bottom + 1)[0]
}
/// Computes the variance of [left, right] * [top, bottom] in F, where `integral_image` is the
/// integral image of F and `integral_squared_image` is the integral image of the squares of the
/// pixels in F.
///
/// See the [`integral_image`](fn.integral_image.html) documentation for more information on integral images.
///
///# Examples
/// ```
/// # extern crate image;
/// # #[macro_use]
/// # extern crate imageproc;
/// # fn main() {
/// use std::f64;
/// use imageproc::integral_image::{integral_image, integral_squared_image, variance};
///
/// let image = gray_image!(
/// 1, 2, 3;
/// 4, 5, 6);
///
/// let integral = integral_image(&image);
/// let integral_squared = integral_squared_image(&image);
///
/// // Compute the variance of the pixels in the right two columns
/// let mean: f64 = (2.0 + 3.0 + 5.0 + 6.0) / 4.0;
/// let var = ((2.0 - mean).powi(2)
/// + (3.0 - mean).powi(2)
/// + (5.0 - mean).powi(2)
/// + (6.0 - mean).powi(2)) / 4.0;
///
/// assert_eq!(variance(&integral, &integral_squared, 1, 0, 2, 1), var);
/// # }
/// ```
pub fn variance(
integral_image: &Image<Luma<u32>>,
integral_squared_image: &Image<Luma<u32>>,
left: u32,
top: u32,
right: u32,
bottom: u32,
) -> f64 {
// TODO: same improvements as for sum_image_pixels, plus check that the given rect is valid.
let n = (right - left + 1) as f64 * (bottom - top + 1) as f64;
let sum_sq = sum_image_pixels(integral_squared_image, left, top, right, bottom);
let sum = sum_image_pixels(integral_image, left, top, right, bottom);
(sum_sq as f64 - (sum as f64).powi(2) / n) / n
}
/// Computes the running sum of one row of image, padded
/// at the beginning and end. The padding is by continuity.
/// Takes a reference to buffer so that this can be reused
/// for all rows in an image.
///
/// # Examples
/// ```
/// # extern crate image;
/// # #[macro_use]
/// # extern crate imageproc;
/// # fn main() {
/// use imageproc::integral_image::row_running_sum;
///
/// let image = gray_image!(
/// 1, 2, 3;
/// 4, 5, 6);
///
/// // Buffer has length two greater than image width, hence padding of 1
/// let mut buffer = [0; 5];
/// row_running_sum(&image, 0, &mut buffer, 1);
///
/// // The image is padded by continuity on either side
/// assert_eq!(buffer, [1, 2, 4, 7, 10]);
/// # }
/// ```
pub fn row_running_sum(image: &GrayImage, row: u32, buffer: &mut [u32], padding: u32) {
// TODO: faster, more formats
let (width, height) = image.dimensions();
assert!(
buffer.len() >= (width + 2 * padding) as usize,
format!(
"Buffer length {} is less than {} + 2 * {}",
buffer.len(),
width,
padding
)
);
assert!(
row < height,
format!("row out of bounds: {} >= {}", row, height)
);
unsafe {
let mut sum = 0;
for x in 0..padding {
sum += image.unsafe_get_pixel(0, row)[0] as u32;
*buffer.get_unchecked_mut(x as usize) = sum;
}
for x in 0..width {
sum += image.unsafe_get_pixel(x, row)[0] as u32;
*buffer.get_unchecked_mut((x + padding) as usize) = sum;
}
for x in 0..padding {
sum += image.unsafe_get_pixel(width - 1, row)[0] as u32;
*buffer.get_unchecked_mut((x + width + padding) as usize) = sum;
}
}
}
/// Computes the running sum of one column of image, padded
/// at the top and bottom. The padding is by continuity.
/// Takes a reference to buffer so that this can be reused
/// for all columns in an image.
///
/// # Examples
/// ```
/// # extern crate image;
/// # #[macro_use]
/// # extern crate imageproc;
/// # fn main() {
/// use imageproc::integral_image::column_running_sum;
///
/// let image = gray_image!(
/// 1, 4;
/// 2, 5;
/// 3, 6);
///
/// // Buffer has length two greater than image height, hence padding of 1
/// let mut buffer = [0; 5];
/// column_running_sum(&image, 0, &mut buffer, 1);
///
/// // The image is padded by continuity on top and bottom
/// assert_eq!(buffer, [1, 2, 4, 7, 10]);
/// # }
/// ```
pub fn column_running_sum(image: &GrayImage, column: u32, buffer: &mut [u32], padding: u32) {
// TODO: faster, more formats
let (width, height) = image.dimensions();
assert!(
buffer.len() >= (height + 2 * padding) as usize,
format!(
"Buffer length {} is less than {} + 2 * {}",
buffer.len(),
height,
padding
)
);
assert!(
column < width,
format!("column out of bounds: {} >= {}", column, width)
);
unsafe {
let mut sum = 0;
for y in 0..padding {
sum += image.unsafe_get_pixel(column, 0)[0] as u32;
*buffer.get_unchecked_mut(y as usize) = sum;
}
for y in 0..height {
sum += image.unsafe_get_pixel(column, y)[0] as u32;
*buffer.get_unchecked_mut((y + padding) as usize) = sum;
}
for y in 0..padding {
sum += image.unsafe_get_pixel(column, height - 1)[0] as u32;
*buffer.get_unchecked_mut((y + height + padding) as usize) = sum;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::property_testing::GrayTestImage;
use crate::utils::{gray_bench_image, pixel_diff_summary, rgb_bench_image};
use image::{GenericImage, ImageBuffer, Luma};
use quickcheck::{quickcheck, TestResult};
use crate::definitions::Image;
use ::test;
#[test]
fn test_sum_image_pixels() {
let image = gray_image!(
1, 2;
3, 4);
let integral = integral_image::<_, u32>(&image);
assert_eq!(sum_image_pixels(&integral, 0, 0, 0, 0), 1);
assert_eq!(sum_image_pixels(&integral, 0, 0, 1, 0), 3);
assert_eq!(sum_image_pixels(&integral, 0, 0, 0, 1), 4);
assert_eq!(sum_image_pixels(&integral, 0, 0, 1, 1), 10);
assert_eq!(sum_image_pixels(&integral, 1, 0, 1, 0), 2);
assert_eq!(sum_image_pixels(&integral, 1, 0, 1, 1), 6);
assert_eq!(sum_image_pixels(&integral, 0, 1, 0, 1), 3);
assert_eq!(sum_image_pixels(&integral, 0, 1, 1, 1), 7);
assert_eq!(sum_image_pixels(&integral, 1, 1, 1, 1), 4);
}
#[test]
fn test_integral_image_gray() {
let image = gray_image!(
1, 2, 3;
4, 5, 6);
let expected = gray_image!(type: u32,
0, 0, 0, 0;
0, 1, 3, 6;
0, 5, 12, 21);
assert_pixels_eq!(integral_image::<_, u32>(&image), expected);
}
#[test]
fn test_integral_image_rgb() {
let image = rgb_image!(
[1, 11, 21], [2, 12, 22], [3, 13, 23];
[4, 14, 24], [5, 15, 25], [6, 16, 26]);
let expected = rgb_image!(type: u32,
[0, 0, 0], [0, 0, 0], [ 0, 0, 0], [ 0, 0, 0];
[0, 0, 0], [1, 11, 21], [ 3, 23, 43], [ 6, 36, 66];
[0, 0, 0], [5, 25, 45], [12, 52, 92], [21, 81, 141]);
assert_pixels_eq!(integral_image::<_, u32>(&image), expected);
}
#[bench]
fn bench_integral_image_gray(b: &mut test::Bencher) {
let image = gray_bench_image(500, 500);
b.iter(|| {
let integral = integral_image::<_, u32>(&image);
test::black_box(integral);
});
}
#[bench]
fn bench_integral_image_rgb(b: &mut test::Bencher) {
let image = rgb_bench_image(500, 500);
b.iter(|| {
let integral = integral_image::<_, u32>(&image);
test::black_box(integral);
});
}
/// Simple implementation of integral_image to validate faster versions against.
fn integral_image_ref<I>(image: &I) -> Image<Luma<u32>>
where
I: GenericImage<Pixel = Luma<u8>>,
{
let (in_width, in_height) = image.dimensions();
let (out_width, out_height) = (in_width + 1, in_height + 1);
let mut out = ImageBuffer::from_pixel(out_width, out_height, Luma([0u32]));
for y in 1..out_height {
for x in 0..out_width {
let mut sum = 0u32;
for iy in 0..y {
for ix in 0..x {
sum += image.get_pixel(ix, iy)[0] as u32;
}
}
out.put_pixel(x, y, Luma([sum]));
}
}
out
}
#[test]
fn test_integral_image_matches_reference_implementation() {
fn prop(image: GrayTestImage) -> TestResult {
let expected = integral_image_ref(&image.0);
let actual = integral_image(&image.0);
match pixel_diff_summary(&actual, &expected) {
None => TestResult::passed(),
Some(err) => TestResult::error(err),
}
}
quickcheck(prop as fn(GrayTestImage) -> TestResult);
}
#[bench]
fn bench_row_running_sum(b: &mut test::Bencher) {
let image = gray_bench_image(1000, 1);
let mut buffer = [0; 1010];
b.iter(|| { row_running_sum(&image, 0, &mut buffer, 5); });
}
#[bench]
fn bench_column_running_sum(b: &mut test::Bencher) {
let image = gray_bench_image(100, 1000);
let mut buffer = [0; 1010];
b.iter(|| { column_running_sum(&image, 0, &mut buffer, 5); });
}
}
| 33.119306 | 109 | 0.57054 |
09ef69e9690ab8a486cb7ac55927b81f09158a46 | 82,516 | use crate::middle::cstore::{ExternCrate, ExternCrateSource};
use crate::mir::interpret::{AllocId, ConstValue, GlobalAlloc, Pointer, Scalar};
use crate::ty::subst::{GenericArg, GenericArgKind, Subst};
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, ScalarInt, Ty, TyCtxt, TypeFoldable};
use rustc_apfloat::ieee::{Double, Single};
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::definitions::{DefPathData, DefPathDataName, DisambiguatedDefPathData};
use rustc_hir::ItemKind;
use rustc_session::config::TrimmedDefPaths;
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
use std::cell::Cell;
use std::char;
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fmt::{self, Write as _};
use std::ops::{ControlFlow, Deref, DerefMut};
// `pretty` is a separate module only for organization.
use super::*;
macro_rules! p {
(@$lit:literal) => {
write!(scoped_cx!(), $lit)?
};
(@write($($data:expr),+)) => {
write!(scoped_cx!(), $($data),+)?
};
(@print($x:expr)) => {
scoped_cx!() = $x.print(scoped_cx!())?
};
(@$method:ident($($arg:expr),*)) => {
scoped_cx!() = scoped_cx!().$method($($arg),*)?
};
($($elem:tt $(($($args:tt)*))?),+) => {{
$(p!(@ $elem $(($($args)*))?);)+
}};
}
macro_rules! define_scoped_cx {
($cx:ident) => {
#[allow(unused_macros)]
macro_rules! scoped_cx {
() => {
$cx
};
}
};
}
thread_local! {
static FORCE_IMPL_FILENAME_LINE: Cell<bool> = Cell::new(false);
static SHOULD_PREFIX_WITH_CRATE: Cell<bool> = Cell::new(false);
static NO_TRIMMED_PATH: Cell<bool> = Cell::new(false);
static NO_QUERIES: Cell<bool> = Cell::new(false);
}
/// Avoids running any queries during any prints that occur
/// during the closure. This may alter the appearance of some
/// types (e.g. forcing verbose printing for opaque types).
/// This method is used during some queries (e.g. `explicit_item_bounds`
/// for opaque types), to ensure that any debug printing that
/// occurs during the query computation does not end up recursively
/// calling the same query.
pub fn with_no_queries<F: FnOnce() -> R, R>(f: F) -> R {
NO_QUERIES.with(|no_queries| {
let old = no_queries.replace(true);
let result = f();
no_queries.set(old);
result
})
}
/// Force us to name impls with just the filename/line number. We
/// normally try to use types. But at some points, notably while printing
/// cycle errors, this can result in extra or suboptimal error output,
/// so this variable disables that check.
pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
FORCE_IMPL_FILENAME_LINE.with(|force| {
let old = force.replace(true);
let result = f();
force.set(old);
result
})
}
/// Adds the `crate::` prefix to paths where appropriate.
pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
SHOULD_PREFIX_WITH_CRATE.with(|flag| {
let old = flag.replace(true);
let result = f();
flag.set(old);
result
})
}
/// Prevent path trimming if it is turned on. Path trimming affects `Display` impl
/// of various rustc types, for example `std::vec::Vec` would be trimmed to `Vec`,
/// if no other `Vec` is found.
pub fn with_no_trimmed_paths<F: FnOnce() -> R, R>(f: F) -> R {
NO_TRIMMED_PATH.with(|flag| {
let old = flag.replace(true);
let result = f();
flag.set(old);
result
})
}
/// The "region highlights" are used to control region printing during
/// specific error messages. When a "region highlight" is enabled, it
/// gives an alternate way to print specific regions. For now, we
/// always print those regions using a number, so something like "`'0`".
///
/// Regions not selected by the region highlight mode are presently
/// unaffected.
#[derive(Copy, Clone, Default)]
pub struct RegionHighlightMode {
/// If enabled, when we see the selected region, use "`'N`"
/// instead of the ordinary behavior.
highlight_regions: [Option<(ty::RegionKind, usize)>; 3],
/// If enabled, when printing a "free region" that originated from
/// the given `ty::BoundRegion`, print it as "`'1`". Free regions that would ordinarily
/// have names print as normal.
///
/// This is used when you have a signature like `fn foo(x: &u32,
/// y: &'a u32)` and we want to give a name to the region of the
/// reference `x`.
highlight_bound_region: Option<(ty::BoundRegion, usize)>,
}
impl RegionHighlightMode {
/// If `region` and `number` are both `Some`, invokes
/// `highlighting_region`.
pub fn maybe_highlighting_region(
&mut self,
region: Option<ty::Region<'_>>,
number: Option<usize>,
) {
if let Some(k) = region {
if let Some(n) = number {
self.highlighting_region(k, n);
}
}
}
/// Highlights the region inference variable `vid` as `'N`.
pub fn highlighting_region(&mut self, region: ty::Region<'_>, number: usize) {
let num_slots = self.highlight_regions.len();
let first_avail_slot =
self.highlight_regions.iter_mut().find(|s| s.is_none()).unwrap_or_else(|| {
bug!("can only highlight {} placeholders at a time", num_slots,)
});
*first_avail_slot = Some((*region, number));
}
/// Convenience wrapper for `highlighting_region`.
pub fn highlighting_region_vid(&mut self, vid: ty::RegionVid, number: usize) {
self.highlighting_region(&ty::ReVar(vid), number)
}
/// Returns `Some(n)` with the number to use for the given region, if any.
fn region_highlighted(&self, region: ty::Region<'_>) -> Option<usize> {
self.highlight_regions.iter().find_map(|h| match h {
Some((r, n)) if r == region => Some(*n),
_ => None,
})
}
/// Highlight the given bound region.
/// We can only highlight one bound region at a time. See
/// the field `highlight_bound_region` for more detailed notes.
pub fn highlighting_bound_region(&mut self, br: ty::BoundRegion, number: usize) {
assert!(self.highlight_bound_region.is_none());
self.highlight_bound_region = Some((br, number));
}
}
/// Trait for printers that pretty-print using `fmt::Write` to the printer.
pub trait PrettyPrinter<'tcx>:
Printer<
'tcx,
Error = fmt::Error,
Path = Self,
Region = Self,
Type = Self,
DynExistential = Self,
Const = Self,
> + fmt::Write
{
/// Like `print_def_path` but for value paths.
fn print_value_path(
self,
def_id: DefId,
substs: &'tcx [GenericArg<'tcx>],
) -> Result<Self::Path, Self::Error> {
self.print_def_path(def_id, substs)
}
fn in_binder<T>(self, value: &ty::Binder<T>) -> Result<Self, Self::Error>
where
T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
value.as_ref().skip_binder().print(self)
}
fn wrap_binder<T, F: Fn(&T, Self) -> Result<Self, fmt::Error>>(
self,
value: &ty::Binder<T>,
f: F,
) -> Result<Self, Self::Error>
where
T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
f(value.as_ref().skip_binder(), self)
}
/// Prints comma-separated elements.
fn comma_sep<T>(mut self, mut elems: impl Iterator<Item = T>) -> Result<Self, Self::Error>
where
T: Print<'tcx, Self, Output = Self, Error = Self::Error>,
{
if let Some(first) = elems.next() {
self = first.print(self)?;
for elem in elems {
self.write_str(", ")?;
self = elem.print(self)?;
}
}
Ok(self)
}
/// Prints `{f: t}` or `{f as t}` depending on the `cast` argument
fn typed_value(
mut self,
f: impl FnOnce(Self) -> Result<Self, Self::Error>,
t: impl FnOnce(Self) -> Result<Self, Self::Error>,
conversion: &str,
) -> Result<Self::Const, Self::Error> {
self.write_str("{")?;
self = f(self)?;
self.write_str(conversion)?;
self = t(self)?;
self.write_str("}")?;
Ok(self)
}
/// Prints `<...>` around what `f` prints.
fn generic_delimiters(
self,
f: impl FnOnce(Self) -> Result<Self, Self::Error>,
) -> Result<Self, Self::Error>;
/// Returns `true` if the region should be printed in
/// optional positions, e.g., `&'a T` or `dyn Tr + 'b`.
/// This is typically the case for all non-`'_` regions.
fn region_should_not_be_omitted(&self, region: ty::Region<'_>) -> bool;
// Defaults (should not be overridden):
/// If possible, this returns a global path resolving to `def_id` that is visible
/// from at least one local module, and returns `true`. If the crate defining `def_id` is
/// declared with an `extern crate`, the path is guaranteed to use the `extern crate`.
fn try_print_visible_def_path(self, def_id: DefId) -> Result<(Self, bool), Self::Error> {
let mut callers = Vec::new();
self.try_print_visible_def_path_recur(def_id, &mut callers)
}
/// Try to see if this path can be trimmed to a unique symbol name.
fn try_print_trimmed_def_path(
mut self,
def_id: DefId,
) -> Result<(Self::Path, bool), Self::Error> {
if !self.tcx().sess.opts.debugging_opts.trim_diagnostic_paths
|| matches!(self.tcx().sess.opts.trimmed_def_paths, TrimmedDefPaths::Never)
|| NO_TRIMMED_PATH.with(|flag| flag.get())
|| SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get())
{
return Ok((self, false));
}
match self.tcx().trimmed_def_paths(LOCAL_CRATE).get(&def_id) {
None => Ok((self, false)),
Some(symbol) => {
self.write_str(&symbol.as_str())?;
Ok((self, true))
}
}
}
/// Does the work of `try_print_visible_def_path`, building the
/// full definition path recursively before attempting to
/// post-process it into the valid and visible version that
/// accounts for re-exports.
///
/// This method should only be called by itself or
/// `try_print_visible_def_path`.
///
/// `callers` is a chain of visible_parent's leading to `def_id`,
/// to support cycle detection during recursion.
fn try_print_visible_def_path_recur(
mut self,
def_id: DefId,
callers: &mut Vec<DefId>,
) -> Result<(Self, bool), Self::Error> {
define_scoped_cx!(self);
debug!("try_print_visible_def_path: def_id={:?}", def_id);
// If `def_id` is a direct or injected extern crate, return the
// path to the crate followed by the path to the item within the crate.
if def_id.index == CRATE_DEF_INDEX {
let cnum = def_id.krate;
if cnum == LOCAL_CRATE {
return Ok((self.path_crate(cnum)?, true));
}
// In local mode, when we encounter a crate other than
// LOCAL_CRATE, execution proceeds in one of two ways:
//
// 1. For a direct dependency, where user added an
// `extern crate` manually, we put the `extern
// crate` as the parent. So you wind up with
// something relative to the current crate.
// 2. For an extern inferred from a path or an indirect crate,
// where there is no explicit `extern crate`, we just prepend
// the crate name.
match self.tcx().extern_crate(def_id) {
Some(&ExternCrate { src, dependency_of, span, .. }) => match (src, dependency_of) {
(ExternCrateSource::Extern(def_id), LOCAL_CRATE) => {
debug!("try_print_visible_def_path: def_id={:?}", def_id);
return Ok((
if !span.is_dummy() {
self.print_def_path(def_id, &[])?
} else {
self.path_crate(cnum)?
},
true,
));
}
(ExternCrateSource::Path, LOCAL_CRATE) => {
debug!("try_print_visible_def_path: def_id={:?}", def_id);
return Ok((self.path_crate(cnum)?, true));
}
_ => {}
},
None => {
return Ok((self.path_crate(cnum)?, true));
}
}
}
if def_id.is_local() {
return Ok((self, false));
}
let visible_parent_map = self.tcx().visible_parent_map(LOCAL_CRATE);
let mut cur_def_key = self.tcx().def_key(def_id);
debug!("try_print_visible_def_path: cur_def_key={:?}", cur_def_key);
// For a constructor, we want the name of its parent rather than <unnamed>.
if let DefPathData::Ctor = cur_def_key.disambiguated_data.data {
let parent = DefId {
krate: def_id.krate,
index: cur_def_key
.parent
.expect("`DefPathData::Ctor` / `VariantData` missing a parent"),
};
cur_def_key = self.tcx().def_key(parent);
}
let visible_parent = match visible_parent_map.get(&def_id).cloned() {
Some(parent) => parent,
None => return Ok((self, false)),
};
if callers.contains(&visible_parent) {
return Ok((self, false));
}
callers.push(visible_parent);
// HACK(eddyb) this bypasses `path_append`'s prefix printing to avoid
// knowing ahead of time whether the entire path will succeed or not.
// To support printers that do not implement `PrettyPrinter`, a `Vec` or
// linked list on the stack would need to be built, before any printing.
match self.try_print_visible_def_path_recur(visible_parent, callers)? {
(cx, false) => return Ok((cx, false)),
(cx, true) => self = cx,
}
callers.pop();
let actual_parent = self.tcx().parent(def_id);
debug!(
"try_print_visible_def_path: visible_parent={:?} actual_parent={:?}",
visible_parent, actual_parent,
);
let mut data = cur_def_key.disambiguated_data.data;
debug!(
"try_print_visible_def_path: data={:?} visible_parent={:?} actual_parent={:?}",
data, visible_parent, actual_parent,
);
match data {
// In order to output a path that could actually be imported (valid and visible),
// we need to handle re-exports correctly.
//
// For example, take `std::os::unix::process::CommandExt`, this trait is actually
// defined at `std::sys::unix::ext::process::CommandExt` (at time of writing).
//
// `std::os::unix` rexports the contents of `std::sys::unix::ext`. `std::sys` is
// private so the "true" path to `CommandExt` isn't accessible.
//
// In this case, the `visible_parent_map` will look something like this:
//
// (child) -> (parent)
// `std::sys::unix::ext::process::CommandExt` -> `std::sys::unix::ext::process`
// `std::sys::unix::ext::process` -> `std::sys::unix::ext`
// `std::sys::unix::ext` -> `std::os`
//
// This is correct, as the visible parent of `std::sys::unix::ext` is in fact
// `std::os`.
//
// When printing the path to `CommandExt` and looking at the `cur_def_key` that
// corresponds to `std::sys::unix::ext`, we would normally print `ext` and then go
// to the parent - resulting in a mangled path like
// `std::os::ext::process::CommandExt`.
//
// Instead, we must detect that there was a re-export and instead print `unix`
// (which is the name `std::sys::unix::ext` was re-exported as in `std::os`). To
// do this, we compare the parent of `std::sys::unix::ext` (`std::sys::unix`) with
// the visible parent (`std::os`). If these do not match, then we iterate over
// the children of the visible parent (as was done when computing
// `visible_parent_map`), looking for the specific child we currently have and then
// have access to the re-exported name.
DefPathData::TypeNs(ref mut name) if Some(visible_parent) != actual_parent => {
let reexport = self
.tcx()
.item_children(visible_parent)
.iter()
.find(|child| child.res.opt_def_id() == Some(def_id))
.map(|child| child.ident.name);
if let Some(reexport) = reexport {
*name = reexport;
}
}
// Re-exported `extern crate` (#43189).
DefPathData::CrateRoot => {
data = DefPathData::TypeNs(self.tcx().original_crate_name(def_id.krate));
}
_ => {}
}
debug!("try_print_visible_def_path: data={:?}", data);
Ok((self.path_append(Ok, &DisambiguatedDefPathData { data, disambiguator: 0 })?, true))
}
fn pretty_path_qualified(
self,
self_ty: Ty<'tcx>,
trait_ref: Option<ty::TraitRef<'tcx>>,
) -> Result<Self::Path, Self::Error> {
if trait_ref.is_none() {
// Inherent impls. Try to print `Foo::bar` for an inherent
// impl on `Foo`, but fallback to `<Foo>::bar` if self-type is
// anything other than a simple path.
match self_ty.kind() {
ty::Adt(..)
| ty::Foreign(_)
| ty::Bool
| ty::Char
| ty::Str
| ty::Int(_)
| ty::Uint(_)
| ty::Float(_) => {
return self_ty.print(self);
}
_ => {}
}
}
self.generic_delimiters(|mut cx| {
define_scoped_cx!(cx);
p!(print(self_ty));
if let Some(trait_ref) = trait_ref {
p!(" as ", print(trait_ref.print_only_trait_path()));
}
Ok(cx)
})
}
fn pretty_path_append_impl(
mut self,
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
self_ty: Ty<'tcx>,
trait_ref: Option<ty::TraitRef<'tcx>>,
) -> Result<Self::Path, Self::Error> {
self = print_prefix(self)?;
self.generic_delimiters(|mut cx| {
define_scoped_cx!(cx);
p!("impl ");
if let Some(trait_ref) = trait_ref {
p!(print(trait_ref.print_only_trait_path()), " for ");
}
p!(print(self_ty));
Ok(cx)
})
}
fn pretty_print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> {
define_scoped_cx!(self);
match *ty.kind() {
ty::Bool => p!("bool"),
ty::Char => p!("char"),
ty::Int(t) => p!(write("{}", t.name_str())),
ty::Uint(t) => p!(write("{}", t.name_str())),
ty::Float(t) => p!(write("{}", t.name_str())),
ty::RawPtr(ref tm) => {
p!(write(
"*{} ",
match tm.mutbl {
hir::Mutability::Mut => "mut",
hir::Mutability::Not => "const",
}
));
p!(print(tm.ty))
}
ty::Ref(r, ty, mutbl) => {
p!("&");
if self.region_should_not_be_omitted(r) {
p!(print(r), " ");
}
p!(print(ty::TypeAndMut { ty, mutbl }))
}
ty::Never => p!("!"),
ty::Tuple(ref tys) => {
p!("(", comma_sep(tys.iter()));
if tys.len() == 1 {
p!(",");
}
p!(")")
}
ty::FnDef(def_id, substs) => {
let sig = self.tcx().fn_sig(def_id).subst(self.tcx(), substs);
p!(print(sig), " {{", print_value_path(def_id, substs), "}}");
}
ty::FnPtr(ref bare_fn) => p!(print(bare_fn)),
ty::Infer(infer_ty) => {
if let ty::TyVar(ty_vid) = infer_ty {
if let Some(name) = self.infer_ty_name(ty_vid) {
p!(write("{}", name))
} else {
p!(write("{}", infer_ty))
}
} else {
p!(write("{}", infer_ty))
}
}
ty::Error(_) => p!("[type error]"),
ty::Param(ref param_ty) => p!(write("{}", param_ty)),
ty::Bound(debruijn, bound_ty) => match bound_ty.kind {
ty::BoundTyKind::Anon => self.pretty_print_bound_var(debruijn, bound_ty.var)?,
ty::BoundTyKind::Param(p) => p!(write("{}", p)),
},
ty::Adt(def, substs) => {
p!(print_def_path(def.did, substs));
}
ty::Dynamic(data, r) => {
let print_r = self.region_should_not_be_omitted(r);
if print_r {
p!("(");
}
p!("dyn ", print(data));
if print_r {
p!(" + ", print(r), ")");
}
}
ty::Foreign(def_id) => {
p!(print_def_path(def_id, &[]));
}
ty::Projection(ref data) => p!(print(data)),
ty::Placeholder(placeholder) => p!(write("Placeholder({:?})", placeholder)),
ty::Opaque(def_id, substs) => {
// FIXME(eddyb) print this with `print_def_path`.
// We use verbose printing in 'NO_QUERIES' mode, to
// avoid needing to call `predicates_of`. This should
// only affect certain debug messages (e.g. messages printed
// from `rustc_middle::ty` during the computation of `tcx.predicates_of`),
// and should have no effect on any compiler output.
if self.tcx().sess.verbose() || NO_QUERIES.with(|q| q.get()) {
p!(write("Opaque({:?}, {:?})", def_id, substs));
return Ok(self);
}
return Ok(with_no_queries(|| {
let def_key = self.tcx().def_key(def_id);
if let Some(name) = def_key.disambiguated_data.data.get_opt_name() {
p!(write("{}", name));
// FIXME(eddyb) print this with `print_def_path`.
if !substs.is_empty() {
p!("::");
p!(generic_delimiters(|cx| cx.comma_sep(substs.iter())));
}
return Ok(self);
}
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
let bounds = self.tcx().explicit_item_bounds(def_id);
let mut first = true;
let mut is_sized = false;
p!("impl");
for (predicate, _) in bounds {
let predicate = predicate.subst(self.tcx(), substs);
// Note: We can't use `to_opt_poly_trait_ref` here as `predicate`
// may contain unbound variables. We therefore do this manually.
//
// FIXME(lcnr): Find out why exactly this is the case :)
let bound_predicate = predicate.bound_atom_with_opt_escaping(self.tcx());
if let ty::PredicateAtom::Trait(pred, _) = bound_predicate.skip_binder() {
let trait_ref = bound_predicate.rebind(pred.trait_ref);
// Don't print +Sized, but rather +?Sized if absent.
if Some(trait_ref.def_id()) == self.tcx().lang_items().sized_trait() {
is_sized = true;
continue;
}
p!(
write("{}", if first { " " } else { "+" }),
print(trait_ref.print_only_trait_path())
);
first = false;
}
}
if !is_sized {
p!(write("{}?Sized", if first { " " } else { "+" }));
} else if first {
p!(" Sized");
}
Ok(self)
})?);
}
ty::Str => p!("str"),
ty::Generator(did, substs, movability) => {
p!(write("["));
match movability {
hir::Movability::Movable => {}
hir::Movability::Static => p!("static "),
}
if !self.tcx().sess.verbose() {
p!("generator");
// FIXME(eddyb) should use `def_span`.
if let Some(did) = did.as_local() {
let hir_id = self.tcx().hir().local_def_id_to_hir_id(did);
let span = self.tcx().hir().span(hir_id);
p!(write("@{}", self.tcx().sess.source_map().span_to_string(span)));
} else {
p!(write("@"), print_def_path(did, substs));
}
} else {
p!(print_def_path(did, substs));
p!(" upvar_tys=(");
if !substs.as_generator().is_valid() {
p!("unavailable");
} else {
self = self.comma_sep(substs.as_generator().upvar_tys())?;
}
p!(")");
}
if substs.as_generator().is_valid() {
p!(" ", print(substs.as_generator().witness()));
}
p!("]")
}
ty::GeneratorWitness(types) => {
p!(in_binder(&types));
}
ty::Closure(did, substs) => {
p!(write("["));
if !self.tcx().sess.verbose() {
p!(write("closure"));
// FIXME(eddyb) should use `def_span`.
if let Some(did) = did.as_local() {
let hir_id = self.tcx().hir().local_def_id_to_hir_id(did);
if self.tcx().sess.opts.debugging_opts.span_free_formats {
p!("@", print_def_path(did.to_def_id(), substs));
} else {
let span = self.tcx().hir().span(hir_id);
p!(write("@{}", self.tcx().sess.source_map().span_to_string(span)));
}
} else {
p!(write("@"), print_def_path(did, substs));
}
} else {
p!(print_def_path(did, substs));
if !substs.as_closure().is_valid() {
p!(" closure_substs=(unavailable)");
} else {
p!(" closure_kind_ty=", print(substs.as_closure().kind_ty()));
p!(
" closure_sig_as_fn_ptr_ty=",
print(substs.as_closure().sig_as_fn_ptr_ty())
);
p!(" upvar_tys=(");
self = self.comma_sep(substs.as_closure().upvar_tys())?;
p!(")");
}
}
p!("]");
}
ty::Array(ty, sz) => {
p!("[", print(ty), "; ");
if self.tcx().sess.verbose() {
p!(write("{:?}", sz));
} else if let ty::ConstKind::Unevaluated(..) = sz.val {
// Do not try to evaluate unevaluated constants. If we are const evaluating an
// array length anon const, rustc will (with debug assertions) print the
// constant's path. Which will end up here again.
p!("_");
} else if let Some(n) = sz.val.try_to_bits(self.tcx().data_layout.pointer_size) {
p!(write("{}", n));
} else if let ty::ConstKind::Param(param) = sz.val {
p!(write("{}", param));
} else {
p!("_");
}
p!("]")
}
ty::Slice(ty) => p!("[", print(ty), "]"),
}
Ok(self)
}
fn pretty_print_bound_var(
&mut self,
debruijn: ty::DebruijnIndex,
var: ty::BoundVar,
) -> Result<(), Self::Error> {
if debruijn == ty::INNERMOST {
write!(self, "^{}", var.index())
} else {
write!(self, "^{}_{}", debruijn.index(), var.index())
}
}
fn infer_ty_name(&self, _: ty::TyVid) -> Option<String> {
None
}
fn pretty_print_dyn_existential(
mut self,
predicates: &'tcx ty::List<ty::Binder<ty::ExistentialPredicate<'tcx>>>,
) -> Result<Self::DynExistential, Self::Error> {
// Generate the main trait ref, including associated types.
let mut first = true;
if let Some(principal) = predicates.principal() {
self = self.wrap_binder(&principal, |principal, mut cx| {
define_scoped_cx!(cx);
p!(print_def_path(principal.def_id, &[]));
let mut resugared = false;
// Special-case `Fn(...) -> ...` and resugar it.
let fn_trait_kind = cx.tcx().fn_trait_kind_from_lang_item(principal.def_id);
if !cx.tcx().sess.verbose() && fn_trait_kind.is_some() {
if let ty::Tuple(ref args) = principal.substs.type_at(0).kind() {
let mut projections = predicates.projection_bounds();
if let (Some(proj), None) = (projections.next(), projections.next()) {
let tys: Vec<_> = args.iter().map(|k| k.expect_ty()).collect();
p!(pretty_fn_sig(&tys, false, proj.skip_binder().ty));
resugared = true;
}
}
}
// HACK(eddyb) this duplicates `FmtPrinter`'s `path_generic_args`,
// in order to place the projections inside the `<...>`.
if !resugared {
// Use a type that can't appear in defaults of type parameters.
let dummy_cx = cx.tcx().mk_ty_infer(ty::FreshTy(0));
let principal = principal.with_self_ty(cx.tcx(), dummy_cx);
let args = cx.generic_args_to_print(
cx.tcx().generics_of(principal.def_id),
principal.substs,
);
// Don't print `'_` if there's no unerased regions.
let print_regions = args.iter().any(|arg| match arg.unpack() {
GenericArgKind::Lifetime(r) => *r != ty::ReErased,
_ => false,
});
let mut args = args.iter().cloned().filter(|arg| match arg.unpack() {
GenericArgKind::Lifetime(_) => print_regions,
_ => true,
});
let mut projections = predicates.projection_bounds();
let arg0 = args.next();
let projection0 = projections.next();
if arg0.is_some() || projection0.is_some() {
let args = arg0.into_iter().chain(args);
let projections = projection0.into_iter().chain(projections);
p!(generic_delimiters(|mut cx| {
cx = cx.comma_sep(args)?;
if arg0.is_some() && projection0.is_some() {
write!(cx, ", ")?;
}
cx.comma_sep(projections)
}));
}
}
Ok(cx)
})?;
first = false;
}
define_scoped_cx!(self);
// Builtin bounds.
// FIXME(eddyb) avoid printing twice (needed to ensure
// that the auto traits are sorted *and* printed via cx).
let mut auto_traits: Vec<_> =
predicates.auto_traits().map(|did| (self.tcx().def_path_str(did), did)).collect();
// The auto traits come ordered by `DefPathHash`. While
// `DefPathHash` is *stable* in the sense that it depends on
// neither the host nor the phase of the moon, it depends
// "pseudorandomly" on the compiler version and the target.
//
// To avoid that causing instabilities in compiletest
// output, sort the auto-traits alphabetically.
auto_traits.sort();
for (_, def_id) in auto_traits {
if !first {
p!(" + ");
}
first = false;
p!(print_def_path(def_id, &[]));
}
Ok(self)
}
fn pretty_fn_sig(
mut self,
inputs: &[Ty<'tcx>],
c_variadic: bool,
output: Ty<'tcx>,
) -> Result<Self, Self::Error> {
define_scoped_cx!(self);
p!("(", comma_sep(inputs.iter().copied()));
if c_variadic {
if !inputs.is_empty() {
p!(", ");
}
p!("...");
}
p!(")");
if !output.is_unit() {
p!(" -> ", print(output));
}
Ok(self)
}
fn pretty_print_const(
mut self,
ct: &'tcx ty::Const<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
if self.tcx().sess.verbose() {
p!(write("Const({:?}: {:?})", ct.val, ct.ty));
return Ok(self);
}
macro_rules! print_underscore {
() => {{
if print_ty {
self = self.typed_value(
|mut this| {
write!(this, "_")?;
Ok(this)
},
|this| this.print_type(ct.ty),
": ",
)?;
} else {
write!(self, "_")?;
}
}};
}
match ct.val {
ty::ConstKind::Unevaluated(def, substs, promoted) => {
if let Some(promoted) = promoted {
p!(print_value_path(def.did, substs));
p!(write("::{:?}", promoted));
} else {
match self.tcx().def_kind(def.did) {
DefKind::Static | DefKind::Const | DefKind::AssocConst => {
p!(print_value_path(def.did, substs))
}
_ => {
if def.is_local() {
let span = self.tcx().def_span(def.did);
if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span)
{
p!(write("{}", snip))
} else {
print_underscore!()
}
} else {
print_underscore!()
}
}
}
}
}
ty::ConstKind::Infer(..) => print_underscore!(),
ty::ConstKind::Param(ParamConst { name, .. }) => p!(write("{}", name)),
ty::ConstKind::Value(value) => {
return self.pretty_print_const_value(value, ct.ty, print_ty);
}
ty::ConstKind::Bound(debruijn, bound_var) => {
self.pretty_print_bound_var(debruijn, bound_var)?
}
ty::ConstKind::Placeholder(placeholder) => p!(write("Placeholder({:?})", placeholder)),
ty::ConstKind::Error(_) => p!("[const error]"),
};
Ok(self)
}
fn pretty_print_const_scalar(
mut self,
scalar: Scalar,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
match (scalar, &ty.kind()) {
// Byte strings (&[u8; N])
(
Scalar::Ptr(ptr),
ty::Ref(
_,
ty::TyS {
kind:
ty::Array(
ty::TyS { kind: ty::Uint(ast::UintTy::U8), .. },
ty::Const {
val: ty::ConstKind::Value(ConstValue::Scalar(int)),
..
},
),
..
},
_,
),
) => match self.tcx().get_global_alloc(ptr.alloc_id) {
Some(GlobalAlloc::Memory(alloc)) => {
let bytes = int.assert_bits(self.tcx().data_layout.pointer_size);
let size = Size::from_bytes(bytes);
if let Ok(byte_str) = alloc.get_bytes(&self.tcx(), ptr, size) {
p!(pretty_print_byte_str(byte_str))
} else {
p!("<too short allocation>")
}
}
// FIXME: for statics and functions, we could in principle print more detail.
Some(GlobalAlloc::Static(def_id)) => p!(write("<static({:?})>", def_id)),
Some(GlobalAlloc::Function(_)) => p!("<function>"),
None => p!("<dangling pointer>"),
},
// Bool
(Scalar::Int(int), ty::Bool) if int == ScalarInt::FALSE => p!("false"),
(Scalar::Int(int), ty::Bool) if int == ScalarInt::TRUE => p!("true"),
// Float
(Scalar::Int(int), ty::Float(ast::FloatTy::F32)) => {
p!(write("{}f32", Single::try_from(int).unwrap()))
}
(Scalar::Int(int), ty::Float(ast::FloatTy::F64)) => {
p!(write("{}f64", Double::try_from(int).unwrap()))
}
// Int
(Scalar::Int(int), ty::Uint(_) | ty::Int(_)) => {
let int =
ConstInt::new(int, matches!(ty.kind(), ty::Int(_)), ty.is_ptr_sized_integral());
if print_ty { p!(write("{:#?}", int)) } else { p!(write("{:?}", int)) }
}
// Char
(Scalar::Int(int), ty::Char) if char::try_from(int).is_ok() => {
p!(write("{:?}", char::try_from(int).unwrap()))
}
// Raw pointers
(Scalar::Int(int), ty::RawPtr(_)) => {
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
self = self.typed_value(
|mut this| {
write!(this, "0x{:x}", data)?;
Ok(this)
},
|this| this.print_type(ty),
" as ",
)?;
}
(Scalar::Ptr(ptr), ty::FnPtr(_)) => {
// FIXME: this can ICE when the ptr is dangling or points to a non-function.
// We should probably have a helper method to share code with the "Byte strings"
// printing above (which also has to handle pointers to all sorts of things).
let instance = self.tcx().global_alloc(ptr.alloc_id).unwrap_fn();
self = self.typed_value(
|this| this.print_value_path(instance.def_id(), instance.substs),
|this| this.print_type(ty),
" as ",
)?;
}
// For function type zsts just printing the path is enough
(Scalar::Int(int), ty::FnDef(d, s)) if int == ScalarInt::ZST => {
p!(print_value_path(*d, s))
}
// Nontrivial types with scalar bit representation
(Scalar::Int(int), _) => {
let print = |mut this: Self| {
if int.size() == Size::ZERO {
write!(this, "transmute(())")?;
} else {
write!(this, "transmute(0x{:x})", int)?;
}
Ok(this)
};
self = if print_ty {
self.typed_value(print, |this| this.print_type(ty), ": ")?
} else {
print(self)?
};
}
// Any pointer values not covered by a branch above
(Scalar::Ptr(p), _) => {
self = self.pretty_print_const_pointer(p, ty, print_ty)?;
}
}
Ok(self)
}
/// This is overridden for MIR printing because we only want to hide alloc ids from users, not
/// from MIR where it is actually useful.
fn pretty_print_const_pointer(
mut self,
_: Pointer,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
if print_ty {
self.typed_value(
|mut this| {
this.write_str("&_")?;
Ok(this)
},
|this| this.print_type(ty),
": ",
)
} else {
self.write_str("&_")?;
Ok(self)
}
}
fn pretty_print_byte_str(mut self, byte_str: &'tcx [u8]) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
p!("b\"");
for &c in byte_str {
for e in std::ascii::escape_default(c) {
self.write_char(e as char)?;
}
}
p!("\"");
Ok(self)
}
fn pretty_print_const_value(
mut self,
ct: ConstValue<'tcx>,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
if self.tcx().sess.verbose() {
p!(write("ConstValue({:?}: ", ct), print(ty), ")");
return Ok(self);
}
let u8_type = self.tcx().types.u8;
match (ct, ty.kind()) {
// Byte/string slices, printed as (byte) string literals.
(
ConstValue::Slice { data, start, end },
ty::Ref(_, ty::TyS { kind: ty::Slice(t), .. }, _),
) if *t == u8_type => {
// The `inspect` here is okay since we checked the bounds, and there are
// no relocations (we have an active slice reference here). We don't use
// this result to affect interpreter execution.
let byte_str = data.inspect_with_uninit_and_ptr_outside_interpreter(start..end);
self.pretty_print_byte_str(byte_str)
}
(
ConstValue::Slice { data, start, end },
ty::Ref(_, ty::TyS { kind: ty::Str, .. }, _),
) => {
// The `inspect` here is okay since we checked the bounds, and there are no
// relocations (we have an active `str` reference here). We don't use this
// result to affect interpreter execution.
let slice = data.inspect_with_uninit_and_ptr_outside_interpreter(start..end);
let s = std::str::from_utf8(slice).expect("non utf8 str from miri");
p!(write("{:?}", s));
Ok(self)
}
(ConstValue::ByRef { alloc, offset }, ty::Array(t, n)) if *t == u8_type => {
let n = n.val.try_to_bits(self.tcx().data_layout.pointer_size).unwrap();
// cast is ok because we already checked for pointer size (32 or 64 bit) above
let n = Size::from_bytes(n);
let ptr = Pointer::new(AllocId(0), offset);
let byte_str = alloc.get_bytes(&self.tcx(), ptr, n).unwrap();
p!("*");
p!(pretty_print_byte_str(byte_str));
Ok(self)
}
// Aggregates, printed as array/tuple/struct/variant construction syntax.
//
// NB: the `has_param_types_or_consts` check ensures that we can use
// the `destructure_const` query with an empty `ty::ParamEnv` without
// introducing ICEs (e.g. via `layout_of`) from missing bounds.
// E.g. `transmute([0usize; 2]): (u8, *mut T)` needs to know `T: Sized`
// to be able to destructure the tuple into `(0u8, *mut T)
//
// FIXME(eddyb) for `--emit=mir`/`-Z dump-mir`, we should provide the
// correct `ty::ParamEnv` to allow printing *all* constant values.
(_, ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) if !ty.has_param_types_or_consts() => {
let contents = self.tcx().destructure_const(
ty::ParamEnv::reveal_all()
.and(self.tcx().mk_const(ty::Const { val: ty::ConstKind::Value(ct), ty })),
);
let fields = contents.fields.iter().copied();
match *ty.kind() {
ty::Array(..) => {
p!("[", comma_sep(fields), "]");
}
ty::Tuple(..) => {
p!("(", comma_sep(fields));
if contents.fields.len() == 1 {
p!(",");
}
p!(")");
}
ty::Adt(def, substs) if def.variants.is_empty() => {
p!(print_value_path(def.did, substs));
}
ty::Adt(def, substs) => {
let variant_id =
contents.variant.expect("destructed const of adt without variant id");
let variant_def = &def.variants[variant_id];
p!(print_value_path(variant_def.def_id, substs));
match variant_def.ctor_kind {
CtorKind::Const => {}
CtorKind::Fn => {
p!("(", comma_sep(fields), ")");
}
CtorKind::Fictive => {
p!(" {{ ");
let mut first = true;
for (field_def, field) in variant_def.fields.iter().zip(fields) {
if !first {
p!(", ");
}
p!(write("{}: ", field_def.ident), print(field));
first = false;
}
p!(" }}");
}
}
}
_ => unreachable!(),
}
Ok(self)
}
(ConstValue::Scalar(scalar), _) => self.pretty_print_const_scalar(scalar, ty, print_ty),
// FIXME(oli-obk): also pretty print arrays and other aggregate constants by reading
// their fields instead of just dumping the memory.
_ => {
// fallback
p!(write("{:?}", ct));
if print_ty {
p!(": ", print(ty));
}
Ok(self)
}
}
}
}
// HACK(eddyb) boxed to avoid moving around a large struct by-value.
pub struct FmtPrinter<'a, 'tcx, F>(Box<FmtPrinterData<'a, 'tcx, F>>);
pub struct FmtPrinterData<'a, 'tcx, F> {
tcx: TyCtxt<'tcx>,
fmt: F,
empty_path: bool,
in_value: bool,
pub print_alloc_ids: bool,
used_region_names: FxHashSet<Symbol>,
region_index: usize,
binder_depth: usize,
printed_type_count: usize,
pub region_highlight_mode: RegionHighlightMode,
pub name_resolver: Option<Box<&'a dyn Fn(ty::sty::TyVid) -> Option<String>>>,
}
impl<F> Deref for FmtPrinter<'a, 'tcx, F> {
type Target = FmtPrinterData<'a, 'tcx, F>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<F> DerefMut for FmtPrinter<'_, '_, F> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<F> FmtPrinter<'a, 'tcx, F> {
pub fn new(tcx: TyCtxt<'tcx>, fmt: F, ns: Namespace) -> Self {
FmtPrinter(Box::new(FmtPrinterData {
tcx,
fmt,
empty_path: false,
in_value: ns == Namespace::ValueNS,
print_alloc_ids: false,
used_region_names: Default::default(),
region_index: 0,
binder_depth: 0,
printed_type_count: 0,
region_highlight_mode: RegionHighlightMode::default(),
name_resolver: None,
}))
}
}
// HACK(eddyb) get rid of `def_path_str` and/or pass `Namespace` explicitly always
// (but also some things just print a `DefId` generally so maybe we need this?)
fn guess_def_namespace(tcx: TyCtxt<'_>, def_id: DefId) -> Namespace {
match tcx.def_key(def_id).disambiguated_data.data {
DefPathData::TypeNs(..) | DefPathData::CrateRoot | DefPathData::ImplTrait => {
Namespace::TypeNS
}
DefPathData::ValueNs(..)
| DefPathData::AnonConst
| DefPathData::ClosureExpr
| DefPathData::Ctor => Namespace::ValueNS,
DefPathData::MacroNs(..) => Namespace::MacroNS,
_ => Namespace::TypeNS,
}
}
impl TyCtxt<'t> {
/// Returns a string identifying this `DefId`. This string is
/// suitable for user output.
pub fn def_path_str(self, def_id: DefId) -> String {
self.def_path_str_with_substs(def_id, &[])
}
pub fn def_path_str_with_substs(self, def_id: DefId, substs: &'t [GenericArg<'t>]) -> String {
let ns = guess_def_namespace(self, def_id);
debug!("def_path_str: def_id={:?}, ns={:?}", def_id, ns);
let mut s = String::new();
let _ = FmtPrinter::new(self, &mut s, ns).print_def_path(def_id, substs);
s
}
}
impl<F: fmt::Write> fmt::Write for FmtPrinter<'_, '_, F> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.fmt.write_str(s)
}
}
impl<F: fmt::Write> Printer<'tcx> for FmtPrinter<'_, 'tcx, F> {
type Error = fmt::Error;
type Path = Self;
type Region = Self;
type Type = Self;
type DynExistential = Self;
type Const = Self;
fn tcx(&'a self) -> TyCtxt<'tcx> {
self.tcx
}
fn print_def_path(
mut self,
def_id: DefId,
substs: &'tcx [GenericArg<'tcx>],
) -> Result<Self::Path, Self::Error> {
define_scoped_cx!(self);
if substs.is_empty() {
match self.try_print_trimmed_def_path(def_id)? {
(cx, true) => return Ok(cx),
(cx, false) => self = cx,
}
match self.try_print_visible_def_path(def_id)? {
(cx, true) => return Ok(cx),
(cx, false) => self = cx,
}
}
let key = self.tcx.def_key(def_id);
if let DefPathData::Impl = key.disambiguated_data.data {
// Always use types for non-local impls, where types are always
// available, and filename/line-number is mostly uninteresting.
let use_types = !def_id.is_local() || {
// Otherwise, use filename/line-number if forced.
let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
!force_no_types
};
if !use_types {
// If no type info is available, fall back to
// pretty printing some span information. This should
// only occur very early in the compiler pipeline.
let parent_def_id = DefId { index: key.parent.unwrap(), ..def_id };
let span = self.tcx.def_span(def_id);
self = self.print_def_path(parent_def_id, &[])?;
// HACK(eddyb) copy of `path_append` to avoid
// constructing a `DisambiguatedDefPathData`.
if !self.empty_path {
write!(self, "::")?;
}
write!(self, "<impl at {}>", self.tcx.sess.source_map().span_to_string(span))?;
self.empty_path = false;
return Ok(self);
}
}
self.default_print_def_path(def_id, substs)
}
fn print_region(self, region: ty::Region<'_>) -> Result<Self::Region, Self::Error> {
self.pretty_print_region(region)
}
fn print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> {
if self.tcx.sess.type_length_limit().value_within_limit(self.printed_type_count) {
self.printed_type_count += 1;
self.pretty_print_type(ty)
} else {
write!(self, "...")?;
Ok(self)
}
}
fn print_dyn_existential(
self,
predicates: &'tcx ty::List<ty::Binder<ty::ExistentialPredicate<'tcx>>>,
) -> Result<Self::DynExistential, Self::Error> {
self.pretty_print_dyn_existential(predicates)
}
fn print_const(self, ct: &'tcx ty::Const<'tcx>) -> Result<Self::Const, Self::Error> {
self.pretty_print_const(ct, true)
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.empty_path = true;
if cnum == LOCAL_CRATE {
if self.tcx.sess.rust_2018() {
// We add the `crate::` keyword on Rust 2018, only when desired.
if SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get()) {
write!(self, "{}", kw::Crate)?;
self.empty_path = false;
}
}
} else {
write!(self, "{}", self.tcx.crate_name(cnum))?;
self.empty_path = false;
}
Ok(self)
}
fn path_qualified(
mut self,
self_ty: Ty<'tcx>,
trait_ref: Option<ty::TraitRef<'tcx>>,
) -> Result<Self::Path, Self::Error> {
self = self.pretty_path_qualified(self_ty, trait_ref)?;
self.empty_path = false;
Ok(self)
}
fn path_append_impl(
mut self,
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
_disambiguated_data: &DisambiguatedDefPathData,
self_ty: Ty<'tcx>,
trait_ref: Option<ty::TraitRef<'tcx>>,
) -> Result<Self::Path, Self::Error> {
self = self.pretty_path_append_impl(
|mut cx| {
cx = print_prefix(cx)?;
if !cx.empty_path {
write!(cx, "::")?;
}
Ok(cx)
},
self_ty,
trait_ref,
)?;
self.empty_path = false;
Ok(self)
}
fn path_append(
mut self,
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
disambiguated_data: &DisambiguatedDefPathData,
) -> Result<Self::Path, Self::Error> {
self = print_prefix(self)?;
// Skip `::{{constructor}}` on tuple/unit structs.
if let DefPathData::Ctor = disambiguated_data.data {
return Ok(self);
}
// FIXME(eddyb) `name` should never be empty, but it
// currently is for `extern { ... }` "foreign modules".
let name = disambiguated_data.data.name();
if name != DefPathDataName::Named(kw::Invalid) {
if !self.empty_path {
write!(self, "::")?;
}
if let DefPathDataName::Named(name) = name {
if Ident::with_dummy_span(name).is_raw_guess() {
write!(self, "r#")?;
}
}
let verbose = self.tcx.sess.verbose();
disambiguated_data.fmt_maybe_verbose(&mut self, verbose)?;
self.empty_path = false;
}
Ok(self)
}
fn path_generic_args(
mut self,
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
args: &[GenericArg<'tcx>],
) -> Result<Self::Path, Self::Error> {
self = print_prefix(self)?;
// Don't print `'_` if there's no unerased regions.
let print_regions = args.iter().any(|arg| match arg.unpack() {
GenericArgKind::Lifetime(r) => *r != ty::ReErased,
_ => false,
});
let args = args.iter().cloned().filter(|arg| match arg.unpack() {
GenericArgKind::Lifetime(_) => print_regions,
_ => true,
});
if args.clone().next().is_some() {
if self.in_value {
write!(self, "::")?;
}
self.generic_delimiters(|cx| cx.comma_sep(args))
} else {
Ok(self)
}
}
}
impl<F: fmt::Write> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> {
fn infer_ty_name(&self, id: ty::TyVid) -> Option<String> {
self.0.name_resolver.as_ref().and_then(|func| func(id))
}
fn print_value_path(
mut self,
def_id: DefId,
substs: &'tcx [GenericArg<'tcx>],
) -> Result<Self::Path, Self::Error> {
let was_in_value = std::mem::replace(&mut self.in_value, true);
self = self.print_def_path(def_id, substs)?;
self.in_value = was_in_value;
Ok(self)
}
fn in_binder<T>(self, value: &ty::Binder<T>) -> Result<Self, Self::Error>
where
T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
self.pretty_in_binder(value)
}
fn wrap_binder<T, C: Fn(&T, Self) -> Result<Self, Self::Error>>(
self,
value: &ty::Binder<T>,
f: C,
) -> Result<Self, Self::Error>
where
T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
self.pretty_wrap_binder(value, f)
}
fn typed_value(
mut self,
f: impl FnOnce(Self) -> Result<Self, Self::Error>,
t: impl FnOnce(Self) -> Result<Self, Self::Error>,
conversion: &str,
) -> Result<Self::Const, Self::Error> {
self.write_str("{")?;
self = f(self)?;
self.write_str(conversion)?;
let was_in_value = std::mem::replace(&mut self.in_value, false);
self = t(self)?;
self.in_value = was_in_value;
self.write_str("}")?;
Ok(self)
}
fn generic_delimiters(
mut self,
f: impl FnOnce(Self) -> Result<Self, Self::Error>,
) -> Result<Self, Self::Error> {
write!(self, "<")?;
let was_in_value = std::mem::replace(&mut self.in_value, false);
let mut inner = f(self)?;
inner.in_value = was_in_value;
write!(inner, ">")?;
Ok(inner)
}
fn region_should_not_be_omitted(&self, region: ty::Region<'_>) -> bool {
let highlight = self.region_highlight_mode;
if highlight.region_highlighted(region).is_some() {
return true;
}
if self.tcx.sess.verbose() {
return true;
}
let identify_regions = self.tcx.sess.opts.debugging_opts.identify_regions;
match *region {
ty::ReEarlyBound(ref data) => {
data.name != kw::Invalid && data.name != kw::UnderscoreLifetime
}
ty::ReLateBound(_, br)
| ty::ReFree(ty::FreeRegion { bound_region: br, .. })
| ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
if name != kw::Invalid && name != kw::UnderscoreLifetime {
return true;
}
}
if let Some((region, _)) = highlight.highlight_bound_region {
if br == region {
return true;
}
}
false
}
ty::ReVar(_) if identify_regions => true,
ty::ReVar(_) | ty::ReErased => false,
ty::ReStatic | ty::ReEmpty(_) => true,
}
}
fn pretty_print_const_pointer(
self,
p: Pointer,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
let print = |mut this: Self| {
define_scoped_cx!(this);
if this.print_alloc_ids {
p!(write("{:?}", p));
} else {
p!("&_");
}
Ok(this)
};
if print_ty {
self.typed_value(print, |this| this.print_type(ty), ": ")
} else {
print(self)
}
}
}
// HACK(eddyb) limited to `FmtPrinter` because of `region_highlight_mode`.
impl<F: fmt::Write> FmtPrinter<'_, '_, F> {
pub fn pretty_print_region(mut self, region: ty::Region<'_>) -> Result<Self, fmt::Error> {
define_scoped_cx!(self);
// Watch out for region highlights.
let highlight = self.region_highlight_mode;
if let Some(n) = highlight.region_highlighted(region) {
p!(write("'{}", n));
return Ok(self);
}
if self.tcx.sess.verbose() {
p!(write("{:?}", region));
return Ok(self);
}
let identify_regions = self.tcx.sess.opts.debugging_opts.identify_regions;
// These printouts are concise. They do not contain all the information
// the user might want to diagnose an error, but there is basically no way
// to fit that into a short string. Hence the recommendation to use
// `explain_region()` or `note_and_explain_region()`.
match *region {
ty::ReEarlyBound(ref data) => {
if data.name != kw::Invalid {
p!(write("{}", data.name));
return Ok(self);
}
}
ty::ReLateBound(_, br)
| ty::ReFree(ty::FreeRegion { bound_region: br, .. })
| ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
if name != kw::Invalid && name != kw::UnderscoreLifetime {
p!(write("{}", name));
return Ok(self);
}
}
if let Some((region, counter)) = highlight.highlight_bound_region {
if br == region {
p!(write("'{}", counter));
return Ok(self);
}
}
}
ty::ReVar(region_vid) if identify_regions => {
p!(write("{:?}", region_vid));
return Ok(self);
}
ty::ReVar(_) => {}
ty::ReErased => {}
ty::ReStatic => {
p!("'static");
return Ok(self);
}
ty::ReEmpty(ty::UniverseIndex::ROOT) => {
p!("'<empty>");
return Ok(self);
}
ty::ReEmpty(ui) => {
p!(write("'<empty:{:?}>", ui));
return Ok(self);
}
}
p!("'_");
Ok(self)
}
}
// HACK(eddyb) limited to `FmtPrinter` because of `binder_depth`,
// `region_index` and `used_region_names`.
impl<F: fmt::Write> FmtPrinter<'_, 'tcx, F> {
pub fn name_all_regions<T>(
mut self,
value: &ty::Binder<T>,
) -> Result<(Self, (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)), fmt::Error>
where
T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>,
{
fn name_by_region_index(index: usize) -> Symbol {
match index {
0 => Symbol::intern("'r"),
1 => Symbol::intern("'s"),
i => Symbol::intern(&format!("'t{}", i - 2)),
}
}
// Replace any anonymous late-bound regions with named
// variants, using new unique identifiers, so that we can
// clearly differentiate between named and unnamed regions in
// the output. We'll probably want to tweak this over time to
// decide just how much information to give.
if self.binder_depth == 0 {
self.prepare_late_bound_region_info(value);
}
let mut empty = true;
let mut start_or_continue = |cx: &mut Self, start: &str, cont: &str| {
write!(
cx,
"{}",
if empty {
empty = false;
start
} else {
cont
}
)
};
define_scoped_cx!(self);
let mut region_index = self.region_index;
let new_value = self.tcx.replace_late_bound_regions(value.clone(), |br| {
let _ = start_or_continue(&mut self, "for<", ", ");
let br = match br {
ty::BrNamed(_, name) => {
let _ = write!(self, "{}", name);
br
}
ty::BrAnon(_) | ty::BrEnv => {
let name = loop {
let name = name_by_region_index(region_index);
region_index += 1;
if !self.used_region_names.contains(&name) {
break name;
}
};
let _ = write!(self, "{}", name);
ty::BrNamed(DefId::local(CRATE_DEF_INDEX), name)
}
};
self.tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br))
});
start_or_continue(&mut self, "", "> ")?;
self.binder_depth += 1;
self.region_index = region_index;
Ok((self, new_value))
}
pub fn pretty_in_binder<T>(self, value: &ty::Binder<T>) -> Result<Self, fmt::Error>
where
T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>,
{
let old_region_index = self.region_index;
let (new, new_value) = self.name_all_regions(value)?;
let mut inner = new_value.0.print(new)?;
inner.region_index = old_region_index;
inner.binder_depth -= 1;
Ok(inner)
}
pub fn pretty_wrap_binder<T, C: Fn(&T, Self) -> Result<Self, fmt::Error>>(
self,
value: &ty::Binder<T>,
f: C,
) -> Result<Self, fmt::Error>
where
T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>,
{
let old_region_index = self.region_index;
let (new, new_value) = self.name_all_regions(value)?;
let mut inner = f(&new_value.0, new)?;
inner.region_index = old_region_index;
inner.binder_depth -= 1;
Ok(inner)
}
fn prepare_late_bound_region_info<T>(&mut self, value: &ty::Binder<T>)
where
T: TypeFoldable<'tcx>,
{
struct LateBoundRegionNameCollector<'a>(&'a mut FxHashSet<Symbol>);
impl<'tcx> ty::fold::TypeVisitor<'tcx> for LateBoundRegionNameCollector<'_> {
fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
if let ty::ReLateBound(_, ty::BrNamed(_, name)) = *r {
self.0.insert(name);
}
r.super_visit_with(self)
}
}
self.used_region_names.clear();
let mut collector = LateBoundRegionNameCollector(&mut self.used_region_names);
value.visit_with(&mut collector);
self.region_index = 0;
}
}
impl<'tcx, T, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::Binder<T>
where
T: Print<'tcx, P, Output = P, Error = P::Error> + TypeFoldable<'tcx>,
{
type Output = P;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
cx.in_binder(self)
}
}
impl<'tcx, T, U, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::OutlivesPredicate<T, U>
where
T: Print<'tcx, P, Output = P, Error = P::Error>,
U: Print<'tcx, P, Output = P, Error = P::Error>,
{
type Output = P;
type Error = P::Error;
fn print(&self, mut cx: P) -> Result<Self::Output, Self::Error> {
define_scoped_cx!(cx);
p!(print(self.0), ": ", print(self.1));
Ok(cx)
}
}
macro_rules! forward_display_to_print {
($($ty:ty),+) => {
$(impl fmt::Display for $ty {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ty::tls::with(|tcx| {
tcx.lift(*self)
.expect("could not lift for printing")
.print(FmtPrinter::new(tcx, f, Namespace::TypeNS))?;
Ok(())
})
}
})+
};
}
macro_rules! define_print_and_forward_display {
(($self:ident, $cx:ident): $($ty:ty $print:block)+) => {
$(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty {
type Output = P;
type Error = fmt::Error;
fn print(&$self, $cx: P) -> Result<Self::Output, Self::Error> {
#[allow(unused_mut)]
let mut $cx = $cx;
define_scoped_cx!($cx);
let _: () = $print;
#[allow(unreachable_code)]
Ok($cx)
}
})+
forward_display_to_print!($($ty),+);
};
}
// HACK(eddyb) this is separate because `ty::RegionKind` doesn't need lifting.
impl fmt::Display for ty::RegionKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ty::tls::with(|tcx| {
self.print(FmtPrinter::new(tcx, f, Namespace::TypeNS))?;
Ok(())
})
}
}
/// Wrapper type for `ty::TraitRef` which opts-in to pretty printing only
/// the trait path. That is, it will print `Trait<U>` instead of
/// `<T as Trait<U>>`.
#[derive(Copy, Clone, TypeFoldable, Lift)]
pub struct TraitRefPrintOnlyTraitPath<'tcx>(ty::TraitRef<'tcx>);
impl fmt::Debug for TraitRefPrintOnlyTraitPath<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl ty::TraitRef<'tcx> {
pub fn print_only_trait_path(self) -> TraitRefPrintOnlyTraitPath<'tcx> {
TraitRefPrintOnlyTraitPath(self)
}
}
impl ty::Binder<ty::TraitRef<'tcx>> {
pub fn print_only_trait_path(self) -> ty::Binder<TraitRefPrintOnlyTraitPath<'tcx>> {
self.map_bound(|tr| tr.print_only_trait_path())
}
}
forward_display_to_print! {
Ty<'tcx>,
&'tcx ty::List<ty::Binder<ty::ExistentialPredicate<'tcx>>>,
&'tcx ty::Const<'tcx>,
// HACK(eddyb) these are exhaustive instead of generic,
// because `for<'tcx>` isn't possible yet.
ty::Binder<ty::ExistentialPredicate<'tcx>>,
ty::Binder<ty::TraitRef<'tcx>>,
ty::Binder<TraitRefPrintOnlyTraitPath<'tcx>>,
ty::Binder<ty::FnSig<'tcx>>,
ty::Binder<ty::TraitPredicate<'tcx>>,
ty::Binder<ty::SubtypePredicate<'tcx>>,
ty::Binder<ty::ProjectionPredicate<'tcx>>,
ty::Binder<ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>,
ty::Binder<ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>>,
ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>,
ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>
}
define_print_and_forward_display! {
(self, cx):
&'tcx ty::List<Ty<'tcx>> {
p!("{{", comma_sep(self.iter()), "}}")
}
ty::TypeAndMut<'tcx> {
p!(write("{}", self.mutbl.prefix_str()), print(self.ty))
}
ty::ExistentialTraitRef<'tcx> {
// Use a type that can't appear in defaults of type parameters.
let dummy_self = cx.tcx().mk_ty_infer(ty::FreshTy(0));
let trait_ref = self.with_self_ty(cx.tcx(), dummy_self);
p!(print(trait_ref.print_only_trait_path()))
}
ty::ExistentialProjection<'tcx> {
let name = cx.tcx().associated_item(self.item_def_id).ident;
p!(write("{} = ", name), print(self.ty))
}
ty::ExistentialPredicate<'tcx> {
match *self {
ty::ExistentialPredicate::Trait(x) => p!(print(x)),
ty::ExistentialPredicate::Projection(x) => p!(print(x)),
ty::ExistentialPredicate::AutoTrait(def_id) => {
p!(print_def_path(def_id, &[]));
}
}
}
ty::FnSig<'tcx> {
p!(write("{}", self.unsafety.prefix_str()));
if self.abi != Abi::Rust {
p!(write("extern {} ", self.abi));
}
p!("fn", pretty_fn_sig(self.inputs(), self.c_variadic, self.output()));
}
ty::InferTy {
if cx.tcx().sess.verbose() {
p!(write("{:?}", self));
return Ok(cx);
}
match *self {
ty::TyVar(_) => p!("_"),
ty::IntVar(_) => p!(write("{}", "{integer}")),
ty::FloatVar(_) => p!(write("{}", "{float}")),
ty::FreshTy(v) => p!(write("FreshTy({})", v)),
ty::FreshIntTy(v) => p!(write("FreshIntTy({})", v)),
ty::FreshFloatTy(v) => p!(write("FreshFloatTy({})", v))
}
}
ty::TraitRef<'tcx> {
p!(write("<{} as {}>", self.self_ty(), self.print_only_trait_path()))
}
TraitRefPrintOnlyTraitPath<'tcx> {
p!(print_def_path(self.0.def_id, self.0.substs));
}
ty::ParamTy {
p!(write("{}", self.name))
}
ty::ParamConst {
p!(write("{}", self.name))
}
ty::SubtypePredicate<'tcx> {
p!(print(self.a), " <: ", print(self.b))
}
ty::TraitPredicate<'tcx> {
p!(print(self.trait_ref.self_ty()), ": ",
print(self.trait_ref.print_only_trait_path()))
}
ty::ProjectionPredicate<'tcx> {
p!(print(self.projection_ty), " == ", print(self.ty))
}
ty::ProjectionTy<'tcx> {
p!(print_def_path(self.item_def_id, self.substs));
}
ty::ClosureKind {
match *self {
ty::ClosureKind::Fn => p!("Fn"),
ty::ClosureKind::FnMut => p!("FnMut"),
ty::ClosureKind::FnOnce => p!("FnOnce"),
}
}
ty::Predicate<'tcx> {
match self.kind() {
&ty::PredicateKind::Atom(atom) => p!(print(atom)),
ty::PredicateKind::ForAll(binder) => p!(print(binder)),
}
}
ty::PredicateAtom<'tcx> {
match *self {
ty::PredicateAtom::Trait(ref data, constness) => {
if let hir::Constness::Const = constness {
p!("const ");
}
p!(print(data))
}
ty::PredicateAtom::Subtype(predicate) => p!(print(predicate)),
ty::PredicateAtom::RegionOutlives(predicate) => p!(print(predicate)),
ty::PredicateAtom::TypeOutlives(predicate) => p!(print(predicate)),
ty::PredicateAtom::Projection(predicate) => p!(print(predicate)),
ty::PredicateAtom::WellFormed(arg) => p!(print(arg), " well-formed"),
ty::PredicateAtom::ObjectSafe(trait_def_id) => {
p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe")
}
ty::PredicateAtom::ClosureKind(closure_def_id, _closure_substs, kind) => {
p!("the closure `",
print_value_path(closure_def_id, &[]),
write("` implements the trait `{}`", kind))
}
ty::PredicateAtom::ConstEvaluatable(def, substs) => {
p!("the constant `", print_value_path(def.did, substs), "` can be evaluated")
}
ty::PredicateAtom::ConstEquate(c1, c2) => {
p!("the constant `", print(c1), "` equals `", print(c2), "`")
}
ty::PredicateAtom::TypeWellFormedFromEnv(ty) => {
p!("the type `", print(ty), "` is found in the environment")
}
}
}
GenericArg<'tcx> {
match self.unpack() {
GenericArgKind::Lifetime(lt) => p!(print(lt)),
GenericArgKind::Type(ty) => p!(print(ty)),
GenericArgKind::Const(ct) => p!(print(ct)),
}
}
}
fn for_each_def(tcx: TyCtxt<'_>, mut collect_fn: impl for<'b> FnMut(&'b Ident, Namespace, DefId)) {
// Iterate all local crate items no matter where they are defined.
let hir = tcx.hir();
for item in hir.krate().items.values() {
if item.ident.name.as_str().is_empty() || matches!(item.kind, ItemKind::Use(_, _)) {
continue;
}
if let Some(local_def_id) = hir.definitions().opt_hir_id_to_local_def_id(item.hir_id) {
let def_id = local_def_id.to_def_id();
let ns = tcx.def_kind(def_id).ns().unwrap_or(Namespace::TypeNS);
collect_fn(&item.ident, ns, def_id);
}
}
// Now take care of extern crate items.
let queue = &mut Vec::new();
let mut seen_defs: DefIdSet = Default::default();
for &cnum in tcx.crates().iter() {
let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX };
// Ignore crates that are not direct dependencies.
match tcx.extern_crate(def_id) {
None => continue,
Some(extern_crate) => {
if !extern_crate.is_direct() {
continue;
}
}
}
queue.push(def_id);
}
// Iterate external crate defs but be mindful about visibility
while let Some(def) = queue.pop() {
for child in tcx.item_children(def).iter() {
if child.vis != ty::Visibility::Public {
continue;
}
match child.res {
def::Res::Def(DefKind::AssocTy, _) => {}
def::Res::Def(defkind, def_id) => {
if let Some(ns) = defkind.ns() {
collect_fn(&child.ident, ns, def_id);
}
if seen_defs.insert(def_id) {
queue.push(def_id);
}
}
_ => {}
}
}
}
}
/// The purpose of this function is to collect public symbols names that are unique across all
/// crates in the build. Later, when printing about types we can use those names instead of the
/// full exported path to them.
///
/// So essentially, if a symbol name can only be imported from one place for a type, and as
/// long as it was not glob-imported anywhere in the current crate, we can trim its printed
/// path and print only the name.
///
/// This has wide implications on error messages with types, for example, shortening
/// `std::vec::Vec` to just `Vec`, as long as there is no other `Vec` importable anywhere.
///
/// The implementation uses similar import discovery logic to that of 'use' suggestions.
fn trimmed_def_paths(tcx: TyCtxt<'_>, crate_num: CrateNum) -> FxHashMap<DefId, Symbol> {
assert_eq!(crate_num, LOCAL_CRATE);
let mut map = FxHashMap::default();
if let TrimmedDefPaths::GoodPath = tcx.sess.opts.trimmed_def_paths {
// For good paths causing this bug, the `rustc_middle::ty::print::with_no_trimmed_paths`
// wrapper can be used to suppress this query, in exchange for full paths being formatted.
tcx.sess.delay_good_path_bug("trimmed_def_paths constructed");
}
let unique_symbols_rev: &mut FxHashMap<(Namespace, Symbol), Option<DefId>> =
&mut FxHashMap::default();
for symbol_set in tcx.glob_map.values() {
for symbol in symbol_set {
unique_symbols_rev.insert((Namespace::TypeNS, *symbol), None);
unique_symbols_rev.insert((Namespace::ValueNS, *symbol), None);
unique_symbols_rev.insert((Namespace::MacroNS, *symbol), None);
}
}
for_each_def(tcx, |ident, ns, def_id| {
use std::collections::hash_map::Entry::{Occupied, Vacant};
match unique_symbols_rev.entry((ns, ident.name)) {
Occupied(mut v) => match v.get() {
None => {}
Some(existing) => {
if *existing != def_id {
v.insert(None);
}
}
},
Vacant(v) => {
v.insert(Some(def_id));
}
}
});
for ((_, symbol), opt_def_id) in unique_symbols_rev.drain() {
if let Some(def_id) = opt_def_id {
map.insert(def_id, symbol);
}
}
map
}
pub fn provide(providers: &mut ty::query::Providers) {
*providers = ty::query::Providers { trimmed_def_paths, ..*providers };
}
| 36.804639 | 100 | 0.493262 |
fbd2cdc7be070442fd75935b483545fe74c6808c | 2,558 | use crate::common::*;
pub(crate) struct Changelog {
releases: Vec<Release>,
}
impl Changelog {
#[throws]
pub(crate) fn new(project: &Project) -> Self {
let repo = project.repo()?;
let mut current = repo.head()?.peel_to_commit()?;
let mut entries = Vec::new();
let mut head = true;
loop {
let summary_bytes = current
.summary_bytes()
.ok_or_else(|| Error::CommitSummery { hash: current.id() })?;
let summary = String::from_utf8_lossy(summary_bytes);
if !summary.starts_with("fixup!") {
let manifest_bytes = current
.tree()?
.get_path("Cargo.toml".as_ref())?
.to_object(&repo)?
.as_blob()
.unwrap()
.content()
.to_vec();
let manifest = Manifest::from_slice(&manifest_bytes)?;
let entry = Entry::new(
¤t,
manifest.package.unwrap().version.as_ref(),
head,
&project.config,
)?;
entries.push(entry);
}
head = false;
match current.parent_count() {
0 => break,
1 => current = current.parent(0)?,
other => throw!(Error::CommitParents {
hash: current.id(),
parents: other
}),
}
}
entries.reverse();
let mut releases = Vec::new();
let mut last_version = entries[0].version.clone();
let mut unreleased = Vec::new();
for entry in entries {
if entry.version != last_version {
let version = entry.version.clone();
let time = entry.time;
unreleased.push(entry);
releases.push(Release {
version: Some(version.clone()),
time,
entries: unreleased,
});
last_version = version;
unreleased = Vec::new();
} else {
unreleased.push(entry);
}
}
if !unreleased.is_empty() {
releases.push(Release {
version: None,
time: Utc::now(),
entries: unreleased,
});
}
releases.reverse();
for release in &mut releases {
release.entries.reverse();
}
Self { releases }
}
#[throws]
pub(crate) fn render(&self, book: bool) -> String {
let mut lines: Vec<String> = Vec::new();
lines.push("Changelog".into());
lines.push("=========".into());
for release in &self.releases {
lines.push("".into());
lines.push("".into());
release.render(&mut lines, book)?;
}
let mut text = lines.join("\n");
text.push('\n');
text
}
}
| 20.796748 | 69 | 0.529711 |
90203709627ff22294bd13e45d0596d0906b678f | 67,106 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Higher-level interfaces to libc::* functions and operating system services.
//!
//! In general these take and return rust types, use rust idioms (enums, closures, vectors) rather
//! than C idioms, and do more extensive safety checks.
//!
//! This module is not meant to only contain 1:1 mappings to libc entries; any os-interface code
//! that is reasonably useful and broadly applicable can go here. Including utility routines that
//! merely build on other os code.
//!
//! We assume the general case is that users do not care, and do not want to be made to care, which
//! operating system they are on. While they may want to special case various special cases -- and
//! so we will not _hide_ the facts of which OS the user is on -- they should be given the
//! opportunity to write OS-ignorant code by default.
#![experimental]
#![allow(missing_docs)]
#![allow(non_snake_case)]
pub use self::MemoryMapKind::*;
pub use self::MapOption::*;
pub use self::MapError::*;
use clone::Clone;
use error::{FromError, Error};
use fmt;
use io::{IoResult, IoError};
use iter::{Iterator, IteratorExt};
use libc::{c_void, c_int};
use libc;
use boxed::Box;
use ops::Drop;
use option::{Some, None, Option};
use os;
use path::{Path, GenericPath, BytesContainer};
use sys;
use sys::os as os_imp;
use ptr::RawPtr;
use ptr;
use result::{Err, Ok, Result};
use slice::{AsSlice, SlicePrelude, PartialEqSlicePrelude};
use slice::CloneSliceAllocPrelude;
use str::{Str, StrPrelude, StrAllocating};
use string::{String, ToString};
use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst};
use vec::Vec;
#[cfg(unix)] use c_str::ToCStr;
#[cfg(unix)] use libc::c_char;
#[cfg(unix)]
pub use sys::ext as unix;
#[cfg(windows)]
pub use sys::ext as windows;
/// Get the number of cores available
pub fn num_cpus() -> uint {
unsafe {
return rust_get_num_cpus() as uint;
}
extern {
fn rust_get_num_cpus() -> libc::uintptr_t;
}
}
pub const TMPBUF_SZ : uint = 1000u;
const BUF_BYTES : uint = 2048u;
/// Returns the current working directory as a `Path`.
///
/// # Errors
///
/// Returns an `Err` if the current working directory value is invalid.
/// Possible cases:
///
/// * Current directory does not exist.
/// * There are insufficient permissions to access the current directory.
/// * The internal buffer is not large enough to hold the path.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// // We assume that we are in a valid directory like "/home".
/// let current_working_directory = os::getcwd().unwrap();
/// println!("The current directory is {}", current_working_directory.display());
/// // /home
/// ```
#[cfg(unix)]
pub fn getcwd() -> IoResult<Path> {
use c_str::CString;
let mut buf = [0 as c_char, ..BUF_BYTES];
unsafe {
if libc::getcwd(buf.as_mut_ptr(), buf.len() as libc::size_t).is_null() {
Err(IoError::last_error())
} else {
Ok(Path::new(CString::new(buf.as_ptr(), false)))
}
}
}
/// Returns the current working directory as a `Path`.
///
/// # Errors
///
/// Returns an `Err` if the current working directory value is invalid.
/// Possible cases:
///
/// * Current directory does not exist.
/// * There are insufficient permissions to access the current directory.
/// * The internal buffer is not large enough to hold the path.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// // We assume that we are in a valid directory like "C:\\Windows".
/// let current_working_directory = os::getcwd().unwrap();
/// println!("The current directory is {}", current_working_directory.display());
/// // C:\\Windows
/// ```
#[cfg(windows)]
pub fn getcwd() -> IoResult<Path> {
use libc::DWORD;
use libc::GetCurrentDirectoryW;
use io::OtherIoError;
let mut buf = [0 as u16, ..BUF_BYTES];
unsafe {
if libc::GetCurrentDirectoryW(buf.len() as DWORD, buf.as_mut_ptr()) == 0 as DWORD {
return Err(IoError::last_error());
}
}
match String::from_utf16(::str::truncate_utf16_at_nul(&buf)) {
Some(ref cwd) => Ok(Path::new(cwd)),
None => Err(IoError {
kind: OtherIoError,
desc: "GetCurrentDirectoryW returned invalid UTF-16",
detail: None,
}),
}
}
#[cfg(windows)]
pub mod windows {
use libc::types::os::arch::extra::DWORD;
use libc;
use option::{None, Option};
use option;
use os::TMPBUF_SZ;
use slice::{SlicePrelude};
use string::String;
use str::StrPrelude;
use vec::Vec;
pub fn fill_utf16_buf_and_decode(f: |*mut u16, DWORD| -> DWORD)
-> Option<String> {
unsafe {
let mut n = TMPBUF_SZ as DWORD;
let mut res = None;
let mut done = false;
while !done {
let mut buf = Vec::from_elem(n as uint, 0u16);
let k = f(buf.as_mut_ptr(), n);
if k == (0 as DWORD) {
done = true;
} else if k == n &&
libc::GetLastError() ==
libc::ERROR_INSUFFICIENT_BUFFER as DWORD {
n *= 2 as DWORD;
} else if k >= n {
n = k;
} else {
done = true;
}
if k != 0 && done {
let sub = buf.slice(0, k as uint);
// We want to explicitly catch the case when the
// closure returned invalid UTF-16, rather than
// set `res` to None and continue.
let s = String::from_utf16(sub)
.expect("fill_utf16_buf_and_decode: closure created invalid UTF-16");
res = option::Some(s)
}
}
return res;
}
}
}
/*
Accessing environment variables is not generally threadsafe.
Serialize access through a global lock.
*/
fn with_env_lock<T>(f: || -> T) -> T {
use rustrt::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
static LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT;
unsafe {
let _guard = LOCK.lock();
f()
}
}
/// Returns a vector of (variable, value) pairs, for all the environment
/// variables of the current process.
///
/// Invalid UTF-8 bytes are replaced with \uFFFD. See `String::from_utf8_lossy()`
/// for details.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// // We will iterate through the references to the element returned by os::env();
/// for &(ref key, ref value) in os::env().iter() {
/// println!("'{}': '{}'", key, value );
/// }
/// ```
pub fn env() -> Vec<(String,String)> {
env_as_bytes().into_iter().map(|(k,v)| {
let k = String::from_utf8_lossy(k.as_slice()).into_string();
let v = String::from_utf8_lossy(v.as_slice()).into_string();
(k,v)
}).collect()
}
/// Returns a vector of (variable, value) byte-vector pairs for all the
/// environment variables of the current process.
pub fn env_as_bytes() -> Vec<(Vec<u8>,Vec<u8>)> {
unsafe {
#[cfg(windows)]
unsafe fn get_env_pairs() -> Vec<Vec<u8>> {
use slice;
use libc::funcs::extra::kernel32::{
GetEnvironmentStringsW,
FreeEnvironmentStringsW
};
let ch = GetEnvironmentStringsW();
if ch as uint == 0 {
panic!("os::env() failure getting env string from OS: {}",
os::last_os_error());
}
// Here, we lossily decode the string as UTF16.
//
// The docs suggest that the result should be in Unicode, but
// Windows doesn't guarantee it's actually UTF16 -- it doesn't
// validate the environment string passed to CreateProcess nor
// SetEnvironmentVariable. Yet, it's unlikely that returning a
// raw u16 buffer would be of practical use since the result would
// be inherently platform-dependent and introduce additional
// complexity to this code.
//
// Using the non-Unicode version of GetEnvironmentStrings is even
// worse since the result is in an OEM code page. Characters that
// can't be encoded in the code page would be turned into question
// marks.
let mut result = Vec::new();
let mut i = 0;
while *ch.offset(i) != 0 {
let p = &*ch.offset(i);
let mut len = 0;
while *(p as *const _).offset(len) != 0 {
len += 1;
}
let p = p as *const u16;
let s = slice::from_raw_buf(&p, len as uint);
result.push(String::from_utf16_lossy(s).into_bytes());
i += len as int + 1;
}
FreeEnvironmentStringsW(ch);
result
}
#[cfg(unix)]
unsafe fn get_env_pairs() -> Vec<Vec<u8>> {
use c_str::CString;
extern {
fn rust_env_pairs() -> *const *const c_char;
}
let mut environ = rust_env_pairs();
if environ as uint == 0 {
panic!("os::env() failure getting env string from OS: {}",
os::last_os_error());
}
let mut result = Vec::new();
while *environ != 0 as *const _ {
let env_pair =
CString::new(*environ, false).as_bytes_no_nul().to_vec();
result.push(env_pair);
environ = environ.offset(1);
}
result
}
fn env_convert(input: Vec<Vec<u8>>) -> Vec<(Vec<u8>, Vec<u8>)> {
let mut pairs = Vec::new();
for p in input.iter() {
let mut it = p.as_slice().splitn(1, |b| *b == b'=');
let key = it.next().unwrap().to_vec();
let default: &[u8] = &[];
let val = it.next().unwrap_or(default).to_vec();
pairs.push((key, val));
}
pairs
}
with_env_lock(|| {
let unparsed_environ = get_env_pairs();
env_convert(unparsed_environ)
})
}
}
#[cfg(unix)]
/// Fetches the environment variable `n` from the current process, returning
/// None if the variable isn't set.
///
/// Any invalid UTF-8 bytes in the value are replaced by \uFFFD. See
/// `String::from_utf8_lossy()` for details.
///
/// # Panics
///
/// Panics if `n` has any interior NULs.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// let key = "HOME";
/// match os::getenv(key) {
/// Some(val) => println!("{}: {}", key, val),
/// None => println!("{} is not defined in the environment.", key)
/// }
/// ```
pub fn getenv(n: &str) -> Option<String> {
getenv_as_bytes(n).map(|v| String::from_utf8_lossy(v.as_slice()).into_string())
}
#[cfg(unix)]
/// Fetches the environment variable `n` byte vector from the current process,
/// returning None if the variable isn't set.
///
/// # Panics
///
/// Panics if `n` has any interior NULs.
pub fn getenv_as_bytes(n: &str) -> Option<Vec<u8>> {
use c_str::CString;
unsafe {
with_env_lock(|| {
let s = n.with_c_str(|buf| libc::getenv(buf));
if s.is_null() {
None
} else {
Some(CString::new(s as *const i8, false).as_bytes_no_nul().to_vec())
}
})
}
}
#[cfg(windows)]
/// Fetches the environment variable `n` from the current process, returning
/// None if the variable isn't set.
pub fn getenv(n: &str) -> Option<String> {
unsafe {
with_env_lock(|| {
use os::windows::{fill_utf16_buf_and_decode};
let mut n: Vec<u16> = n.utf16_units().collect();
n.push(0);
fill_utf16_buf_and_decode(|buf, sz| {
libc::GetEnvironmentVariableW(n.as_ptr(), buf, sz)
})
})
}
}
#[cfg(windows)]
/// Fetches the environment variable `n` byte vector from the current process,
/// returning None if the variable isn't set.
pub fn getenv_as_bytes(n: &str) -> Option<Vec<u8>> {
getenv(n).map(|s| s.into_bytes())
}
/// Sets the environment variable `n` to the value `v` for the currently running
/// process.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// let key = "KEY";
/// os::setenv(key, "VALUE");
/// match os::getenv(key) {
/// Some(ref val) => println!("{}: {}", key, val),
/// None => println!("{} is not defined in the environment.", key)
/// }
/// ```
pub fn setenv<T: BytesContainer>(n: &str, v: T) {
#[cfg(unix)]
fn _setenv(n: &str, v: &[u8]) {
unsafe {
with_env_lock(|| {
n.with_c_str(|nbuf| {
v.with_c_str(|vbuf| {
if libc::funcs::posix01::unistd::setenv(nbuf, vbuf, 1) != 0 {
panic!(IoError::last_error());
}
})
})
})
}
}
#[cfg(windows)]
fn _setenv(n: &str, v: &[u8]) {
let mut n: Vec<u16> = n.utf16_units().collect();
n.push(0);
let mut v: Vec<u16> = ::str::from_utf8(v).unwrap().utf16_units().collect();
v.push(0);
unsafe {
with_env_lock(|| {
if libc::SetEnvironmentVariableW(n.as_ptr(), v.as_ptr()) == 0 {
panic!(IoError::last_error());
}
})
}
}
_setenv(n, v.container_as_bytes())
}
/// Remove a variable from the environment entirely.
pub fn unsetenv(n: &str) {
#[cfg(unix)]
fn _unsetenv(n: &str) {
unsafe {
with_env_lock(|| {
n.with_c_str(|nbuf| {
if libc::funcs::posix01::unistd::unsetenv(nbuf) != 0 {
panic!(IoError::last_error());
}
})
})
}
}
#[cfg(windows)]
fn _unsetenv(n: &str) {
let mut n: Vec<u16> = n.utf16_units().collect();
n.push(0);
unsafe {
with_env_lock(|| {
if libc::SetEnvironmentVariableW(n.as_ptr(), ptr::null()) == 0 {
panic!(IoError::last_error());
}
})
}
}
_unsetenv(n)
}
/// Parses input according to platform conventions for the `PATH`
/// environment variable.
///
/// # Example
/// ```rust
/// use std::os;
///
/// let key = "PATH";
/// match os::getenv_as_bytes(key) {
/// Some(paths) => {
/// for path in os::split_paths(paths).iter() {
/// println!("'{}'", path.display());
/// }
/// }
/// None => println!("{} is not defined in the environment.", key)
/// }
/// ```
pub fn split_paths<T: BytesContainer>(unparsed: T) -> Vec<Path> {
#[cfg(unix)]
fn _split_paths<T: BytesContainer>(unparsed: T) -> Vec<Path> {
unparsed.container_as_bytes()
.split(|b| *b == b':')
.map(Path::new)
.collect()
}
#[cfg(windows)]
fn _split_paths<T: BytesContainer>(unparsed: T) -> Vec<Path> {
// On Windows, the PATH environment variable is semicolon separated. Double
// quotes are used as a way of introducing literal semicolons (since
// c:\some;dir is a valid Windows path). Double quotes are not themselves
// permitted in path names, so there is no way to escape a double quote.
// Quoted regions can appear in arbitrary locations, so
//
// c:\foo;c:\som"e;di"r;c:\bar
//
// Should parse as [c:\foo, c:\some;dir, c:\bar].
//
// (The above is based on testing; there is no clear reference available
// for the grammar.)
let mut parsed = Vec::new();
let mut in_progress = Vec::new();
let mut in_quote = false;
for b in unparsed.container_as_bytes().iter() {
match *b {
b';' if !in_quote => {
parsed.push(Path::new(in_progress.as_slice()));
in_progress.truncate(0)
}
b'"' => {
in_quote = !in_quote;
}
_ => {
in_progress.push(*b);
}
}
}
parsed.push(Path::new(in_progress));
parsed
}
_split_paths(unparsed)
}
/// Joins a collection of `Path`s appropriately for the `PATH`
/// environment variable.
///
/// Returns a `Vec<u8>` on success, since `Path`s are not utf-8
/// encoded on all platforms.
///
/// Returns an `Err` (containing an error message) if one of the input
/// `Path`s contains an invalid character for constructing the `PATH`
/// variable (a double quote on Windows or a colon on Unix).
///
/// # Example
///
/// ```rust
/// use std::os;
/// use std::path::Path;
///
/// let key = "PATH";
/// let mut paths = os::getenv_as_bytes(key).map_or(Vec::new(), os::split_paths);
/// paths.push(Path::new("/home/xyz/bin"));
/// os::setenv(key, os::join_paths(paths.as_slice()).unwrap());
/// ```
pub fn join_paths<T: BytesContainer>(paths: &[T]) -> Result<Vec<u8>, &'static str> {
#[cfg(windows)]
fn _join_paths<T: BytesContainer>(paths: &[T]) -> Result<Vec<u8>, &'static str> {
let mut joined = Vec::new();
let sep = b';';
for (i, path) in paths.iter().map(|p| p.container_as_bytes()).enumerate() {
if i > 0 { joined.push(sep) }
if path.contains(&b'"') {
return Err("path segment contains `\"`");
} else if path.contains(&sep) {
joined.push(b'"');
joined.push_all(path);
joined.push(b'"');
} else {
joined.push_all(path);
}
}
Ok(joined)
}
#[cfg(unix)]
fn _join_paths<T: BytesContainer>(paths: &[T]) -> Result<Vec<u8>, &'static str> {
let mut joined = Vec::new();
let sep = b':';
for (i, path) in paths.iter().map(|p| p.container_as_bytes()).enumerate() {
if i > 0 { joined.push(sep) }
if path.contains(&sep) { return Err("path segment contains separator `:`") }
joined.push_all(path);
}
Ok(joined)
}
_join_paths(paths)
}
/// A low-level OS in-memory pipe.
pub struct Pipe {
/// A file descriptor representing the reading end of the pipe. Data written
/// on the `out` file descriptor can be read from this file descriptor.
pub reader: c_int,
/// A file descriptor representing the write end of the pipe. Data written
/// to this file descriptor can be read from the `input` file descriptor.
pub writer: c_int,
}
/// Creates a new low-level OS in-memory pipe.
///
/// This function can fail to succeed if there are no more resources available
/// to allocate a pipe.
///
/// This function is also unsafe as there is no destructor associated with the
/// `Pipe` structure will return. If it is not arranged for the returned file
/// descriptors to be closed, the file descriptors will leak. For safe handling
/// of this scenario, use `std::io::PipeStream` instead.
pub unsafe fn pipe() -> IoResult<Pipe> {
let (reader, writer) = try!(sys::os::pipe());
Ok(Pipe {
reader: reader.unwrap(),
writer: writer.unwrap(),
})
}
/// Returns the proper dll filename for the given basename of a file
/// as a String.
#[cfg(not(target_os="ios"))]
pub fn dll_filename(base: &str) -> String {
format!("{}{}{}", consts::DLL_PREFIX, base, consts::DLL_SUFFIX)
}
/// Optionally returns the filesystem path to the current executable which is
/// running but with the executable name.
///
/// # Examples
///
/// ```rust
/// use std::os;
///
/// match os::self_exe_name() {
/// Some(exe_path) => println!("Path of this executable is: {}", exe_path.display()),
/// None => println!("Unable to get the path of this executable!")
/// };
/// ```
pub fn self_exe_name() -> Option<Path> {
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
fn load_self() -> Option<Vec<u8>> {
unsafe {
use libc::funcs::bsd44::*;
use libc::consts::os::extra::*;
let mut mib = vec![CTL_KERN as c_int,
KERN_PROC as c_int,
KERN_PROC_PATHNAME as c_int,
-1 as c_int];
let mut sz: libc::size_t = 0;
let err = sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint,
ptr::null_mut(), &mut sz, ptr::null_mut(),
0u as libc::size_t);
if err != 0 { return None; }
if sz == 0 { return None; }
let mut v: Vec<u8> = Vec::with_capacity(sz as uint);
let err = sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint,
v.as_mut_ptr() as *mut c_void, &mut sz,
ptr::null_mut(), 0u as libc::size_t);
if err != 0 { return None; }
if sz == 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
Some(v)
}
}
#[cfg(any(target_os = "linux", target_os = "android"))]
fn load_self() -> Option<Vec<u8>> {
use std::io;
match io::fs::readlink(&Path::new("/proc/self/exe")) {
Ok(path) => Some(path.into_vec()),
Err(..) => None
}
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
fn load_self() -> Option<Vec<u8>> {
unsafe {
use libc::funcs::extra::_NSGetExecutablePath;
let mut sz: u32 = 0;
_NSGetExecutablePath(ptr::null_mut(), &mut sz);
if sz == 0 { return None; }
let mut v: Vec<u8> = Vec::with_capacity(sz as uint);
let err = _NSGetExecutablePath(v.as_mut_ptr() as *mut i8, &mut sz);
if err != 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
Some(v)
}
}
#[cfg(windows)]
fn load_self() -> Option<Vec<u8>> {
unsafe {
use os::windows::fill_utf16_buf_and_decode;
fill_utf16_buf_and_decode(|buf, sz| {
libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz)
}).map(|s| s.into_string().into_bytes())
}
}
load_self().and_then(Path::new_opt)
}
/// Optionally returns the filesystem path to the current executable which is
/// running.
///
/// Like self_exe_name() but without the binary's name.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// match os::self_exe_path() {
/// Some(exe_path) => println!("Executable's Path is: {}", exe_path.display()),
/// None => println!("Impossible to fetch the path of this executable.")
/// };
/// ```
pub fn self_exe_path() -> Option<Path> {
self_exe_name().map(|mut p| { p.pop(); p })
}
/// Optionally returns the path to the current user's home directory if known.
///
/// # Unix
///
/// Returns the value of the 'HOME' environment variable if it is set
/// and not equal to the empty string.
///
/// # Windows
///
/// Returns the value of the 'HOME' environment variable if it is
/// set and not equal to the empty string. Otherwise, returns the value of the
/// 'USERPROFILE' environment variable if it is set and not equal to the empty
/// string.
///
/// # Example
///
/// ```rust
/// use std::os;
///
/// match os::homedir() {
/// Some(ref p) => println!("{}", p.display()),
/// None => println!("Impossible to get your home dir!")
/// }
/// ```
pub fn homedir() -> Option<Path> {
#[inline]
#[cfg(unix)]
fn _homedir() -> Option<Path> {
aux_homedir("HOME")
}
#[inline]
#[cfg(windows)]
fn _homedir() -> Option<Path> {
aux_homedir("HOME").or(aux_homedir("USERPROFILE"))
}
#[inline]
fn aux_homedir(home_name: &str) -> Option<Path> {
match getenv_as_bytes(home_name) {
Some(p) => {
if p.is_empty() { None } else { Path::new_opt(p) }
},
_ => None
}
}
_homedir()
}
/// Returns the path to a temporary directory.
///
/// On Unix, returns the value of the 'TMPDIR' environment variable if it is
/// set, otherwise for non-Android it returns '/tmp'. If Android, since there
/// is no global temporary folder (it is usually allocated per-app), we return
/// '/data/local/tmp'.
///
/// On Windows, returns the value of, in order, the 'TMP', 'TEMP',
/// 'USERPROFILE' environment variable if any are set and not the empty
/// string. Otherwise, tmpdir returns the path to the Windows directory.
pub fn tmpdir() -> Path {
return lookup();
fn getenv_nonempty(v: &str) -> Option<Path> {
match getenv(v) {
Some(x) =>
if x.is_empty() {
None
} else {
Path::new_opt(x)
},
_ => None
}
}
#[cfg(unix)]
fn lookup() -> Path {
let default = if cfg!(target_os = "android") {
Path::new("/data/local/tmp")
} else {
Path::new("/tmp")
};
getenv_nonempty("TMPDIR").unwrap_or(default)
}
#[cfg(windows)]
fn lookup() -> Path {
getenv_nonempty("TMP").or(
getenv_nonempty("TEMP").or(
getenv_nonempty("USERPROFILE").or(
getenv_nonempty("WINDIR")))).unwrap_or(Path::new("C:\\Windows"))
}
}
///
/// Convert a relative path to an absolute path
///
/// If the given path is relative, return it prepended with the current working
/// directory. If the given path is already an absolute path, return it
/// as is.
///
/// # Example
/// ```rust
/// use std::os;
/// use std::path::Path;
///
/// // Assume we're in a path like /home/someuser
/// let rel_path = Path::new("..");
/// let abs_path = os::make_absolute(&rel_path).unwrap();
/// println!("The absolute path is {}", abs_path.display());
/// // Prints "The absolute path is /home"
/// ```
// NB: this is here rather than in path because it is a form of environment
// querying; what it does depends on the process working directory, not just
// the input paths.
pub fn make_absolute(p: &Path) -> IoResult<Path> {
if p.is_absolute() {
Ok(p.clone())
} else {
getcwd().map(|mut cwd| {
cwd.push(p);
cwd
})
}
}
/// Changes the current working directory to the specified path, returning
/// whether the change was completed successfully or not.
///
/// # Example
/// ```rust
/// use std::os;
/// use std::path::Path;
///
/// let root = Path::new("/");
/// assert!(os::change_dir(&root).is_ok());
/// println!("Successfully changed working directory to {}!", root.display());
/// ```
pub fn change_dir(p: &Path) -> IoResult<()> {
return chdir(p);
#[cfg(windows)]
fn chdir(p: &Path) -> IoResult<()> {
let mut p = p.as_str().unwrap().utf16_units().collect::<Vec<u16>>();
p.push(0);
unsafe {
match libc::SetCurrentDirectoryW(p.as_ptr()) != (0 as libc::BOOL) {
true => Ok(()),
false => Err(IoError::last_error()),
}
}
}
#[cfg(unix)]
fn chdir(p: &Path) -> IoResult<()> {
p.with_c_str(|buf| {
unsafe {
match libc::chdir(buf) == (0 as c_int) {
true => Ok(()),
false => Err(IoError::last_error()),
}
}
})
}
}
/// Returns the platform-specific value of errno
pub fn errno() -> uint {
os_imp::errno() as uint
}
/// Return the string corresponding to an `errno()` value of `errnum`.
///
/// # Example
/// ```rust
/// use std::os;
///
/// // Same as println!("{}", last_os_error());
/// println!("{}", os::error_string(os::errno() as uint));
/// ```
pub fn error_string(errnum: uint) -> String {
return os_imp::error_string(errnum as i32);
}
/// Get a string representing the platform-dependent last error
pub fn last_os_error() -> String {
error_string(errno() as uint)
}
static EXIT_STATUS: AtomicInt = INIT_ATOMIC_INT;
/// Sets the process exit code
///
/// Sets the exit code returned by the process if all supervised tasks
/// terminate successfully (without panicking). If the current root task panics
/// and is supervised by the scheduler then any user-specified exit status is
/// ignored and the process exits with the default panic status.
///
/// Note that this is not synchronized against modifications of other threads.
pub fn set_exit_status(code: int) {
EXIT_STATUS.store(code, SeqCst)
}
/// Fetches the process's current exit code. This defaults to 0 and can change
/// by calling `set_exit_status`.
pub fn get_exit_status() -> int {
EXIT_STATUS.load(SeqCst)
}
#[cfg(target_os = "macos")]
unsafe fn load_argc_and_argv(argc: int,
argv: *const *const c_char) -> Vec<Vec<u8>> {
use c_str::CString;
Vec::from_fn(argc as uint, |i| {
CString::new(*argv.offset(i as int), false).as_bytes_no_nul().to_vec()
})
}
/// Returns the command line arguments
///
/// Returns a list of the command line arguments.
#[cfg(target_os = "macos")]
fn real_args_as_bytes() -> Vec<Vec<u8>> {
unsafe {
let (argc, argv) = (*_NSGetArgc() as int,
*_NSGetArgv() as *const *const c_char);
load_argc_and_argv(argc, argv)
}
}
// As _NSGetArgc and _NSGetArgv aren't mentioned in iOS docs
// and use underscores in their names - they're most probably
// are considered private and therefore should be avoided
// Here is another way to get arguments using Objective C
// runtime
//
// In general it looks like:
// res = Vec::new()
// let args = [[NSProcessInfo processInfo] arguments]
// for i in range(0, [args count])
// res.push([args objectAtIndex:i])
// res
#[cfg(target_os = "ios")]
fn real_args_as_bytes() -> Vec<Vec<u8>> {
use c_str::CString;
use iter::range;
use mem;
#[link(name = "objc")]
extern {
fn sel_registerName(name: *const libc::c_uchar) -> Sel;
fn objc_msgSend(obj: NsId, sel: Sel, ...) -> NsId;
fn objc_getClass(class_name: *const libc::c_uchar) -> NsId;
}
#[link(name = "Foundation", kind = "framework")]
extern {}
type Sel = *const libc::c_void;
type NsId = *const libc::c_void;
let mut res = Vec::new();
unsafe {
let processInfoSel = sel_registerName("processInfo\0".as_ptr());
let argumentsSel = sel_registerName("arguments\0".as_ptr());
let utf8Sel = sel_registerName("UTF8String\0".as_ptr());
let countSel = sel_registerName("count\0".as_ptr());
let objectAtSel = sel_registerName("objectAtIndex:\0".as_ptr());
let klass = objc_getClass("NSProcessInfo\0".as_ptr());
let info = objc_msgSend(klass, processInfoSel);
let args = objc_msgSend(info, argumentsSel);
let cnt: int = mem::transmute(objc_msgSend(args, countSel));
for i in range(0, cnt) {
let tmp = objc_msgSend(args, objectAtSel, i);
let utf_c_str: *const libc::c_char =
mem::transmute(objc_msgSend(tmp, utf8Sel));
let s = CString::new(utf_c_str, false);
res.push(s.as_bytes_no_nul().to_vec())
}
}
res
}
#[cfg(any(target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly"))]
fn real_args_as_bytes() -> Vec<Vec<u8>> {
use rustrt;
match rustrt::args::clone() {
Some(args) => args,
None => panic!("process arguments not initialized")
}
}
#[cfg(not(windows))]
fn real_args() -> Vec<String> {
real_args_as_bytes().into_iter()
.map(|v| {
String::from_utf8_lossy(v.as_slice()).into_string()
}).collect()
}
#[cfg(windows)]
fn real_args() -> Vec<String> {
use slice;
let mut nArgs: c_int = 0;
let lpArgCount: *mut c_int = &mut nArgs;
let lpCmdLine = unsafe { GetCommandLineW() };
let szArgList = unsafe { CommandLineToArgvW(lpCmdLine, lpArgCount) };
let args = Vec::from_fn(nArgs as uint, |i| unsafe {
// Determine the length of this argument.
let ptr = *szArgList.offset(i as int);
let mut len = 0;
while *ptr.offset(len as int) != 0 { len += 1; }
// Push it onto the list.
let ptr = ptr as *const u16;
let buf = slice::from_raw_buf(&ptr, len);
let opt_s = String::from_utf16(::str::truncate_utf16_at_nul(buf));
opt_s.expect("CommandLineToArgvW returned invalid UTF-16")
});
unsafe {
LocalFree(szArgList as *mut c_void);
}
return args
}
#[cfg(windows)]
fn real_args_as_bytes() -> Vec<Vec<u8>> {
real_args().into_iter().map(|s| s.into_bytes()).collect()
}
type LPCWSTR = *const u16;
#[cfg(windows)]
#[link_name="kernel32"]
extern "system" {
fn GetCommandLineW() -> LPCWSTR;
fn LocalFree(ptr: *mut c_void);
}
#[cfg(windows)]
#[link_name="shell32"]
extern "system" {
fn CommandLineToArgvW(lpCmdLine: LPCWSTR,
pNumArgs: *mut c_int) -> *mut *mut u16;
}
/// Returns the arguments which this program was started with (normally passed
/// via the command line).
///
/// The first element is traditionally the path to the executable, but it can be
/// set to arbitrary text, and it may not even exist, so this property should not
/// be relied upon for security purposes.
///
/// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD.
/// See `String::from_utf8_lossy` for details.
/// # Example
///
/// ```rust
/// use std::os;
///
/// // Prints each argument on a separate line
/// for argument in os::args().iter() {
/// println!("{}", argument);
/// }
/// ```
pub fn args() -> Vec<String> {
real_args()
}
/// Returns the arguments which this program was started with (normally passed
/// via the command line) as byte vectors.
pub fn args_as_bytes() -> Vec<Vec<u8>> {
real_args_as_bytes()
}
#[cfg(target_os = "macos")]
extern {
// These functions are in crt_externs.h.
pub fn _NSGetArgc() -> *mut c_int;
pub fn _NSGetArgv() -> *mut *mut *mut c_char;
}
// Round up `from` to be divisible by `to`
fn round_up(from: uint, to: uint) -> uint {
let r = if from % to == 0 {
from
} else {
from + to - (from % to)
};
if r == 0 {
to
} else {
r
}
}
/// Returns the page size of the current architecture in bytes.
#[cfg(unix)]
pub fn page_size() -> uint {
unsafe {
libc::sysconf(libc::_SC_PAGESIZE) as uint
}
}
/// Returns the page size of the current architecture in bytes.
#[cfg(windows)]
pub fn page_size() -> uint {
use mem;
unsafe {
let mut info = mem::zeroed();
libc::GetSystemInfo(&mut info);
return info.dwPageSize as uint;
}
}
/// A memory mapped file or chunk of memory. This is a very system-specific
/// interface to the OS's memory mapping facilities (`mmap` on POSIX,
/// `VirtualAlloc`/`CreateFileMapping` on Windows). It makes no attempt at
/// abstracting platform differences, besides in error values returned. Consider
/// yourself warned.
///
/// The memory map is released (unmapped) when the destructor is run, so don't
/// let it leave scope by accident if you want it to stick around.
pub struct MemoryMap {
data: *mut u8,
len: uint,
kind: MemoryMapKind,
}
/// Type of memory map
pub enum MemoryMapKind {
/// Virtual memory map. Usually used to change the permissions of a given
/// chunk of memory. Corresponds to `VirtualAlloc` on Windows.
MapFile(*const u8),
/// Virtual memory map. Usually used to change the permissions of a given
/// chunk of memory, or for allocation. Corresponds to `VirtualAlloc` on
/// Windows.
MapVirtual
}
/// Options the memory map is created with
pub enum MapOption {
/// The memory should be readable
MapReadable,
/// The memory should be writable
MapWritable,
/// The memory should be executable
MapExecutable,
/// Create a map for a specific address range. Corresponds to `MAP_FIXED` on
/// POSIX.
MapAddr(*const u8),
/// Create a memory mapping for a file with a given fd.
MapFd(c_int),
/// When using `MapFd`, the start of the map is `uint` bytes from the start
/// of the file.
MapOffset(uint),
/// On POSIX, this can be used to specify the default flags passed to
/// `mmap`. By default it uses `MAP_PRIVATE` and, if not using `MapFd`,
/// `MAP_ANON`. This will override both of those. This is platform-specific
/// (the exact values used) and ignored on Windows.
MapNonStandardFlags(c_int),
}
/// Possible errors when creating a map.
pub enum MapError {
/// ## The following are POSIX-specific
///
/// fd was not open for reading or, if using `MapWritable`, was not open for
/// writing.
ErrFdNotAvail,
/// fd was not valid
ErrInvalidFd,
/// Either the address given by `MapAddr` or offset given by `MapOffset` was
/// not a multiple of `MemoryMap::granularity` (unaligned to page size).
ErrUnaligned,
/// With `MapFd`, the fd does not support mapping.
ErrNoMapSupport,
/// If using `MapAddr`, the address + `min_len` was outside of the process's
/// address space. If using `MapFd`, the target of the fd didn't have enough
/// resources to fulfill the request.
ErrNoMem,
/// A zero-length map was requested. This is invalid according to
/// [POSIX](http://pubs.opengroup.org/onlinepubs/9699919799/functions/mmap.html).
/// Not all platforms obey this, but this wrapper does.
ErrZeroLength,
/// Unrecognized error. The inner value is the unrecognized errno.
ErrUnknown(int),
/// ## The following are Windows-specific
///
/// Unsupported combination of protection flags
/// (`MapReadable`/`MapWritable`/`MapExecutable`).
ErrUnsupProt,
/// When using `MapFd`, `MapOffset` was given (Windows does not support this
/// at all)
ErrUnsupOffset,
/// When using `MapFd`, there was already a mapping to the file.
ErrAlreadyExists,
/// Unrecognized error from `VirtualAlloc`. The inner value is the return
/// value of GetLastError.
ErrVirtualAlloc(uint),
/// Unrecognized error from `CreateFileMapping`. The inner value is the
/// return value of `GetLastError`.
ErrCreateFileMappingW(uint),
/// Unrecognized error from `MapViewOfFile`. The inner value is the return
/// value of `GetLastError`.
ErrMapViewOfFile(uint)
}
impl fmt::Show for MapError {
fn fmt(&self, out: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
ErrFdNotAvail => "fd not available for reading or writing",
ErrInvalidFd => "Invalid fd",
ErrUnaligned => {
"Unaligned address, invalid flags, negative length or \
unaligned offset"
}
ErrNoMapSupport=> "File doesn't support mapping",
ErrNoMem => "Invalid address, or not enough available memory",
ErrUnsupProt => "Protection mode unsupported",
ErrUnsupOffset => "Offset in virtual memory mode is unsupported",
ErrAlreadyExists => "File mapping for specified file already exists",
ErrZeroLength => "Zero-length mapping not allowed",
ErrUnknown(code) => {
return write!(out, "Unknown error = {}", code)
},
ErrVirtualAlloc(code) => {
return write!(out, "VirtualAlloc failure = {}", code)
},
ErrCreateFileMappingW(code) => {
return write!(out, "CreateFileMappingW failure = {}", code)
},
ErrMapViewOfFile(code) => {
return write!(out, "MapViewOfFile failure = {}", code)
}
};
write!(out, "{}", str)
}
}
impl Error for MapError {
fn description(&self) -> &str { "memory map error" }
fn detail(&self) -> Option<String> { Some(self.to_string()) }
}
impl FromError<MapError> for Box<Error> {
fn from_error(err: MapError) -> Box<Error> {
box err
}
}
#[cfg(unix)]
impl MemoryMap {
/// Create a new mapping with the given `options`, at least `min_len` bytes
/// long. `min_len` must be greater than zero; see the note on
/// `ErrZeroLength`.
pub fn new(min_len: uint, options: &[MapOption]) -> Result<MemoryMap, MapError> {
use libc::off_t;
if min_len == 0 {
return Err(ErrZeroLength)
}
let mut addr: *const u8 = ptr::null();
let mut prot = 0;
let mut flags = libc::MAP_PRIVATE;
let mut fd = -1;
let mut offset = 0;
let mut custom_flags = false;
let len = round_up(min_len, page_size());
for &o in options.iter() {
match o {
MapReadable => { prot |= libc::PROT_READ; },
MapWritable => { prot |= libc::PROT_WRITE; },
MapExecutable => { prot |= libc::PROT_EXEC; },
MapAddr(addr_) => {
flags |= libc::MAP_FIXED;
addr = addr_;
},
MapFd(fd_) => {
flags |= libc::MAP_FILE;
fd = fd_;
},
MapOffset(offset_) => { offset = offset_ as off_t; },
MapNonStandardFlags(f) => { custom_flags = true; flags = f },
}
}
if fd == -1 && !custom_flags { flags |= libc::MAP_ANON; }
let r = unsafe {
libc::mmap(addr as *mut c_void, len as libc::size_t, prot, flags,
fd, offset)
};
if r == libc::MAP_FAILED {
Err(match errno() as c_int {
libc::EACCES => ErrFdNotAvail,
libc::EBADF => ErrInvalidFd,
libc::EINVAL => ErrUnaligned,
libc::ENODEV => ErrNoMapSupport,
libc::ENOMEM => ErrNoMem,
code => ErrUnknown(code as int)
})
} else {
Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: if fd == -1 {
MapVirtual
} else {
MapFile(ptr::null())
}
})
}
}
/// Granularity that the offset or address must be for `MapOffset` and
/// `MapAddr` respectively.
pub fn granularity() -> uint {
page_size()
}
}
#[cfg(unix)]
impl Drop for MemoryMap {
/// Unmap the mapping. Panics the task if `munmap` panics.
fn drop(&mut self) {
if self.len == 0 { /* workaround for dummy_stack */ return; }
unsafe {
// `munmap` only panics due to logic errors
libc::munmap(self.data as *mut c_void, self.len as libc::size_t);
}
}
}
#[cfg(windows)]
impl MemoryMap {
/// Create a new mapping with the given `options`, at least `min_len` bytes long.
pub fn new(min_len: uint, options: &[MapOption]) -> Result<MemoryMap, MapError> {
use libc::types::os::arch::extra::{LPVOID, DWORD, SIZE_T, HANDLE};
let mut lpAddress: LPVOID = ptr::null_mut();
let mut readable = false;
let mut writable = false;
let mut executable = false;
let mut fd: c_int = -1;
let mut offset: uint = 0;
let len = round_up(min_len, page_size());
for &o in options.iter() {
match o {
MapReadable => { readable = true; },
MapWritable => { writable = true; },
MapExecutable => { executable = true; }
MapAddr(addr_) => { lpAddress = addr_ as LPVOID; },
MapFd(fd_) => { fd = fd_; },
MapOffset(offset_) => { offset = offset_; },
MapNonStandardFlags(..) => {}
}
}
let flProtect = match (executable, readable, writable) {
(false, false, false) if fd == -1 => libc::PAGE_NOACCESS,
(false, true, false) => libc::PAGE_READONLY,
(false, true, true) => libc::PAGE_READWRITE,
(true, false, false) if fd == -1 => libc::PAGE_EXECUTE,
(true, true, false) => libc::PAGE_EXECUTE_READ,
(true, true, true) => libc::PAGE_EXECUTE_READWRITE,
_ => return Err(ErrUnsupProt)
};
if fd == -1 {
if offset != 0 {
return Err(ErrUnsupOffset);
}
let r = unsafe {
libc::VirtualAlloc(lpAddress,
len as SIZE_T,
libc::MEM_COMMIT | libc::MEM_RESERVE,
flProtect)
};
match r as uint {
0 => Err(ErrVirtualAlloc(errno())),
_ => Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: MapVirtual
})
}
} else {
let dwDesiredAccess = match (executable, readable, writable) {
(false, true, false) => libc::FILE_MAP_READ,
(false, true, true) => libc::FILE_MAP_WRITE,
(true, true, false) => libc::FILE_MAP_READ | libc::FILE_MAP_EXECUTE,
(true, true, true) => libc::FILE_MAP_WRITE | libc::FILE_MAP_EXECUTE,
_ => return Err(ErrUnsupProt) // Actually, because of the check above,
// we should never get here.
};
unsafe {
let hFile = libc::get_osfhandle(fd) as HANDLE;
let mapping = libc::CreateFileMappingW(hFile,
ptr::null_mut(),
flProtect,
0,
0,
ptr::null());
if mapping == ptr::null_mut() {
return Err(ErrCreateFileMappingW(errno()));
}
if errno() as c_int == libc::ERROR_ALREADY_EXISTS {
return Err(ErrAlreadyExists);
}
let r = libc::MapViewOfFile(mapping,
dwDesiredAccess,
((len as u64) >> 32) as DWORD,
(offset & 0xffff_ffff) as DWORD,
0);
match r as uint {
0 => Err(ErrMapViewOfFile(errno())),
_ => Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: MapFile(mapping as *const u8)
})
}
}
}
}
/// Granularity of MapAddr() and MapOffset() parameter values.
/// This may be greater than the value returned by page_size().
pub fn granularity() -> uint {
use mem;
unsafe {
let mut info = mem::zeroed();
libc::GetSystemInfo(&mut info);
return info.dwAllocationGranularity as uint;
}
}
}
#[cfg(windows)]
impl Drop for MemoryMap {
/// Unmap the mapping. Panics the task if any of `VirtualFree`,
/// `UnmapViewOfFile`, or `CloseHandle` fail.
fn drop(&mut self) {
use libc::types::os::arch::extra::{LPCVOID, HANDLE};
use libc::consts::os::extra::FALSE;
if self.len == 0 { return }
unsafe {
match self.kind {
MapVirtual => {
if libc::VirtualFree(self.data as *mut c_void, 0,
libc::MEM_RELEASE) == 0 {
println!("VirtualFree failed: {}", errno());
}
},
MapFile(mapping) => {
if libc::UnmapViewOfFile(self.data as LPCVOID) == FALSE {
println!("UnmapViewOfFile failed: {}", errno());
}
if libc::CloseHandle(mapping as HANDLE) == FALSE {
println!("CloseHandle failed: {}", errno());
}
}
}
}
}
}
impl MemoryMap {
/// Returns the pointer to the memory created or modified by this map.
pub fn data(&self) -> *mut u8 { self.data }
/// Returns the number of bytes this map applies to.
pub fn len(&self) -> uint { self.len }
/// Returns the type of mapping this represents.
pub fn kind(&self) -> MemoryMapKind { self.kind }
}
#[cfg(target_os = "linux")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `linux`.
pub const SYSNAME: &'static str = "linux";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub const DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub const DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "macos")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `macos`.
pub const SYSNAME: &'static str = "macos";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.dylib`.
pub const DLL_SUFFIX: &'static str = ".dylib";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `dylib`.
pub const DLL_EXTENSION: &'static str = "dylib";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "ios")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `ios`.
pub const SYSNAME: &'static str = "ios";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "freebsd")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `freebsd`.
pub const SYSNAME: &'static str = "freebsd";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub const DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub const DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "dragonfly")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `dragonfly`.
pub const SYSNAME: &'static str = "dragonfly";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub const DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub const DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "android")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `android`.
pub const SYSNAME: &'static str = "android";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub const DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub const DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
#[cfg(target_os = "windows")]
pub mod consts {
pub use os::arch_consts::ARCH;
pub const FAMILY: &'static str = "windows";
/// A string describing the specific operating system in use: in this
/// case, `windows`.
pub const SYSNAME: &'static str = "windows";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, the empty string.
pub const DLL_PREFIX: &'static str = "";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.dll`.
pub const DLL_SUFFIX: &'static str = ".dll";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `dll`.
pub const DLL_EXTENSION: &'static str = "dll";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, `.exe`.
pub const EXE_SUFFIX: &'static str = ".exe";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, `exe`.
pub const EXE_EXTENSION: &'static str = "exe";
}
#[cfg(target_arch = "x86")]
mod arch_consts {
pub const ARCH: &'static str = "x86";
}
#[cfg(target_arch = "x86_64")]
mod arch_consts {
pub const ARCH: &'static str = "x86_64";
}
#[cfg(target_arch = "arm")]
mod arch_consts {
pub const ARCH: &'static str = "arm";
}
#[cfg(target_arch = "mips")]
mod arch_consts {
pub const ARCH: &'static str = "mips";
}
#[cfg(target_arch = "mipsel")]
mod arch_consts {
pub const ARCH: &'static str = "mipsel";
}
#[cfg(test)]
mod tests {
use prelude::*;
use c_str::ToCStr;
use option;
use os::{env, getcwd, getenv, make_absolute};
use os::{split_paths, join_paths, setenv, unsetenv};
use os;
use rand::Rng;
use rand;
#[test]
pub fn last_os_error() {
debug!("{}", os::last_os_error());
}
fn make_rand_name() -> String {
let mut rng = rand::task_rng();
let n = format!("TEST{}", rng.gen_ascii_chars().take(10u)
.collect::<String>());
assert!(getenv(n.as_slice()).is_none());
n
}
#[test]
fn test_num_cpus() {
assert!(os::num_cpus() > 0);
}
#[test]
fn test_setenv() {
let n = make_rand_name();
setenv(n.as_slice(), "VALUE");
assert_eq!(getenv(n.as_slice()), option::Some("VALUE".to_string()));
}
#[test]
fn test_unsetenv() {
let n = make_rand_name();
setenv(n.as_slice(), "VALUE");
unsetenv(n.as_slice());
assert_eq!(getenv(n.as_slice()), option::None);
}
#[test]
#[ignore]
fn test_setenv_overwrite() {
let n = make_rand_name();
setenv(n.as_slice(), "1");
setenv(n.as_slice(), "2");
assert_eq!(getenv(n.as_slice()), option::Some("2".to_string()));
setenv(n.as_slice(), "");
assert_eq!(getenv(n.as_slice()), option::Some("".to_string()));
}
// Windows GetEnvironmentVariable requires some extra work to make sure
// the buffer the variable is copied into is the right size
#[test]
#[ignore]
fn test_getenv_big() {
let mut s = "".to_string();
let mut i = 0i;
while i < 100 {
s.push_str("aaaaaaaaaa");
i += 1;
}
let n = make_rand_name();
setenv(n.as_slice(), s.as_slice());
debug!("{}", s.clone());
assert_eq!(getenv(n.as_slice()), option::Some(s));
}
#[test]
fn test_self_exe_name() {
let path = os::self_exe_name();
assert!(path.is_some());
let path = path.unwrap();
debug!("{}", path.display());
// Hard to test this function
assert!(path.is_absolute());
}
#[test]
fn test_self_exe_path() {
let path = os::self_exe_path();
assert!(path.is_some());
let path = path.unwrap();
debug!("{}", path.display());
// Hard to test this function
assert!(path.is_absolute());
}
#[test]
#[ignore]
fn test_env_getenv() {
let e = env();
assert!(e.len() > 0u);
for p in e.iter() {
let (n, v) = (*p).clone();
debug!("{}", n);
let v2 = getenv(n.as_slice());
// MingW seems to set some funky environment variables like
// "=C:=C:\MinGW\msys\1.0\bin" and "!::=::\" that are returned
// from env() but not visible from getenv().
assert!(v2.is_none() || v2 == option::Some(v));
}
}
#[test]
fn test_env_set_get_huge() {
let n = make_rand_name();
let s = "x".repeat(10000).to_string();
setenv(n.as_slice(), s.as_slice());
assert_eq!(getenv(n.as_slice()), Some(s));
unsetenv(n.as_slice());
assert_eq!(getenv(n.as_slice()), None);
}
#[test]
fn test_env_setenv() {
let n = make_rand_name();
let mut e = env();
setenv(n.as_slice(), "VALUE");
assert!(!e.contains(&(n.clone(), "VALUE".to_string())));
e = env();
assert!(e.contains(&(n, "VALUE".to_string())));
}
#[test]
fn test() {
assert!((!Path::new("test-path").is_absolute()));
let cwd = getcwd().unwrap();
debug!("Current working directory: {}", cwd.display());
debug!("{}", make_absolute(&Path::new("test-path")).unwrap().display());
debug!("{}", make_absolute(&Path::new("/usr/bin")).unwrap().display());
}
#[test]
#[cfg(unix)]
fn homedir() {
let oldhome = getenv("HOME");
setenv("HOME", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "");
assert!(os::homedir().is_none());
for s in oldhome.iter() {
setenv("HOME", s.as_slice());
}
}
#[test]
#[cfg(windows)]
fn homedir() {
let oldhome = getenv("HOME");
let olduserprofile = getenv("USERPROFILE");
setenv("HOME", "");
setenv("USERPROFILE", "");
assert!(os::homedir().is_none());
setenv("HOME", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "");
setenv("USERPROFILE", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "/home/MountainView");
setenv("USERPROFILE", "/home/PaloAlto");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
for s in oldhome.iter() {
setenv("HOME", s.as_slice());
}
for s in olduserprofile.iter() {
setenv("USERPROFILE", s.as_slice());
}
}
#[test]
fn memory_map_rw() {
use result::{Ok, Err};
let chunk = match os::MemoryMap::new(16, &[
os::MapReadable,
os::MapWritable
]) {
Ok(chunk) => chunk,
Err(msg) => panic!("{}", msg)
};
assert!(chunk.len >= 16);
unsafe {
*chunk.data = 0xBE;
assert!(*chunk.data == 0xBE);
}
}
#[test]
fn memory_map_file() {
use result::{Ok, Err};
use os::*;
use libc::*;
use io::fs;
#[cfg(unix)]
fn lseek_(fd: c_int, size: uint) {
unsafe {
assert!(lseek(fd, size as off_t, SEEK_SET) == size as off_t);
}
}
#[cfg(windows)]
fn lseek_(fd: c_int, size: uint) {
unsafe {
assert!(lseek(fd, size as c_long, SEEK_SET) == size as c_long);
}
}
let mut path = tmpdir();
path.push("mmap_file.tmp");
let size = MemoryMap::granularity() * 2;
let fd = unsafe {
let fd = path.with_c_str(|path| {
open(path, O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR)
});
lseek_(fd, size);
"x".with_c_str(|x| assert!(write(fd, x as *const c_void, 1) == 1));
fd
};
let chunk = match MemoryMap::new(size / 2, &[
MapReadable,
MapWritable,
MapFd(fd),
MapOffset(size / 2)
]) {
Ok(chunk) => chunk,
Err(msg) => panic!("{}", msg)
};
assert!(chunk.len > 0);
unsafe {
*chunk.data = 0xbe;
assert!(*chunk.data == 0xbe);
close(fd);
}
drop(chunk);
fs::unlink(&path).unwrap();
}
#[test]
#[cfg(windows)]
fn split_paths_windows() {
fn check_parse(unparsed: &str, parsed: &[&str]) -> bool {
split_paths(unparsed) ==
parsed.iter().map(|s| Path::new(*s)).collect()
}
assert!(check_parse("", &mut [""]));
assert!(check_parse(r#""""#, &mut [""]));
assert!(check_parse(";;", &mut ["", "", ""]));
assert!(check_parse(r"c:\", &mut [r"c:\"]));
assert!(check_parse(r"c:\;", &mut [r"c:\", ""]));
assert!(check_parse(r"c:\;c:\Program Files\",
&mut [r"c:\", r"c:\Program Files\"]));
assert!(check_parse(r#"c:\;c:\"foo"\"#, &mut [r"c:\", r"c:\foo\"]));
assert!(check_parse(r#"c:\;c:\"foo;bar"\;c:\baz"#,
&mut [r"c:\", r"c:\foo;bar\", r"c:\baz"]));
}
#[test]
#[cfg(unix)]
fn split_paths_unix() {
fn check_parse(unparsed: &str, parsed: &[&str]) -> bool {
split_paths(unparsed) ==
parsed.iter().map(|s| Path::new(*s)).collect()
}
assert!(check_parse("", &mut [""]));
assert!(check_parse("::", &mut ["", "", ""]));
assert!(check_parse("/", &mut ["/"]));
assert!(check_parse("/:", &mut ["/", ""]));
assert!(check_parse("/:/usr/local", &mut ["/", "/usr/local"]));
}
#[test]
#[cfg(unix)]
fn join_paths_unix() {
fn test_eq(input: &[&str], output: &str) -> bool {
join_paths(input).unwrap().as_slice() == output.as_bytes()
}
assert!(test_eq(&[], ""));
assert!(test_eq(&["/bin", "/usr/bin", "/usr/local/bin"],
"/bin:/usr/bin:/usr/local/bin"));
assert!(test_eq(&["", "/bin", "", "", "/usr/bin", ""],
":/bin:::/usr/bin:"));
assert!(join_paths(&["/te:st"]).is_err());
}
#[test]
#[cfg(windows)]
fn join_paths_windows() {
fn test_eq(input: &[&str], output: &str) -> bool {
join_paths(input).unwrap().as_slice() == output.as_bytes()
}
assert!(test_eq(&[], ""));
assert!(test_eq(&[r"c:\windows", r"c:\"],
r"c:\windows;c:\"));
assert!(test_eq(&["", r"c:\windows", "", "", r"c:\", ""],
r";c:\windows;;;c:\;"));
assert!(test_eq(&[r"c:\te;st", r"c:\"],
r#""c:\te;st";c:\"#));
assert!(join_paths(&[r#"c:\te"st"#]).is_err());
}
// More recursive_mkdir tests are in extra::tempfile
}
| 31.940029 | 99 | 0.551516 |
e5858f5f3eff92c6b2588b09326957961f94e920 | 13,438 | // subset of CSS selectors for CSS-in-JS
use super::parsing::{ident, skip, sym, Parsable, ParseError, Parser};
use crate::util::Atom;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Specificity(u32);
// to be implemented by client
pub trait MatchingContext: Sized {
type ElementRef: Copy;
fn parent_element(&self, element: Self::ElementRef) -> Option<Self::ElementRef>;
fn local_name(&self, element: Self::ElementRef) -> &str;
fn attribute(&self, element: Self::ElementRef, attribute: &str) -> Option<&str>;
// TODO: fast-path has_* methods (with default impls)
// or maybe introduce type LocalName: PartialEq<Atom>? and make the whole trait parametrized?
}
#[derive(Debug, Clone, PartialEq)]
pub struct Selector {
pub(super) parts: Vec<SelectorPart>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(super) enum SelectorPart {
Universal,
LocalName(Atom),
Identifier(Atom),
ClassName(Atom),
AttrExists(Atom),
// AttrEq(Atom, Atom),
// AttrStartsWith(Atom, Atom),
// AttrEndsWith(Atom, Atom),
// AttrContains(Atom, Atom),
Combinator(Combinator),
// FirstChild // (prev_element_sibling == None)
// LastChild // (next_element_sibling == None)
// OnlyChild // (prev_element_sibling == None && next_element_sibling == None)
// BTW: many are just compound shorthands and can be resolved here (:disabled is like [disabled] & input, select, ...)
// PseudoClass(Atom) // :root, :hover, :focus, :active, :enabled, :disabled, :valid, :invalid, ...
Unsupported,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(super) enum Combinator {
Parent,
Ancestor,
Or,
// Adjacent,
// Sibling,
}
impl Selector {
pub fn parse(input: &str) -> Result<Self, ParseError> {
Parsable::parse(input)
}
pub(crate) fn unsupported() -> Self {
Self {
parts: vec![SelectorPart::Unsupported],
}
}
pub fn match_element<C: MatchingContext>(&self, element: C::ElementRef, ctx: &C) -> Option<Specificity> {
// so we can fast-forward to next OR
let mut parts_iter = self.parts.iter();
// state
let mut current = element;
let mut parent = false;
let mut ancestors = false;
let specificity = Specificity(0);
// we are always going forward
'next_part: while let Some(p) = parts_iter.next() {
match p {
SelectorPart::Combinator(comb) => {
match comb {
// state changes
Combinator::Parent => parent = true,
Combinator::Ancestor => ancestors = true,
// end-of-branch and we still have a match, no need to check others
Combinator::Or => break 'next_part,
}
}
comp => {
loop {
if parent || ancestors {
parent = false;
match ctx.parent_element(current) {
Some(parent) => current = parent,
// nothing left to match
None => break,
}
}
if Self::match_component(current, comp, ctx) {
ancestors = false;
continue 'next_part;
}
// we got no match on parent
if !ancestors {
break;
}
}
// no match, fast-forward to next OR
for p in parts_iter.by_ref() {
if p == &SelectorPart::Combinator(Combinator::Or) {
// reset stack
current = element;
continue 'next_part;
}
}
// or fail otherwise
return None;
}
}
}
// everything was fine
Some(specificity)
}
fn match_component<C: MatchingContext>(el: C::ElementRef, comp: &SelectorPart, ctx: &C) -> bool {
match comp {
SelectorPart::Universal => true,
SelectorPart::LocalName(name) => ctx.local_name(el) == &**name,
SelectorPart::Identifier(id) => ctx.attribute(el, "id") == Some(id),
SelectorPart::ClassName(cls) => match ctx.attribute(el, "class") {
Some(s) => s.split_ascii_whitespace().any(|part| part == &**cls),
_ => false,
},
SelectorPart::AttrExists(att) => ctx.attribute(el, att).is_some(),
// SelectorPart::AttrEq(att, val) => ctx.attribute(el, att) == Some(val),
// SelectorPart::AttrStartsWith(att, s) => ctx.attribute(el, att).map_or(false, |v| v.starts_with(&**s)),
// SelectorPart::AttrEndsWith(att, s) => ctx.attribute(el, att).map_or(false, |v| v.ends_with(&**s)),
// SelectorPart::AttrContains(att, s) => ctx.attribute(el, att).map_or(false, |v| v.contains(&**s)),
SelectorPart::Unsupported => false,
SelectorPart::Combinator(_) => unreachable!(),
}
}
}
impl Parsable for Selector {
fn parser<'a>() -> Parser<'a, Self> {
let tag = || {
let ident = || ident().map(Atom::from);
let universal = sym("*").map(|_| SelectorPart::Universal);
let local_name = ident().map(SelectorPart::LocalName);
let id = sym("#") * ident().map(SelectorPart::Identifier);
let class_name = sym(".") * ident().map(SelectorPart::ClassName);
let attr_exists = sym("[") * ident().map(SelectorPart::AttrExists) - sym("]");
let unknown_attr =
sym("[") * (!sym("]") * skip(1)).repeat(1..).map(|_| SelectorPart::Unsupported) - sym("]");
let attr = attr_exists | unknown_attr;
let pseudo = sym(":").discard().repeat(1..3) * ident().map(|_| SelectorPart::Unsupported);
universal | local_name | id | class_name | attr | pseudo
};
// note we parse child/descendant but we flip the final order so it's parent/ancestor
let child = sym(">").map(|_| Combinator::Parent);
let descendant = sym(" ").map(|_| Combinator::Ancestor);
let or = sym(",").map(|_| Combinator::Or);
let unsupported = (sym("+") | sym("~")).map(|_| SelectorPart::Unsupported);
let comb = (child | descendant | or).map(SelectorPart::Combinator) | unsupported;
let selector = tag() + (comb.opt() + tag()).repeat(0..);
selector.map(|(head, tail)| {
let mut parts = Vec::with_capacity(tail.len() + 1);
// reversed (child/descendant -> parent/ancestor)
for (comb, tag) in tail.into_iter().rev() {
parts.push(tag);
if let Some(comb) = comb {
parts.push(comb);
}
}
parts.push(head);
Selector { parts }
})
}
}
impl fmt::Display for Selector {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for r in self.parts.iter().rev() {
write!(f, "{}", r)?;
}
Ok(())
}
}
impl fmt::Display for SelectorPart {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Universal => write!(f, "*"),
Self::LocalName(name) => write!(f, "{}", name),
Self::Identifier(id) => write!(f, "#{}", id),
Self::ClassName(clz) => write!(f, ".{}", clz),
Self::AttrExists(att) => write!(f, "[{}]", att),
Self::Combinator(comb) => write!(f, "{}", comb),
Self::Unsupported => write!(f, "???"),
}
}
}
impl fmt::Display for Combinator {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Self::Parent => " ",
Self::Ancestor => ">",
Self::Or => ",",
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_selector() {
use super::Combinator::*;
use SelectorPart::*;
let s = |s| Selector::parse(s).unwrap().parts;
// simple
assert_eq!(s("*"), &[Universal]);
assert_eq!(s("body"), &[LocalName("body".into())]);
assert_eq!(s("h2"), &[LocalName("h2".into())]);
assert_eq!(s("#app"), &[Identifier("app".into())]);
assert_eq!(s(".btn"), &[ClassName("btn".into())]);
// attrs
assert_eq!(s(r"[href]"), &[AttrExists("href".into())]);
// assert_eq!(s(r#"[href="foo"]"#), &[AttrEq("href".into(), "foo".into())]);
// assert_eq!(s(r#"[href^="http"]"#), &[AttrStartsWith("href".into(), "http".into())]);
// assert_eq!(s(r#"[href$=".org"]"#), &[AttrEndsWith("href".into(), ".org".into())]);
// assert_eq!(s(r#"[href*="foo"]"#), &[AttrContains("href".into(), "foo".into())]);
// combined
assert_eq!(
s(".btn.btn-primary"),
&[ClassName("btn-primary".into()), ClassName("btn".into())]
);
assert_eq!(s("*.test"), &[ClassName("test".into()), Universal]);
assert_eq!(
s("div#app.test"),
&[
ClassName("test".into()),
Identifier("app".into()),
LocalName("div".into())
]
);
// combined with combinators
assert_eq!(
s("body > div.test div#test"),
&[
Identifier("test".into()),
LocalName("div".into()),
Combinator(Ancestor),
ClassName("test".into()),
LocalName("div".into()),
Combinator(Parent),
LocalName("body".into())
]
);
// multi
assert_eq!(
s("html, body"),
&[LocalName("body".into()), Combinator(Or), LocalName("html".into())]
);
assert_eq!(
s("body > div, div button span"),
&[
LocalName("span".into()),
Combinator(Ancestor),
LocalName("button".into()),
Combinator(Ancestor),
LocalName("div".into()),
Combinator(Or),
LocalName("div".into()),
Combinator(Parent),
LocalName("body".into()),
]
);
// unsupported for now
assert_eq!(s(":root"), &[Unsupported]);
assert_eq!(s("* + *"), &[Universal, Unsupported, Universal]);
assert_eq!(s("* ~ *"), &[Universal, Unsupported, Universal]);
// invalid
assert!(Selector::parse("").is_err());
assert!(Selector::parse(" ").is_err());
assert!(Selector::parse("a,,b").is_err());
assert!(Selector::parse("a>>b").is_err());
// bugs & edge-cases
assert_eq!(s("input[type=\"submit\"]"), &[Unsupported, LocalName("input".into())]);
}
#[test]
fn matching() {
struct Ctx;
impl MatchingContext for Ctx {
type ElementRef = usize;
fn parent_element(&self, el: usize) -> Option<usize> {
[None, Some(0), Some(1), Some(2), Some(3)][el]
}
fn local_name(&self, el: usize) -> &str {
["html", "body", "div", "button", "span"][el]
}
fn attribute(&self, el: usize, attr: &str) -> Option<&str> {
let v = match attr {
"id" => ["", "app", "panel", "", ""][el],
"class" => ["", "", "", "btn", ""][el],
_ => "",
};
match v {
"" => None,
v => Some(v),
}
}
}
let match_sel = |s, el| Selector::parse(s).unwrap().match_element(el, &Ctx).is_some();
// invalid
assert!(Selector::unsupported().match_element(0, &Ctx).is_none());
// basic
assert!(match_sel("*", 0));
assert!(match_sel("html", 0));
assert!(match_sel("body", 1));
assert!(match_sel("#app", 1));
assert!(match_sel("div", 2));
assert!(match_sel("#panel", 2));
assert!(match_sel("button", 3));
assert!(match_sel(".btn", 3));
assert!(match_sel("span", 4));
// combined
assert!(match_sel("body#app", 1));
assert!(match_sel("div#panel", 2));
assert!(match_sel("button.btn", 3));
// parent
assert!(match_sel("button > span", 4));
assert!(match_sel("div#panel > button.btn > span", 4));
// ancestor
assert!(match_sel("button span", 4));
assert!(match_sel("div#panel span", 4));
assert!(match_sel("body div .btn span", 4));
// OR
assert!(match_sel("div, span", 4));
assert!(match_sel("a, b, c, span, d", 4));
assert!(match_sel("html, body", 1));
// complex
assert!(match_sel("div, span.foo, #panel span", 4));
assert!(match_sel("a b c d e f g, span", 4));
}
}
| 34.193384 | 122 | 0.486233 |
2195933334c882b2e45f2b240ffa6adda31ec886 | 2,216 | use super::{
wasm_externtype_t, wasm_mutability_enum, wasm_mutability_t, wasm_valtype_delete,
wasm_valtype_t, WasmExternType,
};
use std::convert::TryInto;
use wasmer::{ExternType, GlobalType};
#[derive(Debug, Clone)]
pub(crate) struct WasmGlobalType {
pub(crate) global_type: GlobalType,
content: Box<wasm_valtype_t>,
}
impl WasmGlobalType {
pub(crate) fn new(global_type: GlobalType) -> Self {
let content = Box::new(global_type.ty.into());
Self {
global_type,
content,
}
}
}
#[allow(non_camel_case_types)]
#[derive(Debug)]
#[repr(transparent)]
pub struct wasm_globaltype_t {
pub(crate) extern_type: wasm_externtype_t,
}
impl wasm_globaltype_t {
pub(crate) fn new(global_type: GlobalType) -> Self {
Self {
extern_type: wasm_externtype_t::new(ExternType::Global(global_type)),
}
}
pub(crate) fn inner(&self) -> &WasmGlobalType {
match &self.extern_type.inner {
WasmExternType::Global(wasm_global_type) => &wasm_global_type,
_ => {
unreachable!("Data corruption: `wasm_globaltype_t` does not contain a global type")
}
}
}
}
wasm_declare_vec!(globaltype);
#[no_mangle]
pub unsafe extern "C" fn wasm_globaltype_new(
valtype: Option<Box<wasm_valtype_t>>,
mutability: wasm_mutability_t,
) -> Option<Box<wasm_globaltype_t>> {
let valtype = valtype?;
let mutability: wasm_mutability_enum = mutability.try_into().ok()?;
let global_type = Box::new(wasm_globaltype_t::new(GlobalType::new(
(*valtype).into(),
mutability.into(),
)));
wasm_valtype_delete(Some(valtype));
Some(global_type)
}
#[no_mangle]
pub unsafe extern "C" fn wasm_globaltype_delete(_global_type: Option<Box<wasm_globaltype_t>>) {}
#[no_mangle]
pub unsafe extern "C" fn wasm_globaltype_mutability(
global_type: &wasm_globaltype_t,
) -> wasm_mutability_t {
wasm_mutability_enum::from(global_type.inner().global_type.mutability).into()
}
#[no_mangle]
pub unsafe extern "C" fn wasm_globaltype_content(
global_type: &wasm_globaltype_t,
) -> &wasm_valtype_t {
global_type.inner().content.as_ref()
}
| 26.380952 | 99 | 0.675542 |
cc75d2ee86a70927d815633e880cb8846f12b092 | 78 | pub fn main() {
match -5 {
-5 => {}
_ => { panic!() }
}
}
| 11.142857 | 23 | 0.282051 |
38b6e501e9c48f273d461483219e9fb650f71eba | 14,982 | use crate::function::PyFuncArgs;
use crate::obj::objsequence;
use crate::obj::objtuple::{PyTuple, PyTupleRef};
use crate::obj::objtype;
use crate::obj::objtype::PyClassRef;
use crate::pyobject::{IdProtocol, PyContext, PyObjectRef, PyResult, TypeProtocol};
use crate::types::create_type;
use crate::vm::VirtualMachine;
use itertools::Itertools;
use std::fs::File;
use std::io::{BufRead, BufReader};
fn exception_init(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
let exc_self = args.args[0].clone();
let exc_args = vm.ctx.new_tuple(args.args[1..].to_vec());
vm.set_attr(&exc_self, "args", exc_args)?;
// TODO: have an actual `traceback` object for __traceback__
vm.set_attr(&exc_self, "__traceback__", vm.ctx.new_list(vec![]))?;
vm.set_attr(&exc_self, "__cause__", vm.get_none())?;
vm.set_attr(&exc_self, "__context__", vm.get_none())?;
vm.set_attr(&exc_self, "__suppress_context__", vm.new_bool(false))?;
Ok(vm.get_none())
}
/// Print exception chain
pub fn print_exception(vm: &VirtualMachine, exc: &PyObjectRef) {
let mut had_cause = false;
if let Ok(cause) = vm.get_attribute(exc.clone(), "__cause__") {
if !vm.get_none().is(&cause) {
had_cause = true;
print_exception(vm, &cause);
println!("\nThe above exception was the direct cause of the following exception:\n");
}
}
if !had_cause {
if let Ok(context) = vm.get_attribute(exc.clone(), "__context__") {
if !vm.get_none().is(&context) {
print_exception(vm, &context);
println!("\nDuring handling of the above exception, another exception occurred:\n");
}
}
}
print_exception_inner(vm, exc)
}
fn print_source_line(filename: String, lineno: usize) {
// TODO: use io.open() method instead, when available, according to https://github.com/python/cpython/blob/master/Python/traceback.c#L393
// TODO: support different encodings
let file = match File::open(filename) {
Ok(file) => file,
Err(_) => {
return;
}
};
let file = BufReader::new(file);
for (i, line) in file.lines().enumerate() {
if i + 1 == lineno {
if let Ok(line) = line {
// Indented with 4 spaces
println!(" {}", line.trim_start());
}
return;
}
}
}
/// Print exception occurrence location from traceback element
fn print_traceback_entry(vm: &VirtualMachine, tb_entry: &PyObjectRef) {
if objtype::isinstance(&tb_entry, &vm.ctx.tuple_type()) {
let location_attrs = objsequence::get_elements_tuple(&tb_entry);
let filename = if let Ok(x) = vm.to_str(&location_attrs[0]) {
x.value.clone()
} else {
"<error>".to_string()
};
let lineno = if let Ok(x) = vm.to_str(&location_attrs[1]) {
x.value.clone()
} else {
"<error>".to_string()
};
let obj_name = if let Ok(x) = vm.to_str(&location_attrs[2]) {
x.value.clone()
} else {
"<error>".to_string()
};
println!(
r##" File "{}", line {}, in {}"##,
filename, lineno, obj_name
);
print_source_line(filename, lineno.parse().unwrap());
} else {
println!(" File ??");
}
}
/// Print exception with traceback
pub fn print_exception_inner(vm: &VirtualMachine, exc: &PyObjectRef) {
if let Ok(tb) = vm.get_attribute(exc.clone(), "__traceback__") {
if objtype::isinstance(&tb, &vm.ctx.list_type()) {
let mut tb_entries = objsequence::get_elements_list(&tb).to_vec();
tb_entries.reverse();
if !tb_entries.is_empty() {
println!("Traceback (most recent call last):");
}
for exc_location in tb_entries.iter() {
print_traceback_entry(vm, exc_location);
}
}
} else {
println!("No traceback set on exception");
}
let varargs = vm
.get_attribute(exc.clone(), "args")
.unwrap()
.downcast::<PyTuple>()
.expect("'args' must be a tuple");
let args_repr = exception_args_as_string(vm, varargs, true);
let exc_name = exc.class().name.clone();
match args_repr.len() {
0 => println!("{}", exc_name),
1 => println!("{}: {}", exc_name, args_repr[0]),
_ => println!("{}: ({})", exc_name, args_repr.into_iter().format(", ")),
}
}
fn exception_args_as_string(
vm: &VirtualMachine,
varargs: PyTupleRef,
str_single: bool,
) -> Vec<String> {
match varargs.elements.len() {
0 => vec![],
1 => {
let args0_repr = if str_single {
vm.to_pystr(&varargs.elements[0])
.unwrap_or_else(|_| "<element str() failed>".to_string())
} else {
vm.to_repr(&varargs.elements[0])
.map(|s| s.as_str().to_owned())
.unwrap_or_else(|_| "<element repr() failed>".to_string())
};
vec![args0_repr]
}
_ => {
let mut args_vec = Vec::with_capacity(varargs.elements.len());
for vararg in &varargs.elements {
let arg_repr = match vm.to_repr(vararg) {
Ok(arg_repr) => arg_repr.value.clone(),
Err(_) => "<element repr() failed>".to_string(),
};
args_vec.push(arg_repr);
}
args_vec
}
}
}
fn exception_str(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(
vm,
args,
required = [(exc, Some(vm.ctx.exceptions.exception_type.clone()))]
);
let args = vm
.get_attribute(exc.clone(), "args")
.unwrap()
.downcast::<PyTuple>()
.expect("'args' must be a tuple");
let args_str = exception_args_as_string(vm, args, false);
let joined_str = match args_str.len() {
0 => "".to_string(),
1 => args_str.into_iter().next().unwrap(),
_ => format!("({})", args_str.into_iter().format(", ")),
};
Ok(vm.new_str(joined_str))
}
fn exception_repr(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(
vm,
args,
required = [(exc, Some(vm.ctx.exceptions.exception_type.clone()))]
);
let args = vm
.get_attribute(exc.clone(), "args")
.unwrap()
.downcast::<PyTuple>()
.expect("'args' must be a tuple");
let args_repr = exception_args_as_string(vm, args, false);
let exc_name = exc.class().name.clone();
let joined_str = match args_repr.len() {
0 => format!("{}()", exc_name),
1 => format!("{}({},)", exc_name, args_repr[0]),
_ => format!("{}({})", exc_name, args_repr.join(", ")),
};
Ok(vm.new_str(joined_str))
}
#[derive(Debug)]
pub struct ExceptionZoo {
pub arithmetic_error: PyClassRef,
pub assertion_error: PyClassRef,
pub attribute_error: PyClassRef,
pub base_exception_type: PyClassRef,
pub exception_type: PyClassRef,
pub file_not_found_error: PyClassRef,
pub file_exists_error: PyClassRef,
pub import_error: PyClassRef,
pub index_error: PyClassRef,
pub key_error: PyClassRef,
pub lookup_error: PyClassRef,
pub module_not_found_error: PyClassRef,
pub name_error: PyClassRef,
pub not_implemented_error: PyClassRef,
pub os_error: PyClassRef,
pub overflow_error: PyClassRef,
pub permission_error: PyClassRef,
pub reference_error: PyClassRef,
pub runtime_error: PyClassRef,
pub stop_iteration: PyClassRef,
pub syntax_error: PyClassRef,
pub system_error: PyClassRef,
pub type_error: PyClassRef,
pub value_error: PyClassRef,
pub unicode_error: PyClassRef,
pub unicode_decode_error: PyClassRef,
pub unicode_encode_error: PyClassRef,
pub unicode_translate_error: PyClassRef,
pub zero_division_error: PyClassRef,
pub eof_error: PyClassRef,
pub warning: PyClassRef,
pub bytes_warning: PyClassRef,
pub unicode_warning: PyClassRef,
pub deprecation_warning: PyClassRef,
pub pending_deprecation_warning: PyClassRef,
pub future_warning: PyClassRef,
pub import_warning: PyClassRef,
pub syntax_warning: PyClassRef,
pub resource_warning: PyClassRef,
pub runtime_warning: PyClassRef,
pub user_warning: PyClassRef,
pub keyboard_interrupt: PyClassRef,
}
impl ExceptionZoo {
pub fn new(type_type: &PyClassRef, object_type: &PyClassRef) -> Self {
// Sorted By Hierarchy then alphabetized.
let base_exception_type = create_type("BaseException", &type_type, &object_type);
let exception_type = create_type("Exception", &type_type, &base_exception_type);
let arithmetic_error = create_type("ArithmeticError", &type_type, &exception_type);
let assertion_error = create_type("AssertionError", &type_type, &exception_type);
let attribute_error = create_type("AttributeError", &type_type, &exception_type);
let import_error = create_type("ImportError", &type_type, &exception_type);
let index_error = create_type("IndexError", &type_type, &exception_type);
let key_error = create_type("KeyError", &type_type, &exception_type);
let lookup_error = create_type("LookupError", &type_type, &exception_type);
let name_error = create_type("NameError", &type_type, &exception_type);
let os_error = create_type("OSError", &type_type, &exception_type);
let runtime_error = create_type("RuntimeError", &type_type, &exception_type);
let reference_error = create_type("ReferenceError", &type_type, &exception_type);
let stop_iteration = create_type("StopIteration", &type_type, &exception_type);
let syntax_error = create_type("SyntaxError", &type_type, &exception_type);
let system_error = create_type("SystemError", &type_type, &exception_type);
let type_error = create_type("TypeError", &type_type, &exception_type);
let value_error = create_type("ValueError", &type_type, &exception_type);
let overflow_error = create_type("OverflowError", &type_type, &arithmetic_error);
let zero_division_error = create_type("ZeroDivisionError", &type_type, &arithmetic_error);
let module_not_found_error = create_type("ModuleNotFoundError", &type_type, &import_error);
let not_implemented_error = create_type("NotImplementedError", &type_type, &runtime_error);
let file_not_found_error = create_type("FileNotFoundError", &type_type, &os_error);
let permission_error = create_type("PermissionError", &type_type, &os_error);
let file_exists_error = create_type("FileExistsError", &type_type, &os_error);
let eof_error = create_type("EOFError", &type_type, &exception_type);
let unicode_error = create_type("UnicodeError", &type_type, &value_error);
let unicode_decode_error = create_type("UnicodeDecodeError", &type_type, &unicode_error);
let unicode_encode_error = create_type("UnicodeEncodeError", &type_type, &unicode_error);
let unicode_translate_error =
create_type("UnicodeTranslateError", &type_type, &unicode_error);
let warning = create_type("Warning", &type_type, &exception_type);
let bytes_warning = create_type("BytesWarning", &type_type, &warning);
let unicode_warning = create_type("UnicodeWarning", &type_type, &warning);
let deprecation_warning = create_type("DeprecationWarning", &type_type, &warning);
let pending_deprecation_warning =
create_type("PendingDeprecationWarning", &type_type, &warning);
let future_warning = create_type("FutureWarning", &type_type, &warning);
let import_warning = create_type("ImportWarning", &type_type, &warning);
let syntax_warning = create_type("SyntaxWarning", &type_type, &warning);
let resource_warning = create_type("ResourceWarning", &type_type, &warning);
let runtime_warning = create_type("RuntimeWarning", &type_type, &warning);
let user_warning = create_type("UserWarning", &type_type, &warning);
let keyboard_interrupt = create_type("KeyboardInterrupt", &type_type, &base_exception_type);
ExceptionZoo {
arithmetic_error,
assertion_error,
attribute_error,
base_exception_type,
exception_type,
file_not_found_error,
file_exists_error,
import_error,
index_error,
key_error,
lookup_error,
module_not_found_error,
name_error,
not_implemented_error,
os_error,
overflow_error,
permission_error,
runtime_error,
stop_iteration,
syntax_error,
system_error,
type_error,
value_error,
unicode_error,
unicode_decode_error,
unicode_encode_error,
unicode_translate_error,
zero_division_error,
eof_error,
warning,
bytes_warning,
unicode_warning,
deprecation_warning,
pending_deprecation_warning,
future_warning,
import_warning,
syntax_warning,
resource_warning,
runtime_warning,
reference_error,
user_warning,
keyboard_interrupt,
}
}
}
fn import_error_init(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
// TODO: call super().__init__(*args) instead
exception_init(vm, args.clone())?;
let exc_self = args.args[0].clone();
vm.set_attr(
&exc_self,
"name",
args.kwargs
.get("name")
.cloned()
.unwrap_or_else(|| vm.get_none()),
)?;
vm.set_attr(
&exc_self,
"path",
args.kwargs
.get("path")
.cloned()
.unwrap_or_else(|| vm.get_none()),
)?;
vm.set_attr(
&exc_self,
"msg",
args.args.get(1).cloned().unwrap_or_else(|| vm.get_none()),
)?;
Ok(vm.get_none())
}
pub fn init(context: &PyContext) {
let base_exception_type = &context.exceptions.base_exception_type;
extend_class!(context, base_exception_type, {
"__init__" => context.new_rustfunc(exception_init)
});
let exception_type = &context.exceptions.exception_type;
extend_class!(context, exception_type, {
"__str__" => context.new_rustfunc(exception_str),
"__repr__" => context.new_rustfunc(exception_repr),
});
let import_error_type = &context.exceptions.import_error;
extend_class!(context, import_error_type, {
"__init__" => context.new_rustfunc(import_error_init)
});
}
| 37.548872 | 141 | 0.620077 |
ef122a39df613848b829685b87aff8370efc8d76 | 730 | /*
* Nomad
*
* Nomad OpenApi specification
*
* The version of the OpenAPI document: 0.11.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct JobStabilityRequest {
#[serde(rename = "JobID")]
pub job_id: String,
#[serde(rename = "JobVersion", skip_serializing_if = "Option::is_none")]
pub job_version: Option<i32>,
#[serde(rename = "Stable", skip_serializing_if = "Option::is_none")]
pub stable: Option<bool>,
}
impl JobStabilityRequest {
pub fn new(job_id: String) -> JobStabilityRequest {
JobStabilityRequest {
job_id,
job_version: None,
stable: None,
}
}
}
| 20.857143 | 76 | 0.630137 |
eb903e2cfd58acac36352959750edbd020103860 | 12,579 | use js_sys::*;
use std::iter::FromIterator;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::*;
macro_rules! js_array {
($($e:expr),*) => ({
let __x = Array::new();
$(__x.push(&JsValue::from($e));)*
__x
})
}
macro_rules! array {
($($e:expr),*) => ({
let mut __x = Vec::new();
$(__x.push(JsValue::from($e));)*
__x
})
}
fn to_rust(arr: &Array) -> Vec<JsValue> {
let mut result = Vec::with_capacity(arr.length() as usize);
arr.for_each(&mut |x, _, _| result.push(x));
result
}
#[wasm_bindgen_test]
fn from_iter() {
assert_eq!(
to_rust(&vec![
JsValue::from("a"),
JsValue::from("b"),
JsValue::from("c"),
].into_iter().collect()),
vec!["a", "b", "c"],
);
assert_eq!(
to_rust(&vec![
JsValue::from("a"),
JsValue::from("b"),
JsValue::from("c"),
].iter().collect()),
vec!["a", "b", "c"],
);
let array = js_array![1u32, 2u32, 3u32];
assert_eq!(
to_rust(&vec![
array.clone(),
].into_iter().collect()),
vec![JsValue::from(array.clone())],
);
assert_eq!(
to_rust(&vec![
array.clone(),
].iter().collect()),
vec![JsValue::from(array)],
);
assert_eq!(
to_rust(&vec![
5,
10,
20,
].into_iter().map(JsValue::from).collect()),
vec![5, 10, 20],
);
assert_eq!(
to_rust(&Array::from_iter(&[
JsValue::from("a"),
JsValue::from("b"),
JsValue::from("c"),
])),
vec!["a", "b", "c"],
);
let v = vec![
"a",
"b",
"c",
];
assert_eq!(
to_rust(&Array::from_iter(v.into_iter().map(|s| JsValue::from(s)))),
vec!["a", "b", "c"],
);
}
#[wasm_bindgen_test]
fn new_with_length() {
let array = Array::new_with_length(5);
assert_eq!(array.length(), 5);
assert_eq!(array.get(4), JsValue::undefined());
array.set(4, JsValue::from("a"));
assert_eq!(array.get(4), "a");
assert_eq!(array.length(), 5);
}
#[wasm_bindgen_test]
fn get() {
let array = js_array!["a", "c", "x", "n"];
assert_eq!(array.length(), 4);
assert_eq!(array.get(0), "a");
assert_eq!(array.get(3), "n");
assert_eq!(array.get(4), JsValue::undefined());
}
#[wasm_bindgen_test]
fn set() {
let array = js_array!["a", "c", "x", "n"];
assert_eq!(array.length(), 4);
assert_eq!(array.get(0), "a");
array.set(0, JsValue::from("b"));
assert_eq!(array.get(0), "b");
assert_eq!(array.get(4), JsValue::undefined());
assert_eq!(array.length(), 4);
array.set(4, JsValue::from("d"));
assert_eq!(array.length(), 5);
assert_eq!(array.get(4), "d");
assert_eq!(array.get(10), JsValue::undefined());
assert_eq!(array.length(), 5);
array.set(10, JsValue::from("z"));
assert_eq!(array.length(), 11);
assert_eq!(array.get(10), "z");
assert_eq!(array.get(9), JsValue::undefined());
}
#[wasm_bindgen_test]
fn delete() {
let array = js_array!["a", "c", "x", "n"];
assert_eq!(array.length(), 4);
assert_eq!(array.get(0), "a");
array.delete(0);
assert_eq!(array.get(0), JsValue::undefined());
}
#[wasm_bindgen_test]
fn filter() {
let array = js_array!["a", "c", "x", "n"];
assert!(array.filter(&mut |x, _, _| x.as_f64().is_some()).length() == 0);
let array = js_array![1, 2, 3, 4];
assert_eq!(
array.filter(&mut |x, _, _| x.as_f64().is_some()).length(),
4
);
let array = js_array!["a", 1, "b", 2];
assert_eq!(
array.filter(&mut |x, _, _| x.as_f64().is_some()).length(),
2
);
}
#[wasm_bindgen_test]
fn flat() {
let array = js_array![
js_array!["a", "b", "c"],
"d",
js_array!["e", js_array!["f", "g"]]
];
assert_eq!(
to_rust(&array.flat(1).slice(0, 5)),
vec!["a", "b", "c", "d", "e"]
);
assert_eq!(array.flat(1).length(), 6);
assert_eq!(
to_rust(&array.flat(2)),
vec!["a", "b", "c", "d", "e", "f", "g"]
);
}
#[wasm_bindgen_test]
fn flat_map() {
let array = js_array![1, 2, 3, 1];
assert_eq!(
to_rust(
&array.flat_map(&mut |val, _, _| match val.as_f64().map(|v| v as i32) {
Some(1) => vec![JsString::from("x").into(), JsString::from("x").into()],
Some(2) => vec![],
Some(3) => vec![JsString::from("z").into()],
_ => panic!("Unexpected conversion"),
})
),
vec!["x", "x", "z", "x", "x"]
);
}
#[wasm_bindgen_test]
fn index_of() {
let chars = js_array!["a", "c", "x", "n"];
assert_eq!(chars.index_of(&"x".into(), 0), 2);
assert_eq!(chars.index_of(&"z".into(), 0), -1);
assert_eq!(chars.index_of(&"x".into(), -3), 2);
assert_eq!(chars.index_of(&"z".into(), -2), -1);
}
#[wasm_bindgen_test]
fn is_array() {
assert!(Array::is_array(&Array::new().into()));
assert!(Array::is_array(&js_array![1].into()));
assert!(!Array::is_array(&JsValue::null()));
assert!(!Array::is_array(&JsValue::undefined()));
assert!(!Array::is_array(&10.into()));
assert!(!Array::is_array(&"x".into()));
assert!(!Array::is_array(&true.into()));
assert!(!Array::is_array(&false.into()));
}
#[wasm_bindgen_test]
fn sort() {
let array = js_array![3, 1, 6, 2];
let sorted = array.sort();
assert_eq!(to_rust(&sorted), array![1, 2, 3, 6]);
}
#[wasm_bindgen_test]
fn some() {
let array = js_array!["z", 1, "y", 2];
assert!(array.some(&mut |e| e == JsValue::from(2)));
assert!(array.some(&mut |e| e == JsValue::from("y")));
assert!(!array.some(&mut |e| e == JsValue::from("nope")));
}
#[wasm_bindgen_test]
fn last_index_of() {
let characters = js_array!["a", "x", "c", "x", "n"];
assert_eq!(characters.last_index_of(&"x".into(), 5), 3);
assert_eq!(characters.last_index_of(&"z".into(), 5), -1);
assert_eq!(characters.last_index_of(&"x".into(), 2), 1);
assert_eq!(characters.last_index_of(&"x".into(), 0), -1);
}
#[wasm_bindgen_test]
fn join() {
let characters = js_array!["a", "c", "x", "n"];
assert_eq!(String::from(characters.join(", ")), "a, c, x, n");
assert_eq!(String::from(characters.join("/")), "a/c/x/n");
}
#[wasm_bindgen_test]
fn slice() {
let characters = js_array!["a", "c", "x", "n", 1, "8"];
let subset = characters.slice(1, 3);
assert_eq!(to_rust(&subset), array!["c", "x"]);
}
#[wasm_bindgen_test]
fn splice() {
let characters = js_array!["a", "c", "x", "n", 1, "8"];
let removed = characters.splice(1, 3, &"b".into());
assert_eq!(to_rust(&removed), array!["c", "x", "n"]);
assert_eq!(to_rust(&characters), array!["a", "b", 1, "8"]);
}
#[wasm_bindgen_test]
fn fill() {
let characters = js_array!["a", "c", "x", "n", 1, "8"];
let subset = characters.fill(&0.into(), 0, 3);
assert_eq!(to_rust(&subset), array![0, 0, 0, "n", 1, "8"]);
}
#[wasm_bindgen_test]
fn copy_within() {
let characters = js_array![8, 5, 4, 3, 1, 2];
characters.copy_within(1, 4, 5);
assert_eq!(to_rust(&characters)[1], JsValue::from(1));
// if negatives were used
characters.copy_within(-1, -3, -2);
assert_eq!(to_rust(&characters)[5], JsValue::from(3));
}
#[wasm_bindgen_test]
fn of() {
let a = JsValue::from("a");
let b = JsValue::from("b");
let c = JsValue::from("c");
let arr = Array::of3(&a, &b, &c);
let vec = to_rust(&arr);
assert_eq!(vec.len(), 3);
assert_eq!(vec[0], a);
assert_eq!(vec[1], b);
assert_eq!(vec[2], c);
}
#[wasm_bindgen_test]
fn pop() {
let characters = js_array![8, 5, 4, 3, 1, 2];
let item = characters.pop();
assert_eq!(item, JsValue::from(2));
assert_eq!(characters.length(), 5);
}
#[wasm_bindgen_test]
fn push() {
let characters = js_array![8, 5, 4, 3, 1, 2];
let length = characters.push(&"a".into());
assert_eq!(length, 7);
assert_eq!(to_rust(&characters)[6], "a");
}
#[wasm_bindgen_test]
fn reverse() {
let characters = js_array![8, 5, 4, 3, 1, 2];
let reversed = characters.reverse();
assert_eq!(to_rust(&reversed), array![2, 1, 3, 4, 5, 8]);
}
#[wasm_bindgen_test]
fn shift() {
let characters = js_array![8, 5, 4, 3, 1, 2];
let shiftedItem = characters.shift();
assert_eq!(shiftedItem, 8);
assert_eq!(characters.length(), 5);
}
#[wasm_bindgen_test]
fn unshift() {
let characters = js_array![8, 5, 4, 3, 1, 2];
let length = characters.unshift(&"abba".into());
assert_eq!(length, 7);
assert_eq!(to_rust(&characters)[0], "abba");
}
#[wasm_bindgen_test]
fn to_string() {
let characters = js_array![8, 5, 4, 3, 1, 2];
assert_eq!(String::from(characters.to_string()), "8,5,4,3,1,2");
}
#[wasm_bindgen_test]
fn includes() {
let characters = js_array![8, 5, 4, 3, 1, 2];
assert!(characters.includes(&2.into(), 0));
assert!(!characters.includes(&9.into(), 0));
assert!(!characters.includes(&3.into(), 4));
}
#[wasm_bindgen_test]
fn concat() {
let arr1 = js_array![1, 2, 3];
let arr2 = js_array![4, 5, 6];
let new_array = arr1.concat(&arr2);
assert_eq!(to_rust(&new_array), array![1, 2, 3, 4, 5, 6]);
}
#[wasm_bindgen_test]
fn length() {
let characters = js_array![8, 5, 4, 3, 1, 2];
assert_eq!(characters.length(), 6);
assert_eq!(Array::new().length(), 0);
}
#[wasm_bindgen_test]
fn every() {
let even = js_array![2, 4, 6, 8];
assert!(even.every(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0));
let odd = js_array![1, 3, 5, 7];
assert!(!odd.every(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0));
let mixed = js_array![2, 3, 4, 5];
assert!(!mixed.every(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0));
}
#[wasm_bindgen_test]
fn find() {
let even = js_array![2, 4, 6, 8];
assert_eq!(
even.find(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0),
2
);
let odd = js_array![1, 3, 5, 7];
assert_eq!(
odd.find(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0),
JsValue::undefined(),
);
let mixed = js_array![3, 5, 7, 10];
assert_eq!(
mixed.find(&mut |x, _, _| x.as_f64().unwrap() % 2.0 == 0.0),
10
);
}
#[wasm_bindgen_test]
fn map() {
let numbers = js_array![1, 4, 9];
let sqrt = numbers.map(&mut |x, _, _| x.as_f64().unwrap().sqrt().into());
assert_eq!(to_rust(&sqrt), array![1, 2, 3]);
}
#[wasm_bindgen_test]
fn reduce() {
let arr = js_array!["0", "1", "2", "3", "4"].reduce(
&mut |ac, cr, _, _| {
format!("{}{}", &ac.as_string().unwrap(), &cr.as_string().unwrap()).into()
},
&"".into(),
);
assert_eq!(arr, "01234");
}
#[wasm_bindgen_test]
fn reduce_right() {
let arr = js_array!["0", "1", "2", "3", "4"].reduce_right(
&mut |ac, cr, _, _| {
format!("{}{}", &ac.as_string().unwrap(), &cr.as_string().unwrap()).into()
},
&"".into(),
);
assert_eq!(arr, "43210");
}
#[wasm_bindgen_test]
fn find_index() {
let even = js_array![2, 4, 6, 8];
assert_eq!(
even.find_index(&mut |e, _, _| e.as_f64().unwrap() % 2. == 0.),
0
);
let odd = js_array![1, 3, 5, 7];
assert_eq!(
odd.find_index(&mut |e, _, _| e.as_f64().unwrap() % 2. == 0.),
-1
);
let mixed = js_array![3, 5, 7, 10];
assert_eq!(
mixed.find_index(&mut |e, _, _| e.as_f64().unwrap() % 2. == 0.),
3
);
}
#[wasm_bindgen_test]
fn to_locale_string() {
let output = js_array![1, "a", Date::new(&"21 Dec 1997 14:12:00 UTC".into())]
.to_locale_string(&"en".into(), &JsValue::undefined());
assert!(String::from(output).len() > 0);
}
#[wasm_bindgen_test]
fn for_each() {
fn sum_indices_of_evens(array: &Array) -> u32 {
let mut res = 0;
array.for_each(&mut |elem: JsValue, i, _| match elem.as_f64() {
Some(val) if val % 2. == 0. => res += i,
_ => {}
});
res
}
assert_eq!(sum_indices_of_evens(&js_array![2, 4, 6, 8]), 0 + 1 + 2 + 3);
assert_eq!(sum_indices_of_evens(&js_array![1, 3, 5, 7]), 0);
assert_eq!(sum_indices_of_evens(&js_array![3, 5, 7, 10]), 3);
}
#[wasm_bindgen_test]
fn array_inheritance() {
let array = Array::new();
assert!(array.is_instance_of::<Array>());
assert!(array.is_instance_of::<Object>());
let _: &Object = array.as_ref();
}
| 26.151767 | 88 | 0.530885 |
bbd980939d635c26f7190d766547adb66da60791 | 18,393 | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use jujutsu_lib::backend::CommitId;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::index::IndexRef;
use jujutsu_lib::repo::ReadonlyRepo;
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::testutils;
use jujutsu_lib::testutils::{create_random_commit, CommitGraphBuilder};
use test_case::test_case;
#[must_use]
fn child_commit(settings: &UserSettings, repo: &ReadonlyRepo, commit: &Commit) -> CommitBuilder {
testutils::create_random_commit(settings, repo).set_parents(vec![commit.id().clone()])
}
// Helper just to reduce line wrapping
fn generation_number<'a>(index: impl Into<IndexRef<'a>>, commit_id: &CommitId) -> u32 {
index
.into()
.entry_by_id(commit_id)
.unwrap()
.generation_number()
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_empty_repo(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let index = repo.index();
// There should be the root commit and the working copy commit
assert_eq!(index.num_commits(), 2);
// Check the generation numbers of the root and the working copy
assert_eq!(
generation_number(index.as_ref(), repo.store().root_commit_id()),
0
);
assert_eq!(generation_number(index.as_ref(), repo.view().checkout()), 1);
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_standard_cases(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
// o H
// o | G
// o | F
// |\|
// | o E
// | o D
// | o C
// o | B
// |/
// o A
// | o working copy
// |/
// o root
let root_commit_id = repo.store().root_commit_id();
let checkout_id = repo.view().checkout().clone();
let mut tx = repo.start_transaction("test");
let mut graph_builder = CommitGraphBuilder::new(&settings, tx.mut_repo());
let commit_a = graph_builder.initial_commit();
let commit_b = graph_builder.commit_with_parents(&[&commit_a]);
let commit_c = graph_builder.commit_with_parents(&[&commit_a]);
let commit_d = graph_builder.commit_with_parents(&[&commit_c]);
let commit_e = graph_builder.commit_with_parents(&[&commit_d]);
let commit_f = graph_builder.commit_with_parents(&[&commit_b, &commit_e]);
let commit_g = graph_builder.commit_with_parents(&[&commit_f]);
let commit_h = graph_builder.commit_with_parents(&[&commit_e]);
let repo = tx.commit();
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 8 more
assert_eq!(index.num_commits(), 2 + 8);
let stats = index.stats();
assert_eq!(stats.num_commits, 2 + 8);
assert_eq!(stats.num_merges, 1);
assert_eq!(stats.max_generation_number, 6);
assert_eq!(generation_number(index.as_ref(), root_commit_id), 0);
assert_eq!(generation_number(index.as_ref(), &checkout_id), 1);
assert_eq!(generation_number(index.as_ref(), commit_a.id()), 1);
assert_eq!(generation_number(index.as_ref(), commit_b.id()), 2);
assert_eq!(generation_number(index.as_ref(), commit_c.id()), 2);
assert_eq!(generation_number(index.as_ref(), commit_d.id()), 3);
assert_eq!(generation_number(index.as_ref(), commit_e.id()), 4);
assert_eq!(generation_number(index.as_ref(), commit_f.id()), 5);
assert_eq!(generation_number(index.as_ref(), commit_g.id()), 6);
assert_eq!(generation_number(index.as_ref(), commit_h.id()), 5);
assert!(index.is_ancestor(root_commit_id, commit_a.id()));
assert!(!index.is_ancestor(commit_a.id(), root_commit_id));
assert!(index.is_ancestor(root_commit_id, commit_b.id()));
assert!(!index.is_ancestor(commit_b.id(), root_commit_id));
assert!(!index.is_ancestor(commit_b.id(), commit_c.id()));
assert!(index.is_ancestor(commit_a.id(), commit_b.id()));
assert!(index.is_ancestor(commit_a.id(), commit_e.id()));
assert!(index.is_ancestor(commit_a.id(), commit_f.id()));
assert!(index.is_ancestor(commit_a.id(), commit_g.id()));
assert!(index.is_ancestor(commit_a.id(), commit_h.id()));
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_criss_cross(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let num_generations = 50;
// Create a long chain of criss-crossed merges. If they were traversed without
// keeping track of visited nodes, it would be 2^50 visits, so if this test
// finishes in reasonable time, we know that we don't do a naive traversal.
let mut tx = repo.start_transaction("test");
let mut graph_builder = CommitGraphBuilder::new(&settings, tx.mut_repo());
let mut left_commits = vec![graph_builder.initial_commit()];
let mut right_commits = vec![graph_builder.initial_commit()];
for gen in 1..num_generations {
let new_left =
graph_builder.commit_with_parents(&[&left_commits[gen - 1], &right_commits[gen - 1]]);
let new_right =
graph_builder.commit_with_parents(&[&left_commits[gen - 1], &right_commits[gen - 1]]);
left_commits.push(new_left);
right_commits.push(new_right);
}
let repo = tx.commit();
let index = repo.index();
// There should the root commit and the working copy commit, plus 2 for each
// generation
assert_eq!(index.num_commits(), 2 + 2 * (num_generations as u32));
let stats = index.stats();
assert_eq!(stats.num_commits, 2 + 2 * (num_generations as u32));
// The first generations are not merges
assert_eq!(stats.num_merges, 2 * (num_generations as u32 - 1));
assert_eq!(stats.max_generation_number, num_generations as u32);
// Check generation numbers
for gen in 0..num_generations {
assert_eq!(
generation_number(index.as_ref(), left_commits[gen].id()),
(gen as u32) + 1
);
assert_eq!(
generation_number(index.as_ref(), right_commits[gen].id()),
(gen as u32) + 1
);
}
// The left and right commits of the same generation should not be ancestors of
// each other
for gen in 0..num_generations {
assert!(!index.is_ancestor(left_commits[gen].id(), right_commits[gen].id()));
assert!(!index.is_ancestor(right_commits[gen].id(), left_commits[gen].id()));
}
// Both sides of earlier generations should be ancestors. Check a few different
// earlier generations.
for gen in 1..num_generations {
for ancestor_side in &[&left_commits, &right_commits] {
for descendant_side in &[&left_commits, &right_commits] {
assert!(index.is_ancestor(ancestor_side[0].id(), descendant_side[gen].id()));
assert!(index.is_ancestor(ancestor_side[gen - 1].id(), descendant_side[gen].id()));
assert!(index.is_ancestor(ancestor_side[gen / 2].id(), descendant_side[gen].id()));
}
}
}
assert_eq!(
index
.walk_revs(&[left_commits[num_generations - 1].id().clone()], &[])
.count(),
2 * num_generations
);
assert_eq!(
index
.walk_revs(&[right_commits[num_generations - 1].id().clone()], &[])
.count(),
2 * num_generations
);
assert_eq!(
index
.walk_revs(
&[left_commits[num_generations - 1].id().clone()],
&[left_commits[num_generations - 2].id().clone()]
)
.count(),
2
);
assert_eq!(
index
.walk_revs(
&[right_commits[num_generations - 1].id().clone()],
&[right_commits[num_generations - 2].id().clone()]
)
.count(),
2
);
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_previous_operations(use_git: bool) {
// Test that commits visible only in previous operations are indexed.
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
// Remove commit B and C in one operation and make sure they're still
// visible in the index after that operation.
// o C
// o B
// o A
// | o working copy
// |/
// o root
let mut tx = repo.start_transaction("test");
let mut graph_builder = CommitGraphBuilder::new(&settings, tx.mut_repo());
let commit_a = graph_builder.initial_commit();
let commit_b = graph_builder.commit_with_parents(&[&commit_a]);
let commit_c = graph_builder.commit_with_parents(&[&commit_b]);
let repo = tx.commit();
let mut tx = repo.start_transaction("test");
tx.mut_repo().remove_head(commit_c.id());
let repo = tx.commit();
// Delete index from disk
let index_operations_dir = repo.repo_path().join("index").join("operations");
assert!(index_operations_dir.is_dir());
std::fs::remove_dir_all(&index_operations_dir).unwrap();
std::fs::create_dir(&index_operations_dir).unwrap();
let repo = ReadonlyRepo::load(&settings, repo.repo_path().clone());
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 3 more
assert_eq!(index.num_commits(), 2 + 3);
let stats = index.stats();
assert_eq!(stats.num_commits, 2 + 3);
assert_eq!(stats.num_merges, 0);
assert_eq!(stats.max_generation_number, 3);
assert_eq!(generation_number(index.as_ref(), commit_a.id()), 1);
assert_eq!(generation_number(index.as_ref(), commit_b.id()), 2);
assert_eq!(generation_number(index.as_ref(), commit_c.id()), 3);
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
// Create A in one operation, then B and C in another. Check that the index is
// valid after.
// o C
// o B
// o A
// | o working copy
// |/
// o root
let root_commit = repo.store().root_commit();
let mut tx = repo.start_transaction("test");
let commit_a = child_commit(&settings, repo, &root_commit).write_to_repo(tx.mut_repo());
let repo = tx.commit();
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 1 more
assert_eq!(index.num_commits(), 2 + 1);
let mut tx = repo.start_transaction("test");
let commit_b = child_commit(&settings, &repo, &commit_a).write_to_repo(tx.mut_repo());
let commit_c = child_commit(&settings, &repo, &commit_b).write_to_repo(tx.mut_repo());
tx.commit();
let repo = ReadonlyRepo::load(&settings, repo.repo_path().clone());
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 3 more
assert_eq!(index.num_commits(), 2 + 3);
let stats = index.stats();
assert_eq!(stats.num_commits, 2 + 3);
assert_eq!(stats.num_merges, 0);
assert_eq!(stats.max_generation_number, 3);
assert_eq!(stats.levels.len(), 1);
assert_eq!(stats.levels[0].num_commits, 5);
assert_eq!(generation_number(index.as_ref(), root_commit.id()), 0);
assert_eq!(generation_number(index.as_ref(), commit_a.id()), 1);
assert_eq!(generation_number(index.as_ref(), commit_b.id()), 2);
assert_eq!(generation_number(index.as_ref(), commit_c.id()), 3);
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_empty_transaction(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
// Create A in one operation, then just an empty transaction. Check that the
// index is valid after.
// o A
// | o working copy
// |/
// o root
let root_commit = repo.store().root_commit();
let mut tx = repo.start_transaction("test");
let commit_a = child_commit(&settings, repo, &root_commit).write_to_repo(tx.mut_repo());
let repo = tx.commit();
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 1 more
assert_eq!(index.num_commits(), 2 + 1);
repo.start_transaction("test").commit();
let repo = ReadonlyRepo::load(&settings, repo.repo_path().clone());
let index = repo.index();
// There should be the root commit and the working copy commit, plus
// 1 more
assert_eq!(index.num_commits(), 2 + 1);
let stats = index.stats();
assert_eq!(stats.num_commits, 2 + 1);
assert_eq!(stats.num_merges, 0);
assert_eq!(stats.max_generation_number, 1);
assert_eq!(stats.levels.len(), 1);
assert_eq!(stats.levels[0].num_commits, 3);
assert_eq!(generation_number(index.as_ref(), root_commit.id()), 0);
assert_eq!(generation_number(index.as_ref(), commit_a.id()), 1);
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_already_indexed(use_git: bool) {
// Tests that trying to add a commit that's already been added is a no-op.
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
// Create A in one operation, then try to add it again an new transaction.
// o A
// | o working copy
// |/
// o root
let root_commit = repo.store().root_commit();
let mut tx = repo.start_transaction("test");
let commit_a = child_commit(&settings, repo, &root_commit).write_to_repo(tx.mut_repo());
let repo = tx.commit();
assert!(repo.index().has_id(commit_a.id()));
assert_eq!(repo.index().num_commits(), 2 + 1);
let mut tx = repo.start_transaction("test");
let mut_repo = tx.mut_repo();
mut_repo.add_head(&commit_a);
assert_eq!(mut_repo.index().num_commits(), 2 + 1);
}
#[must_use]
fn create_n_commits(
settings: &UserSettings,
repo: &Arc<ReadonlyRepo>,
num_commits: i32,
) -> Arc<ReadonlyRepo> {
let mut tx = repo.start_transaction("test");
for _ in 0..num_commits {
create_random_commit(settings, repo).write_to_repo(tx.mut_repo());
}
tx.commit()
}
fn commits_by_level(repo: &ReadonlyRepo) -> Vec<u32> {
repo.index()
.stats()
.levels
.iter()
.map(|level| level.num_commits)
.collect()
}
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_squashed(use_git: bool) {
let settings = testutils::user_settings();
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 1);
assert_eq!(commits_by_level(&repo), vec![3]);
let repo = create_n_commits(&settings, &repo, 1);
assert_eq!(commits_by_level(&repo), vec![3, 1]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 2);
assert_eq!(commits_by_level(&repo), vec![4]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 100);
assert_eq!(commits_by_level(&repo), vec![102]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 2);
let repo = create_n_commits(&settings, &repo, 4);
let repo = create_n_commits(&settings, &repo, 8);
let repo = create_n_commits(&settings, &repo, 16);
let repo = create_n_commits(&settings, &repo, 32);
assert_eq!(commits_by_level(&repo), vec![64]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 32);
let repo = create_n_commits(&settings, &repo, 16);
let repo = create_n_commits(&settings, &repo, 8);
let repo = create_n_commits(&settings, &repo, 4);
let repo = create_n_commits(&settings, &repo, 2);
assert_eq!(commits_by_level(&repo), vec![58, 6]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 29);
let repo = create_n_commits(&settings, &repo, 15);
let repo = create_n_commits(&settings, &repo, 7);
let repo = create_n_commits(&settings, &repo, 3);
let repo = create_n_commits(&settings, &repo, 1);
assert_eq!(commits_by_level(&repo), vec![31, 15, 7, 3, 1]);
let test_workspace = testutils::init_repo(&settings, use_git);
let repo = &test_workspace.repo;
let repo = create_n_commits(&settings, repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
let repo = create_n_commits(&settings, &repo, 10);
assert_eq!(commits_by_level(&repo), vec![72, 20]);
}
| 37.923711 | 99 | 0.662589 |
8f9324948a48322e1a4bf1811547c937b1f237a6 | 18,451 | // Copyright 2016 Matthew Collins
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate steven_resources as internal;
use std::thread;
use std::path;
use std::io;
use std_or_web::fs;
use std::sync::mpsc;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
use serde_json;
#[cfg(not(target_arch = "wasm32"))]
use reqwest;
use zip;
use crate::types::hash::FNVHash;
use crate::ui;
const RESOURCES_VERSION: &str = "1.12.2";
const VANILLA_CLIENT_URL: &str = "https://launcher.mojang.com/v1/objects/0f275bc1547d01fa5f56ba34bdc87d981ee12daf/client.jar";
const ASSET_VERSION: &str = "1.12";
const ASSET_INDEX_URL: &str = "https://launchermeta.mojang.com/mc/assets/1.12/67e29e024e664064c1f04c728604f83c24cbc218/1.12.json";
pub trait Pack: Sync + Send {
fn open(&self, name: &str) -> Option<Box<io::Read>>;
}
pub struct Manager {
packs: Vec<Box<Pack>>,
version: usize,
vanilla_chan: Option<mpsc::Receiver<bool>>,
vanilla_assets_chan: Option<mpsc::Receiver<bool>>,
vanilla_progress: Arc<Mutex<Progress>>,
}
pub struct ManagerUI {
progress_ui: Vec<ProgressUI>,
num_tasks: isize,
}
struct ProgressUI {
task_name: String,
task_file: String,
position: f64,
closing: bool,
progress: f64,
background: ui::ImageRef,
progress_bar: ui::ImageRef,
}
struct Progress {
tasks: Vec<Task>,
}
struct Task {
task_name: String,
task_file: String,
total: u64,
progress: u64,
}
unsafe impl Sync for Manager {}
impl Manager {
pub fn new() -> (Manager, ManagerUI) {
let mut m = Manager {
packs: Vec::new(),
version: 0,
vanilla_chan: None,
vanilla_assets_chan: None,
vanilla_progress: Arc::new(Mutex::new(Progress {
tasks: vec![],
})),
};
m.add_pack(Box::new(InternalPack));
#[cfg(not(target_arch = "wasm32"))]
{
m.download_vanilla();
m.download_assets();
}
(m, ManagerUI { progress_ui: vec!{}, num_tasks: 0 })
}
/// Returns the 'version' of the manager. The version is
/// increase everytime a pack is added or removed.
pub fn version(&self) -> usize {
self.version
}
pub fn open(&self, plugin: &str, name: &str) -> Option<Box<io::Read>> {
let path = format!("assets/{}/{}", plugin, name);
for pack in self.packs.iter().rev() {
if let Some(val) = pack.open(&path) {
return Some(val);
}
}
None
}
pub fn open_all(&self, plugin: &str, name: &str) -> Vec<Box<io::Read>> {
let mut ret = Vec::new();
let path = format!("assets/{}/{}", plugin, name);
for pack in self.packs.iter().rev() {
if let Some(val) = pack.open(&path) {
ret.push(val);
}
}
ret
}
pub fn tick(&mut self, mui: &mut ManagerUI, ui_container: &mut ui::Container, delta: f64) {
let delta = delta.min(5.0);
// Check to see if the download of vanilla has completed
// (if it was started)
let mut done = false;
if let Some(ref recv) = self.vanilla_chan {
if let Ok(_) = recv.try_recv() {
done = true;
}
}
if done {
self.vanilla_chan = None;
self.load_vanilla();
}
let mut done = false;
if let Some(ref recv) = self.vanilla_assets_chan {
if let Ok(_) = recv.try_recv() {
done = true;
}
}
if done {
self.vanilla_assets_chan = None;
self.load_assets();
}
const UI_HEIGHT: f64 = 32.0;
let mut progress = self.vanilla_progress.lock().unwrap();
progress.tasks.retain(|v| v.progress < v.total);
// Find out what we have to work with
for task in &progress.tasks {
if !mui.progress_ui.iter()
.filter(|v| v.task_file == task.task_file)
.any(|v| v.task_name == task.task_name) {
mui.num_tasks += 1;
// Add a ui element for it
let background = ui::ImageBuilder::new()
.texture("steven:solid")
.position(0.0, -UI_HEIGHT)
.size(350.0, UI_HEIGHT)
.colour((0, 0, 0, 100))
.draw_index(0xFFFFFF - mui.num_tasks)
.alignment(ui::VAttach::Bottom, ui::HAttach::Left)
.create(ui_container);
ui::ImageBuilder::new()
.texture("steven:solid")
.position(0.0, 0.0)
.size(350.0, 10.0)
.colour((0, 0, 0, 200))
.attach(&mut *background.borrow_mut());
ui::TextBuilder::new()
.text(&*task.task_name)
.position(3.0, 0.0)
.scale_x(0.5)
.scale_y(0.5)
.draw_index(1)
.attach(&mut *background.borrow_mut());
ui::TextBuilder::new()
.text(&*task.task_file)
.position(3.0, 12.0)
.scale_x(0.5)
.scale_y(0.5)
.draw_index(1)
.attach(&mut *background.borrow_mut());
let progress_bar = ui::ImageBuilder::new()
.texture("steven:solid")
.position(0.0, 0.0)
.size(0.0, 10.0)
.colour((0, 255, 0, 255))
.draw_index(2)
.alignment(ui::VAttach::Bottom, ui::HAttach::Left)
.attach(&mut *background.borrow_mut());
mui.progress_ui.push(ProgressUI {
task_name: task.task_name.clone(),
task_file: task.task_file.clone(),
position: -UI_HEIGHT,
closing: false,
progress: 0.0,
background,
progress_bar,
});
}
}
for ui in &mut mui.progress_ui {
if ui.closing {
continue;
}
let mut found = false;
let mut prog = 1.0;
for task in progress.tasks.iter()
.filter(|v| v.task_file == ui.task_file)
.filter(|v| v.task_name == ui.task_name) {
found = true;
prog = task.progress as f64 / task.total as f64;
}
let background = ui.background.borrow();
let bar = ui.progress_bar.borrow();
// Let the progress bar finish
if !found && (background.y - ui.position).abs() < 0.7 * delta && (bar.width - 350.0).abs() < 1.0 * delta {
ui.closing = true;
ui.position = -UI_HEIGHT;
}
ui.progress = prog;
}
let mut offset = 0.0;
for ui in &mut mui.progress_ui {
if ui.closing {
continue;
}
ui.position = offset;
offset += UI_HEIGHT;
}
// Move elements
for ui in &mut mui.progress_ui {
let mut background = ui.background.borrow_mut();
if (background.y - ui.position).abs() < 0.7 * delta {
background.y = ui.position;
} else {
background.y += (ui.position - background.y).signum() * 0.7 * delta;
}
let mut bar = ui.progress_bar.borrow_mut();
let target_size = (350.0 * ui.progress).min(350.0);
if (bar.width - target_size).abs() < 1.0 * delta {
bar.width = target_size;
} else {
bar.width += ((target_size - bar.width).signum() * delta).max(0.0);
}
}
// Clean up dead elements
mui.progress_ui.retain(|v| v.position >= -UI_HEIGHT || !v.closing);
}
fn add_pack(&mut self, pck: Box<Pack>) {
self.packs.push(pck);
self.version += 1;
}
fn load_vanilla(&mut self) {
let loc = format!("./resources-{}", RESOURCES_VERSION);
let location = path::Path::new(&loc);
self.packs.insert(1, Box::new(DirPack { root: location.to_path_buf() }));
self.version += 1;
}
fn load_assets(&mut self) {
self.packs.insert(1, Box::new(ObjectPack::new()));
self.version += 1;
}
#[cfg(not(target_arch = "wasm32"))]
fn download_assets(&mut self) {
let loc = format!("./index/{}.json", ASSET_VERSION);
let location = path::Path::new(&loc).to_owned();
let progress_info = self.vanilla_progress.clone();
let (send, recv) = mpsc::channel();
if fs::metadata(&location).is_ok(){
self.load_assets();
} else {
self.vanilla_assets_chan = Some(recv);
}
thread::spawn(move || {
let client = reqwest::Client::new();
if fs::metadata(&location).is_err(){
fs::create_dir_all(location.parent().unwrap()).unwrap();
let res = client.get(ASSET_INDEX_URL)
.send()
.unwrap();
let length = res.headers().get(reqwest::header::CONTENT_LENGTH).unwrap().to_str().unwrap().parse::<u64>().unwrap();
Self::add_task(&progress_info, "Downloading Asset Index", &*location.to_string_lossy(), length);
{
let mut file = fs::File::create(format!("index-{}.tmp", ASSET_VERSION)).unwrap();
let mut progress = ProgressRead {
read: res,
progress: &progress_info,
task_name: "Downloading Asset Index".into(),
task_file: location.to_string_lossy().into_owned(),
};
io::copy(&mut progress, &mut file).unwrap();
}
fs::rename(format!("index-{}.tmp", ASSET_VERSION), &location).unwrap();
send.send(true).unwrap();
}
let file = fs::File::open(&location).unwrap();
let index: serde_json::Value = serde_json::from_reader(&file).unwrap();
let root_location = path::Path::new("./objects/");
let objects = index.get("objects").and_then(|v| v.as_object()).unwrap();
Self::add_task(&progress_info, "Downloading Assets", "./objects", objects.len() as u64);
for (k, v) in objects {
let hash = v.get("hash").and_then(|v| v.as_str()).unwrap();
let hash_path = format!("{}/{}", &hash[..2], hash);
let location = root_location.join(&hash_path);
if fs::metadata(&location).is_err(){
fs::create_dir_all(location.parent().unwrap()).unwrap();
let res = client.get(&format!("http://resources.download.minecraft.net/{}", hash_path))
.send()
.unwrap();
let length = v.get("size").and_then(|v| v.as_u64()).unwrap();
Self::add_task(&progress_info, "Downloading Asset", k, length);
let mut tmp_file = location.to_owned();
tmp_file.set_file_name(format!("{}.tmp", hash));
{
let mut file = fs::File::create(&tmp_file).unwrap();
let mut progress = ProgressRead {
read: res,
progress: &progress_info,
task_name: "Downloading Asset".into(),
task_file: k.to_owned(),
};
io::copy(&mut progress, &mut file).unwrap();
}
fs::rename(&tmp_file, &location).unwrap();
}
Self::add_task_progress(&progress_info, "Downloading Assets", "./objects", 1);
}
});
}
#[cfg(not(target_arch = "wasm32"))]
fn download_vanilla(&mut self) {
let loc = format!("./resources-{}", RESOURCES_VERSION);
let location = path::Path::new(&loc);
if fs::metadata(location.join("steven.assets")).is_ok() {
self.load_vanilla();
return;
}
let (send, recv) = mpsc::channel();
self.vanilla_chan = Some(recv);
let progress_info = self.vanilla_progress.clone();
thread::spawn(move || {
let client = reqwest::Client::new();
let res = client.get(VANILLA_CLIENT_URL)
.send()
.unwrap();
let mut file = fs::File::create(format!("{}.tmp", RESOURCES_VERSION)).unwrap();
let length = res.headers().get(reqwest::header::CONTENT_LENGTH).unwrap().to_str().unwrap().parse::<u64>().unwrap();
let task_file = format!("./resources-{}", RESOURCES_VERSION);
Self::add_task(&progress_info, "Downloading Core Assets", &task_file, length);
{
let mut progress = ProgressRead {
read: res,
progress: &progress_info,
task_name: "Downloading Core Assets".into(),
task_file,
};
io::copy(&mut progress, &mut file).unwrap();
}
// Copy the resources from the zip
let file = fs::File::open(format!("{}.tmp", RESOURCES_VERSION)).unwrap();
let mut zip = zip::ZipArchive::new(file).unwrap();
let task_file = format!("./resources-{}", RESOURCES_VERSION);
Self::add_task(&progress_info, "Unpacking Core Assets", &task_file, zip.len() as u64);
let loc = format!("./resources-{}", RESOURCES_VERSION);
let location = path::Path::new(&loc);
let count = zip.len();
for i in 0..count {
Self::add_task_progress(&progress_info, "Unpacking Core Assets", &task_file, 1);
let mut file = zip.by_index(i).unwrap();
if !file.name().starts_with("assets/") {
continue;
}
let path = location.join(file.name());
fs::create_dir_all(path.parent().unwrap()).unwrap();
let mut out = fs::File::create(path).unwrap();
io::copy(&mut file, &mut out).unwrap();
}
fs::File::create(location.join("steven.assets")).unwrap(); // Marker file
send.send(true).unwrap();
fs::remove_file(format!("{}.tmp", RESOURCES_VERSION)).unwrap();
});
}
fn add_task(progress: &Arc<Mutex<Progress>>, name: &str, file: &str, length: u64) {
let mut info = progress.lock().unwrap();
info.tasks.push(Task {
task_name: name.into(),
task_file: file.into(),
total: length,
progress: 0,
});
}
fn add_task_progress(progress: &Arc<Mutex<Progress>>, name: &str, file: &str, prog: u64) {
let mut progress = progress.lock().unwrap();
for task in progress.tasks.iter_mut()
.filter(|v| v.task_file == file)
.filter(|v| v.task_name == name) {
task.progress += prog as u64;
}
}
}
struct DirPack {
root: path::PathBuf,
}
impl Pack for DirPack {
fn open(&self, name: &str) -> Option<Box<io::Read>> {
match fs::File::open(self.root.join(name)) {
Ok(val) => Some(Box::new(val)),
Err(_) => None,
}
}
}
struct InternalPack;
impl Pack for InternalPack {
fn open(&self, name: &str) -> Option<Box<io::Read>> {
match internal::get_file(name) {
Some(val) => Some(Box::new(io::Cursor::new(val))),
None => None,
}
}
}
struct ObjectPack {
objects: HashMap<String, String, BuildHasherDefault<FNVHash>>,
}
impl ObjectPack {
fn new() -> ObjectPack {
let loc = format!("./index/{}.json", ASSET_VERSION);
let location = path::Path::new(&loc);
let file = fs::File::open(&location).unwrap();
let index: serde_json::Value = serde_json::from_reader(&file).unwrap();
let objects = index.get("objects").and_then(|v| v.as_object()).unwrap();
let mut hash_objs = HashMap::with_hasher(BuildHasherDefault::default());
for (k, v) in objects {
hash_objs.insert(k.clone(), v.get("hash").and_then(|v| v.as_str()).unwrap().to_owned());
}
ObjectPack {
objects: hash_objs,
}
}
}
impl Pack for ObjectPack {
fn open(&self, name: &str) -> Option<Box<io::Read>> {
if !name.starts_with("assets/") {
return None;
}
let name = &name["assets/".len()..];
if let Some(hash) = self.objects.get(name) {
let root_location = path::Path::new("./objects/");
let hash_path = format!("{}/{}", &hash[..2], hash);
let location = root_location.join(&hash_path);
match fs::File::open(location) {
Ok(val) => Some(Box::new(val)),
Err(_) => None,
}
} else {
None
}
}
}
struct ProgressRead<'a, T> {
read: T,
progress: &'a Arc<Mutex<Progress>>,
task_name: String,
task_file: String,
}
impl <'a, T: io::Read> io::Read for ProgressRead<'a, T> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let size = self.read.read(buf)?;
Manager::add_task_progress(self.progress, &self.task_name, &self.task_file, size as u64);
Ok(size)
}
}
| 36.178431 | 131 | 0.510704 |
7a3624a67ff330159d0acac80fd051fa3801887b | 1,433 | use std::collections::HashMap;
use std::env::args;
use std::process;
const BAD_INPUT_ERROR: &str = "space sparated numbers please";
fn main() {
let numbers = to_vec();
if numbers.is_empty() {
die(BAD_INPUT_ERROR);
}
println!("mean {}", mean(&numbers));
println!("median {}", median(&numbers));
println!("mode {}", mode(&numbers));
}
fn to_vec() -> Vec<i32> {
let mut numbers = Vec::new();
for arg in args().skip(1) {
match arg.parse() {
Ok(n) => numbers.push(n),
Err(_) => die(BAD_INPUT_ERROR),
}
}
numbers
}
fn mean(numbers: &Vec<i32>) -> f32 {
let sum: i32 = numbers.iter().sum();
sum as f32 / numbers.len() as f32
}
fn median(numbers: &Vec<i32>) -> f32 {
let mut sorted = numbers.clone();
sorted.sort();
let is_even_len = numbers.len() % 2 == 0;
let middle = numbers.len() / 2;
if is_even_len {
let (left, right) = (sorted[middle - 1], sorted[middle]);
(left + right) as f32 / 2.0
} else {
sorted[middle] as f32
}
}
fn mode(numbers: &Vec<i32>) -> i32 {
let mut counts = HashMap::new();
for n in numbers {
let count = counts.entry(*n).or_insert(0);
*count += 1;
}
let max = counts.iter().max_by(|(_, a), (_, b)| a.cmp(b));
*max.unwrap_or((&i32::max_value(), &-1)).0
}
fn die(msg: &str) -> ! {
println!("{}", msg);
process::exit(1)
} | 24.288136 | 65 | 0.540823 |
1d288a9cac1447f8ac29176f46e2ca4b183a3a4d | 18,682 | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
use std::time::{SystemTime};
use std::time::UNIX_EPOCH;
use xxx::ecp::ECP;
use xxx::fp2::FP2;
use xxx::ecp2::ECP2;
use xxx::fp4::FP4;
use xxx::fp12::FP12;
use xxx::big::BIG;
use xxx::pair;
use xxx::big;
use xxx::rom;
use rand::RAND;
use hash256::HASH256;
use hash384::HASH384;
use hash512::HASH512;
/* MPIN API Functions */
/* Configure mode of operation */
pub const EFS: usize=big::MODBYTES as usize;
pub const EGS: usize=big::MODBYTES as usize;
pub const PAS: usize=16;
pub const BAD_PARAMS: isize=-11;
pub const INVALID_POINT: isize=-14;
pub const WRONG_ORDER: isize=-18;
pub const BAD_PIN: isize=-19;
pub const SHA256: usize=32;
pub const SHA384: usize=48;
pub const SHA512: usize=64;
/* Configure your PIN here */
pub const MAXPIN: i32=10000; /* PIN less than this */
pub const PBLEN: i32=14; /* Number of bits in PIN */
pub const TS: usize=10; /* 10 for 4 digit PIN, 14 for 6-digit PIN - 2^TS/TS approx = sqrt(MAXPIN) */
pub const TRAP:usize=200; /* 200 for 4 digit PIN, 2000 for 6-digit PIN - approx 2*sqrt(MAXPIN) */
pub const HASH_TYPE: usize=SHA256;
#[allow(non_snake_case)]
fn hash(sha: usize,c: &mut FP4,U: &mut ECP,r: &mut [u8]) -> bool {
let mut w:[u8;EFS]=[0;EFS];
let mut t:[u8;6*EFS]=[0;6*EFS];
c.geta().geta().tobytes(&mut w); for i in 0..EFS {t[i]=w[i]}
c.geta().getb().tobytes(&mut w); for i in EFS..2*EFS {t[i]=w[i-EFS]}
c.getb().geta().tobytes(&mut w); for i in 2*EFS..3*EFS {t[i]=w[i-2*EFS]}
c.getb().getb().tobytes(&mut w); for i in 3*EFS..4*EFS {t[i]=w[i-3*EFS]}
U.getx().tobytes(&mut w); for i in 4*EFS..5*EFS {t[i]=w[i-4*EFS]}
U.gety().tobytes(&mut w); for i in 5*EFS..6*EFS {t[i]=w[i-5*EFS]}
if sha==SHA256 {
let mut h=HASH256::new();
h.process_array(&t);
let sh=h.hash();
for i in 0..PAS {r[i]=sh[i]}
return true;
}
if sha==SHA384 {
let mut h=HASH384::new();
h.process_array(&t);
let sh=h.hash();
for i in 0..PAS {r[i]=sh[i]}
return true;
}
if sha==SHA512 {
let mut h=HASH512::new();
h.process_array(&t);
let sh=h.hash();
for i in 0..PAS {r[i]=sh[i]}
return true;
}
return false;
}
/* Hash number (optional) and string to point on curve */
fn hashit(sha: usize,n: usize,id: &[u8],w: &mut [u8]) -> bool {
let mut r:[u8;64]=[0;64];
let mut didit=false;
if sha==SHA256 {
let mut h=HASH256::new();
if n>0 {h.process_num(n as i32)}
h.process_array(id);
let hs=h.hash();
for i in 0..sha {r[i]=hs[i];}
didit=true;
}
if sha==SHA384 {
let mut h=HASH384::new();
if n>0 {h.process_num(n as i32)}
h.process_array(id);
let hs=h.hash();
for i in 0..sha {r[i]=hs[i];}
didit=true;
}
if sha==SHA512 {
let mut h=HASH512::new();
if n>0 {h.process_num(n as i32)}
h.process_array(id);
let hs=h.hash();
for i in 0..sha {r[i]=hs[i];}
didit=true;
}
if !didit {return false}
let rm=big::MODBYTES as usize;
if sha>rm {
for i in 0..rm {w[i]=r[i]}
} else {
for i in 0..sha {w[i]=r[i]}
for i in sha..rm {w[i]=0}
}
return true;
}
/* return time in slots since epoch */
pub fn today() -> usize {
return (SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs()/(60*1440)) as usize;
}
/* these next two functions help to implement elligator squared - http://eprint.iacr.org/2014/043 */
/* maps a random u to a point on the curve */
#[allow(non_snake_case)]
fn emap(u: &BIG,cb: isize) -> ECP {
let mut P:ECP;
let mut x=BIG::new_copy(u);
let mut p=BIG::new_ints(&rom::MODULUS);
x.rmod(&mut p);
loop {
P=ECP::new_bigint(&x,cb);
if !P.is_infinity() {break}
x.inc(1); x.norm();
}
return P;
}
/* returns u derived from P. Random value in range 1 to return value should then be added to u */
#[allow(non_snake_case)]
fn unmap(u: &mut BIG,P: &mut ECP) -> isize {
let s=P.gets();
let mut R:ECP;
let mut r=0;
let x=P.getx();
u.copy(&x);
loop {
u.dec(1); u.norm();
r+=1;
R=ECP::new_bigint(u,s);
if !R.is_infinity() {break}
}
return r as isize;
}
pub fn hash_id(sha: usize,id: &[u8],w: &mut [u8]) -> bool {
return hashit(sha,0,id,w);
}
/* these next two functions implement elligator squared - http://eprint.iacr.org/2014/043 */
/* Elliptic curve point E in format (0x04,x,y} is converted to form {0x0-,u,v} */
/* Note that u and v are indistinguisible from random strings */
#[allow(non_snake_case)]
pub fn encoding(rng: &mut RAND,e: &mut [u8]) ->isize {
let mut t:[u8;EFS]=[0;EFS];
for i in 0..EFS {t[i]=e[i+1]}
let mut u=BIG::frombytes(&t);
for i in 0..EFS {t[i]=e[i+EFS+1]}
let mut v=BIG::frombytes(&t);
let mut P=ECP::new_bigs(&u,&v);
if P.is_infinity() {return INVALID_POINT}
let p=BIG::new_ints(&rom::MODULUS);
u=BIG::randomnum(&p,rng);
let mut su=rng.getbyte() as isize; /*if (su<0) su=-su;*/ su%=2;
let mut W=emap(&mut u,su);
P.sub(&mut W);
let sv=P.gets();
let rn=unmap(&mut v,&mut P);
let mut m=rng.getbyte() as isize; /*if (m<0) m=-m;*/ m%=rn;
v.inc(m+1);
e[0]=(su+2*sv) as u8;
u.tobytes(&mut t);
for i in 0..EFS {e[i+1]=t[i]}
v.tobytes(&mut t);
for i in 0..EFS {e[i+EFS+1]=t[i]}
return 0;
}
#[allow(non_snake_case)]
pub fn decoding(d: &mut [u8]) -> isize {
let mut t:[u8;EFS]=[0;EFS];
if (d[0]&0x04)!=0 {return INVALID_POINT}
for i in 0..EFS {t[i]=d[i+1]}
let mut u=BIG::frombytes(&t);
for i in 0..EFS {t[i]=d[i+EFS+1]}
let mut v=BIG::frombytes(&t);
let su=(d[0]&1) as isize;
let sv=((d[0]>>1)&1) as isize;
let mut W=emap(&mut u,su);
let mut P=emap(&mut v,sv);
P.add(&mut W);
u=P.getx();
v=P.gety();
d[0]=0x04;
u.tobytes(&mut t);
for i in 0..EFS {d[i+1]=t[i]}
v.tobytes(&mut t);
for i in 0..EFS {d[i+EFS+1]=t[i]}
return 0;
}
/* R=R1+R2 in group G1 */
#[allow(non_snake_case)]
pub fn recombine_g1(r1: &[u8],r2: &[u8],r: &mut [u8]) -> isize {
let mut P=ECP::frombytes(&r1);
let mut Q=ECP::frombytes(&r2);
if P.is_infinity() || Q.is_infinity() {return INVALID_POINT}
P.add(&mut Q);
P.tobytes(r);
return 0;
}
/* W=W1+W2 in group G2 */
#[allow(non_snake_case)]
pub fn recombine_g2(w1: &[u8],w2: &[u8],w: &mut [u8]) -> isize {
let mut P=ECP2::frombytes(&w1);
let mut Q=ECP2::frombytes(&w2);
if P.is_infinity() || Q.is_infinity() {return INVALID_POINT}
P.add(&mut Q);
P.tobytes(w);
return 0;
}
/* create random secret S */
pub fn random_generate(rng: &mut RAND,s: &mut [u8]) -> isize {
let r=BIG::new_ints(&rom::CURVE_ORDER);
let mut sc=BIG::randomnum(&r,rng);
//if rom::AES_S>0 {
// sc.mod2m(2*rom::AES_S);
//}
sc.tobytes(s);
return 0;
}
/* Extract Server Secret SST=S*Q where Q is fixed generator in G2 and S is master secret */
#[allow(non_snake_case)]
pub fn get_server_secret(s: &[u8],sst: &mut [u8]) -> isize {
let mut Q=ECP2::new_fp2s(&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PXA),&BIG::new_ints(&rom::CURVE_PXB)),&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PYA),&BIG::new_ints(&rom::CURVE_PYB)));
let mut sc=BIG::frombytes(s);
Q=pair::g2mul(&mut Q,&mut sc);
Q.tobytes(sst);
return 0;
}
/*
W=x*H(G);
if RNG == NULL then X is passed in
if RNG != NULL the X is passed out
if type=0 W=x*G where G is point on the curve, else W=x*M(G), where M(G) is mapping of octet G to point on the curve
*/
#[allow(non_snake_case)]
pub fn get_g1_multiple(rng: Option<&mut RAND>,typ: usize,x: &mut [u8],g: &[u8],w: &mut [u8]) -> isize {
let mut sx:BIG;
let r=BIG::new_ints(&rom::CURVE_ORDER);
if let Some(mut rd)=rng
{
sx=BIG::randomnum(&r,rd);
//if rom::AES_S>0 {
// sx.mod2m(2*rom::AES_S);
//}
sx.tobytes(x);
} else {
sx=BIG::frombytes(x);
}
let mut P:ECP;
if typ==0 {
P=ECP::frombytes(g);
if P.is_infinity() {return INVALID_POINT}
} else {
P=ECP::mapit(g)
}
pair::g1mul(&mut P,&mut sx).tobytes(w);
return 0;
}
/* Client secret CST=S*H(CID) where CID is client ID and S is master secret */
/* CID is hashed externally */
pub fn get_client_secret(s: &mut [u8],cid: &[u8],cst: &mut [u8]) -> isize {
return get_g1_multiple(None,1,s,cid,cst);
}
/* Extract PIN from TOKEN for identity CID */
#[allow(non_snake_case)]
pub fn extract_pin(sha: usize,cid: &[u8],pin: i32,token: &mut [u8]) -> isize {
let mut P=ECP::frombytes(&token);
const RM:usize=big::MODBYTES as usize;
let mut h:[u8;RM]=[0;RM];
if P.is_infinity() {return INVALID_POINT}
hashit(sha,0,cid,&mut h);
let mut R=ECP::mapit(&h);
R=R.pinmul(pin%MAXPIN,PBLEN);
P.sub(&mut R);
P.tobytes(token);
return 0;
}
/* Functions to support M-Pin Full */
#[allow(non_snake_case)]
pub fn precompute(token: &[u8],cid: &[u8],g1: &mut [u8],g2: &mut [u8]) -> isize {
let T=ECP::frombytes(&token);
if T.is_infinity() {return INVALID_POINT}
let P=ECP::mapit(&cid);
let Q=ECP2::new_fp2s(&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PXA),&BIG::new_ints(&rom::CURVE_PXB)),&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PYA),&BIG::new_ints(&rom::CURVE_PYB)));
let mut g=pair::ate(&Q,&T);
g=pair::fexp(&g);
g.tobytes(g1);
g=pair::ate(&Q,&P);
g=pair::fexp(&g);
g.tobytes(g2);
return 0;
}
/* Time Permit CTT=S*(date|H(CID)) where S is master secret */
#[allow(non_snake_case)]
pub fn get_client_permit(sha: usize,date: usize,s: &[u8],cid: &[u8],ctt: &mut [u8]) ->isize {
const RM:usize=big::MODBYTES as usize;
let mut h:[u8;RM]=[0;RM];
hashit(sha,date,cid,&mut h);
let mut P=ECP::mapit(&h);
let mut sc=BIG::frombytes(s);
pair::g1mul(&mut P,&mut sc).tobytes(ctt);
return 0;
}
/* Implement step 1 on client side of MPin protocol */
#[allow(non_snake_case)]
pub fn client_1(sha: usize,date: usize,client_id: &[u8],rng: Option<&mut RAND>,x: &mut [u8],pin: usize,token: &[u8],sec: &mut [u8],xid: Option<&mut [u8]>,xcid: Option<&mut [u8]>,permit: Option<&[u8]>) ->isize {
let r=BIG::new_ints(&rom::CURVE_ORDER);
let mut sx:BIG;
if let Some(mut rd)=rng
{
sx=BIG::randomnum(&r,rd);
//if rom::AES_S>0 {
// sx.mod2m(2*rom::AES_S);
//}
sx.tobytes(x);
} else {
sx=BIG::frombytes(x);
}
const RM:usize=big::MODBYTES as usize;
let mut h:[u8;RM]=[0;RM];
hashit(sha,0,&client_id,&mut h);
let mut P=ECP::mapit(&h);
let mut T=ECP::frombytes(&token);
if T.is_infinity() {return INVALID_POINT}
let mut W=P.pinmul((pin as i32)%MAXPIN,PBLEN);
T.add(&mut W);
if date!=0 {
if let Some(rpermit)=permit {W=ECP::frombytes(&rpermit);}
if W.is_infinity() {return INVALID_POINT}
T.add(&mut W);
let mut h2:[u8;RM]=[0;RM];
hashit(sha,date,&h,&mut h2);
W=ECP::mapit(&h2);
if let Some(mut rxid)=xid {
P=pair::g1mul(&mut P,&mut sx);
P.tobytes(&mut rxid);
W=pair::g1mul(&mut W,&mut sx);
P.add(&mut W);
} else {
P.add(&mut W);
P=pair::g1mul(&mut P,&mut sx);
}
if let Some(mut rxcid)=xcid {P.tobytes(&mut rxcid)}
} else {
if let Some(mut rxid)=xid {
P=pair::g1mul(&mut P,&mut sx);
P.tobytes(&mut rxid);
}
}
T.tobytes(sec);
return 0;
}
/* Outputs H(CID) and H(T|H(CID)) for time permits. If no time permits set HID=HTID */
#[allow(non_snake_case)]
pub fn server_1(sha: usize,date: usize,cid: &[u8],hid: &mut [u8],htid: Option<&mut [u8]>) {
const RM:usize=big::MODBYTES as usize;
let mut h:[u8;RM]=[0;RM];
hashit(sha,0,cid,&mut h);
let mut P=ECP::mapit(&h);
P.tobytes(hid);
if date!=0 {
let mut h2:[u8;RM]=[0;RM];
hashit(sha,date,&h,&mut h2);
let mut R=ECP::mapit(&h2);
P.add(&mut R);
if let Some(rhtid)=htid {P.tobytes(rhtid);}
}
}
/* Implement step 2 on client side of MPin protocol */
#[allow(non_snake_case)]
pub fn client_2(x: &[u8],y: &[u8],sec: &mut [u8]) -> isize {
let mut r=BIG::new_ints(&rom::CURVE_ORDER);
let mut P=ECP::frombytes(sec);
if P.is_infinity() {return INVALID_POINT}
let mut px=BIG::frombytes(x);
let py=BIG::frombytes(y);
px.add(&py);
px.rmod(&mut r);
//px.rsub(r)
P=pair::g1mul(&mut P,&mut px);
P.neg();
P.tobytes(sec);
return 0;
}
/* return time since epoch */
pub fn get_time() -> usize {
return (SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs()) as usize;
}
/* Generate Y = H(epoch, xCID/xID) */
pub fn get_y(sha: usize,timevalue: usize,xcid: &[u8],y: &mut [u8]) {
const RM:usize=big::MODBYTES as usize;
let mut h:[u8;RM]=[0;RM];
hashit(sha,timevalue,xcid,&mut h);
let mut sy= BIG::frombytes(&h);
let mut q=BIG::new_ints(&rom::CURVE_ORDER);
sy.rmod(&mut q);
//if rom::AES_S>0 {
// sy.mod2m(2*rom::AES_S);
//}
sy.tobytes(y);
}
/* Implement step 2 of MPin protocol on server side */
#[allow(non_snake_case)]
pub fn server_2(date: usize,hid: &[u8],htid: Option<&[u8]>,y: &[u8],sst: &[u8],xid: Option<&[u8]>,xcid: Option<&[u8]>,msec: &[u8],e: Option<&mut [u8]>,f: Option<&mut [u8]>) -> isize {
// q:=NewBIGints(Modulus)
let Q=ECP2::new_fp2s(&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PXA),&BIG::new_ints(&rom::CURVE_PXB)),&FP2::new_bigs(&BIG::new_ints(&rom::CURVE_PYA),&BIG::new_ints(&rom::CURVE_PYB)));
let sQ=ECP2::frombytes(&sst);
if sQ.is_infinity() {return INVALID_POINT}
let mut R:ECP;
if date!=0 {
if let Some(rxcid)=xcid {R=ECP::frombytes(&rxcid);}
else {return BAD_PARAMS}
} else {
if let Some(rxid)=xid {R=ECP::frombytes(&rxid)}
else {return BAD_PARAMS}
}
if R.is_infinity() {return INVALID_POINT}
let mut sy=BIG::frombytes(&y);
let mut P:ECP;
if date!=0 {
if let Some(rhtid)=htid {P=ECP::frombytes(&rhtid)}
else {return BAD_PARAMS}
} else {
P=ECP::frombytes(&hid);
}
if P.is_infinity() {return INVALID_POINT}
P=pair::g1mul(&mut P,&mut sy);
P.add(&mut R); P.affine();
R=ECP::frombytes(&msec);
if R.is_infinity() {return INVALID_POINT}
let mut g:FP12;
// FP12 g1=new FP12(0);
g=pair::ate2(&Q,&R,&sQ,&P);
g=pair::fexp(&g);
if !g.isunity() {
if let Some(rxid)=xid {
if let Some(re)=e {
if let Some(rf)=f {
g.tobytes(re);
if date!=0 {
P=ECP::frombytes(&hid);
if P.is_infinity() {return INVALID_POINT}
R=ECP::frombytes(&rxid);
if R.is_infinity() {return INVALID_POINT}
P=pair::g1mul(&mut P,&mut sy);
P.add(&mut R); P.affine();
}
g=pair::ate(&Q,&P);
g=pair::fexp(&g);
g.tobytes(rf);
}
}
}
return BAD_PIN;
}
return 0;
}
/* Pollards kangaroos used to return PIN error */
pub fn kangaroo(e: &[u8],f: &[u8]) -> isize {
let mut ge=FP12::frombytes(e);
let mut gf=FP12::frombytes(f);
let mut distance: [isize;TS]=[0;TS];
let mut t=FP12::new_copy(&gf);
let mut table: [FP12;TS]=[FP12::new();TS];
let mut s:isize=1;
for m in 0..TS {
distance[m]=s;
table[m]=FP12::new_copy(&t);
s*=2;
t.usqr();
}
t.one();
let mut dn:isize=0;
let mut i:usize;
for _ in 0..TRAP {
i=(t.geta().geta().geta().lastbits(20)%(TS as isize)) as usize;
t.mul(&mut table[i]);
dn+=distance[i];
}
gf.copy(&t); gf.conj();
let mut steps:usize=0; let mut dm:isize=0;
let mut res:isize=0;
while dm-dn<MAXPIN as isize {
steps+=1;
if steps>4*TRAP {break}
i=(ge.geta().geta().geta().lastbits(20)%(TS as isize)) as usize;
ge.mul(&mut table[i]);
dm+=distance[i];
if ge.equals(&mut t) {
res=dm-dn;
break;
}
if ge.equals(&mut gf) {
res=dn-dm;
break;
}
}
if steps>4*TRAP || dm-dn>=MAXPIN as isize {res=0 } // Trap Failed - probable invalid token
return res;
}
/* Hash the M-Pin transcript - new */
pub fn hash_all(sha: usize,hid: &[u8],xid: &[u8],xcid: Option<&[u8]>,sec: &[u8],y: &[u8],r: &[u8],w: &[u8],h: &mut[u8]) -> bool {
let mut tlen:usize=0;
const RM:usize=big::MODBYTES as usize;
let mut t: [u8;10*RM+4]=[0;10*RM+4];
for i in 0 .. hid.len() {t[i]=hid[i]}
tlen+=hid.len();
if let Some(rxcid)=xcid {
for i in 0..rxcid.len() {t[i+tlen]=rxcid[i]}
tlen+=rxcid.len();
} else {
for i in 0..xid.len() {t[i+tlen]=xid[i]}
tlen+=xid.len();
}
for i in 0..sec.len() {t[i+tlen]=sec[i]}
tlen+=sec.len();
for i in 0..y.len() {t[i+tlen]=y[i]}
tlen+=y.len();
for i in 0..r.len() {t[i+tlen]=r[i]}
tlen+=r.len();
for i in 0..w.len() {t[i+tlen]=w[i]}
tlen+=w.len();
if tlen!=10*RM+4 {return false}
return hashit(sha,0,&t,h);
}
/* calculate common key on client side */
/* wCID = w.(A+AT) */
#[allow(non_snake_case)]
pub fn client_key(sha: usize,g1: &[u8],g2: &[u8],pin: usize,r: &[u8],x: &[u8],h: &[u8],wcid: &[u8],ck: &mut [u8]) -> isize {
let mut g1=FP12::frombytes(&g1);
let mut g2=FP12::frombytes(&g2);
let mut z=BIG::frombytes(&r);
let mut x=BIG::frombytes(&x);
let h=BIG::frombytes(&h);
let mut W=ECP::frombytes(&wcid);
if W.is_infinity() {return INVALID_POINT}
W=pair::g1mul(&mut W,&mut x);
// let mut f=FP2::new_bigs(&BIG::new_ints(&rom::FRA),&BIG::new_ints(&rom::FRB));
let mut r=BIG::new_ints(&rom::CURVE_ORDER);
// let q=BIG::new_ints(&rom::MODULUS);
z.add(&h); //new
z.rmod(&mut r);
g2.pinpow(pin as i32,PBLEN);
g1.mul(&mut g2);
let mut c=g1.compow(&z,&mut r);
/*
let mut m=BIG::new_copy(&q);
m.rmod(&mut r);
let mut a=BIG::new_copy(&z);
a.rmod(&mut m);
let mut b=BIG::new_copy(&z);
b.div(&mut m);
let mut c=g1.trace();
g2.copy(&g1);
g2.frob(&mut f);
let cp=g2.trace();
g1.conj();
g2.mul(&mut g1);
let cpm1=g2.trace();
g2.mul(&mut g1);
let cpm2=g2.trace();
c=c.xtr_pow2(&cp,&cpm1,&cpm2,&mut a,&mut b);
*/
hash(sha,&mut c,&mut W,ck);
return 0
}
/* calculate common key on server side */
/* Z=r.A - no time permits involved */
#[allow(non_snake_case)]
pub fn server_key(sha: usize,z: &[u8],sst: &[u8],w: &[u8],h: &[u8],hid: &[u8],xid: &[u8],xcid: Option<&[u8]>,sk: &mut [u8]) -> isize {
let sQ=ECP2::frombytes(&sst);
if sQ.is_infinity() {return INVALID_POINT}
let mut R=ECP::frombytes(&z);
if R.is_infinity() {return INVALID_POINT}
let mut A=ECP::frombytes(&hid);
if A.is_infinity() {return INVALID_POINT}
let mut U=ECP::new();
if let Some(rxcid)=xcid {
U.copy(&ECP::frombytes(&rxcid));
} else {
U.copy(&ECP::frombytes(&xid));
}
if U.is_infinity() {return INVALID_POINT}
let mut w=BIG::frombytes(&w);
let mut h=BIG::frombytes(&h);
A=pair::g1mul(&mut A,&mut h); // new
R.add(&mut A); R.affine();
U=pair::g1mul(&mut U,&mut w);
let mut g=pair::ate(&sQ,&R);
g=pair::fexp(&g);
let mut c=g.trace();
hash(sha,&mut c,&mut U,sk);
return 0
}
| 24.810093 | 210 | 0.62049 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.