hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5dfbd50c4e59e107a8fe0d2792c0fa02ab658196 | 5,802 | // Copyright 2014 Tyler Neely
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use rocksdb::{ColumnFamilyOptions, DBOptions, MergeOperands, Writable, DB};
use super::tempdir_with_prefix;
#[test]
pub fn test_column_family() {
let path = tempdir_with_prefix("_rust_rocksdb_cftest");
let path_str = path.path().to_str().unwrap();
// should be able to create column families
{
let mut opts = DBOptions::new();
opts.create_if_missing(true);
let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge);
let mut db = DB::open_cf(opts, path_str, vec![("default", cf_opts)]).unwrap();
match db.create_cf("cf1") {
Ok(_) => println!("cf1 created successfully"),
Err(e) => {
panic!("could not create column family: {}", e);
}
}
assert_eq!(db.cf_names(), vec!["cf1", "default"]);
}
// should fail to open db without specifying same column families
{
let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge);
match DB::open_cf(DBOptions::new(), path_str, vec![("default", cf_opts)]) {
Ok(_) => panic!(
"should not have opened DB successfully without \
specifying column
families"
),
Err(e) => assert!(e.starts_with(
"Invalid argument: You have to open \
all column families."
)),
}
}
// should properly open db when specifying all column families
{
let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge);
match DB::open_cf(DBOptions::new(), path_str, vec![("cf1", cf_opts)]) {
Ok(_) => println!("successfully opened db with column family"),
Err(e) => panic!("failed to open db with column family: {}", e),
}
}
// TODO should be able to write, read, merge, batch, and iterate over a cf
{
let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge);
let db = match DB::open_cf(DBOptions::new(), path_str, vec![("cf1", cf_opts)]) {
Ok(db) => {
println!("successfully opened db with column family");
db
}
Err(e) => panic!("failed to open db with column family: {}", e),
};
let cf1 = db.cf_handle("cf1").unwrap();
assert!(db.put_cf(cf1, b"k1", b"v1").is_ok());
assert!(db.get_cf(cf1, b"k1").unwrap().unwrap().to_utf8().unwrap() == "v1");
let p = db.put_cf(cf1, b"k1", b"a");
assert!(p.is_ok());
/*
// TODO support family merge operator
// have not finished yet, following codes won't work.
db.merge_cf(cf1, b"k1", b"b").unwrap();
db.merge_cf(cf1, b"k1", b"c").unwrap();
db.merge_cf(cf1, b"k1", b"d").unwrap();
db.merge_cf(cf1, b"k1", b"efg").unwrap();
let m = db.merge_cf(cf1, b"k1", b"h");
println!("m is {:?}", m);
// TODO assert!(m.is_ok());
match db.get(b"k1") {
Ok(Some(value)) => {
match value.to_utf8() {
Some(v) => println!("retrieved utf8 value: {}", v),
None => println!("did not read valid utf-8 out of the db"),
}
}
Err(_) => println!("error reading value"),
_ => panic!("value not present!"),
}
let _ = db.get_cf(cf1, b"k1");
// TODO assert!(r.unwrap().to_utf8().unwrap() == "abcdefgh");
assert!(db.delete(b"k1").is_ok());
assert!(db.get(b"k1").unwrap().is_none());
*/
}
// TODO should be able to use writebatch ops with a cf
{}
// TODO should be able to iterate over a cf
{}
// should be able to drop a cf
{
let mut db = DB::open_cf(
DBOptions::new(),
path_str,
vec![("cf1", ColumnFamilyOptions::new())],
)
.unwrap();
match db.drop_cf("cf1") {
Ok(_) => println!("cf1 successfully dropped."),
Err(e) => panic!("failed to drop column family: {}", e),
}
}
}
fn test_provided_merge(
_: &[u8],
existing_val: Option<&[u8]>,
operands: &mut MergeOperands,
) -> Vec<u8> {
let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops);
match existing_val {
Some(v) => {
for e in v {
result.push(*e);
}
}
None => (),
}
for op in operands {
for e in op {
result.push(*e);
}
}
result
}
#[test]
pub fn test_column_family_option_use_doubly_skiplist() {
let cf_opts = ColumnFamilyOptions::new();
let memtable_name = cf_opts.get_memtable_factory_name();
assert!(memtable_name.is_some());
assert_eq!("SkipListFactory", memtable_name.unwrap());
cf_opts.set_doubly_skiplist();
let memtable_name = cf_opts.get_memtable_factory_name();
assert_eq!("DoublySkipListFactory", memtable_name.unwrap());
}
| 35.814815 | 88 | 0.564978 |
64d8058b15e2316be609cf80c54fb18a23c14451 | 407 | #![feature(rustc_private)]
#[macro_use]
mod common;
use common::*;
test_verify_one_file! {
// TODO SOUNDNESS this code verifies, but panics in debug mode
// introduce under/overflow checks
#[ignore] #[test] verified_code_should_not_panic code! {
fn do_not_panic() {
let mut a: u64 = 0;
a = a - 1; // TODO this should produce an error
}
} => Err(_)
}
| 25.4375 | 66 | 0.609337 |
91ddf89e491f38a2580d9322b341310ee20eaa62 | 394 | #![no_std]
#[macro_use] extern crate crucible;
extern crate std;
use std::io::{Read, Write, Cursor};
use std::vec::Vec;
#[cfg_attr(crux, crux_test)]
pub fn f() {
let mut buf = Vec::new();
buf.write(&[1, 2, 3]);
let mut curs = Cursor::new(buf);
let mut rbuf = [0; 2];
curs.read(&mut rbuf).unwrap();
crucible_assert!(rbuf[0] == 1);
crucible_assert!(rbuf[1] == 2);
}
| 21.888889 | 36 | 0.591371 |
e6a952d61388e7bb745c9bebc4b04e255126b4d8 | 63,861 | #![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn spark_batch(&self) -> spark_batch::Client {
spark_batch::Client(self.clone())
}
pub fn spark_session(&self) -> spark_session::Client {
spark_session::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
SparkBatch_GetSparkBatchJobs(#[from] spark_batch::get_spark_batch_jobs::Error),
#[error(transparent)]
SparkBatch_CreateSparkBatchJob(#[from] spark_batch::create_spark_batch_job::Error),
#[error(transparent)]
SparkBatch_GetSparkBatchJob(#[from] spark_batch::get_spark_batch_job::Error),
#[error(transparent)]
SparkBatch_CancelSparkBatchJob(#[from] spark_batch::cancel_spark_batch_job::Error),
#[error(transparent)]
SparkSession_GetSparkSessions(#[from] spark_session::get_spark_sessions::Error),
#[error(transparent)]
SparkSession_CreateSparkSession(#[from] spark_session::create_spark_session::Error),
#[error(transparent)]
SparkSession_GetSparkSession(#[from] spark_session::get_spark_session::Error),
#[error(transparent)]
SparkSession_CancelSparkSession(#[from] spark_session::cancel_spark_session::Error),
#[error(transparent)]
SparkSession_ResetSparkSessionTimeout(#[from] spark_session::reset_spark_session_timeout::Error),
#[error(transparent)]
SparkSession_GetSparkStatements(#[from] spark_session::get_spark_statements::Error),
#[error(transparent)]
SparkSession_CreateSparkStatement(#[from] spark_session::create_spark_statement::Error),
#[error(transparent)]
SparkSession_GetSparkStatement(#[from] spark_session::get_spark_statement::Error),
#[error(transparent)]
SparkSession_CancelSparkStatement(#[from] spark_session::cancel_spark_statement::Error),
}
pub mod spark_batch {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_spark_batch_jobs(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
) -> get_spark_batch_jobs::Builder {
get_spark_batch_jobs::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
from: None,
size: None,
detailed: None,
}
}
pub fn create_spark_batch_job(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
spark_batch_job_options: impl Into<models::SparkBatchJobOptions>,
) -> create_spark_batch_job::Builder {
create_spark_batch_job::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
spark_batch_job_options: spark_batch_job_options.into(),
detailed: None,
}
}
pub fn get_spark_batch_job(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
batch_id: i32,
) -> get_spark_batch_job::Builder {
get_spark_batch_job::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
batch_id,
detailed: None,
}
}
pub fn cancel_spark_batch_job(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
batch_id: i32,
) -> cancel_spark_batch_job::Builder {
cancel_spark_batch_job::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
batch_id,
}
}
}
pub mod get_spark_batch_jobs {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) from: Option<i32>,
pub(crate) size: Option<i32>,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn from(mut self, from: i32) -> Self {
self.from = Some(from);
self
}
pub fn size(mut self, size: i32) -> Self {
self.size = Some(size);
self
}
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkBatchJobCollection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/batches",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(from) = &self.from {
url.query_pairs_mut().append_pair("from", &from.to_string());
}
if let Some(size) = &self.size {
url.query_pairs_mut().append_pair("size", &size.to_string());
}
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkBatchJobCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create_spark_batch_job {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) spark_batch_job_options: models::SparkBatchJobOptions,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkBatchJob, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/batches",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.spark_batch_job_options).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkBatchJob =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get_spark_batch_job {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) batch_id: i32,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkBatchJob, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/batches/{}",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.batch_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkBatchJob =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod cancel_spark_batch_job {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) batch_id: i32,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/batches/{}",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.batch_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod spark_session {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_spark_sessions(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
) -> get_spark_sessions::Builder {
get_spark_sessions::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
from: None,
size: None,
detailed: None,
}
}
pub fn create_spark_session(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
spark_session_options: impl Into<models::SparkSessionOptions>,
) -> create_spark_session::Builder {
create_spark_session::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
spark_session_options: spark_session_options.into(),
detailed: None,
}
}
pub fn get_spark_session(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
) -> get_spark_session::Builder {
get_spark_session::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
detailed: None,
}
}
pub fn cancel_spark_session(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
) -> cancel_spark_session::Builder {
cancel_spark_session::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
}
}
pub fn reset_spark_session_timeout(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
) -> reset_spark_session_timeout::Builder {
reset_spark_session_timeout::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
}
}
pub fn get_spark_statements(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
) -> get_spark_statements::Builder {
get_spark_statements::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
}
}
pub fn create_spark_statement(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
spark_statement_options: impl Into<models::SparkStatementOptions>,
) -> create_spark_statement::Builder {
create_spark_statement::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
spark_statement_options: spark_statement_options.into(),
}
}
pub fn get_spark_statement(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
statement_id: i32,
) -> get_spark_statement::Builder {
get_spark_statement::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
statement_id,
}
}
pub fn cancel_spark_statement(
&self,
livy_api_version: impl Into<String>,
spark_pool_name: impl Into<String>,
session_id: i32,
statement_id: i32,
) -> cancel_spark_statement::Builder {
cancel_spark_statement::Builder {
client: self.0.clone(),
livy_api_version: livy_api_version.into(),
spark_pool_name: spark_pool_name.into(),
session_id,
statement_id,
}
}
}
pub mod get_spark_sessions {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) from: Option<i32>,
pub(crate) size: Option<i32>,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn from(mut self, from: i32) -> Self {
self.from = Some(from);
self
}
pub fn size(mut self, size: i32) -> Self {
self.size = Some(size);
self
}
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkSessionCollection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(from) = &self.from {
url.query_pairs_mut().append_pair("from", &from.to_string());
}
if let Some(size) = &self.size {
url.query_pairs_mut().append_pair("size", &size.to_string());
}
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkSessionCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create_spark_session {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) spark_session_options: models::SparkSessionOptions,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkSession, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.spark_session_options).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkSession =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get_spark_session {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
pub(crate) detailed: Option<bool>,
}
impl Builder {
pub fn detailed(mut self, detailed: bool) -> Self {
self.detailed = Some(detailed);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkSession, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
if let Some(detailed) = &self.detailed {
url.query_pairs_mut().append_pair("detailed", &detailed.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkSession =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod cancel_spark_session {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod reset_spark_session_timeout {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}/reset-timeout",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get_spark_statements {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkStatementCollection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}/statements",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkStatementCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create_spark_statement {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
pub(crate) spark_statement_options: models::SparkStatementOptions,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkStatement, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}/statements",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.spark_statement_options).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkStatement =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get_spark_statement {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
pub(crate) statement_id: i32,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkStatement, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}/statements/{}",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id,
&self.statement_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkStatement =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod cancel_spark_statement {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::StreamError),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) livy_api_version: String,
pub(crate) spark_pool_name: String,
pub(crate) session_id: i32,
pub(crate) statement_id: i32,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::SparkStatementCancellationResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/livyApi/versions/{}/sparkPools/{}/sessions/{}/statements/{}/cancel",
self.client.endpoint(),
&self.livy_api_version,
&self.spark_pool_name,
&self.session_id,
&self.statement_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SparkStatementCancellationResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
| 49.086088 | 139 | 0.516544 |
23b63bc26657c6519e0ed39b295ec09f0d33e019 | 49,187 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::def;
use middle::region;
use middle::subst::{VecPerParamSpace,Subst};
use middle::subst;
use middle::ty::{BoundRegion, BrAnon, BrNamed};
use middle::ty::{ReEarlyBound, BrFresh, ctxt};
use middle::ty::{ReFree, ReScope, ReInfer, ReStatic, Region, ReEmpty};
use middle::ty::{ReSkolemized, ReVar, BrEnv};
use middle::ty::{mt, Ty, ParamTy};
use middle::ty::{ty_bool, ty_char, ty_struct, ty_enum};
use middle::ty::{ty_err, ty_str, ty_vec, ty_float, ty_bare_fn};
use middle::ty::{ty_param, ty_ptr, ty_rptr, ty_tup, ty_open};
use middle::ty::{ty_closure};
use middle::ty::{ty_uniq, ty_trait, ty_int, ty_uint, ty_infer};
use middle::ty;
use middle::ty_fold::TypeFoldable;
use std::collections::HashMap;
use std::collections::hash_state::HashState;
use std::hash::Hash;
use std::rc::Rc;
use syntax::abi;
use syntax::ast_map;
use syntax::codemap::{Span, Pos};
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::{ast, ast_util};
use syntax::owned_slice::OwnedSlice;
/// Produces a string suitable for debugging output.
pub trait Repr<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String;
}
/// Produces a string suitable for showing to the user.
pub trait UserString<'tcx> : Repr<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String;
}
pub fn note_and_explain_region(cx: &ctxt,
prefix: &str,
region: ty::Region,
suffix: &str) -> Option<Span> {
match explain_region_and_span(cx, region) {
(ref str, Some(span)) => {
cx.sess.span_note(
span,
&format!("{}{}{}", prefix, *str, suffix));
Some(span)
}
(ref str, None) => {
cx.sess.note(
&format!("{}{}{}", prefix, *str, suffix));
None
}
}
}
/// When a free region is associated with `item`, how should we describe the item in the error
/// message.
fn item_scope_tag(item: &ast::Item) -> &'static str {
match item.node {
ast::ItemImpl(..) => "impl",
ast::ItemStruct(..) => "struct",
ast::ItemEnum(..) => "enum",
ast::ItemTrait(..) => "trait",
ast::ItemFn(..) => "function body",
_ => "item"
}
}
pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
-> (String, Option<Span>) {
return match region {
ReScope(scope) => {
let new_string;
let on_unknown_scope = || {
(format!("unknown scope: {:?}. Please report a bug.", scope), None)
};
let span = match scope.span(&cx.map) {
Some(s) => s,
None => return on_unknown_scope(),
};
let tag = match cx.map.find(scope.node_id()) {
Some(ast_map::NodeBlock(_)) => "block",
Some(ast_map::NodeExpr(expr)) => match expr.node {
ast::ExprCall(..) => "call",
ast::ExprMethodCall(..) => "method call",
ast::ExprMatch(_, _, ast::MatchSource::IfLetDesugar { .. }) => "if let",
ast::ExprMatch(_, _, ast::MatchSource::WhileLetDesugar) => "while let",
ast::ExprMatch(_, _, ast::MatchSource::ForLoopDesugar) => "for",
ast::ExprMatch(..) => "match",
_ => "expression",
},
Some(ast_map::NodeStmt(_)) => "statement",
Some(ast_map::NodeItem(it)) => item_scope_tag(&*it),
Some(_) | None => {
// this really should not happen
return on_unknown_scope();
}
};
let scope_decorated_tag = match scope {
region::CodeExtent::Misc(_) => tag,
region::CodeExtent::DestructionScope(_) => {
new_string = format!("destruction scope surrounding {}", tag);
new_string.as_slice()
}
region::CodeExtent::Remainder(r) => {
new_string = format!("block suffix following statement {}",
r.first_statement_index);
&*new_string
}
};
explain_span(cx, scope_decorated_tag, span)
}
ReFree(ref fr) => {
let prefix = match fr.bound_region {
BrAnon(idx) => {
format!("the anonymous lifetime #{} defined on", idx + 1)
}
BrFresh(_) => "an anonymous lifetime defined on".to_string(),
_ => {
format!("the lifetime {} as defined on",
bound_region_ptr_to_string(cx, fr.bound_region))
}
};
match cx.map.find(fr.scope.node_id) {
Some(ast_map::NodeBlock(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", blk.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(ast_map::NodeItem(it)) => {
let tag = item_scope_tag(&*it);
let (msg, opt_span) = explain_span(cx, tag, it.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(_) | None => {
// this really should not happen
(format!("{} unknown free region bounded by scope {:?}", prefix, fr.scope), None)
}
}
}
ReStatic => { ("the static lifetime".to_string(), None) }
ReEmpty => { ("the empty lifetime".to_string(), None) }
ReEarlyBound(_, _, _, name) => {
(format!("{}", token::get_name(name)), None)
}
// I believe these cases should not occur (except when debugging,
// perhaps)
ty::ReInfer(_) | ty::ReLateBound(..) => {
(format!("lifetime {:?}", region), None)
}
};
fn explain_span(cx: &ctxt, heading: &str, span: Span)
-> (String, Option<Span>) {
let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
(format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
Some(span))
}
}
pub fn bound_region_ptr_to_string(cx: &ctxt, br: BoundRegion) -> String {
bound_region_to_string(cx, "", false, br)
}
pub fn bound_region_to_string(cx: &ctxt,
prefix: &str, space: bool,
br: BoundRegion) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, br.repr(cx), space_str)
}
match br {
BrNamed(_, name) => {
format!("{}{}{}", prefix, token::get_name(name), space_str)
}
BrAnon(_) | BrFresh(_) | BrEnv => prefix.to_string()
}
}
// In general, if you are giving a region error message,
// you should use `explain_region()` or, better yet,
// `note_and_explain_region()`
pub fn region_ptr_to_string(cx: &ctxt, region: Region) -> String {
region_to_string(cx, "&", true, region)
}
pub fn region_to_string(cx: &ctxt, prefix: &str, space: bool, region: Region) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, region.repr(cx), space_str)
}
// These printouts are concise. They do not contain all the information
// the user might want to diagnose an error, but there is basically no way
// to fit that into a short string. Hence the recommendation to use
// `explain_region()` or `note_and_explain_region()`.
match region {
ty::ReScope(_) => prefix.to_string(),
ty::ReEarlyBound(_, _, _, name) => {
token::get_name(name).to_string()
}
ty::ReLateBound(_, br) => bound_region_to_string(cx, prefix, space, br),
ty::ReFree(ref fr) => bound_region_to_string(cx, prefix, space, fr.bound_region),
ty::ReInfer(ReSkolemized(_, br)) => {
bound_region_to_string(cx, prefix, space, br)
}
ty::ReInfer(ReVar(_)) => prefix.to_string(),
ty::ReStatic => format!("{}'static{}", prefix, space_str),
ty::ReEmpty => format!("{}'<empty>{}", prefix, space_str),
}
}
pub fn mutability_to_string(m: ast::Mutability) -> String {
match m {
ast::MutMutable => "mut ".to_string(),
ast::MutImmutable => "".to_string(),
}
}
pub fn mt_to_string<'tcx>(cx: &ctxt<'tcx>, m: &mt<'tcx>) -> String {
format!("{}{}",
mutability_to_string(m.mutbl),
ty_to_string(cx, m.ty))
}
pub fn vec_map_to_string<T, F>(ts: &[T], f: F) -> String where
F: FnMut(&T) -> String,
{
let tstrs = ts.iter().map(f).collect::<Vec<String>>();
format!("[{}]", tstrs.connect(", "))
}
pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>,
opt_def_id: Option<ast::DefId>,
unsafety: ast::Unsafety,
abi: abi::Abi,
ident: Option<ast::Ident>,
sig: &ty::PolyFnSig<'tcx>)
-> String {
let mut s = String::new();
match unsafety {
ast::Unsafety::Normal => {}
ast::Unsafety::Unsafe => {
s.push_str(&unsafety.to_string());
s.push(' ');
}
};
if abi != abi::Rust {
s.push_str(&format!("extern {} ", abi.to_string()));
};
s.push_str("fn");
match ident {
Some(i) => {
s.push(' ');
s.push_str(&token::get_ident(i));
}
_ => { }
}
push_sig_to_string(cx, &mut s, '(', ')', sig);
match opt_def_id {
Some(def_id) => {
s.push_str(" {");
let path_str = ty::item_path_str(cx, def_id);
s.push_str(&path_str[..]);
s.push_str("}");
}
None => { }
}
s
}
fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String {
let mut s = String::new();
s.push_str("[closure");
push_sig_to_string(cx, &mut s, '(', ')', &cty.sig);
s.push(']');
s
}
fn push_sig_to_string<'tcx>(cx: &ctxt<'tcx>,
s: &mut String,
bra: char,
ket: char,
sig: &ty::PolyFnSig<'tcx>) {
s.push(bra);
let strs = sig.0.inputs
.iter()
.map(|a| ty_to_string(cx, *a))
.collect::<Vec<_>>();
s.push_str(&strs.connect(", "));
if sig.0.variadic {
s.push_str(", ...");
}
s.push(ket);
match sig.0.output {
ty::FnConverging(t) => {
if !ty::type_is_nil(t) {
s.push_str(" -> ");
s.push_str(&ty_to_string(cx, t));
}
}
ty::FnDiverging => {
s.push_str(" -> !");
}
}
}
fn infer_ty_to_string(cx: &ctxt, ty: ty::InferTy) -> String {
let print_var_ids = cx.sess.verbose();
match ty {
ty::TyVar(ref vid) if print_var_ids => vid.repr(cx),
ty::IntVar(ref vid) if print_var_ids => vid.repr(cx),
ty::FloatVar(ref vid) if print_var_ids => vid.repr(cx),
ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => format!("_"),
ty::FreshTy(v) => format!("FreshTy({})", v),
ty::FreshIntTy(v) => format!("FreshIntTy({})", v)
}
}
// pretty print the structural type representation:
match typ.sty {
ty_bool => "bool".to_string(),
ty_char => "char".to_string(),
ty_int(t) => ast_util::int_ty_to_string(t, None).to_string(),
ty_uint(t) => ast_util::uint_ty_to_string(t, None).to_string(),
ty_float(t) => ast_util::float_ty_to_string(t).to_string(),
ty_uniq(typ) => format!("Box<{}>", ty_to_string(cx, typ)),
ty_ptr(ref tm) => {
format!("*{} {}", match tm.mutbl {
ast::MutMutable => "mut",
ast::MutImmutable => "const",
}, ty_to_string(cx, tm.ty))
}
ty_rptr(r, ref tm) => {
let mut buf = region_ptr_to_string(cx, *r);
buf.push_str(&mt_to_string(cx, tm));
buf
}
ty_open(typ) =>
format!("opened<{}>", ty_to_string(cx, typ)),
ty_tup(ref elems) => {
let strs = elems
.iter()
.map(|elem| ty_to_string(cx, *elem))
.collect::<Vec<_>>();
match &strs[..] {
[ref string] => format!("({},)", string),
strs => format!("({})", strs.connect(", "))
}
}
ty_bare_fn(opt_def_id, ref f) => {
bare_fn_to_string(cx, opt_def_id, f.unsafety, f.abi, None, &f.sig)
}
ty_infer(infer_ty) => infer_ty_to_string(cx, infer_ty),
ty_err => "[type error]".to_string(),
ty_param(ref param_ty) => {
if cx.sess.verbose() {
param_ty.repr(cx)
} else {
param_ty.user_string(cx)
}
}
ty_enum(did, substs) | ty_struct(did, substs) => {
let base = ty::item_path_str(cx, did);
parameterized(cx, &base, substs, did, &[],
|| ty::lookup_item_type(cx, did).generics)
}
ty_trait(ref data) => {
data.user_string(cx)
}
ty::ty_projection(ref data) => {
format!("<{} as {}>::{}",
data.trait_ref.self_ty().user_string(cx),
data.trait_ref.user_string(cx),
data.item_name.user_string(cx))
}
ty_str => "str".to_string(),
ty_closure(ref did, _, substs) => {
let closure_tys = cx.closure_tys.borrow();
closure_tys.get(did).map(|closure_type| {
closure_to_string(cx, &closure_type.subst(cx, substs))
}).unwrap_or_else(|| {
if did.krate == ast::LOCAL_CRATE {
let span = cx.map.span(did.node);
format!("[closure {}]", span.repr(cx))
} else {
format!("[closure]")
}
})
}
ty_vec(t, sz) => {
let inner_str = ty_to_string(cx, t);
match sz {
Some(n) => format!("[{}; {}]", inner_str, n),
None => format!("[{}]", inner_str),
}
}
}
}
pub fn explicit_self_category_to_str(category: &ty::ExplicitSelfCategory)
-> &'static str {
match *category {
ty::StaticExplicitSelfCategory => "static",
ty::ByValueExplicitSelfCategory => "self",
ty::ByReferenceExplicitSelfCategory(_, ast::MutMutable) => {
"&mut self"
}
ty::ByReferenceExplicitSelfCategory(_, ast::MutImmutable) => "&self",
ty::ByBoxExplicitSelfCategory => "Box<self>",
}
}
pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
base: &str,
substs: &subst::Substs<'tcx>,
did: ast::DefId,
projections: &[ty::ProjectionPredicate<'tcx>],
get_generics: GG)
-> String
where GG : FnOnce() -> ty::Generics<'tcx>
{
if cx.sess.verbose() {
let mut strings = vec![];
match substs.regions {
subst::ErasedRegions => {
strings.push(format!(".."));
}
subst::NonerasedRegions(ref regions) => {
for region in regions.iter() {
strings.push(region.repr(cx));
}
}
}
for ty in substs.types.iter() {
strings.push(ty.repr(cx));
}
for projection in projections.iter() {
strings.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
}
return if strings.is_empty() {
format!("{}", base)
} else {
format!("{}<{}>", base, strings.connect(","))
};
}
let mut strs = Vec::new();
match substs.regions {
subst::ErasedRegions => { }
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
let s = region_to_string(cx, "", false, r);
if s.is_empty() {
// This happens when the value of the region
// parameter is not easily serialized. This may be
// because the user omitted it in the first place,
// or because it refers to some block in the code,
// etc. I'm not sure how best to serialize this.
strs.push(format!("'_"));
} else {
strs.push(s)
}
}
}
}
// It is important to execute this conditionally, only if -Z
// verbose is false. Otherwise, debug logs can sometimes cause
// ICEs trying to fetch the generics early in the pipeline. This
// is kind of a hacky workaround in that -Z verbose is required to
// avoid those ICEs.
let generics = get_generics();
let has_self = substs.self_ty().is_some();
let tps = substs.types.get_slice(subst::TypeSpace);
let ty_params = generics.types.get_slice(subst::TypeSpace);
let has_defaults = ty_params.last().map_or(false, |def| def.default.is_some());
let num_defaults = if has_defaults {
ty_params.iter().zip(tps.iter()).rev().take_while(|&(def, &actual)| {
match def.default {
Some(default) => {
if !has_self && ty::type_has_self(default) {
// In an object type, there is no `Self`, and
// thus if the default value references Self,
// the user will be required to give an
// explicit value. We can't even do the
// substitution below to check without causing
// an ICE. (#18956).
false
} else {
default.subst(cx, substs) == actual
}
}
None => false
}
}).count()
} else {
0
};
for t in &tps[..tps.len() - num_defaults] {
strs.push(ty_to_string(cx, *t))
}
for projection in projections {
strs.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
}
if cx.lang_items.fn_trait_kind(did).is_some() && projections.len() == 1 {
let projection_ty = projections[0].ty;
let tail =
if ty::type_is_nil(projection_ty) {
format!("")
} else {
format!(" -> {}", projection_ty.user_string(cx))
};
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
&strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
&strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
} else {
&strs[0][..]
},
tail)
} else if strs.len() > 0 {
format!("{}<{}>", base, strs.connect(", "))
} else {
format!("{}", base)
}
}
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string();
if s.len() >= 32 {
s = (&s[0..32]).to_string();
}
return s;
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Option<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&None => "None".to_string(),
&Some(ref t) => t.repr(tcx),
}
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for P<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(**self).repr(tcx)
}
}
impl<'tcx,T:Repr<'tcx>,U:Repr<'tcx>> Repr<'tcx> for Result<T,U> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&Ok(ref t) => t.repr(tcx),
&Err(ref u) => format!("Err({})", u.repr(tcx))
}
}
}
impl<'tcx> Repr<'tcx> for () {
fn repr(&self, _tcx: &ctxt) -> String {
"()".to_string()
}
}
impl<'a, 'tcx, T: ?Sized +Repr<'tcx>> Repr<'tcx> for &'a T {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
Repr::repr(*self, tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Rc<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Box<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
fn repr_vec<'tcx, T:Repr<'tcx>>(tcx: &ctxt<'tcx>, v: &[T]) -> String {
vec_map_to_string(v, |t| t.repr(tcx))
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, self)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[..])
}
}
// This is necessary to handle types like Option<~[T]>, for which
// autoderef cannot convert the &[T] handler
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[..])
}
}
impl<'tcx, T:UserString<'tcx>> UserString<'tcx> for Vec<T> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let strs: Vec<String> =
self.iter().map(|t| t.user_string(tcx)).collect();
strs.connect(", ")
}
}
impl<'tcx> Repr<'tcx> for def::Def {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
/// This curious type is here to help pretty-print trait objects. In
/// a trait object, the projections are stored separately from the
/// main trait bound, but in fact we want to package them together
/// when printing out; they also have separate binders, but we want
/// them to share a binder when we print them out. (And the binder
/// pretty-printing logic is kind of clever and we don't want to
/// reproduce it.) So we just repackage up the structure somewhat.
///
/// Right now there is only one trait in an object that can have
/// projection bounds, so we just stuff them altogether. But in
/// reality we should eventually sort things out better.
type TraitAndProjections<'tcx> =
(Rc<ty::TraitRef<'tcx>>, Vec<ty::ProjectionPredicate<'tcx>>);
impl<'tcx> UserString<'tcx> for TraitAndProjections<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let &(ref trait_ref, ref projection_bounds) = self;
let base = ty::item_path_str(tcx, trait_ref.def_id);
parameterized(tcx,
&base,
trait_ref.substs,
trait_ref.def_id,
&projection_bounds[..],
|| ty::lookup_trait_def(tcx, trait_ref.def_id).generics.clone())
}
}
impl<'tcx> UserString<'tcx> for ty::TyTrait<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let &ty::TyTrait { ref principal, ref bounds } = self;
let mut components = vec![];
let tap: ty::Binder<TraitAndProjections<'tcx>> =
ty::Binder((principal.0.clone(),
bounds.projection_bounds.iter().map(|x| x.0.clone()).collect()));
// Generate the main trait ref, including associated types.
components.push(tap.user_string(tcx));
// Builtin bounds.
for bound in &bounds.builtin_bounds {
components.push(bound.user_string(tcx));
}
// Region, if not obviously implied by builtin bounds.
if bounds.region_bound != ty::ReStatic {
// Region bound is implied by builtin bounds:
components.push(bounds.region_bound.user_string(tcx));
}
components.retain(|s| !s.is_empty());
components.connect(" + ")
}
}
impl<'tcx> Repr<'tcx> for ty::TypeParameterDef<'tcx> {
fn repr(&self, _tcx: &ctxt<'tcx>) -> String {
format!("TypeParameterDef({:?}, {:?}/{})",
self.def_id,
self.space,
self.index)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionParameterDef {
fn repr(&self, tcx: &ctxt) -> String {
format!("RegionParameterDef(name={}, def_id={}, bounds={})",
token::get_name(self.name),
self.def_id.repr(tcx),
self.bounds.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TyS<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for ty::mt<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
mt_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for subst::Substs<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Substs[types={}, regions={}]",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for subst::VecPerParamSpace<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("[{};{};{}]",
self.get_slice(subst::TypeSpace).repr(tcx),
self.get_slice(subst::SelfSpace).repr(tcx),
self.get_slice(subst::FnSpace).repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemSubsts<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ItemSubsts({})", self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for subst::RegionSubsts {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
subst::ErasedRegions => "erased".to_string(),
subst::NonerasedRegions(ref regions) => regions.repr(tcx)
}
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBounds {
fn repr(&self, _tcx: &ctxt) -> String {
let mut res = Vec::new();
for b in self {
res.push(match b {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
});
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::ParamBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
res.push(self.builtin_bounds.repr(tcx));
for t in &self.trait_bounds {
res.push(t.repr(tcx));
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::TraitRef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
// when printing out the debug representation, we don't need
// to enumerate the `for<...>` etc because the debruijn index
// tells you everything you need to know.
let base = ty::item_path_str(tcx, self.def_id);
parameterized(tcx, &base, self.substs, self.def_id, &[],
|| ty::lookup_trait_def(tcx, self.def_id).generics.clone())
}
}
impl<'tcx> Repr<'tcx> for ty::TraitDef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitDef(generics={}, bounds={}, trait_ref={})",
self.generics.repr(tcx),
self.bounds.repr(tcx),
self.trait_ref.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::TraitItem {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
ast::RequiredMethod(ref data) => format!("RequiredMethod({}, id={})",
data.ident, data.id),
ast::ProvidedMethod(ref data) => format!("ProvidedMethod(id={})",
data.id),
ast::TypeTraitItem(ref data) => format!("TypeTraitItem({}, id={})",
data.ty_param.ident, data.ty_param.id),
}
}
}
impl<'tcx> Repr<'tcx> for ast::Expr {
fn repr(&self, _tcx: &ctxt) -> String {
format!("expr({}: {})", self.id, pprust::expr_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Path {
fn repr(&self, _tcx: &ctxt) -> String {
format!("path({})", pprust::path_to_string(self))
}
}
impl<'tcx> UserString<'tcx> for ast::Path {
fn user_string(&self, _tcx: &ctxt) -> String {
pprust::path_to_string(self)
}
}
impl<'tcx> Repr<'tcx> for ast::Ty {
fn repr(&self, _tcx: &ctxt) -> String {
format!("type({})", pprust::ty_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Item {
fn repr(&self, tcx: &ctxt) -> String {
format!("item({})", tcx.map.node_to_string(self.id))
}
}
impl<'tcx> Repr<'tcx> for ast::Lifetime {
fn repr(&self, _tcx: &ctxt) -> String {
format!("lifetime({}: {})", self.id, pprust::lifetime_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Stmt {
fn repr(&self, _tcx: &ctxt) -> String {
format!("stmt({}: {})",
ast_util::stmt_id(self),
pprust::stmt_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Pat {
fn repr(&self, _tcx: &ctxt) -> String {
format!("pat({}: {})", self.id, pprust::pat_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ty::BoundRegion {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::BrAnon(id) => format!("BrAnon({})", id),
ty::BrNamed(id, name) => {
format!("BrNamed({}, {})", id.repr(tcx), token::get_name(name))
}
ty::BrFresh(id) => format!("BrFresh({})", id),
ty::BrEnv => "BrEnv".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::Region {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::ReEarlyBound(id, space, index, name) => {
format!("ReEarlyBound({}, {:?}, {}, {})",
id,
space,
index,
token::get_name(name))
}
ty::ReLateBound(binder_id, ref bound_region) => {
format!("ReLateBound({:?}, {})",
binder_id,
bound_region.repr(tcx))
}
ty::ReFree(ref fr) => fr.repr(tcx),
ty::ReScope(id) => {
format!("ReScope({:?})", id)
}
ty::ReStatic => {
"ReStatic".to_string()
}
ty::ReInfer(ReVar(ref vid)) => {
format!("{:?}", vid)
}
ty::ReInfer(ReSkolemized(id, ref bound_region)) => {
format!("re_skolemized({}, {})", id, bound_region.repr(tcx))
}
ty::ReEmpty => {
"ReEmpty".to_string()
}
}
}
}
impl<'tcx> UserString<'tcx> for ty::Region {
fn user_string(&self, tcx: &ctxt) -> String {
region_to_string(tcx, "", false, *self)
}
}
impl<'tcx> Repr<'tcx> for ty::FreeRegion {
fn repr(&self, tcx: &ctxt) -> String {
format!("ReFree({}, {})",
self.scope.repr(tcx),
self.bound_region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for region::CodeExtent {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
region::CodeExtent::Misc(node_id) =>
format!("Misc({})", node_id),
region::CodeExtent::DestructionScope(node_id) =>
format!("DestructionScope({})", node_id),
region::CodeExtent::Remainder(rem) =>
format!("Remainder({}, {})", rem.block, rem.first_statement_index),
}
}
}
impl<'tcx> Repr<'tcx> for region::DestructionScopeData {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
region::DestructionScopeData{ node_id } =>
format!("DestructionScopeData {{ node_id: {} }}", node_id),
}
}
}
impl<'tcx> Repr<'tcx> for ast::DefId {
fn repr(&self, tcx: &ctxt) -> String {
// Unfortunately, there seems to be no way to attempt to print
// a path for a def-id, so I'll just make a best effort for now
// and otherwise fallback to just printing the crate/node pair
if self.krate == ast::LOCAL_CRATE {
match tcx.map.find(self.node) {
Some(ast_map::NodeItem(..)) |
Some(ast_map::NodeForeignItem(..)) |
Some(ast_map::NodeImplItem(..)) |
Some(ast_map::NodeTraitItem(..)) |
Some(ast_map::NodeVariant(..)) |
Some(ast_map::NodeStructCtor(..)) => {
return format!(
"{:?}:{}",
*self,
ty::item_path_str(tcx, *self))
}
_ => {}
}
}
return format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::TypeScheme<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TypeScheme {{generics: {}, ty: {}}}",
self.generics.repr(tcx),
self.ty.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Generics<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Generics(types: {}, regions: {})",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::GenericPredicates<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("GenericPredicates(predicates: {})",
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::InstantiatedPredicates<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("InstantiatedPredicates({})",
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemVariances {
fn repr(&self, tcx: &ctxt) -> String {
format!("ItemVariances(types={}, \
regions={})",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Variance {
fn repr(&self, _: &ctxt) -> String {
// The first `.to_string()` returns a &'static str (it is not an implementation
// of the ToString trait). Because of that, we need to call `.to_string()` again
// if we want to have a `String`.
let result: &'static str = (*self).to_string();
result.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::Method<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("method(name: {}, generics: {}, fty: {}, \
explicit_self: {}, vis: {}, def_id: {})",
self.name.repr(tcx),
self.generics.repr(tcx),
self.fty.repr(tcx),
self.explicit_self.repr(tcx),
self.vis.repr(tcx),
self.def_id.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::Name {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_name(*self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ast::Name {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(*self).to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::Ident {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_ident(*self).to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::ExplicitSelf_ {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::Visibility {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BareFnTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("BareFnTy {{unsafety: {}, abi: {}, sig: {}}}",
self.unsafety,
self.abi.to_string(),
self.sig.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnSig<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("fn{} -> {}", self.inputs.repr(tcx), self.output.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnOutput<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::FnConverging(ty) =>
format!("FnConverging({0})", ty.repr(tcx)),
ty::FnDiverging =>
"FnDiverging".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodCallee<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodCallee {{origin: {}, ty: {}, {}}}",
self.origin.repr(tcx),
self.ty.repr(tcx),
self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::MethodOrigin<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&ty::MethodStatic(def_id) => {
format!("MethodStatic({})", def_id.repr(tcx))
}
&ty::MethodStaticClosure(def_id) => {
format!("MethodStaticClosure({})", def_id.repr(tcx))
}
&ty::MethodTypeParam(ref p) => {
p.repr(tcx)
}
&ty::MethodTraitObject(ref p) => {
p.repr(tcx)
}
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodParam<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodParam({},{})",
self.trait_ref.repr(tcx),
self.method_num)
}
}
impl<'tcx> Repr<'tcx> for ty::MethodObject<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodObject({},{},{})",
self.trait_ref.repr(tcx),
self.method_num,
self.vtable_index)
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBound {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBound {
fn user_string(&self, _tcx: &ctxt) -> String {
match *self {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
}
}
}
impl<'tcx> Repr<'tcx> for Span {
fn repr(&self, tcx: &ctxt) -> String {
tcx.sess.codemap().span_to_string(*self).to_string()
}
}
impl<'tcx, A:UserString<'tcx>> UserString<'tcx> for Rc<A> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let this: &A = &**self;
this.user_string(tcx)
}
}
impl<'tcx> UserString<'tcx> for ty::ParamBounds<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let mut result = Vec::new();
let s = self.builtin_bounds.user_string(tcx);
if !s.is_empty() {
result.push(s);
}
for n in &self.trait_bounds {
result.push(n.user_string(tcx));
}
result.connect(" + ")
}
}
impl<'tcx> Repr<'tcx> for ty::ExistentialBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
let region_str = self.region_bound.user_string(tcx);
if !region_str.is_empty() {
res.push(region_str);
}
for bound in &self.builtin_bounds {
res.push(bound.user_string(tcx));
}
for projection_bound in &self.projection_bounds {
res.push(projection_bound.user_string(tcx));
}
res.connect("+")
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBounds {
fn user_string(&self, tcx: &ctxt) -> String {
self.iter()
.map(|bb| bb.user_string(tcx))
.collect::<Vec<String>>()
.connect("+")
.to_string()
}
}
impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
where T : UserString<'tcx> + TypeFoldable<'tcx>
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
// Replace any anonymous late-bound regions with named
// variants, using gensym'd identifiers, so that we can
// clearly differentiate between named and unnamed regions in
// the output. We'll probably want to tweak this over time to
// decide just how much information to give.
let mut names = Vec::new();
let (unbound_value, _) = ty::replace_late_bound_regions(tcx, self, |br| {
ty::ReLateBound(ty::DebruijnIndex::new(1), match br {
ty::BrNamed(_, name) => {
names.push(token::get_name(name));
br
}
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => {
let name = token::gensym("'r");
names.push(token::get_name(name));
ty::BrNamed(ast_util::local_def(ast::DUMMY_NODE_ID), name)
}
})
});
let names: Vec<_> = names.iter().map(|s| &s[..]).collect();
let value_str = unbound_value.user_string(tcx);
if names.len() == 0 {
value_str
} else {
format!("for<{}> {}", names.connect(","), value_str)
}
}
}
impl<'tcx> UserString<'tcx> for ty::TraitRef<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let path_str = ty::item_path_str(tcx, self.def_id);
parameterized(tcx, &path_str, self.substs, self.def_id, &[],
|| ty::lookup_trait_def(tcx, self.def_id).generics.clone())
}
}
impl<'tcx> UserString<'tcx> for Ty<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, *self)
}
}
impl<'tcx> UserString<'tcx> for ast::Ident {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(self.name).to_string()
}
}
impl<'tcx> Repr<'tcx> for abi::Abi {
fn repr(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> UserString<'tcx> for abi::Abi {
fn user_string(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarId {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarId({};`{}`;{})",
self.var_id,
ty::local_var_name_str(tcx, self.var_id),
self.closure_expr_id)
}
}
impl<'tcx> Repr<'tcx> for ast::Mutability {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BorrowKind {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarBorrow {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarBorrow({}, {})",
self.kind.repr(tcx),
self.region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarCapture {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::UpvarCapture::ByValue => format!("ByValue"),
ty::UpvarCapture::ByRef(ref data) => format!("ByRef({})", data.repr(tcx)),
}
}
}
impl<'tcx> Repr<'tcx> for ty::IntVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::FloatVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::TyVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::IntVarValue {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::IntTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::UintTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::FloatTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::ExplicitSelfCategory {
fn repr(&self, _: &ctxt) -> String {
explicit_self_category_to_str(self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ParamTy {
fn user_string(&self, _tcx: &ctxt) -> String {
format!("{}", token::get_name(self.name))
}
}
impl<'tcx> Repr<'tcx> for ParamTy {
fn repr(&self, tcx: &ctxt) -> String {
let ident = self.user_string(tcx);
format!("{}/{:?}.{}", ident, self.space, self.idx)
}
}
impl<'tcx, A:Repr<'tcx>, B:Repr<'tcx>> Repr<'tcx> for (A,B) {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let &(ref a, ref b) = self;
format!("({},{})", a.repr(tcx), b.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for ty::Binder<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Binder({})", self.0.repr(tcx))
}
}
impl<'tcx, S, K, V> Repr<'tcx> for HashMap<K, V, S>
where K: Hash + Eq + Repr<'tcx>,
V: Repr<'tcx>,
S: HashState,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("HashMap({})",
self.iter()
.map(|(k,v)| format!("{} => {}", k.repr(tcx), v.repr(tcx)))
.collect::<Vec<String>>()
.connect(", "))
}
}
impl<'tcx, T, U> Repr<'tcx> for ty::OutlivesPredicate<T,U>
where T : Repr<'tcx> + TypeFoldable<'tcx>,
U : Repr<'tcx> + TypeFoldable<'tcx>,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("OutlivesPredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx, T, U> UserString<'tcx> for ty::OutlivesPredicate<T,U>
where T : UserString<'tcx> + TypeFoldable<'tcx>,
U : UserString<'tcx> + TypeFoldable<'tcx>,
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::EquatePredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("EquatePredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::EquatePredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TraitPredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitPredicate({})",
self.trait_ref.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::TraitPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.projection_ty.user_string(tcx),
self.ty.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ProjectionTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("<{} as {}>::{}",
self.trait_ref.substs.self_ty().repr(tcx),
self.trait_ref.repr(tcx),
self.item_name.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionTy<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("<{} as {}>::{}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx),
self.item_name.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::Predicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::Predicate::Trait(ref data) => data.user_string(tcx),
ty::Predicate::Equate(ref predicate) => predicate.user_string(tcx),
ty::Predicate::RegionOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::TypeOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::Projection(ref predicate) => predicate.user_string(tcx),
}
}
}
| 31.93961 | 95 | 0.505337 |
79482ad77c40b32e8f58e2cd085b4e076763d304 | 29,568 | #[doc = "Reader of register GINTMSK"]
pub type R = crate::R<u32, super::GINTMSK>;
#[doc = "Writer for register GINTMSK"]
pub type W = crate::W<u32, super::GINTMSK>;
#[doc = "Register GINTMSK `reset()`'s with value 0"]
impl crate::ResetValue for super::GINTMSK {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `MODEMISMSK`"]
pub type MODEMISMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MODEMISMSK`"]
pub struct MODEMISMSK_W<'a> {
w: &'a mut W,
}
impl<'a> MODEMISMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `OTGINTMSK`"]
pub type OTGINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OTGINTMSK`"]
pub struct OTGINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> OTGINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `SOFMSK`"]
pub type SOFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SOFMSK`"]
pub struct SOFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> SOFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `RXFLVLMSK`"]
pub type RXFLVLMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXFLVLMSK`"]
pub struct RXFLVLMSK_W<'a> {
w: &'a mut W,
}
impl<'a> RXFLVLMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `NPTXFEMPMSK`"]
pub type NPTXFEMPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NPTXFEMPMSK`"]
pub struct NPTXFEMPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> NPTXFEMPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `GINNAKEFFMSK`"]
pub type GINNAKEFFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GINNAKEFFMSK`"]
pub struct GINNAKEFFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> GINNAKEFFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `GOUTNAKEFFMSK`"]
pub type GOUTNAKEFFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GOUTNAKEFFMSK`"]
pub struct GOUTNAKEFFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> GOUTNAKEFFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `ERLYSUSPMSK`"]
pub type ERLYSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ERLYSUSPMSK`"]
pub struct ERLYSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ERLYSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `USBSUSPMSK`"]
pub type USBSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USBSUSPMSK`"]
pub struct USBSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> USBSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `USBRSTMSK`"]
pub type USBRSTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USBRSTMSK`"]
pub struct USBRSTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> USBRSTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `ENUMDONEMSK`"]
pub type ENUMDONEMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENUMDONEMSK`"]
pub struct ENUMDONEMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ENUMDONEMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `ISOOUTDROPMSK`"]
pub type ISOOUTDROPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISOOUTDROPMSK`"]
pub struct ISOOUTDROPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ISOOUTDROPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `EOPFMSK`"]
pub type EOPFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EOPFMSK`"]
pub struct EOPFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> EOPFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `EPMISMSK`"]
pub type EPMISMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EPMISMSK`"]
pub struct EPMISMSK_W<'a> {
w: &'a mut W,
}
impl<'a> EPMISMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `IEPINTMSK`"]
pub type IEPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `IEPINTMSK`"]
pub struct IEPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> IEPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `OEPINTMSK`"]
pub type OEPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OEPINTMSK`"]
pub struct OEPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> OEPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `INCOMPISOINMSK`"]
pub type INCOMPISOINMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INCOMPISOINMSK`"]
pub struct INCOMPISOINMSK_W<'a> {
w: &'a mut W,
}
impl<'a> INCOMPISOINMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `INCOMPLPMSK`"]
pub type INCOMPLPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INCOMPLPMSK`"]
pub struct INCOMPLPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> INCOMPLPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `FETSUSPMSK`"]
pub type FETSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FETSUSPMSK`"]
pub struct FETSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> FETSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `RESETDETMSK`"]
pub type RESETDETMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RESETDETMSK`"]
pub struct RESETDETMSK_W<'a> {
w: &'a mut W,
}
impl<'a> RESETDETMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `PRTINTMSK`"]
pub type PRTINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRTINTMSK`"]
pub struct PRTINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> PRTINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `HCHINTMSK`"]
pub type HCHINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HCHINTMSK`"]
pub struct HCHINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> HCHINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `PTXFEMPMSK`"]
pub type PTXFEMPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PTXFEMPMSK`"]
pub struct PTXFEMPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> PTXFEMPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `CONIDSTSCHNGMSK`"]
pub type CONIDSTSCHNGMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CONIDSTSCHNGMSK`"]
pub struct CONIDSTSCHNGMSK_W<'a> {
w: &'a mut W,
}
impl<'a> CONIDSTSCHNGMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `DISCONNINTMSK`"]
pub type DISCONNINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DISCONNINTMSK`"]
pub struct DISCONNINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> DISCONNINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `SESSREQINTMSK`"]
pub type SESSREQINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SESSREQINTMSK`"]
pub struct SESSREQINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> SESSREQINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `WKUPINTMSK`"]
pub type WKUPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `WKUPINTMSK`"]
pub struct WKUPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> WKUPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 1 - Mode Mismatch Interrupt Mask (host and device)"]
#[inline(always)]
pub fn modemismsk(&self) -> MODEMISMSK_R {
MODEMISMSK_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - OTG Interrupt Mask (host and device)"]
#[inline(always)]
pub fn otgintmsk(&self) -> OTGINTMSK_R {
OTGINTMSK_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Start of Frame Mask (host and device)"]
#[inline(always)]
pub fn sofmsk(&self) -> SOFMSK_R {
SOFMSK_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Receive FIFO Non-Empty Mask (host and device)"]
#[inline(always)]
pub fn rxflvlmsk(&self) -> RXFLVLMSK_R {
RXFLVLMSK_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Non-Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn nptxfempmsk(&self) -> NPTXFEMPMSK_R {
NPTXFEMPMSK_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Global Non-periodic IN NAK Effective Mask (device only)"]
#[inline(always)]
pub fn ginnakeffmsk(&self) -> GINNAKEFFMSK_R {
GINNAKEFFMSK_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Global OUT NAK Effective Mask (device only)"]
#[inline(always)]
pub fn goutnakeffmsk(&self) -> GOUTNAKEFFMSK_R {
GOUTNAKEFFMSK_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 10 - Early Suspend Mask (device only)"]
#[inline(always)]
pub fn erlysuspmsk(&self) -> ERLYSUSPMSK_R {
ERLYSUSPMSK_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - USB Suspend Mask (device only)"]
#[inline(always)]
pub fn usbsuspmsk(&self) -> USBSUSPMSK_R {
USBSUSPMSK_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - USB Reset Mask (device only)"]
#[inline(always)]
pub fn usbrstmsk(&self) -> USBRSTMSK_R {
USBRSTMSK_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - Enumeration Done Mask (device only)"]
#[inline(always)]
pub fn enumdonemsk(&self) -> ENUMDONEMSK_R {
ENUMDONEMSK_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - Isochronous OUT Packet Dropped Interrupt Mask (device only)"]
#[inline(always)]
pub fn isooutdropmsk(&self) -> ISOOUTDROPMSK_R {
ISOOUTDROPMSK_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - End of Periodic Frame Interrupt Mask (device only)"]
#[inline(always)]
pub fn eopfmsk(&self) -> EOPFMSK_R {
EOPFMSK_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 17 - Endpoint Mismatch Interrupt Mask (device only)"]
#[inline(always)]
pub fn epmismsk(&self) -> EPMISMSK_R {
EPMISMSK_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - IN Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn iepintmsk(&self) -> IEPINTMSK_R {
IEPINTMSK_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - OUT Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn oepintmsk(&self) -> OEPINTMSK_R {
OEPINTMSK_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - Incomplete Isochronous IN Transfer Mask (device only)"]
#[inline(always)]
pub fn incompisoinmsk(&self) -> INCOMPISOINMSK_R {
INCOMPISOINMSK_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - Incomplete Periodic Transfer Mask (host only)"]
#[inline(always)]
pub fn incomplpmsk(&self) -> INCOMPLPMSK_R {
INCOMPLPMSK_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 22 - Data Fetch Suspended Mask (device only)"]
#[inline(always)]
pub fn fetsuspmsk(&self) -> FETSUSPMSK_R {
FETSUSPMSK_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - Reset detected Interrupt Mask (device only)"]
#[inline(always)]
pub fn resetdetmsk(&self) -> RESETDETMSK_R {
RESETDETMSK_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - Host Port Interrupt Mask (host only)"]
#[inline(always)]
pub fn prtintmsk(&self) -> PRTINTMSK_R {
PRTINTMSK_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - Host Channels Interrupt Mask (host only)"]
#[inline(always)]
pub fn hchintmsk(&self) -> HCHINTMSK_R {
HCHINTMSK_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn ptxfempmsk(&self) -> PTXFEMPMSK_R {
PTXFEMPMSK_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 28 - Connector ID Status Change Mask (host and device)"]
#[inline(always)]
pub fn conidstschngmsk(&self) -> CONIDSTSCHNGMSK_R {
CONIDSTSCHNGMSK_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - Disconnect Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn disconnintmsk(&self) -> DISCONNINTMSK_R {
DISCONNINTMSK_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - Session Request/New Session Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn sessreqintmsk(&self) -> SESSREQINTMSK_R {
SESSREQINTMSK_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Resume/Remote Wakeup Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn wkupintmsk(&self) -> WKUPINTMSK_R {
WKUPINTMSK_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Mode Mismatch Interrupt Mask (host and device)"]
#[inline(always)]
pub fn modemismsk(&mut self) -> MODEMISMSK_W {
MODEMISMSK_W { w: self }
}
#[doc = "Bit 2 - OTG Interrupt Mask (host and device)"]
#[inline(always)]
pub fn otgintmsk(&mut self) -> OTGINTMSK_W {
OTGINTMSK_W { w: self }
}
#[doc = "Bit 3 - Start of Frame Mask (host and device)"]
#[inline(always)]
pub fn sofmsk(&mut self) -> SOFMSK_W {
SOFMSK_W { w: self }
}
#[doc = "Bit 4 - Receive FIFO Non-Empty Mask (host and device)"]
#[inline(always)]
pub fn rxflvlmsk(&mut self) -> RXFLVLMSK_W {
RXFLVLMSK_W { w: self }
}
#[doc = "Bit 5 - Non-Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn nptxfempmsk(&mut self) -> NPTXFEMPMSK_W {
NPTXFEMPMSK_W { w: self }
}
#[doc = "Bit 6 - Global Non-periodic IN NAK Effective Mask (device only)"]
#[inline(always)]
pub fn ginnakeffmsk(&mut self) -> GINNAKEFFMSK_W {
GINNAKEFFMSK_W { w: self }
}
#[doc = "Bit 7 - Global OUT NAK Effective Mask (device only)"]
#[inline(always)]
pub fn goutnakeffmsk(&mut self) -> GOUTNAKEFFMSK_W {
GOUTNAKEFFMSK_W { w: self }
}
#[doc = "Bit 10 - Early Suspend Mask (device only)"]
#[inline(always)]
pub fn erlysuspmsk(&mut self) -> ERLYSUSPMSK_W {
ERLYSUSPMSK_W { w: self }
}
#[doc = "Bit 11 - USB Suspend Mask (device only)"]
#[inline(always)]
pub fn usbsuspmsk(&mut self) -> USBSUSPMSK_W {
USBSUSPMSK_W { w: self }
}
#[doc = "Bit 12 - USB Reset Mask (device only)"]
#[inline(always)]
pub fn usbrstmsk(&mut self) -> USBRSTMSK_W {
USBRSTMSK_W { w: self }
}
#[doc = "Bit 13 - Enumeration Done Mask (device only)"]
#[inline(always)]
pub fn enumdonemsk(&mut self) -> ENUMDONEMSK_W {
ENUMDONEMSK_W { w: self }
}
#[doc = "Bit 14 - Isochronous OUT Packet Dropped Interrupt Mask (device only)"]
#[inline(always)]
pub fn isooutdropmsk(&mut self) -> ISOOUTDROPMSK_W {
ISOOUTDROPMSK_W { w: self }
}
#[doc = "Bit 15 - End of Periodic Frame Interrupt Mask (device only)"]
#[inline(always)]
pub fn eopfmsk(&mut self) -> EOPFMSK_W {
EOPFMSK_W { w: self }
}
#[doc = "Bit 17 - Endpoint Mismatch Interrupt Mask (device only)"]
#[inline(always)]
pub fn epmismsk(&mut self) -> EPMISMSK_W {
EPMISMSK_W { w: self }
}
#[doc = "Bit 18 - IN Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn iepintmsk(&mut self) -> IEPINTMSK_W {
IEPINTMSK_W { w: self }
}
#[doc = "Bit 19 - OUT Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn oepintmsk(&mut self) -> OEPINTMSK_W {
OEPINTMSK_W { w: self }
}
#[doc = "Bit 20 - Incomplete Isochronous IN Transfer Mask (device only)"]
#[inline(always)]
pub fn incompisoinmsk(&mut self) -> INCOMPISOINMSK_W {
INCOMPISOINMSK_W { w: self }
}
#[doc = "Bit 21 - Incomplete Periodic Transfer Mask (host only)"]
#[inline(always)]
pub fn incomplpmsk(&mut self) -> INCOMPLPMSK_W {
INCOMPLPMSK_W { w: self }
}
#[doc = "Bit 22 - Data Fetch Suspended Mask (device only)"]
#[inline(always)]
pub fn fetsuspmsk(&mut self) -> FETSUSPMSK_W {
FETSUSPMSK_W { w: self }
}
#[doc = "Bit 23 - Reset detected Interrupt Mask (device only)"]
#[inline(always)]
pub fn resetdetmsk(&mut self) -> RESETDETMSK_W {
RESETDETMSK_W { w: self }
}
#[doc = "Bit 24 - Host Port Interrupt Mask (host only)"]
#[inline(always)]
pub fn prtintmsk(&mut self) -> PRTINTMSK_W {
PRTINTMSK_W { w: self }
}
#[doc = "Bit 25 - Host Channels Interrupt Mask (host only)"]
#[inline(always)]
pub fn hchintmsk(&mut self) -> HCHINTMSK_W {
HCHINTMSK_W { w: self }
}
#[doc = "Bit 26 - Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn ptxfempmsk(&mut self) -> PTXFEMPMSK_W {
PTXFEMPMSK_W { w: self }
}
#[doc = "Bit 28 - Connector ID Status Change Mask (host and device)"]
#[inline(always)]
pub fn conidstschngmsk(&mut self) -> CONIDSTSCHNGMSK_W {
CONIDSTSCHNGMSK_W { w: self }
}
#[doc = "Bit 29 - Disconnect Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn disconnintmsk(&mut self) -> DISCONNINTMSK_W {
DISCONNINTMSK_W { w: self }
}
#[doc = "Bit 30 - Session Request/New Session Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn sessreqintmsk(&mut self) -> SESSREQINTMSK_W {
SESSREQINTMSK_W { w: self }
}
#[doc = "Bit 31 - Resume/Remote Wakeup Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn wkupintmsk(&mut self) -> WKUPINTMSK_W {
WKUPINTMSK_W { w: self }
}
}
| 31.623529 | 93 | 0.560809 |
080f503476d44d440eb913a7d5059c62680e57fb | 10,711 | #[doc = "Reader of register VDD_SPI"]
pub type R = crate::R<u32, super::VDD_SPI>;
#[doc = "Writer for register VDD_SPI"]
pub type W = crate::W<u32, super::VDD_SPI>;
#[doc = "Register VDD_SPI `reset()`'s with value 0x0a00"]
impl crate::ResetValue for super::VDD_SPI {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0a00
}
}
#[doc = "Reader of field `MCU_SEL`"]
pub type MCU_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `MCU_SEL`"]
pub struct MCU_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> MCU_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Reader of field `FUN_DRV`"]
pub type FUN_DRV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `FUN_DRV`"]
pub struct FUN_DRV_W<'a> {
w: &'a mut W,
}
impl<'a> FUN_DRV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);
self.w
}
}
#[doc = "Reader of field `FUN_IE`"]
pub type FUN_IE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FUN_IE`"]
pub struct FUN_IE_W<'a> {
w: &'a mut W,
}
impl<'a> FUN_IE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `FUN_PU`"]
pub type FUN_PU_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FUN_PU`"]
pub struct FUN_PU_W<'a> {
w: &'a mut W,
}
impl<'a> FUN_PU_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `FUN_PD`"]
pub type FUN_PD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FUN_PD`"]
pub struct FUN_PD_W<'a> {
w: &'a mut W,
}
impl<'a> FUN_PD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `SLP_DRV`"]
pub type SLP_DRV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SLP_DRV`"]
pub struct SLP_DRV_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_DRV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5);
self.w
}
}
#[doc = "Reader of field `SLP_IE`"]
pub type SLP_IE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLP_IE`"]
pub struct SLP_IE_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_IE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `SLP_PU`"]
pub type SLP_PU_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLP_PU`"]
pub struct SLP_PU_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_PU_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `SLP_PD`"]
pub type SLP_PD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLP_PD`"]
pub struct SLP_PD_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_PD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `SLP_SEL`"]
pub type SLP_SEL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLP_SEL`"]
pub struct SLP_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_SEL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `SLP_OE`"]
pub type SLP_OE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLP_OE`"]
pub struct SLP_OE_W<'a> {
w: &'a mut W,
}
impl<'a> SLP_OE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bits 12:13 - configures IO_MUX function"]
#[inline(always)]
pub fn mcu_sel(&self) -> MCU_SEL_R {
MCU_SEL_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 10:11 - configures drive strength"]
#[inline(always)]
pub fn fun_drv(&self) -> FUN_DRV_R {
FUN_DRV_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bit 9 - configures input enable"]
#[inline(always)]
pub fn fun_ie(&self) -> FUN_IE_R {
FUN_IE_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - configures pull up"]
#[inline(always)]
pub fn fun_pu(&self) -> FUN_PU_R {
FUN_PU_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7 - configures pull down"]
#[inline(always)]
pub fn fun_pd(&self) -> FUN_PD_R {
FUN_PD_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 5:6 - configures drive strength during sleep mode"]
#[inline(always)]
pub fn slp_drv(&self) -> SLP_DRV_R {
SLP_DRV_R::new(((self.bits >> 5) & 0x03) as u8)
}
#[doc = "Bit 4 - configures input enable during sleep mode"]
#[inline(always)]
pub fn slp_ie(&self) -> SLP_IE_R {
SLP_IE_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3 - configures pull up during sleep mode"]
#[inline(always)]
pub fn slp_pu(&self) -> SLP_PU_R {
SLP_PU_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2 - configures pull down during sleep mode"]
#[inline(always)]
pub fn slp_pd(&self) -> SLP_PD_R {
SLP_PD_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - configures sleep mode selection"]
#[inline(always)]
pub fn slp_sel(&self) -> SLP_SEL_R {
SLP_SEL_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - configures output enable during sleep mode"]
#[inline(always)]
pub fn slp_oe(&self) -> SLP_OE_R {
SLP_OE_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 12:13 - configures IO_MUX function"]
#[inline(always)]
pub fn mcu_sel(&mut self) -> MCU_SEL_W {
MCU_SEL_W { w: self }
}
#[doc = "Bits 10:11 - configures drive strength"]
#[inline(always)]
pub fn fun_drv(&mut self) -> FUN_DRV_W {
FUN_DRV_W { w: self }
}
#[doc = "Bit 9 - configures input enable"]
#[inline(always)]
pub fn fun_ie(&mut self) -> FUN_IE_W {
FUN_IE_W { w: self }
}
#[doc = "Bit 8 - configures pull up"]
#[inline(always)]
pub fn fun_pu(&mut self) -> FUN_PU_W {
FUN_PU_W { w: self }
}
#[doc = "Bit 7 - configures pull down"]
#[inline(always)]
pub fn fun_pd(&mut self) -> FUN_PD_W {
FUN_PD_W { w: self }
}
#[doc = "Bits 5:6 - configures drive strength during sleep mode"]
#[inline(always)]
pub fn slp_drv(&mut self) -> SLP_DRV_W {
SLP_DRV_W { w: self }
}
#[doc = "Bit 4 - configures input enable during sleep mode"]
#[inline(always)]
pub fn slp_ie(&mut self) -> SLP_IE_W {
SLP_IE_W { w: self }
}
#[doc = "Bit 3 - configures pull up during sleep mode"]
#[inline(always)]
pub fn slp_pu(&mut self) -> SLP_PU_W {
SLP_PU_W { w: self }
}
#[doc = "Bit 2 - configures pull down during sleep mode"]
#[inline(always)]
pub fn slp_pd(&mut self) -> SLP_PD_W {
SLP_PD_W { w: self }
}
#[doc = "Bit 1 - configures sleep mode selection"]
#[inline(always)]
pub fn slp_sel(&mut self) -> SLP_SEL_W {
SLP_SEL_W { w: self }
}
#[doc = "Bit 0 - configures output enable during sleep mode"]
#[inline(always)]
pub fn slp_oe(&mut self) -> SLP_OE_W {
SLP_OE_W { w: self }
}
}
| 29.67036 | 86 | 0.543367 |
38ec3ae28cca561fa2d230ae36038cd8c2d6e487 | 16,922 | // Copyright (c) Microsoft. All rights reserved.
#![deny(rust_2018_idioms)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::default_trait_access,
clippy::let_and_return,
clippy::let_unit_value,
clippy::missing_errors_doc,
clippy::similar_names,
clippy::too_many_arguments,
clippy::too_many_lines,
clippy::type_complexity
)]
use std::sync::Arc;
use aziot_cloud_client_async_common::{get_sas_connector, get_x509_connector};
pub const IOT_HUB_ENCODE_SET: &percent_encoding::AsciiSet =
&http_common::PATH_SEGMENT_ENCODE_SET.add(b'=');
pub struct Client {
device: aziot_identity_common::IoTHubDevice,
key_client: Arc<aziot_key_client_async::Client>,
key_engine: Arc<futures_util::lock::Mutex<openssl2::FunctionalEngine>>,
cert_client: Arc<aziot_cert_client_async::Client>,
tpm_client: Arc<aziot_tpm_client_async::Client>,
proxy_uri: Option<hyper::Uri>,
}
impl Client {
#[must_use]
pub fn new(
device: aziot_identity_common::IoTHubDevice,
key_client: Arc<aziot_key_client_async::Client>,
key_engine: Arc<futures_util::lock::Mutex<openssl2::FunctionalEngine>>,
cert_client: Arc<aziot_cert_client_async::Client>,
tpm_client: Arc<aziot_tpm_client_async::Client>,
proxy_uri: Option<hyper::Uri>,
) -> Self {
Client {
device,
key_client,
key_engine,
cert_client,
tpm_client,
proxy_uri,
}
}
}
impl Client {
pub async fn create_module(
&self,
module_id: &str,
authentication_type: Option<aziot_identity_common::hub::AuthMechanism>,
managed_by: Option<String>,
) -> Result<aziot_identity_common::hub::Module, std::io::Error> {
let uri = format!(
"/devices/{}/modules/{}?api-version=2017-11-08-preview",
percent_encoding::percent_encode(&self.device.device_id.as_bytes(), IOT_HUB_ENCODE_SET),
percent_encoding::percent_encode(module_id.as_bytes(), IOT_HUB_ENCODE_SET),
);
let body = aziot_identity_common::hub::Module {
module_id: module_id.into(),
managed_by,
device_id: self.device.device_id.clone(),
generation_id: None,
authentication: authentication_type,
};
let res: aziot_identity_common::hub::Module = self
.request(&self.device, http::Method::PUT, &uri, Some(&body), false)
.await?;
Ok(res)
}
pub async fn update_module(
&self,
module_id: &str,
authentication_type: Option<aziot_identity_common::hub::AuthMechanism>,
managed_by: Option<String>,
) -> Result<aziot_identity_common::hub::Module, std::io::Error> {
let uri = format!(
"/devices/{}/modules/{}?api-version=2017-11-08-preview",
percent_encoding::percent_encode(&self.device.device_id.as_bytes(), IOT_HUB_ENCODE_SET),
percent_encoding::percent_encode(module_id.as_bytes(), IOT_HUB_ENCODE_SET),
);
let body = aziot_identity_common::hub::Module {
module_id: module_id.into(),
managed_by,
device_id: self.device.device_id.clone(),
generation_id: None,
authentication: authentication_type,
};
let res: aziot_identity_common::hub::Module = self
.request(&self.device, http::Method::PUT, &uri, Some(&body), true)
.await?;
Ok(res)
}
pub async fn get_module(
&self,
module_id: &str,
) -> Result<aziot_identity_common::hub::Module, std::io::Error> {
let uri = format!(
"/devices/{}/modules/{}?api-version=2017-11-08-preview",
percent_encoding::percent_encode(&self.device.device_id.as_bytes(), IOT_HUB_ENCODE_SET),
percent_encoding::percent_encode(module_id.as_bytes(), IOT_HUB_ENCODE_SET),
);
let res: aziot_identity_common::hub::Module = self
.request::<(), _>(&self.device, http::Method::GET, &uri, None, false)
.await?;
Ok(res)
}
pub async fn get_modules(
&self,
) -> Result<Vec<aziot_identity_common::hub::Module>, std::io::Error> {
let uri = format!(
"/devices/{}/modules?api-version=2017-11-08-preview",
percent_encoding::percent_encode(&self.device.device_id.as_bytes(), IOT_HUB_ENCODE_SET),
);
let res: Vec<aziot_identity_common::hub::Module> = self
.request::<(), _>(&self.device, http::Method::GET, &uri, None, false)
.await?;
Ok(res)
}
pub async fn delete_module(&self, module_id: &str) -> Result<(), std::io::Error> {
let uri = format!(
"/devices/{}/modules/{}?api-version=2017-11-08-preview",
percent_encoding::percent_encode(&self.device.device_id.as_bytes(), IOT_HUB_ENCODE_SET),
percent_encoding::percent_encode(module_id.as_bytes(), IOT_HUB_ENCODE_SET),
);
let () = self
.request_no_content::<()>(&self.device, http::Method::DELETE, &uri, None)
.await?;
Ok(())
}
async fn request<TRequest, TResponse>(
&self,
hub_device: &aziot_identity_common::IoTHubDevice,
method: http::Method,
uri: &str,
body: Option<&TRequest>,
add_if_match: bool,
) -> std::io::Result<TResponse>
where
TRequest: serde::Serialize,
TResponse: serde::de::DeserializeOwned,
{
let uri = format!("https://{}{}", hub_device.iothub_hostname, uri);
let req = hyper::Request::builder().method(method).uri(uri);
// `req` is consumed by both branches, so this cannot be replaced with `Option::map_or_else`
//
// Ref: https://github.com/rust-lang/rust-clippy/issues/5822
#[allow(clippy::option_if_let_else)]
let req = if let Some(body) = body {
let body = serde_json::to_vec(body)
.expect("serializing request body to JSON cannot fail")
.into();
req.header(hyper::header::CONTENT_TYPE, "application/json")
.body(body)
} else {
req.body(hyper::Body::default())
};
let mut req = req.expect("cannot fail to create hyper request");
if add_if_match {
req.headers_mut().insert(
hyper::header::IF_MATCH,
hyper::header::HeaderValue::from_static("*"),
);
}
let connector = match hub_device.credentials.clone() {
aziot_identity_common::Credentials::SharedPrivateKey(key) => {
let audience = format!(
"{}/devices/{}",
hub_device.iothub_hostname, hub_device.device_id
);
let key_handle = self.key_client.load_key(&key).await?;
let (connector, token) = get_sas_connector(
&audience,
key_handle,
&*self.key_client,
self.proxy_uri.clone(),
false,
)
.await?;
let authorization_header_value = hyper::header::HeaderValue::from_str(&token)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
req.headers_mut()
.append(hyper::header::AUTHORIZATION, authorization_header_value);
connector
}
aziot_identity_common::Credentials::Tpm => {
let audience = format!(
"{}/devices/{}",
hub_device.iothub_hostname, hub_device.device_id
);
let (connector, token) = get_sas_connector(
&audience,
(),
&*self.tpm_client,
self.proxy_uri.clone(),
false,
)
.await?;
let authorization_header_value = hyper::header::HeaderValue::from_str(&token)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
req.headers_mut()
.append(hyper::header::AUTHORIZATION, authorization_header_value);
connector
}
aziot_identity_common::Credentials::X509 {
identity_cert,
identity_pk,
} => {
get_x509_connector(
&identity_cert,
&identity_pk,
&self.key_client,
&mut *self.key_engine.lock().await,
&self.cert_client,
self.proxy_uri.clone(),
)
.await?
}
};
let client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(connector);
let res = client
.request(req)
.await
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
let (
http::response::Parts {
status: res_status_code,
headers,
..
},
body,
) = res.into_parts();
log::debug!("IoTHub response status {:?}", res_status_code);
log::debug!("IoTHub response headers{:?}", headers);
let mut is_json = false;
for (header_name, header_value) in headers {
if header_name == Some(hyper::header::CONTENT_TYPE) {
let value = header_value
.to_str()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
if value.contains("application/json") {
is_json = true;
}
}
}
let body = hyper::body::to_bytes(body)
.await
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
let res: TResponse = match res_status_code {
hyper::StatusCode::OK | hyper::StatusCode::CREATED => {
if !is_json {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
));
}
let res = serde_json::from_slice(&body)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
res
}
res_status_code
if res_status_code.is_client_error() || res_status_code.is_server_error() =>
{
let res: crate::Error = serde_json::from_slice(&body)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
return Err(std::io::Error::new(std::io::ErrorKind::Other, res.message));
}
_ => {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
))
}
};
Ok(res)
}
async fn request_no_content<TRequest>(
&self,
hub_device: &aziot_identity_common::IoTHubDevice,
method: http::Method,
uri: &str,
body: Option<&TRequest>,
) -> std::io::Result<()>
where
TRequest: serde::Serialize,
{
let uri = format!("https://{}{}", hub_device.iothub_hostname, uri);
let req = hyper::Request::builder().method(method).uri(uri);
// `req` is consumed by both branches, so this cannot be replaced with `Option::map_or_else`
//
// Ref: https://github.com/rust-lang/rust-clippy/issues/5822
#[allow(clippy::option_if_let_else)]
let req = if let Some(body) = body {
let body = serde_json::to_vec(body)
.expect("serializing request body to JSON cannot fail")
.into();
req.header(hyper::header::CONTENT_TYPE, "application/json")
.body(body)
} else {
req.body(Default::default())
};
let mut req = req.expect("cannot fail to create hyper request");
req.headers_mut().insert(
hyper::header::IF_MATCH,
hyper::header::HeaderValue::from_static("*"),
);
let connector = match hub_device.credentials.clone() {
aziot_identity_common::Credentials::SharedPrivateKey(key) => {
let audience = format!(
"{}/devices/{}",
hub_device.iothub_hostname, hub_device.device_id
);
let key_handle = self.key_client.load_key(&key).await?;
let (connector, token) = get_sas_connector(
&audience,
key_handle,
&*self.key_client,
self.proxy_uri.clone(),
false,
)
.await?;
let authorization_header_value = hyper::header::HeaderValue::from_str(&token)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
req.headers_mut()
.append(hyper::header::AUTHORIZATION, authorization_header_value);
connector
}
aziot_identity_common::Credentials::Tpm => {
let audience = format!(
"{}/devices/{}",
hub_device.iothub_hostname, hub_device.device_id
);
let (connector, token) = get_sas_connector(
&audience,
(),
&*self.tpm_client,
self.proxy_uri.clone(),
false,
)
.await?;
let authorization_header_value = hyper::header::HeaderValue::from_str(&token)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
req.headers_mut()
.append(hyper::header::AUTHORIZATION, authorization_header_value);
connector
}
aziot_identity_common::Credentials::X509 {
identity_cert,
identity_pk,
} => {
get_x509_connector(
&identity_cert,
&identity_pk,
&self.key_client,
&mut *self.key_engine.lock().await,
&self.cert_client,
self.proxy_uri.clone(),
)
.await?
}
};
let client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(connector);
let res = client
.request(req)
.await
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
let (
http::response::Parts {
status: res_status_code,
headers,
..
},
body,
) = res.into_parts();
let body = hyper::body::to_bytes(body)
.await
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
match res_status_code {
hyper::StatusCode::NO_CONTENT => Ok(()),
res_status_code
if res_status_code.is_client_error() || res_status_code.is_server_error() =>
{
let mut is_json = false;
for (header_name, header_value) in headers {
if header_name == Some(hyper::header::CONTENT_TYPE) {
let value = header_value
.to_str()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
if value.contains("application/json") {
is_json = true;
}
}
}
if !is_json {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
));
}
let res: crate::Error = serde_json::from_slice(&body)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
Err(std::io::Error::new(std::io::ErrorKind::Other, res.message))
}
_ => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
)),
}
}
}
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct Error {
#[serde(alias = "Message")]
pub message: std::borrow::Cow<'static, str>,
}
| 36.391398 | 100 | 0.519442 |
1e096237ab01a3679acd2a80e1ff17dccec973c6 | 2,880 | use noria::DataType;
use std::future::Future;
use tower_util::ServiceExt;
use trawler::{StoryId, UserId};
pub(crate) async fn handle<F>(
c: F,
acting_as: Option<UserId>,
id: StoryId,
title: String,
priming: bool,
) -> Result<(crate::Conn, bool), failure::Error>
where
F: 'static + Future<Output = Result<crate::Conn, failure::Error>> + Send,
{
let c = c.await?;
let user = acting_as.unwrap();
// check that tags are active
let tag = c
.view("submit_1")
.await?
.ready_oneshot()
.await?
.lookup_first(&[DataType::from(0i32)], true)
.await?;
let tag = tag.unwrap().take("id").unwrap();
if !priming {
// check that story id isn't already assigned
let _ = c
.view("submit_2")
.await?
.ready_oneshot()
.await?
.lookup(&[::std::str::from_utf8(&id[..]).unwrap().into()], true)
.await?;
}
// TODO: check for similar stories if there's a url
// SELECT `stories`.*
// FROM `stories`
// WHERE `stories`.`url` IN (
// 'https://google.com/test',
// 'http://google.com/test',
// 'https://google.com/test/',
// 'http://google.com/test/',
// ... etc
// )
// AND (is_expired = 0 OR is_moderated = 1)
// TODO
// real impl queries `tags` and `users` again here..?
// TODO: real impl checks *new* short_id and duplicate urls *again*
// TODO: sometimes submit url
// XXX: last_insert_id
let story_id = super::slug_to_id(&id);
// NOTE: MySQL technically does everything inside this and_then in a transaction,
// but let's be nice to it
let mut stories = c.table("stories").await?.ready_oneshot().await?;
let story = noria::row!(stories,
"id" => story_id,
"created_at" => chrono::Local::now().naive_local(),
"user_id" => user,
"title" => title,
"description" => "body",
"short_id" => ::std::str::from_utf8(&id[..]).unwrap(),
"markeddown_description" => "body",
);
stories.insert(story).await?;
let mut taggings = c.table("taggings").await?.ready_oneshot().await?;
let tagging = noria::row!(taggings,
"id" => rand::random::<i64>(),
"story_id" => story_id,
"tag_id" => tag,
);
taggings.insert(tagging).await?;
if !priming {
let _ = c
.view("submit_3")
.await?
.ready_oneshot()
.await?
.lookup(&[user.into(), story_id.into()], true)
.await?;
}
let mut votes = c.table("votes").await?.ready_oneshot().await?;
let vote = noria::row!(votes,
"id" => rand::random::<i64>(),
"user_id" => user,
"story_id" => story_id,
"vote" => 1,
);
votes.insert(vote).await?;
Ok((c, false))
}
| 27.692308 | 85 | 0.537847 |
e6e9bf6e6367fabf04e5340592982b3938824b3d | 666 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(collections)]
fn main() {
let mut escaped = String::from_str("");
for c in '\u{10401}'.escape_unicode() {
escaped.push(c);
}
assert_eq!("\\u{10401}", escaped);
}
| 31.714286 | 68 | 0.689189 |
9c1b8c663809a7a8cbcc533ef4d78a5234c9dfe0 | 5,714 | use crate::{
InputState, ValidatingComponent, ValidatingComponentProperties, ValidationContext, Validator,
};
use web_sys::HtmlInputElement;
use yew::prelude::*;
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum TextInputIcon {
None,
Calendar,
Clock,
Search,
Custom,
}
impl Default for TextInputIcon {
fn default() -> Self {
Self::None
}
}
#[derive(Clone, PartialEq, Properties)]
pub struct TextInputProps {
#[prop_or_default]
pub name: String,
#[prop_or_default]
pub id: String,
#[prop_or_default]
pub value: String,
#[prop_or_default]
pub required: bool,
#[prop_or_default]
pub disabled: bool,
#[prop_or_default]
pub readonly: bool,
#[prop_or_default]
pub state: InputState,
#[prop_or_default]
pub icon: TextInputIcon,
#[prop_or("text".into())]
pub r#type: String,
#[prop_or_default]
pub placeholder: String,
#[prop_or_default]
pub onchange: Callback<String>,
#[prop_or_default]
pub oninput: Callback<String>,
// Called when validation should occur
#[prop_or_default]
pub onvalidate: Callback<ValidationContext<String>>,
#[prop_or_default]
pub validator: Validator<String, InputState>,
}
impl ValidatingComponent for TextInput {
type Value = String;
}
impl ValidatingComponentProperties<String> for TextInputProps {
fn set_onvalidate(&mut self, onvalidate: Callback<ValidationContext<String>>) {
self.onvalidate = onvalidate;
}
fn set_input_state(&mut self, state: InputState) {
self.state = state;
}
}
pub struct TextInput {
value: Option<String>,
input_ref: NodeRef,
}
pub enum TextInputMsg {
Init,
Changed(String),
Input(String),
}
impl Component for TextInput {
type Message = TextInputMsg;
type Properties = TextInputProps;
fn create(ctx: &Context<Self>) -> Self {
ctx.link().send_message(Self::Message::Init);
Self {
value: None,
input_ref: Default::default(),
}
}
fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
TextInputMsg::Init => {
ctx.props().onvalidate.emit(ValidationContext {
value: self.value(ctx),
initial: true,
});
}
TextInputMsg::Changed(data) => {
self.value = Some(data.clone());
ctx.props().onchange.emit(data.clone());
ctx.props().onvalidate.emit(data.into());
}
TextInputMsg::Input(data) => {
ctx.props().oninput.emit(data);
if let Some(value) = self.extract_value() {
self.value = Some(value.clone());
ctx.props().onchange.emit(value.clone());
ctx.props().onvalidate.emit(value.into());
}
// only re-render if we have a validator
return ctx.props().validator.is_custom();
}
}
true
}
fn changed(&mut self, ctx: &Context<Self>) -> bool {
if ctx.props().readonly {
self.value = None;
}
true
}
fn view(&self, ctx: &Context<Self>) -> Html {
let mut classes = Classes::from("pf-c-form-control");
match ctx.props().icon {
TextInputIcon::None => {}
TextInputIcon::Search => classes.push("pf-m-search"),
TextInputIcon::Calendar => classes.extend(vec!["pf-m-icon", "pf-m-calendar"]),
TextInputIcon::Clock => classes.extend(vec!["pf-m-icon", "pf-m-clock"]),
TextInputIcon::Custom => classes.extend(vec!["pf-m-icon"]),
};
let (classes, aria_invalid) = self.input_state(ctx).convert(classes);
let input_ref = self.input_ref.clone();
let onchange = ctx.link().batch_callback(move |_| {
input_ref
.cast::<HtmlInputElement>()
.map(|input| TextInputMsg::Changed(input.value()))
});
let oninput = ctx
.link()
.callback(|evt: InputEvent| TextInputMsg::Input(evt.data().unwrap_or_default()));
let value = self.value(ctx);
html! {
<input
ref={self.input_ref.clone()}
class={classes}
type={ctx.props().r#type.clone()}
name={ctx.props().name.clone()}
id={ctx.props().id.clone()}
required={ctx.props().required}
disabled={ctx.props().disabled}
readonly={ctx.props().readonly}
aria-invalid={aria_invalid.to_string()}
value={value}
placeholder={ctx.props().placeholder.clone()}
onchange={onchange}
oninput={oninput}
/>
}
}
}
impl TextInput {
/// Extract the current value from the input element
fn extract_value(&self) -> Option<String> {
self.input_ref
.cast::<HtmlInputElement>()
.map(|input| input.value())
}
fn value(&self, ctx: &Context<Self>) -> String {
self.value
.clone()
.unwrap_or_else(|| ctx.props().value.clone())
}
/// Get the effective input state
///
/// This may be the result of the validator, or if none was set, the provided input state
/// from the properties.
fn input_state(&self, ctx: &Context<Self>) -> InputState {
ctx.props()
.validator
.run_if(|| ValidationContext::from(self.value(ctx)))
.unwrap_or_else(|| ctx.props().state)
}
}
| 28.713568 | 97 | 0.557228 |
16e220ad3e4cccc96f2a0d8d4a9465b842011d24 | 26,457 | #![doc = "generated by AutoRust"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::de::{value, Deserializer, IntoDeserializer};
use serde::{Deserialize, Serialize, Serializer};
use std::str::FromStr;
#[doc = "Attestation request for Intel SGX enclaves"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AttestOpenEnclaveRequest {
#[doc = "OpenEnclave report from the enclave to be attested"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub report: Option<String>,
#[doc = "Defines the \"run time data\" provided by the attestation target for use by the MAA"]
#[serde(rename = "runtimeData", default, skip_serializing_if = "Option::is_none")]
pub runtime_data: Option<RuntimeData>,
#[doc = "Defines the \"initialization time data\" used to provision the attestation target for use by the MAA"]
#[serde(rename = "initTimeData", default, skip_serializing_if = "Option::is_none")]
pub init_time_data: Option<InitTimeData>,
#[doc = "Attest against the provided draft policy. Note that the resulting token cannot be validated."]
#[serde(rename = "draftPolicyForAttestation", default, skip_serializing_if = "Option::is_none")]
pub draft_policy_for_attestation: Option<String>,
}
impl AttestOpenEnclaveRequest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Attestation request for Intel SGX enclaves"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AttestSgxEnclaveRequest {
#[doc = "Quote of the enclave to be attested"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quote: Option<String>,
#[doc = "Defines the \"run time data\" provided by the attestation target for use by the MAA"]
#[serde(rename = "runtimeData", default, skip_serializing_if = "Option::is_none")]
pub runtime_data: Option<RuntimeData>,
#[doc = "Defines the \"initialization time data\" used to provision the attestation target for use by the MAA"]
#[serde(rename = "initTimeData", default, skip_serializing_if = "Option::is_none")]
pub init_time_data: Option<InitTimeData>,
#[doc = "Attest against the provided draft policy. Note that the resulting token cannot be validated."]
#[serde(rename = "draftPolicyForAttestation", default, skip_serializing_if = "Option::is_none")]
pub draft_policy_for_attestation: Option<String>,
}
impl AttestSgxEnclaveRequest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The body of the JWT used for the PolicyCertificates APIs"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AttestationCertificateManagementBody {
#[serde(rename = "policyCertificate", default, skip_serializing_if = "Option::is_none")]
pub policy_certificate: Option<JsonWebKey>,
}
impl AttestationCertificateManagementBody {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The result of an attestation operation"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AttestationResponse {
#[doc = "An RFC 7519 Json Web Token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<JsonWebToken>,
}
impl AttestationResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "A Microsoft Azure Attestation response token body - the body of a response token issued by MAA"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AttestationResult {
#[doc = "Unique Identifier for the token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub jti: Option<String>,
#[doc = "The Principal who issued the token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub iss: Option<String>,
#[doc = "The time at which the token was issued, in the number of seconds since 1970-01-0T00:00:00Z UTC"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub iat: Option<f64>,
#[doc = "The expiration time after which the token is no longer valid, in the number of seconds since 1970-01-0T00:00:00Z UTC"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub exp: Option<f64>,
#[doc = "The not before time before which the token cannot be considered valid, in the number of seconds since 1970-01-0T00:00:00Z UTC"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub nbf: Option<f64>,
#[doc = "An RFC 7800 Proof of Possession Key"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cnf: Option<serde_json::Value>,
#[doc = "The Nonce input to the attestation request, if provided."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub nonce: Option<String>,
#[doc = "The Schema version of this structure. Current Value: 1.0"]
#[serde(rename = "x-ms-ver", default, skip_serializing_if = "Option::is_none")]
pub x_ms_ver: Option<String>,
#[doc = "Runtime Claims"]
#[serde(rename = "x-ms-runtime", default, skip_serializing_if = "Option::is_none")]
pub x_ms_runtime: Option<serde_json::Value>,
#[doc = "Inittime Claims"]
#[serde(rename = "x-ms-inittime", default, skip_serializing_if = "Option::is_none")]
pub x_ms_inittime: Option<serde_json::Value>,
#[doc = "Policy Generated Claims"]
#[serde(rename = "x-ms-policy", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy: Option<serde_json::Value>,
#[doc = "The Attestation type being attested."]
#[serde(rename = "x-ms-attestation-type", default, skip_serializing_if = "Option::is_none")]
pub x_ms_attestation_type: Option<String>,
#[serde(rename = "x-ms-policy-signer", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_signer: Option<JsonWebKey>,
#[doc = "The SHA256 hash of the BASE64URL encoded policy text used for attestation"]
#[serde(rename = "x-ms-policy-hash", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_hash: Option<String>,
#[doc = "True if the enclave is debuggable, false otherwise"]
#[serde(rename = "x-ms-sgx-is-debuggable", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_is_debuggable: Option<bool>,
#[doc = "The SGX Product ID for the enclave."]
#[serde(rename = "x-ms-sgx-product-id", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_product_id: Option<f64>,
#[doc = "The HEX encoded SGX MRENCLAVE value for the enclave."]
#[serde(rename = "x-ms-sgx-mrenclave", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_mrenclave: Option<String>,
#[doc = "The HEX encoded SGX MRSIGNER value for the enclave."]
#[serde(rename = "x-ms-sgx-mrsigner", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_mrsigner: Option<String>,
#[doc = "The SGX SVN value for the enclave."]
#[serde(rename = "x-ms-sgx-svn", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_svn: Option<f64>,
#[doc = "A copy of the RuntimeData specified as an input to the attest call."]
#[serde(rename = "x-ms-sgx-ehd", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_ehd: Option<String>,
#[doc = "The SGX SVN value for the enclave."]
#[serde(rename = "x-ms-sgx-collateral", default, skip_serializing_if = "Option::is_none")]
pub x_ms_sgx_collateral: Option<serde_json::Value>,
#[doc = "DEPRECATED: Private Preview version of x-ms-ver claim."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ver: Option<String>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-is-debuggable claim."]
#[serde(rename = "is-debuggable", default, skip_serializing_if = "Option::is_none")]
pub is_debuggable: Option<bool>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-collateral claim."]
#[serde(rename = "maa-attestationcollateral", default, skip_serializing_if = "Option::is_none")]
pub maa_attestationcollateral: Option<serde_json::Value>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-ehd claim."]
#[serde(rename = "aas-ehd", default, skip_serializing_if = "Option::is_none")]
pub aas_ehd: Option<String>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-ehd claim."]
#[serde(rename = "maa-ehd", default, skip_serializing_if = "Option::is_none")]
pub maa_ehd: Option<String>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-product-id"]
#[serde(rename = "product-id", default, skip_serializing_if = "Option::is_none")]
pub product_id: Option<f64>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-mrenclave."]
#[serde(rename = "sgx-mrenclave", default, skip_serializing_if = "Option::is_none")]
pub sgx_mrenclave: Option<String>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-mrsigner."]
#[serde(rename = "sgx-mrsigner", default, skip_serializing_if = "Option::is_none")]
pub sgx_mrsigner: Option<String>,
#[doc = "DEPRECATED: Private Preview version of x-ms-sgx-svn."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub svn: Option<f64>,
#[doc = "DEPRECATED: Private Preview version of x-ms-tee."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tee: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policy_signer: Option<JsonWebKey>,
#[doc = "DEPRECATED: Private Preview version of x-ms-policy-hash"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policy_hash: Option<String>,
#[doc = "DEPRECATED: Private Preview version of nonce"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub rp_data: Option<String>,
}
impl AttestationResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "An error response from Attestation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CloudError {
#[doc = "An error response from Attestation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<CloudErrorBody>,
}
impl CloudError {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "An error response from Attestation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CloudErrorBody {
#[doc = "An identifier for the error. Codes are invariant and are intended to be consumed programmatically."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "A message describing the error, intended to be suitable for displaying in a user interface."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl CloudErrorBody {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Specifies the type of the data encoded contained within the \"data\" field of a \"RuntimeData\" or \"InitTimeData\" object"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "DataType")]
pub enum DataType {
Binary,
#[serde(rename = "JSON")]
Json,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for DataType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for DataType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for DataType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Binary => serializer.serialize_unit_variant("DataType", 0u32, "Binary"),
Self::Json => serializer.serialize_unit_variant("DataType", 1u32, "JSON"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "Defines the \"initialization time data\" used to provision the attestation target for use by the MAA"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct InitTimeData {
#[doc = "UTF-8 encoded Initialization Data passed into the trusted environment when it is created."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub data: Option<String>,
#[doc = "Specifies the type of the data encoded contained within the \"data\" field of a \"RuntimeData\" or \"InitTimeData\" object"]
#[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")]
pub data_type: Option<DataType>,
}
impl InitTimeData {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct JsonWebKey {
#[doc = "The \"alg\" (algorithm) parameter identifies the algorithm intended for\nuse with the key. The values used should either be registered in the\nIANA \"JSON Web Signature and Encryption Algorithms\" registry\nestablished by [JWA] or be a value that contains a Collision-\nResistant Name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub alg: Option<String>,
#[doc = "The \"crv\" (curve) parameter identifies the curve type"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub crv: Option<String>,
#[doc = "RSA private exponent or ECC private key"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub d: Option<String>,
#[doc = "RSA Private Key Parameter"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dp: Option<String>,
#[doc = "RSA Private Key Parameter"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dq: Option<String>,
#[doc = "RSA public exponent, in Base64"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub e: Option<String>,
#[doc = "Symmetric key"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub k: Option<String>,
#[doc = "The \"kid\" (key ID) parameter is used to match a specific key. This\nis used, for instance, to choose among a set of keys within a JWK Set\nduring key rollover. The structure of the \"kid\" value is\nunspecified. When \"kid\" values are used within a JWK Set, different\nkeys within the JWK Set SHOULD use distinct \"kid\" values. (One\nexample in which different keys might use the same \"kid\" value is if\nthey have different \"kty\" (key type) values but are considered to be\nequivalent alternatives by the application using them.) The \"kid\"\nvalue is a case-sensitive string."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kid: Option<String>,
#[doc = "The \"kty\" (key type) parameter identifies the cryptographic algorithm\nfamily used with the key, such as \"RSA\" or \"EC\". \"kty\" values should\neither be registered in the IANA \"JSON Web Key Types\" registry\nestablished by [JWA] or be a value that contains a Collision-\nResistant Name. The \"kty\" value is a case-sensitive string."]
pub kty: String,
#[doc = "RSA modulus, in Base64"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub n: Option<String>,
#[doc = "RSA secret prime"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub p: Option<String>,
#[doc = "RSA secret prime, with p < q"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub q: Option<String>,
#[doc = "RSA Private Key Parameter"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub qi: Option<String>,
#[doc = "Use (\"public key use\") identifies the intended use of\nthe public key. The \"use\" parameter is employed to indicate whether\na public key is used for encrypting data or verifying the signature\non data. Values are commonly \"sig\" (signature) or \"enc\" (encryption)."]
#[serde(rename = "use", default, skip_serializing_if = "Option::is_none")]
pub use_: Option<String>,
#[doc = "X coordinate for the Elliptic Curve point"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub x: Option<String>,
#[doc = "The \"x5c\" (X.509 certificate chain) parameter contains a chain of one\nor more PKIX certificates [RFC5280]. The certificate chain is\nrepresented as a JSON array of certificate value strings. Each\nstring in the array is a base64-encoded (Section 4 of [RFC4648] --\nnot base64url-encoded) DER [ITU.X690.1994] PKIX certificate value.\nThe PKIX certificate containing the key value MUST be the first\ncertificate."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub x5c: Vec<String>,
#[doc = "Y coordinate for the Elliptic Curve point"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub y: Option<String>,
}
impl JsonWebKey {
pub fn new(kty: String) -> Self {
Self {
alg: None,
crv: None,
d: None,
dp: None,
dq: None,
e: None,
k: None,
kid: None,
kty,
n: None,
p: None,
q: None,
qi: None,
use_: None,
x: None,
x5c: Vec::new(),
y: None,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct JsonWebKeySet {
#[doc = "The value of the \"keys\" parameter is an array of JWK values. By\ndefault, the order of the JWK values within the array does not imply\nan order of preference among them, although applications of JWK Sets\ncan choose to assign a meaning to the order for their purposes, if\ndesired."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub keys: Vec<JsonWebKey>,
}
impl JsonWebKeySet {
pub fn new() -> Self {
Self::default()
}
}
pub type JsonWebToken = String;
#[doc = "The result of a policy certificate modification"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyCertificatesModificationResult {
#[doc = "Hex encoded SHA1 Hash of the binary representation certificate which was added or removed"]
#[serde(rename = "x-ms-certificate-thumbprint", default, skip_serializing_if = "Option::is_none")]
pub x_ms_certificate_thumbprint: Option<String>,
#[doc = "The result of the operation"]
#[serde(rename = "x-ms-policycertificates-result", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policycertificates_result: Option<policy_certificates_modification_result::XMsPolicycertificatesResult>,
}
impl PolicyCertificatesModificationResult {
pub fn new() -> Self {
Self::default()
}
}
pub mod policy_certificates_modification_result {
use super::*;
#[doc = "The result of the operation"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "XMsPolicycertificatesResult")]
pub enum XMsPolicycertificatesResult {
IsPresent,
IsAbsent,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for XMsPolicycertificatesResult {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for XMsPolicycertificatesResult {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for XMsPolicycertificatesResult {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::IsPresent => serializer.serialize_unit_variant("XMsPolicycertificatesResult", 0u32, "IsPresent"),
Self::IsAbsent => serializer.serialize_unit_variant("XMsPolicycertificatesResult", 1u32, "IsAbsent"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The response to an attestation policy management API"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyCertificatesModifyResponse {
#[doc = "An RFC 7519 Json Web Token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<JsonWebToken>,
}
impl PolicyCertificatesModifyResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The response to an attestation policy management API"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyCertificatesResponse {
#[doc = "An RFC 7519 Json Web Token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<JsonWebToken>,
}
impl PolicyCertificatesResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The result of a call to retrieve policy certificates."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyCertificatesResult {
#[serde(rename = "x-ms-policy-certificates", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_certificates: Option<JsonWebKeySet>,
}
impl PolicyCertificatesResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The response to an attestation policy operation"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyResponse {
#[doc = "An RFC 7519 Json Web Token"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<JsonWebToken>,
}
impl PolicyResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The result of a policy certificate modification"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PolicyResult {
#[doc = "The result of the operation"]
#[serde(rename = "x-ms-policy-result", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_result: Option<policy_result::XMsPolicyResult>,
#[doc = "The SHA256 hash of the policy object modified"]
#[serde(rename = "x-ms-policy-token-hash", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_token_hash: Option<String>,
#[serde(rename = "x-ms-policy-signer", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy_signer: Option<JsonWebKey>,
#[doc = "An RFC 7519 Json Web Token"]
#[serde(rename = "x-ms-policy", default, skip_serializing_if = "Option::is_none")]
pub x_ms_policy: Option<JsonWebToken>,
}
impl PolicyResult {
pub fn new() -> Self {
Self::default()
}
}
pub mod policy_result {
use super::*;
#[doc = "The result of the operation"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "XMsPolicyResult")]
pub enum XMsPolicyResult {
Updated,
Removed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for XMsPolicyResult {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for XMsPolicyResult {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for XMsPolicyResult {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Updated => serializer.serialize_unit_variant("XMsPolicyResult", 0u32, "Updated"),
Self::Removed => serializer.serialize_unit_variant("XMsPolicyResult", 1u32, "Removed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "Defines the \"run time data\" provided by the attestation target for use by the MAA"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct RuntimeData {
#[doc = "UTF-8 encoded Runtime Data generated by the trusted environment"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub data: Option<String>,
#[doc = "Specifies the type of the data encoded contained within the \"data\" field of a \"RuntimeData\" or \"InitTimeData\" object"]
#[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")]
pub data_type: Option<DataType>,
}
impl RuntimeData {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct StoredAttestationPolicy {
#[doc = "Policy text to set as a sequence of UTF-8 encoded octets."]
#[serde(rename = "AttestationPolicy", default, skip_serializing_if = "Option::is_none")]
pub attestation_policy: Option<String>,
}
impl StoredAttestationPolicy {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Attestation request for Trusted Platform Module (TPM) attestation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct TpmAttestationRequest {
#[doc = "Protocol data containing artifacts for attestation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub data: Option<String>,
}
impl TpmAttestationRequest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Attestation response for Trusted Platform Module (TPM) attestation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct TpmAttestationResponse {
#[doc = "Protocol data containing attestation service response."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub data: Option<String>,
}
impl TpmAttestationResponse {
pub fn new() -> Self {
Self::default()
}
}
| 47.160428 | 604 | 0.674566 |
1154a43f1770c89c70b0380e505873343afd23a4 | 2,473 | #![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
use std::convert::{TryFrom, TryInto};
mod c_types {
pub type c_char = u8;
pub type c_uchar = u8;
pub type c_schar = i8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i32;
pub type c_ulong = u32;
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type c_void = std::ffi::c_void;
}
include!("bindings.rs");
impl Event {
#[inline]
pub fn is_key(&self) -> bool {
match self {
Self::KEYPRESS |
Self::KEYRELEASE |
Self::KEYREPEAT => true,
_ => false,
}
}
#[cfg(feature = "sdk_v6")]
#[inline]
pub fn is_pointer(&self) -> bool {
match self {
Self::POINTERUP |
Self::POINTERDOWN |
Self::POINTERMOVE |
Self::POINTERLONG |
Self::POINTERHOLD |
Self::MTSYNC |
Self::POINTERDRAG |
Self::POINTERCANCEL => true, // Exists only in v4
_ => false,
}
}
#[cfg(feature = "sdk_v4")]
#[inline]
pub fn is_pointer(&self) -> bool {
match self {
Self::POINTERUP |
Self::POINTERDOWN |
Self::POINTERMOVE |
Self::POINTERLONG |
Self::POINTERHOLD |
Self::MTSYNC |
Self::POINTERDRAG => true,
_ => false,
}
}
#[inline]
pub fn is_panel(&self) -> bool {
match self {
Self::TAB |
Self::PANEL |
Self::PANEL_ICON |
Self::PANEL_TEXT |
Self::PANEL_PROGRESS |
Self::PANEL_MPLAYER |
Self::PANEL_USBDRIVE |
Self::PANEL_NETWORK |
Self::PANEL_CLOCK |
Self::PANEL_BLUETOOTH |
Self::PANEL_TASKLIST |
Self::PANEL_OBREEY_SYNC |
Self::PANEL_SETREADINGMODE |
Self::PANEL_SETREADINGMODE_INVERT => true,
_ => false,
}
}
}
#[cfg(not(feature = "sdk_v4"))]
impl TryFrom<c_types::c_int> for PanelType {
type Error = &'static str;
fn try_from(value: c_types::c_int) -> Result<Self, Self::Error> {
let value: u32 = value.try_into().unwrap();
if value > (PanelType::ENABLED | PanelType::EVENT_NO_HANDLING | PanelType::NO_FB_OFFSET).0 {
Err("Invalid panel type value")
}
else {
Ok(PanelType(value))
}
}
}
#[cfg(feature = "sdk_v4")]
impl TryFrom<c_types::c_int> for PanelType {
type Error = &'static str;
fn try_from(value: c_types::c_int) -> Result<Self, Self::Error> {
let value: u32 = value.try_into().unwrap();
if value > (PanelType::ENABLED | PanelType::EVENT_NO_HANDLING).0 {
Err("Invalid panel type value")
}
else {
Ok(PanelType(value))
}
}
}
| 20.957627 | 94 | 0.636474 |
031844f3bcd0b31a93358945231122abdb13422c | 2,579 | //! HTTP response
use std::collections::BTreeMap;
use std::convert::AsRef;
/// Additional response data
#[derive(Clone, Default, Debug)]
pub struct ResponseData<'a> {
pub status: &'a str,
pub headers: BTreeMap<&'a str, &'a str>,
}
impl<'a> ResponseData<'a> {
/// Create new with default values
pub fn new() -> Self {
Self {
status: "200 OK",
headers: BTreeMap::new(),
}
}
/// Change status
pub fn set_status(mut self, status: &'a str) -> Self {
self.status = status;
self
}
}
/// Create HTTP response
pub fn respond(
content: impl AsRef<[u8]>,
content_type: impl AsRef<str>,
data: Option<ResponseData>,
) -> Vec<u8> {
// convert content to &[u8]
let content = content.as_ref();
// additional response data
let data = match data {
Some(data) => data,
None => ResponseData::new(),
};
let status = data.status;
let mut headers = String::new();
data.headers.iter().for_each(|(k, v)| {
headers.push_str("\r\n");
headers.push_str(k);
headers.push_str(": ");
headers.push_str(v);
});
// create response
let mut response = Vec::new();
let header = format!(
"HTTP/1.1 {}\r\nserver: ltheinrich.de/lhi\r\ncontent-type: {}; charset=utf-8{}",
status,
content_type.as_ref(),
headers
);
response.extend_from_slice(header.as_bytes());
// write content
response.append(&mut set_content_length(content.len()));
response.extend_from_slice(content);
response.extend_from_slice(b"\r\n");
// return
response
}
/// create content-length header bytes
fn set_content_length(content_length: usize) -> Vec<u8> {
let mut header = Vec::new();
header.extend_from_slice(b"\r\n");
header.extend_from_slice(b"content-length: ");
header.extend_from_slice((content_length + 2).to_string().as_bytes());
header.extend_from_slice(b"\r\n\r\n");
header
}
/// Create HTTP redirect response
pub fn redirect(url: impl AsRef<str>) -> Vec<u8> {
// as ref
let url = url.as_ref();
// set location
let mut headers = BTreeMap::new();
headers.insert("location", url);
// create response data
let data = ResponseData {
status: "303 See Other",
headers,
};
// create and return response
respond(
format!("<html><head><title>Moved</title></head><body><h1>Moved</h1><p><a href=\"{0}\">{0}</a></p></body></html>", url),
"text/html",
Some(data)
)
}
| 25.038835 | 128 | 0.587825 |
3a973a908cc92fc9fa8925d94c790f18d2549cea | 62 | #[cfg(test)]
mod script_tests;
#[cfg(test)]
mod testing_tool;
| 12.4 | 17 | 0.709677 |
c16d4647ffb4b51d125d0b2b06a2212026d386d7 | 541 | // tests3.rs
// This test isn't testing our function -- make it do that in such a way that
// the test passes. Then write a second test that tests whether we get the result
// we expect to get when we call `is_even(5)`.
// Execute `rustlings hint tests3` for hints :)
pub fn is_even(num: i32) -> bool {
num % 2 == 0
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_true_when_even() {
assert!(is_even(4) == true);
}
#[test]
fn is_false_when_odd() {
assert!(is_even(7) == false);
}
}
| 20.807692 | 81 | 0.600739 |
8954e5c4a67d349ebf7c410ddcff1932aaf8c03f | 4,655 | type T = uint;
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "arm")]
const bits: uint = 32;
#[cfg(target_arch = "x86_64")]
const bits: uint = 64;
/**
* Divide two numbers, return the result, rounded up.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The smallest integer `q` such that `x/y <= q`.
*/
pure fn div_ceil(x: uint, y: uint) -> uint {
let div = x / y;
if x % y == 0u { div }
else { div + 1u }
}
/**
* Divide two numbers, return the result, rounded to the closest integer.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The integer `q` closest to `x/y`.
*/
pure fn div_round(x: uint, y: uint) -> uint {
let div = x / y;
if x % y * 2u < y { div }
else { div + 1u }
}
/**
* Divide two numbers, return the result, rounded down.
*
* Note: This is the same function as `div`.
*
* # Arguments
*
* * x - an integer
* * y - an integer distinct from 0u
*
* # Return value
*
* The smallest integer `q` such that `x/y <= q`. This
* is either `x/y` or `x/y + 1`.
*/
pure fn div_floor(x: uint, y: uint) -> uint { return x / y; }
/// Produce a uint suitable for use in a hash table
pure fn hash(x: uint) -> uint {
hash::hash_uint(x) as uint
}
/**
* Iterate over the range [`lo`..`hi`), or stop when requested
*
* # Arguments
*
* * lo - The integer at which to start the loop (included)
* * hi - The integer at which to stop the loop (excluded)
* * it - A block to execute with each consecutive integer of the range.
* Return `true` to continue, `false` to stop.
*
* # Return value
*
* `true` If execution proceeded correctly, `false` if it was interrupted,
* that is if `it` returned `false` at any point.
*/
pure fn iterate(lo: uint, hi: uint, it: fn(uint) -> bool) -> bool {
let mut i = lo;
while i < hi {
if (!it(i)) { return false; }
i += 1u;
}
return true;
}
/// Returns the smallest power of 2 greater than or equal to `n`
#[inline(always)]
fn next_power_of_two(n: uint) -> uint {
let halfbits: uint = sys::size_of::<uint>() * 4u;
let mut tmp: uint = n - 1u;
let mut shift: uint = 1u;
while shift <= halfbits { tmp |= tmp >> shift; shift <<= 1u; }
return tmp + 1u;
}
#[test]
fn test_next_power_of_two() {
assert (uint::next_power_of_two(0u) == 0u);
assert (uint::next_power_of_two(1u) == 1u);
assert (uint::next_power_of_two(2u) == 2u);
assert (uint::next_power_of_two(3u) == 4u);
assert (uint::next_power_of_two(4u) == 4u);
assert (uint::next_power_of_two(5u) == 8u);
assert (uint::next_power_of_two(6u) == 8u);
assert (uint::next_power_of_two(7u) == 8u);
assert (uint::next_power_of_two(8u) == 8u);
assert (uint::next_power_of_two(9u) == 16u);
assert (uint::next_power_of_two(10u) == 16u);
assert (uint::next_power_of_two(11u) == 16u);
assert (uint::next_power_of_two(12u) == 16u);
assert (uint::next_power_of_two(13u) == 16u);
assert (uint::next_power_of_two(14u) == 16u);
assert (uint::next_power_of_two(15u) == 16u);
assert (uint::next_power_of_two(16u) == 16u);
assert (uint::next_power_of_two(17u) == 32u);
assert (uint::next_power_of_two(18u) == 32u);
assert (uint::next_power_of_two(19u) == 32u);
assert (uint::next_power_of_two(20u) == 32u);
assert (uint::next_power_of_two(21u) == 32u);
assert (uint::next_power_of_two(22u) == 32u);
assert (uint::next_power_of_two(23u) == 32u);
assert (uint::next_power_of_two(24u) == 32u);
assert (uint::next_power_of_two(25u) == 32u);
assert (uint::next_power_of_two(26u) == 32u);
assert (uint::next_power_of_two(27u) == 32u);
assert (uint::next_power_of_two(28u) == 32u);
assert (uint::next_power_of_two(29u) == 32u);
assert (uint::next_power_of_two(30u) == 32u);
assert (uint::next_power_of_two(31u) == 32u);
assert (uint::next_power_of_two(32u) == 32u);
assert (uint::next_power_of_two(33u) == 64u);
assert (uint::next_power_of_two(34u) == 64u);
assert (uint::next_power_of_two(35u) == 64u);
assert (uint::next_power_of_two(36u) == 64u);
assert (uint::next_power_of_two(37u) == 64u);
assert (uint::next_power_of_two(38u) == 64u);
assert (uint::next_power_of_two(39u) == 64u);
}
#[test]
fn test_overflows() {
assert (uint::max_value > 0u);
assert (uint::min_value <= 0u);
assert (uint::min_value + uint::max_value + 1u == 0u);
}
#[test]
fn test_div() {
assert(uint::div_floor(3u, 4u) == 0u);
assert(uint::div_ceil(3u, 4u) == 1u);
assert(uint::div_round(3u, 4u) == 1u);
}
| 29.27673 | 74 | 0.615467 |
acf7994ee46cbded66d6efd48c5422c7f7ab5a35 | 12,019 | /*
This tool is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 20/09/2018
Last Modified: 13/10/2018
License: MIT
*/
use crate::tools::*;
use crate::vector::*;
use std::env;
use std::io::{Error, ErrorKind};
use std::path;
/// This tool can be used to extend vector lines by a specified distance. The user must
/// input the names of the input and output shapefiles, the distance to extend features
/// by, and whether to extend both ends, line starts, or line ends. The input shapefile
/// must be of a POLYLINE base shape type and should be in a projected coordinate system.
pub struct ExtendVectorLines {
name: String,
description: String,
toolbox: String,
parameters: Vec<ToolParameter>,
example_usage: String,
}
impl ExtendVectorLines {
pub fn new() -> ExtendVectorLines {
// public constructor
let name = "ExtendVectorLines".to_string();
let toolbox = "GIS Analysis".to_string();
let description = "Extends vector lines by a specified distance.".to_string();
let mut parameters = vec![];
parameters.push(ToolParameter {
name: "Input Vector Lines File".to_owned(),
flags: vec!["-i".to_owned(), "--input".to_owned()],
description: "Input vector polyline file.".to_owned(),
parameter_type: ParameterType::ExistingFile(ParameterFileType::Vector(
VectorGeometryType::Line,
)),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Output Vector File".to_owned(),
flags: vec!["-o".to_owned(), "--output".to_owned()],
description: "Output vector polyline file.".to_owned(),
parameter_type: ParameterType::NewFile(ParameterFileType::Vector(
VectorGeometryType::Line,
)),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Extend Distance".to_owned(),
flags: vec!["--dist".to_owned()],
description: "The distance to extend.".to_owned(),
parameter_type: ParameterType::Float,
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Extend Direction".to_owned(),
flags: vec!["--extend".to_owned()],
description: "Extend direction, 'both ends' (default), 'line start', 'line end'."
.to_owned(),
parameter_type: ParameterType::OptionList(vec![
"both ends".to_owned(),
"line start".to_owned(),
"line end".to_owned(),
]),
default_value: Some("both ends".to_owned()),
optional: true,
});
let sep: String = path::MAIN_SEPARATOR.to_string();
let p = format!("{}", env::current_dir().unwrap().display());
let e = format!("{}", env::current_exe().unwrap().display());
let mut short_exe = e
.replace(&p, "")
.replace(".exe", "")
.replace(".", "")
.replace(&sep, "");
if e.contains(".exe") {
short_exe += ".exe";
}
let usage = format!(
">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i=in_file.shp -o=out_file.shp --dist=10.0 --extend='both ends'",
short_exe, name
).replace("*", &sep);
ExtendVectorLines {
name: name,
description: description,
toolbox: toolbox,
parameters: parameters,
example_usage: usage,
}
}
}
impl WhiteboxTool for ExtendVectorLines {
fn get_source_file(&self) -> String {
String::from(file!())
}
fn get_tool_name(&self) -> String {
self.name.clone()
}
fn get_tool_description(&self) -> String {
self.description.clone()
}
fn get_tool_parameters(&self) -> String {
let mut s = String::from("{\"parameters\": [");
for i in 0..self.parameters.len() {
if i < self.parameters.len() - 1 {
s.push_str(&(self.parameters[i].to_string()));
s.push_str(",");
} else {
s.push_str(&(self.parameters[i].to_string()));
}
}
s.push_str("]}");
s
}
fn get_example_usage(&self) -> String {
self.example_usage.clone()
}
fn get_toolbox(&self) -> String {
self.toolbox.clone()
}
fn run<'a>(
&self,
args: Vec<String>,
working_directory: &'a str,
verbose: bool,
) -> Result<(), Error> {
let mut input_file: String = "".to_string();
let mut output_file: String = "".to_string();
let mut dist: f64 = 0.0;
let mut extend = 0;
// read the arguments
if args.len() == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
let flag_val = vec[0].to_lowercase().replace("--", "-");
if flag_val == "-i" || flag_val == "-input" {
input_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-o" || flag_val == "-output" {
output_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-dist" {
dist = if keyval {
vec[1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1]
.to_string()
.parse::<f64>()
.expect(&format!("Error parsing {}", flag_val))
};
} else if flag_val.contains("extend") {
let extend_str = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
extend = if extend_str.to_lowercase().contains("bo") {
// both
0
} else if extend_str.to_lowercase().contains("st") {
// line start
1
} else if extend_str.to_lowercase().contains("end") {
// line end
2
} else {
// in the event that the flag is not recognized, default to both ends
0
};
}
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
let start = Instant::now();
if verbose {
println!("***************{}", "*".repeat(self.get_tool_name().len()));
println!("* Welcome to {} *", self.get_tool_name());
println!("***************{}", "*".repeat(self.get_tool_name().len()));
}
if !input_file.contains(path::MAIN_SEPARATOR) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
let input = Shapefile::read(&input_file)?;
// make sure the input vector file is of polyline type
if input.header.shape_type.base_shape_type() != ShapeType::PolyLine {
return Err(Error::new(
ErrorKind::InvalidInput,
"The input vector data must be of POLYLINE base shape type.",
));
}
// create output file
let mut output =
Shapefile::initialize_using_file(&output_file, &input, input.header.shape_type, true)?;
let (mut x1, mut x2, mut y1, mut y2): (f64, f64, f64, f64);
let (mut x_st, mut x_end, mut y_st, mut y_end): (f64, f64, f64, f64);
let (mut start_point_in_part, mut end_point_in_part): (usize, usize);
let mut slope: f64;
for record_num in 0..input.num_records {
let mut record = input.get_record(record_num).clone();
for part in 0..record.num_parts as usize {
start_point_in_part = record.parts[part] as usize;
end_point_in_part = if part < record.num_parts as usize - 1 {
record.parts[part + 1] as usize - 1
} else {
record.num_points as usize - 1
};
if extend == 0 || extend == 1 {
// new starting point
x1 = record.points[start_point_in_part].x;
y1 = record.points[start_point_in_part].y;
x2 = record.points[start_point_in_part + 1].x;
y2 = record.points[start_point_in_part + 1].y;
if (x1 - x2) != 0f64 {
slope = (y1 - y2).atan2(x1 - x2);
x_st = x1 + dist * slope.cos();
y_st = y1 + dist * slope.sin();
} else {
x_st = x1;
y_st = if y2 > y1 { y1 - dist } else { y1 + dist };
}
record.points[start_point_in_part].x = x_st;
record.points[start_point_in_part].y = y_st;
}
if extend == 0 || extend == 2 {
// new ending point
x1 = record.points[end_point_in_part].x;
y1 = record.points[end_point_in_part].y;
x2 = record.points[end_point_in_part - 1].x;
y2 = record.points[end_point_in_part - 1].y;
if (x1 - x2) != 0f64 {
slope = (y1 - y2).atan2(x1 - x2);
x_end = x1 + dist * slope.cos();
y_end = y1 + dist * slope.sin();
} else {
x_end = x1;
y_end = if y2 < y1 { y1 - dist } else { y1 + dist };
}
record.points[end_point_in_part].x = x_end;
record.points[end_point_in_part].y = y_end;
}
}
output.add_record(record);
let atts = input.attributes.get_record(record_num);
output.attributes.add_record(atts.clone(), false);
if verbose {
progress =
(100.0_f64 * (record_num + 1) as f64 / input.num_records as f64) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
if verbose {
println!("Saving data...")
};
let _ = match output.write() {
Ok(_) => {
if verbose {
println!("Output file written")
}
}
Err(e) => return Err(e),
};
let elapsed_time = get_formatted_elapsed_time(start);
if verbose {
println!("{}", &format!("Elapsed Time: {}", elapsed_time));
}
Ok(())
}
}
| 35.040816 | 120 | 0.47791 |
486d5fa5d6270b01cabfd5fc7589313863df3d3c | 12,602 | //! Zoom and pan on an image.
use crate::event::{self, Event};
use crate::image;
use crate::layout;
use crate::mouse;
use crate::renderer;
use crate::{
Clipboard, Element, Hasher, Layout, Length, Point, Rectangle, Shell, Size,
Vector, Widget,
};
use std::hash::Hash;
/// A frame that displays an image with the ability to zoom in/out and pan.
#[allow(missing_debug_implementations)]
pub struct Viewer<'a, Handle> {
state: &'a mut State,
padding: u16,
width: Length,
height: Length,
min_scale: f32,
max_scale: f32,
scale_step: f32,
handle: Handle,
}
impl<'a, Handle> Viewer<'a, Handle> {
/// Creates a new [`Viewer`] with the given [`State`].
pub fn new(state: &'a mut State, handle: Handle) -> Self {
Viewer {
state,
padding: 0,
width: Length::Shrink,
height: Length::Shrink,
min_scale: 0.25,
max_scale: 10.0,
scale_step: 0.10,
handle,
}
}
/// Sets the padding of the [`Viewer`].
pub fn padding(mut self, units: u16) -> Self {
self.padding = units;
self
}
/// Sets the width of the [`Viewer`].
pub fn width(mut self, width: Length) -> Self {
self.width = width;
self
}
/// Sets the height of the [`Viewer`].
pub fn height(mut self, height: Length) -> Self {
self.height = height;
self
}
/// Sets the max scale applied to the image of the [`Viewer`].
///
/// Default is `10.0`
pub fn max_scale(mut self, max_scale: f32) -> Self {
self.max_scale = max_scale;
self
}
/// Sets the min scale applied to the image of the [`Viewer`].
///
/// Default is `0.25`
pub fn min_scale(mut self, min_scale: f32) -> Self {
self.min_scale = min_scale;
self
}
/// Sets the percentage the image of the [`Viewer`] will be scaled by
/// when zoomed in / out.
///
/// Default is `0.10`
pub fn scale_step(mut self, scale_step: f32) -> Self {
self.scale_step = scale_step;
self
}
/// Returns the bounds of the underlying image, given the bounds of
/// the [`Viewer`]. Scaling will be applied and original aspect ratio
/// will be respected.
fn image_size<Renderer>(&self, renderer: &Renderer, bounds: Size) -> Size
where
Renderer: image::Renderer<Handle = Handle>,
{
let (width, height) = renderer.dimensions(&self.handle);
let (width, height) = {
let dimensions = (width as f32, height as f32);
let width_ratio = bounds.width / dimensions.0;
let height_ratio = bounds.height / dimensions.1;
let ratio = width_ratio.min(height_ratio);
let scale = self.state.scale;
if ratio < 1.0 {
(dimensions.0 * ratio * scale, dimensions.1 * ratio * scale)
} else {
(dimensions.0 * scale, dimensions.1 * scale)
}
};
Size::new(width, height)
}
}
impl<'a, Message, Renderer, Handle> Widget<Message, Renderer>
for Viewer<'a, Handle>
where
Renderer: image::Renderer<Handle = Handle>,
Handle: Clone + Hash,
{
fn width(&self) -> Length {
self.width
}
fn height(&self) -> Length {
self.height
}
fn layout(
&self,
renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
let (width, height) = renderer.dimensions(&self.handle);
let mut size = limits
.width(self.width)
.height(self.height)
.resolve(Size::new(width as f32, height as f32));
let expansion_size = if height > width {
self.width
} else {
self.height
};
// Only calculate viewport sizes if the images are constrained to a limited space.
// If they are Fill|Portion let them expand within their alotted space.
match expansion_size {
Length::Shrink | Length::Units(_) => {
let aspect_ratio = width as f32 / height as f32;
let viewport_aspect_ratio = size.width / size.height;
if viewport_aspect_ratio > aspect_ratio {
size.width = width as f32 * size.height / height as f32;
} else {
size.height = height as f32 * size.width / width as f32;
}
}
Length::Fill | Length::FillPortion(_) => {}
}
layout::Node::new(size)
}
fn on_event(
&mut self,
event: Event,
layout: Layout<'_>,
cursor_position: Point,
renderer: &Renderer,
_clipboard: &mut dyn Clipboard,
_shell: &mut Shell<'_, Message>,
) -> event::Status {
let bounds = layout.bounds();
let is_mouse_over = bounds.contains(cursor_position);
match event {
Event::Mouse(mouse::Event::WheelScrolled { delta })
if is_mouse_over =>
{
match delta {
mouse::ScrollDelta::Lines { y, .. }
| mouse::ScrollDelta::Pixels { y, .. } => {
let previous_scale = self.state.scale;
if y < 0.0 && previous_scale > self.min_scale
|| y > 0.0 && previous_scale < self.max_scale
{
self.state.scale = (if y > 0.0 {
self.state.scale * (1.0 + self.scale_step)
} else {
self.state.scale / (1.0 + self.scale_step)
})
.max(self.min_scale)
.min(self.max_scale);
let image_size =
self.image_size(renderer, bounds.size());
let factor =
self.state.scale / previous_scale - 1.0;
let cursor_to_center =
cursor_position - bounds.center();
let adjustment = cursor_to_center * factor
+ self.state.current_offset * factor;
self.state.current_offset = Vector::new(
if image_size.width > bounds.width {
self.state.current_offset.x + adjustment.x
} else {
0.0
},
if image_size.height > bounds.height {
self.state.current_offset.y + adjustment.y
} else {
0.0
},
);
}
}
}
event::Status::Captured
}
Event::Mouse(mouse::Event::ButtonPressed(mouse::Button::Left))
if is_mouse_over =>
{
self.state.cursor_grabbed_at = Some(cursor_position);
self.state.starting_offset = self.state.current_offset;
event::Status::Captured
}
Event::Mouse(mouse::Event::ButtonReleased(mouse::Button::Left))
if self.state.cursor_grabbed_at.is_some() =>
{
self.state.cursor_grabbed_at = None;
event::Status::Captured
}
Event::Mouse(mouse::Event::CursorMoved { position }) => {
if let Some(origin) = self.state.cursor_grabbed_at {
let image_size = self.image_size(renderer, bounds.size());
let hidden_width = (image_size.width - bounds.width / 2.0)
.max(0.0)
.round();
let hidden_height = (image_size.height
- bounds.height / 2.0)
.max(0.0)
.round();
let delta = position - origin;
let x = if bounds.width < image_size.width {
(self.state.starting_offset.x - delta.x)
.min(hidden_width)
.max(-hidden_width)
} else {
0.0
};
let y = if bounds.height < image_size.height {
(self.state.starting_offset.y - delta.y)
.min(hidden_height)
.max(-hidden_height)
} else {
0.0
};
self.state.current_offset = Vector::new(x, y);
event::Status::Captured
} else {
event::Status::Ignored
}
}
_ => event::Status::Ignored,
}
}
fn mouse_interaction(
&self,
layout: Layout<'_>,
cursor_position: Point,
_viewport: &Rectangle,
_renderer: &Renderer,
) -> mouse::Interaction {
let bounds = layout.bounds();
let is_mouse_over = bounds.contains(cursor_position);
if self.state.is_cursor_grabbed() {
mouse::Interaction::Grabbing
} else if is_mouse_over {
mouse::Interaction::Grab
} else {
mouse::Interaction::Idle
}
}
fn draw(
&self,
renderer: &mut Renderer,
_style: &renderer::Style,
layout: Layout<'_>,
_cursor_position: Point,
_viewport: &Rectangle,
) {
let bounds = layout.bounds();
let image_size = self.image_size(renderer, bounds.size());
let translation = {
let image_top_left = Vector::new(
bounds.width / 2.0 - image_size.width / 2.0,
bounds.height / 2.0 - image_size.height / 2.0,
);
image_top_left - self.state.offset(bounds, image_size)
};
renderer.with_layer(bounds, |renderer| {
renderer.with_translation(translation, |renderer| {
image::Renderer::draw(
renderer,
self.handle.clone(),
Rectangle {
x: bounds.x,
y: bounds.y,
..Rectangle::with_size(image_size)
},
)
});
});
}
fn hash_layout(&self, state: &mut Hasher) {
struct Marker;
std::any::TypeId::of::<Marker>().hash(state);
self.width.hash(state);
self.height.hash(state);
self.padding.hash(state);
self.handle.hash(state);
}
}
/// The local state of a [`Viewer`].
#[derive(Debug, Clone, Copy)]
pub struct State {
scale: f32,
starting_offset: Vector,
current_offset: Vector,
cursor_grabbed_at: Option<Point>,
}
impl Default for State {
fn default() -> Self {
Self {
scale: 1.0,
starting_offset: Vector::default(),
current_offset: Vector::default(),
cursor_grabbed_at: None,
}
}
}
impl State {
/// Creates a new [`State`].
pub fn new() -> Self {
State::default()
}
/// Returns the current offset of the [`State`], given the bounds
/// of the [`Viewer`] and its image.
fn offset(&self, bounds: Rectangle, image_size: Size) -> Vector {
let hidden_width =
(image_size.width - bounds.width / 2.0).max(0.0).round();
let hidden_height =
(image_size.height - bounds.height / 2.0).max(0.0).round();
Vector::new(
self.current_offset.x.min(hidden_width).max(-hidden_width),
self.current_offset.y.min(hidden_height).max(-hidden_height),
)
}
/// Returns if the cursor is currently grabbed by the [`Viewer`].
pub fn is_cursor_grabbed(&self) -> bool {
self.cursor_grabbed_at.is_some()
}
}
impl<'a, Message, Renderer, Handle> From<Viewer<'a, Handle>>
for Element<'a, Message, Renderer>
where
Renderer: 'a + image::Renderer<Handle = Handle>,
Message: 'a,
Handle: Clone + Hash + 'a,
{
fn from(viewer: Viewer<'a, Handle>) -> Element<'a, Message, Renderer> {
Element::new(viewer)
}
}
| 30.811736 | 90 | 0.489129 |
1eba1efac557fc6a55629984a13b193d7c787df1 | 4,203 | //! A helper library for parsing values from `clap::ArgMatches`.
use clap::ArgMatches;
use eth2_testnet_config::Eth2TestnetConfig;
use ssz::Decode;
use std::path::PathBuf;
use std::str::FromStr;
pub const BAD_TESTNET_DIR_MESSAGE: &str = "The hard-coded testnet directory was invalid. \
This happens when Lighthouse is migrating between spec versions \
or when there is no default public network to connect to. \
During these times you must specify a --testnet-dir.";
/// Attempts to load the testnet dir at the path if `name` is in `matches`, returning an error if
/// the path cannot be found or the testnet dir is invalid.
pub fn parse_testnet_dir(
matches: &ArgMatches,
name: &'static str,
) -> Result<Option<Eth2TestnetConfig>, String> {
let path = parse_required::<PathBuf>(matches, name)?;
Eth2TestnetConfig::load(path.clone())
.map_err(|e| format!("Unable to open testnet dir at {:?}: {}", path, e))
.map(Some)
}
/// Attempts to load a hardcoded network config if `name` is in `matches`, returning an error if
/// the name is not a valid network name.
pub fn parse_hardcoded_network(
matches: &ArgMatches,
name: &str,
) -> Result<Option<Eth2TestnetConfig>, String> {
let network_name = parse_required::<String>(matches, name)?;
Eth2TestnetConfig::constant(network_name.as_str())
}
/// If `name` is in `matches`, parses the value as a path. Otherwise, attempts to find the user's
/// home directory and appends `default` to it.
pub fn parse_path_with_default_in_home_dir(
matches: &ArgMatches,
name: &'static str,
default: PathBuf,
) -> Result<PathBuf, String> {
matches
.value_of(name)
.map(|dir| {
dir.parse::<PathBuf>()
.map_err(|e| format!("Unable to parse {}: {}", name, e))
})
.unwrap_or_else(|| {
dirs::home_dir()
.map(|home| home.join(default))
.ok_or_else(|| format!("Unable to locate home directory. Try specifying {}", name))
})
}
/// Returns the value of `name` or an error if it is not in `matches` or does not parse
/// successfully using `std::string::FromStr`.
pub fn parse_required<T>(matches: &ArgMatches, name: &str) -> Result<T, String>
where
T: FromStr,
<T as FromStr>::Err: std::fmt::Display,
{
parse_optional(matches, name)?.ok_or_else(|| format!("{} not specified", name))
}
/// Returns the value of `name` (if present) or an error if it does not parse successfully using
/// `std::string::FromStr`.
pub fn parse_optional<T>(matches: &ArgMatches, name: &str) -> Result<Option<T>, String>
where
T: FromStr,
<T as FromStr>::Err: std::fmt::Display,
{
matches
.value_of(name)
.map(|val| {
val.parse()
.map_err(|e| format!("Unable to parse {}: {}", name, e))
})
.transpose()
}
/// Returns the value of `name` or an error if it is not in `matches` or does not parse
/// successfully using `ssz::Decode`.
///
/// Expects the value of `name` to be 0x-prefixed ASCII-hex.
pub fn parse_ssz_required<T: Decode>(
matches: &ArgMatches,
name: &'static str,
) -> Result<T, String> {
parse_ssz_optional(matches, name)?.ok_or_else(|| format!("{} not specified", name))
}
/// Returns the value of `name` (if present) or an error if it does not parse successfully using
/// `ssz::Decode`.
///
/// Expects the value of `name` (if any) to be 0x-prefixed ASCII-hex.
pub fn parse_ssz_optional<T: Decode>(
matches: &ArgMatches,
name: &'static str,
) -> Result<Option<T>, String> {
matches
.value_of(name)
.map(|val| {
if val.starts_with("0x") {
let vec = hex::decode(&val[2..])
.map_err(|e| format!("Unable to parse {} as hex: {:?}", name, e))?;
T::from_ssz_bytes(&vec)
.map_err(|e| format!("Unable to parse {} as SSZ: {:?}", name, e))
} else {
Err(format!("Unable to parse {}, must have 0x prefix", name))
}
})
.transpose()
}
| 36.232759 | 105 | 0.604568 |
691c90e32b047d8a08999ffd6cc44cc8aaab795f | 12,486 | //! Instruction Set Architectures.
//!
//! The `isa` module provides a `TargetIsa` trait which provides the behavior specialization needed
//! by the ISA-independent code generator. The sub-modules of this module provide definitions for
//! the instruction sets that Cranelift can target. Each sub-module has it's own implementation of
//! `TargetIsa`.
//!
//! # Constructing a `TargetIsa` instance
//!
//! The target ISA is built from the following information:
//!
//! - The name of the target ISA as a string. Cranelift is a cross-compiler, so the ISA to target
//! can be selected dynamically. Individual ISAs can be left out when Cranelift is compiled, so a
//! string is used to identify the proper sub-module.
//! - Values for settings that apply to all ISAs. This is represented by a `settings::Flags`
//! instance.
//! - Values for ISA-specific settings.
//!
//! The `isa::lookup()` function is the main entry point which returns an `isa::Builder`
//! appropriate for the requested ISA:
//!
//! ```
//! # #[macro_use] extern crate target_lexicon;
//! use cranelift_codegen::isa;
//! use cranelift_codegen::settings::{self, Configurable};
//! use std::str::FromStr;
//! use target_lexicon::Triple;
//!
//! let shared_builder = settings::builder();
//! let shared_flags = settings::Flags::new(shared_builder);
//!
//! match isa::lookup(triple!("x86_64")) {
//! Err(_) => {
//! // The x86_64 target ISA is not available.
//! }
//! Ok(mut isa_builder) => {
//! isa_builder.set("use_popcnt", "on");
//! let isa = isa_builder.finish(shared_flags);
//! }
//! }
//! ```
//!
//! The configured target ISA trait object is a `Box<TargetIsa>` which can be used for multiple
//! concurrent function compilations.
pub use crate::isa::call_conv::CallConv;
use crate::flowgraph;
use crate::ir::{self, Function};
#[cfg(feature = "unwind")]
use crate::isa::unwind::systemv::RegisterMappingError;
use crate::machinst::{MachCompileResult, TextSectionBuilder, UnwindInfoKind};
use crate::settings;
use crate::settings::SetResult;
use crate::CodegenResult;
use alloc::{boxed::Box, vec::Vec};
use core::fmt;
use core::fmt::{Debug, Formatter};
use target_lexicon::{triple, Architecture, OperatingSystem, PointerWidth, Triple};
// This module is made public here for benchmarking purposes. No guarantees are
// made regarding API stability.
#[cfg(feature = "x86")]
pub mod x64;
#[cfg(feature = "arm64")]
pub(crate) mod aarch64;
#[cfg(feature = "s390x")]
mod s390x;
pub mod unwind;
mod call_conv;
/// Returns a builder that can create a corresponding `TargetIsa`
/// or `Err(LookupError::SupportDisabled)` if not enabled.
macro_rules! isa_builder {
($name: ident, $cfg_terms: tt, $triple: ident) => {{
#[cfg $cfg_terms]
{
Ok($name::isa_builder($triple))
}
#[cfg(not $cfg_terms)]
{
Err(LookupError::SupportDisabled)
}
}};
}
/// Look for an ISA for the given `triple`.
/// Return a builder that can create a corresponding `TargetIsa`.
pub fn lookup(triple: Triple) -> Result<Builder, LookupError> {
match triple.architecture {
Architecture::X86_64 => {
isa_builder!(x64, (feature = "x86"), triple)
}
Architecture::Aarch64 { .. } => isa_builder!(aarch64, (feature = "arm64"), triple),
Architecture::S390x { .. } => isa_builder!(s390x, (feature = "s390x"), triple),
_ => Err(LookupError::Unsupported),
}
}
/// Look for a supported ISA with the given `name`.
/// Return a builder that can create a corresponding `TargetIsa`.
pub fn lookup_by_name(name: &str) -> Result<Builder, LookupError> {
use alloc::str::FromStr;
lookup(triple!(name))
}
/// Describes reason for target lookup failure
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub enum LookupError {
/// Support for this target was disabled in the current build.
SupportDisabled,
/// Support for this target has not yet been implemented.
Unsupported,
}
// This is manually implementing Error and Display instead of using thiserror to reduce the amount
// of dependencies used by Cranelift.
impl std::error::Error for LookupError {}
impl fmt::Display for LookupError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
LookupError::SupportDisabled => write!(f, "Support for this target is disabled"),
LookupError::Unsupported => {
write!(f, "Support for this target has not been implemented yet")
}
}
}
}
/// Builder for a `TargetIsa`.
/// Modify the ISA-specific settings before creating the `TargetIsa` trait object with `finish`.
#[derive(Clone)]
pub struct Builder {
triple: Triple,
setup: settings::Builder,
constructor:
fn(Triple, settings::Flags, settings::Builder) -> CodegenResult<Box<dyn TargetIsa>>,
}
impl Builder {
/// Gets the triple for the builder.
pub fn triple(&self) -> &Triple {
&self.triple
}
/// Iterates the available settings in the builder.
pub fn iter(&self) -> impl Iterator<Item = settings::Setting> {
self.setup.iter()
}
/// Combine the ISA-specific settings with the provided
/// ISA-independent settings and allocate a fully configured
/// `TargetIsa` trait object. May return an error if some of the
/// flags are inconsistent or incompatible: for example, some
/// platform-independent features, like general SIMD support, may
/// need certain ISA extensions to be enabled.
pub fn finish(self, shared_flags: settings::Flags) -> CodegenResult<Box<dyn TargetIsa>> {
(self.constructor)(self.triple, shared_flags, self.setup)
}
}
impl settings::Configurable for Builder {
fn set(&mut self, name: &str, value: &str) -> SetResult<()> {
self.setup.set(name, value)
}
fn enable(&mut self, name: &str) -> SetResult<()> {
self.setup.enable(name)
}
}
/// After determining that an instruction doesn't have an encoding, how should we proceed to
/// legalize it?
///
/// The `Encodings` iterator returns a legalization function to call.
pub type Legalize =
fn(ir::Inst, &mut ir::Function, &mut flowgraph::ControlFlowGraph, &dyn TargetIsa) -> bool;
/// This struct provides information that a frontend may need to know about a target to
/// produce Cranelift IR for the target.
#[derive(Clone, Copy, Hash)]
pub struct TargetFrontendConfig {
/// The default calling convention of the target.
pub default_call_conv: CallConv,
/// The pointer width of the target.
pub pointer_width: PointerWidth,
}
impl TargetFrontendConfig {
/// Get the pointer type of this target.
pub fn pointer_type(self) -> ir::Type {
ir::Type::int(u16::from(self.pointer_bits())).unwrap()
}
/// Get the width of pointers on this target, in units of bits.
pub fn pointer_bits(self) -> u8 {
self.pointer_width.bits()
}
/// Get the width of pointers on this target, in units of bytes.
pub fn pointer_bytes(self) -> u8 {
self.pointer_width.bytes()
}
}
/// Methods that are specialized to a target ISA.
///
/// Implies a Display trait that shows the shared flags, as well as any ISA-specific flags.
pub trait TargetIsa: fmt::Display + Send + Sync {
/// Get the name of this ISA.
fn name(&self) -> &'static str;
/// Get the target triple that was used to make this trait object.
fn triple(&self) -> &Triple;
/// Get the ISA-independent flags that were used to make this trait object.
fn flags(&self) -> &settings::Flags;
/// Get the ISA-dependent flag values that were used to make this trait object.
fn isa_flags(&self) -> Vec<settings::Value>;
/// Compile the given function.
fn compile_function(
&self,
func: &Function,
want_disasm: bool,
) -> CodegenResult<MachCompileResult>;
#[cfg(feature = "unwind")]
/// Map a regalloc::Reg to its corresponding DWARF register.
fn map_regalloc_reg_to_dwarf(
&self,
_: crate::machinst::Reg,
) -> Result<u16, RegisterMappingError> {
Err(RegisterMappingError::UnsupportedArchitecture)
}
/// IntCC condition for Unsigned Addition Overflow (Carry).
fn unsigned_add_overflow_condition(&self) -> ir::condcodes::IntCC;
/// Creates unwind information for the function.
///
/// Returns `None` if there is no unwind information for the function.
#[cfg(feature = "unwind")]
fn emit_unwind_info(
&self,
result: &MachCompileResult,
kind: UnwindInfoKind,
) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>>;
/// Creates a new System V Common Information Entry for the ISA.
///
/// Returns `None` if the ISA does not support System V unwind information.
#[cfg(feature = "unwind")]
fn create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry> {
// By default, an ISA cannot create a System V CIE
None
}
/// Returns an object that can be used to build the text section of an
/// executable.
///
/// This object will internally attempt to handle as many relocations as
/// possible using relative calls/jumps/etc between functions.
///
/// The `num_labeled_funcs` argument here is the number of functions which
/// will be "labeled" or might have calls between them, typically the number
/// of defined functions in the object file.
fn text_section_builder(&self, num_labeled_funcs: u32) -> Box<dyn TextSectionBuilder>;
}
/// Methods implemented for free for target ISA!
impl<'a> dyn TargetIsa + 'a {
/// Get the default calling convention of this target.
pub fn default_call_conv(&self) -> CallConv {
CallConv::triple_default(self.triple())
}
/// Get the endianness of this ISA.
pub fn endianness(&self) -> ir::Endianness {
match self.triple().endianness().unwrap() {
target_lexicon::Endianness::Little => ir::Endianness::Little,
target_lexicon::Endianness::Big => ir::Endianness::Big,
}
}
/// Returns the code (text) section alignment for this ISA.
pub fn code_section_alignment(&self) -> u64 {
use target_lexicon::*;
match (self.triple().operating_system, self.triple().architecture) {
(
OperatingSystem::MacOSX { .. }
| OperatingSystem::Darwin
| OperatingSystem::Ios
| OperatingSystem::Tvos,
Architecture::Aarch64(..),
) => 0x4000,
// 64 KB is the maximal page size (i.e. memory translation granule size)
// supported by the architecture and is used on some platforms.
(_, Architecture::Aarch64(..)) => 0x10000,
_ => 0x1000,
}
}
/// Get the pointer type of this ISA.
pub fn pointer_type(&self) -> ir::Type {
ir::Type::int(u16::from(self.pointer_bits())).unwrap()
}
/// Get the width of pointers on this ISA.
pub(crate) fn pointer_width(&self) -> PointerWidth {
self.triple().pointer_width().unwrap()
}
/// Get the width of pointers on this ISA, in units of bits.
pub fn pointer_bits(&self) -> u8 {
self.pointer_width().bits()
}
/// Get the width of pointers on this ISA, in units of bytes.
pub fn pointer_bytes(&self) -> u8 {
self.pointer_width().bytes()
}
/// Get the information needed by frontends producing Cranelift IR.
pub fn frontend_config(&self) -> TargetFrontendConfig {
TargetFrontendConfig {
default_call_conv: self.default_call_conv(),
pointer_width: self.pointer_width(),
}
}
/// Returns the flavor of unwind information emitted for this target.
pub(crate) fn unwind_info_kind(&self) -> UnwindInfoKind {
match self.triple().operating_system {
#[cfg(feature = "unwind")]
OperatingSystem::Windows => UnwindInfoKind::Windows,
#[cfg(feature = "unwind")]
_ => UnwindInfoKind::SystemV,
#[cfg(not(feature = "unwind"))]
_ => UnwindInfoKind::None,
}
}
}
impl Debug for &dyn TargetIsa {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"TargetIsa {{ triple: {:?}, pointer_width: {:?}}}",
self.triple(),
self.pointer_width()
)
}
}
| 34.396694 | 99 | 0.644161 |
e29eb7bd70389e1a3086ee71cb5731fbb31a8976 | 1,626 |
use {
crate::{
errors::ProgramError,
},
glob,
std::path::Path,
};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum SpecialHandling {
None,
Enter,
NoEnter,
Hide,
}
#[derive(Debug, Clone)]
pub struct SpecialPath {
pub pattern: glob::Pattern,
pub handling: SpecialHandling,
}
impl SpecialPath {
/// parse a "glob"="handling" representation as could be found in conf
pub fn parse(name: &str, value: &str) -> Result<Self, ProgramError> {
let pattern = match glob::Pattern::new(name) {
Ok(pattern) => pattern,
Err(_) => {
return Err(ProgramError::InvalidGlobError { pattern: name.to_string() })
}
};
let value = value.to_lowercase();
let value = regex!(r"\W+").replace_all(&value, "");
let handling = match value.as_ref() {
"none" => SpecialHandling::None,
"enter" => SpecialHandling::Enter,
"noenter" => SpecialHandling::NoEnter,
"hide" => SpecialHandling::Hide,
_ => {
return Err(ProgramError::Unrecognized { token: value.to_string() })
}
};
Ok(Self { pattern, handling })
}
}
pub trait SpecialPathList {
fn find(
self,
path: &Path,
) -> SpecialHandling;
}
impl SpecialPathList for &[SpecialPath] {
fn find(
self,
path: &Path,
) -> SpecialHandling {
for sp in self {
if sp.pattern.matches_path(path) {
return sp.handling;
}
}
SpecialHandling::None
}
}
| 24.268657 | 88 | 0.53567 |
23ac5500add34cb267d7cd193596ebe965343e14 | 1,148 | /*
* Copyright 2018-2020 TON DEV SOLUTIONS LTD.
*
* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use
* this file except in compliance with the License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific TON DEV software governing permissions and
* limitations under the License.
*/
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate failure;
extern crate api_info;
#[macro_use]
extern crate api_derive;
pub use ton_abi::json_abi;
pub use ton_abi::Contract as AbiContract;
pub use ton_abi::Function as AbiFunction;
mod error;
pub use error::SdkError;
mod contract;
pub use contract::{Contract, ContractImage, FunctionCallSet, SdkMessage};
mod message;
pub use message::{Message, MessageId, MessageType};
mod transaction;
pub use transaction::{Transaction, TransactionFees, TransactionId};
mod block;
pub use block::{Block, MsgDescr};
pub mod types;
pub use types::BlockId;
pub mod json_helper;
| 24.956522 | 81 | 0.77439 |
e846f8dbeb404170c9d9e84d117118771a31ae8e | 4,091 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
/// Entry point of failure, for details, see std::macros
#[macro_export]
macro_rules! fail(
() => (
fail!("{}", "explicit failure")
);
($msg:expr) => ({
static _FILE_LINE: (&'static str, uint) = (file!(), line!());
::core::failure::begin_unwind_string($msg, &_FILE_LINE)
});
($fmt:expr, $($arg:tt)*) => ({
// a closure can't have return type !, so we need a full
// function to pass to format_args!, *and* we need the
// file and line numbers right here; so an inner bare fn
// is our only choice.
//
// LLVM doesn't tend to inline this, presumably because begin_unwind_fmt
// is #[cold] and #[inline(never)] and because this is flagged as cold
// as returning !. We really do want this to be inlined, however,
// because it's just a tiny wrapper. Small wins (156K to 149K in size)
// were seen when forcing this to be inlined, and that number just goes
// up with the number of calls to fail!()
//
// The leading _'s are to avoid dead code warnings if this is
// used inside a dead function. Just `#[allow(dead_code)]` is
// insufficient, since the user may have
// `#[forbid(dead_code)]` and which cannot be overridden.
#[inline(always)]
fn _run_fmt(fmt: &::std::fmt::Arguments) -> ! {
static _FILE_LINE: (&'static str, uint) = (file!(), line!());
::core::failure::begin_unwind(fmt, &_FILE_LINE)
}
format_args!(_run_fmt, $fmt, $($arg)*)
});
)
/// Runtime assertion, for details see std::macros
#[macro_export]
macro_rules! assert(
($cond:expr) => (
if !$cond {
fail!(concat!("assertion failed: ", stringify!($cond)))
}
);
($cond:expr, $($arg:tt)*) => (
if !$cond {
fail!($($arg)*)
}
);
)
/// Runtime assertion, only without `--cfg ndebug`
#[macro_export]
macro_rules! debug_assert(
($(a:tt)*) => ({
if cfg!(not(ndebug)) {
assert!($($a)*);
}
})
)
/// Runtime assertion for equality, for details see std::macros
#[macro_export]
macro_rules! assert_eq(
($cond1:expr, $cond2:expr) => ({
let c1 = $cond1;
let c2 = $cond2;
if c1 != c2 || c2 != c1 {
fail!("expressions not equal, left: {}, right: {}", c1, c2);
}
})
)
/// Runtime assertion for equality, only without `--cfg ndebug`
#[macro_export]
macro_rules! debug_assert_eq(
($($a:tt)*) => ({
if cfg!(not(ndebug)) {
assert_eq!($($a)*);
}
})
)
/// Runtime assertion, disableable at compile time
#[macro_export]
macro_rules! debug_assert(
($($arg:tt)*) => (if cfg!(not(ndebug)) { assert!($($arg)*); })
)
/// Short circuiting evaluation on Err
#[macro_export]
macro_rules! try(
($e:expr) => (match $e { Ok(e) => e, Err(e) => return Err(e) })
)
/// Writing a formatted string into a writer
#[macro_export]
macro_rules! write(
($dst:expr, $($arg:tt)*) => (format_args_method!($dst, write_fmt, $($arg)*))
)
/// Writing a formatted string plus a newline into a writer
#[macro_export]
macro_rules! writeln(
($dst:expr, $fmt:expr $($arg:tt)*) => (
write!($dst, concat!($fmt, "\n") $($arg)*)
)
)
/// Write some formatted data into a stream.
///
/// Identical to the macro in `std::macros`
#[macro_export]
macro_rules! write(
($dst:expr, $($arg:tt)*) => ({
format_args_method!($dst, write_fmt, $($arg)*)
})
)
#[macro_export]
macro_rules! unreachable( () => (fail!("unreachable code")) )
| 30.529851 | 80 | 0.583231 |
e2482062144e68fc38aec08d610294510b582284 | 415 | //! These prelude re-exports are a set of exports that are commonly used from
//! within the library.
//!
//! These are not publicly re-exported to the end user, and must stay as a
//! private module.
pub type JsonMap = Map<String, Value>;
pub use error::{Error, Result};
pub use serde_json::{Map, Number, Value};
pub use std::result::Result as StdResult;
#[cfg(feature = "client")]
pub use client::ClientError;
| 27.666667 | 77 | 0.710843 |
1aa6af5b78f5b8c0ba88d92a15fb346fbe55188c | 714 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z continue-parse-after-error
struct X {
a: u8 /** document a */,
//~^ ERROR found a documentation comment that doesn't document anything
//~| HELP maybe a comment was intended
}
fn main() {
let y = X {a = 1};
}
| 34 | 75 | 0.701681 |
7220845e722f0ba7e5b1f5e178aaff3d88046b7b | 3,675 | use super::optimize_filter;
use warp::{hyper::StatusCode, test::request};
static TEST_INPUT: &str = r#"
{
"method": "guillotine",
"randomSeed": 1,
"cutWidth": 2,
"stockPieces": [
{
"width": 48,
"length": 96,
"patternDirection": "none",
"price": 0
},
{
"width": 48,
"length": 120,
"patternDirection": "none",
"price": 0
}
],
"cutPieces": [
{
"externalId": 1,
"width": 10,
"length": 30,
"patternDirection": "none",
"canRotate": true
},
{
"externalId": 2,
"width": 45,
"length": 100,
"patternDirection": "none",
"canRotate": true
}
]
}
"#;
#[tokio::test]
async fn optimize_should_return_ok() {
let api = optimize_filter(10240);
let resp = request()
.method("POST")
.path("/optimize")
.body(&TEST_INPUT)
.reply(&api)
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn content_length_too_long_should_return_payload_too_large() {
let api = optimize_filter(100);
let resp = request()
.method("POST")
.path("/optimize")
.body(&TEST_INPUT)
.reply(&api)
.await;
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
async fn optimize_with_wrong_http_method(http_method: &str) {
let api = optimize_filter(10240);
let resp = request()
.method(http_method)
.path("/optimize")
.body(&TEST_INPUT)
.reply(&api)
.await;
assert_eq!(resp.status(), StatusCode::METHOD_NOT_ALLOWED);
}
#[tokio::test]
async fn optimize_with_delete_should_fail() {
optimize_with_wrong_http_method("DELETE").await
}
#[tokio::test]
async fn optimize_with_get_should_fail() {
optimize_with_wrong_http_method("GET").await
}
#[tokio::test]
async fn optimize_with_patch_should_fail() {
optimize_with_wrong_http_method("PATCH").await
}
#[tokio::test]
async fn optimize_with_put_should_fail() {
optimize_with_wrong_http_method("PUT").await
}
#[tokio::test]
async fn invalid_input_should_return_bad_request() {
let api = optimize_filter(1024);
let invalid_input = "{}";
let resp = request()
.method("POST")
.path("/optimize")
.body(&invalid_input)
.reply(&api)
.await;
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn non_fitting_price_should_return_unprocessable_entity() {
let api = optimize_filter(1024);
let non_fitting_input = r#"
{
"method": "guillotine",
"randomSeed": 1,
"cutWidth": 2,
"stockPieces": [
{
"width": 48,
"length": 96,
"patternDirection": "none",
"price": 0
}
],
"cutPieces": [
{
"externalId": 1,
"width": 10,
"length": 300,
"patternDirection": "none",
"canRotate": true
}
]
}
"#;
let resp = request()
.method("POST")
.path("/optimize")
.body(&non_fitting_input)
.reply(&api)
.await;
assert_eq!(resp.status(), StatusCode::UNPROCESSABLE_ENTITY);
}
| 24.5 | 68 | 0.498503 |
5d885137b03617864c3741732c41cb3fd054fa8c | 93 | #[cfg(feature = "actix_web_4")]
pub mod actix_web_4;
#[cfg(feature = "axum")]
pub mod axum;
| 15.5 | 31 | 0.666667 |
e5b7931273af5f44732a1fef08f65e58b21cd65a | 33,407 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::sync::Arc;
use common_exception::ErrorCode;
use common_exception::Result;
use common_meta_types::NodeInfo;
use common_planners::AggregatorFinalPlan;
use common_planners::AggregatorPartialPlan;
use common_planners::BroadcastPlan;
use common_planners::EmptyPlan;
use common_planners::Expression;
use common_planners::ExpressionPlan;
use common_planners::Expressions;
use common_planners::FilterPlan;
use common_planners::HavingPlan;
use common_planners::LimitByPlan;
use common_planners::LimitPlan;
use common_planners::Partitions;
use common_planners::PlanNode;
use common_planners::ProjectionPlan;
use common_planners::ReadDataSourcePlan;
use common_planners::RemotePlan;
use common_planners::SelectPlan;
use common_planners::SinkPlan;
use common_planners::SortPlan;
use common_planners::StageKind;
use common_planners::StagePlan;
use common_planners::SubQueriesSetPlan;
use common_tracing::tracing;
use crate::api::BroadcastAction;
use crate::api::FlightAction;
use crate::api::ShuffleAction;
use crate::sessions::QueryContext;
#[derive(PartialEq)]
enum RunningMode {
Cluster,
Standalone,
}
pub struct Tasks {
plan: PlanNode,
context: Arc<QueryContext>,
actions: HashMap<String, VecDeque<FlightAction>>,
}
pub struct PlanScheduler {
stage_id: String,
cluster_nodes: Vec<String>,
local_pos: usize,
nodes_plan: Vec<PlanNode>,
running_mode: RunningMode,
query_context: Arc<QueryContext>,
subqueries_expressions: Vec<Expressions>,
}
impl PlanScheduler {
pub fn try_create(context: Arc<QueryContext>) -> Result<PlanScheduler> {
let cluster = context.get_cluster();
let cluster_nodes = cluster.get_nodes();
let mut local_pos = 0;
let mut nodes_plan = Vec::new();
let mut cluster_nodes_name = Vec::with_capacity(cluster_nodes.len());
for index in 0..cluster_nodes.len() {
if cluster.is_local(cluster_nodes[index].as_ref()) {
local_pos = index;
}
nodes_plan.push(PlanNode::Empty(EmptyPlan::create()));
cluster_nodes_name.push(cluster_nodes[index].id.clone());
}
Ok(PlanScheduler {
local_pos,
nodes_plan,
stage_id: uuid::Uuid::new_v4().to_string(),
query_context: context,
subqueries_expressions: vec![],
cluster_nodes: cluster_nodes_name,
running_mode: RunningMode::Standalone,
})
}
/// Schedule the plan to Local or Remote mode.
#[tracing::instrument(level = "debug", skip(self, plan))]
pub fn reschedule(mut self, plan: &PlanNode) -> Result<Tasks> {
let context = self.query_context.clone();
let cluster = context.get_cluster();
let mut tasks = Tasks::create(context);
match cluster.is_empty() {
true => tasks.finalize(plan),
false => {
self.visit_plan_node(plan, &mut tasks)?;
tasks.finalize(&self.nodes_plan[self.local_pos])
}
}
}
}
impl Tasks {
pub fn create(context: Arc<QueryContext>) -> Tasks {
Tasks {
context,
actions: HashMap::new(),
plan: PlanNode::Empty(EmptyPlan::create()),
}
}
pub fn get_local_task(&self) -> PlanNode {
self.plan.clone()
}
pub fn finalize(mut self, plan: &PlanNode) -> Result<Self> {
self.plan = plan.clone();
Ok(self)
}
pub fn get_tasks(&self) -> Result<Vec<(Arc<NodeInfo>, FlightAction)>> {
let cluster = self.context.get_cluster();
let mut tasks = Vec::new();
for cluster_node in &cluster.get_nodes() {
if let Some(actions) = self.actions.get(&cluster_node.id) {
for action in actions {
tasks.push((cluster_node.clone(), action.clone()));
}
}
}
Ok(tasks)
}
#[allow(clippy::ptr_arg)]
pub fn add_task(&mut self, node_name: &String, action: FlightAction) {
match self.actions.entry(node_name.to_string()) {
Entry::Occupied(mut entry) => entry.get_mut().push_back(action),
Entry::Vacant(entry) => {
let mut node_tasks = VecDeque::new();
node_tasks.push_back(action);
entry.insert(node_tasks);
}
};
}
}
impl PlanScheduler {
fn normal_action(&self, stage: &StagePlan, input: &PlanNode) -> ShuffleAction {
ShuffleAction {
stage_id: self.stage_id.clone(),
query_id: self.query_context.get_id(),
plan: input.clone(),
sinks: self.cluster_nodes.clone(),
scatters_expression: stage.scatters_expr.clone(),
}
}
fn normal_remote_plan(&self, node_name: &str, action: &ShuffleAction) -> RemotePlan {
RemotePlan {
schema: action.plan.schema(),
query_id: action.query_id.clone(),
stage_id: action.stage_id.clone(),
stream_id: node_name.to_string(),
fetch_nodes: self.cluster_nodes.clone(),
}
}
fn schedule_normal_tasks(&mut self, stage: &StagePlan, tasks: &mut Tasks) -> Result<()> {
if let RunningMode::Standalone = self.running_mode {
return Err(ErrorCode::LogicalError(
"Normal stage cannot work on standalone mode",
));
}
for index in 0..self.nodes_plan.len() {
let node_name = &self.cluster_nodes[index];
let shuffle_action = self.normal_action(stage, &self.nodes_plan[index]);
let remote_plan_node = self.normal_remote_plan(node_name, &shuffle_action);
let shuffle_flight_action = FlightAction::PrepareShuffleAction(shuffle_action);
tasks.add_task(node_name, shuffle_flight_action);
self.nodes_plan[index] = PlanNode::Remote(remote_plan_node);
}
Ok(())
}
fn expansive_action(&self, stage: &StagePlan, input: &PlanNode) -> ShuffleAction {
ShuffleAction {
stage_id: self.stage_id.clone(),
query_id: self.query_context.get_id(),
plan: input.clone(),
sinks: self.cluster_nodes.clone(),
scatters_expression: stage.scatters_expr.clone(),
}
}
fn expansive_remote_plan(&self, node_name: &str, action: &ShuffleAction) -> PlanNode {
PlanNode::Remote(RemotePlan {
schema: action.plan.schema(),
query_id: action.query_id.clone(),
stage_id: action.stage_id.clone(),
stream_id: node_name.to_string(),
fetch_nodes: vec![self.cluster_nodes[self.local_pos].clone()],
})
}
fn schedule_expansive_tasks(&mut self, stage: &StagePlan, tasks: &mut Tasks) -> Result<()> {
if let RunningMode::Cluster = self.running_mode {
return Err(ErrorCode::LogicalError(
"Expansive stage cannot work on Cluster mode",
));
}
self.running_mode = RunningMode::Cluster;
let node_name = &self.cluster_nodes[self.local_pos];
let shuffle_action = self.expansive_action(stage, &self.nodes_plan[self.local_pos]);
tasks.add_task(
node_name,
FlightAction::PrepareShuffleAction(shuffle_action.clone()),
);
for index in 0..self.nodes_plan.len() {
let node_name = &self.cluster_nodes[index];
self.nodes_plan[index] = self.expansive_remote_plan(node_name, &shuffle_action);
}
Ok(())
}
fn converge_action(&self, stage: &StagePlan, input: &PlanNode) -> ShuffleAction {
ShuffleAction {
stage_id: self.stage_id.clone(),
query_id: self.query_context.get_id(),
plan: input.clone(),
sinks: vec![self.cluster_nodes[self.local_pos].clone()],
scatters_expression: stage.scatters_expr.clone(),
}
}
fn converge_remote_plan(&self, node_name: &str, stage: &StagePlan) -> RemotePlan {
RemotePlan {
schema: stage.schema(),
stage_id: self.stage_id.clone(),
query_id: self.query_context.get_id(),
stream_id: node_name.to_string(),
fetch_nodes: self.cluster_nodes.clone(),
}
}
fn schedule_converge_tasks(&mut self, stage: &StagePlan, tasks: &mut Tasks) -> Result<()> {
if let RunningMode::Standalone = self.running_mode {
return Err(ErrorCode::LogicalError(
"Converge stage cannot work on standalone mode",
));
}
for index in 0..self.nodes_plan.len() {
let node_name = &self.cluster_nodes[index];
let shuffle_action = self.converge_action(stage, &self.nodes_plan[index]);
let shuffle_flight_action = FlightAction::PrepareShuffleAction(shuffle_action);
tasks.add_task(node_name, shuffle_flight_action);
}
self.running_mode = RunningMode::Standalone;
let node_name = &self.cluster_nodes[self.local_pos];
let remote_plan_node = self.converge_remote_plan(node_name, stage);
self.nodes_plan[self.local_pos] = PlanNode::Remote(remote_plan_node);
Ok(())
}
}
impl PlanScheduler {
fn visit_plan_node(&mut self, node: &PlanNode, tasks: &mut Tasks) -> Result<()> {
match node {
PlanNode::AggregatorPartial(plan) => self.visit_aggr_part(plan, tasks),
PlanNode::AggregatorFinal(plan) => self.visit_aggr_final(plan, tasks),
PlanNode::Empty(plan) => self.visit_empty(plan, tasks),
PlanNode::Projection(plan) => self.visit_projection(plan, tasks),
PlanNode::Filter(plan) => self.visit_filter(plan, tasks),
PlanNode::Sort(plan) => self.visit_sort(plan, tasks),
PlanNode::Limit(plan) => self.visit_limit(plan, tasks),
PlanNode::LimitBy(plan) => self.visit_limit_by(plan, tasks),
PlanNode::ReadSource(plan) => self.visit_data_source(plan, tasks),
PlanNode::Sink(plan) => self.visit_sink(plan, tasks),
PlanNode::Select(plan) => self.visit_select(plan, tasks),
PlanNode::Stage(plan) => self.visit_stage(plan, tasks),
PlanNode::Broadcast(plan) => self.visit_broadcast(plan, tasks),
PlanNode::Having(plan) => self.visit_having(plan, tasks),
PlanNode::Expression(plan) => self.visit_expression(plan, tasks),
PlanNode::SubQueryExpression(plan) => self.visit_subqueries_set(plan, tasks),
_ => Err(ErrorCode::UnImplement("")),
}
}
fn visit_aggr_part(&mut self, plan: &AggregatorPartialPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_aggr_part(plan),
RunningMode::Standalone => self.visit_local_aggr_part(plan),
}
Ok(())
}
fn visit_local_aggr_part(&mut self, plan: &AggregatorPartialPlan) {
self.nodes_plan[self.local_pos] = PlanNode::AggregatorPartial(AggregatorPartialPlan {
schema: plan.schema(),
aggr_expr: plan.aggr_expr.clone(),
group_expr: plan.group_expr.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_aggr_part(&mut self, plan: &AggregatorPartialPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::AggregatorPartial(AggregatorPartialPlan {
schema: plan.schema(),
aggr_expr: plan.aggr_expr.clone(),
group_expr: plan.group_expr.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_aggr_final(&mut self, plan: &AggregatorFinalPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_aggr_final(plan),
RunningMode::Standalone => self.visit_local_aggr_final(plan),
};
Ok(())
}
fn visit_local_aggr_final(&mut self, plan: &AggregatorFinalPlan) {
self.nodes_plan[self.local_pos] = PlanNode::AggregatorFinal(AggregatorFinalPlan {
schema: plan.schema.clone(),
aggr_expr: plan.aggr_expr.clone(),
group_expr: plan.group_expr.clone(),
schema_before_group_by: plan.schema_before_group_by.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
})
}
fn visit_cluster_aggr_final(&mut self, plan: &AggregatorFinalPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::AggregatorFinal(AggregatorFinalPlan {
schema: plan.schema.clone(),
aggr_expr: plan.aggr_expr.clone(),
group_expr: plan.group_expr.clone(),
schema_before_group_by: plan.schema_before_group_by.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
})
}
}
fn visit_empty(&mut self, plan: &EmptyPlan, _: &mut Tasks) -> Result<()> {
match plan {
EmptyPlan {
is_cluster: true, ..
} => self.visit_cluster_empty(plan),
EmptyPlan {
is_cluster: false, ..
} => self.visit_local_empty(plan),
};
Ok(())
}
fn visit_local_empty(&mut self, origin: &EmptyPlan) {
self.running_mode = RunningMode::Standalone;
self.nodes_plan[self.local_pos] = PlanNode::Empty(origin.clone());
}
fn visit_cluster_empty(&mut self, origin: &EmptyPlan) {
self.running_mode = RunningMode::Cluster;
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Empty(EmptyPlan {
schema: origin.schema.clone(),
is_cluster: origin.is_cluster,
})
}
}
fn visit_stage(&mut self, stage: &StagePlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(stage.input.as_ref(), tasks)?;
// Entering new stage
self.stage_id = uuid::Uuid::new_v4().to_string();
match stage.kind {
StageKind::Normal => self.schedule_normal_tasks(stage, tasks),
StageKind::Expansive => self.schedule_expansive_tasks(stage, tasks),
StageKind::Convergent => self.schedule_converge_tasks(stage, tasks),
}
}
fn visit_broadcast(&mut self, plan: &BroadcastPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
// Entering new stage
self.stage_id = uuid::Uuid::new_v4().to_string();
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_broadcast(tasks),
RunningMode::Standalone => self.visit_local_broadcast(tasks),
};
Ok(())
}
fn broadcast_action(&self, input: &PlanNode) -> BroadcastAction {
BroadcastAction {
stage_id: self.stage_id.clone(),
query_id: self.query_context.get_id(),
plan: input.clone(),
sinks: self.cluster_nodes.clone(),
}
}
fn broadcast_remote(&self, node_name: &str, action: &BroadcastAction) -> RemotePlan {
RemotePlan {
schema: action.plan.schema(),
query_id: action.query_id.clone(),
stage_id: action.stage_id.clone(),
stream_id: node_name.to_string(),
fetch_nodes: self.cluster_nodes.clone(),
}
}
fn visit_local_broadcast(&mut self, tasks: &mut Tasks) {
self.running_mode = RunningMode::Cluster;
let node_name = &self.cluster_nodes[self.local_pos];
let action = self.broadcast_action(&self.nodes_plan[self.local_pos]);
tasks.add_task(node_name, FlightAction::BroadcastAction(action.clone()));
for index in 0..self.nodes_plan.len() {
let node_name = &self.cluster_nodes[index];
self.nodes_plan[index] = PlanNode::Remote(RemotePlan {
schema: action.plan.schema(),
query_id: action.query_id.clone(),
stage_id: action.stage_id.clone(),
stream_id: node_name.to_string(),
fetch_nodes: vec![self.cluster_nodes[self.local_pos].clone()],
});
}
}
fn visit_cluster_broadcast(&mut self, tasks: &mut Tasks) {
self.running_mode = RunningMode::Cluster;
for index in 0..self.nodes_plan.len() {
let node_name = &self.cluster_nodes[index];
let action = self.broadcast_action(&self.nodes_plan[index]);
let remote_plan_node = self.broadcast_remote(node_name, &action);
tasks.add_task(node_name, FlightAction::BroadcastAction(action));
self.nodes_plan[index] = PlanNode::Remote(remote_plan_node);
}
}
fn visit_projection(&mut self, plan: &ProjectionPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_projection(plan),
RunningMode::Standalone => self.visit_local_projection(plan),
};
Ok(())
}
fn visit_local_projection(&mut self, plan: &ProjectionPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Projection(ProjectionPlan {
schema: plan.schema.clone(),
expr: plan.expr.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
})
}
fn visit_cluster_projection(&mut self, plan: &ProjectionPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Projection(ProjectionPlan {
schema: plan.schema.clone(),
expr: plan.expr.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
})
}
}
fn visit_expression(&mut self, plan: &ExpressionPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_expression(plan),
RunningMode::Standalone => self.visit_local_expression(plan),
};
Ok(())
}
fn visit_local_expression(&mut self, plan: &ExpressionPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Expression(ExpressionPlan {
desc: plan.desc.clone(),
exprs: plan.exprs.clone(),
schema: plan.schema.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_expression(&mut self, plan: &ExpressionPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Expression(ExpressionPlan {
desc: plan.desc.clone(),
exprs: plan.exprs.clone(),
schema: plan.schema.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_subqueries_set(&mut self, plan: &SubQueriesSetPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_subqueries(&plan.expressions, tasks),
RunningMode::Standalone => self.visit_local_subqueries(&plan.expressions, tasks),
}
}
fn visit_local_subqueries(&mut self, exprs: &[Expression], tasks: &mut Tasks) -> Result<()> {
self.visit_subqueries(exprs, tasks)?;
if self.subqueries_expressions.len() != self.nodes_plan.len() {
return Err(ErrorCode::LogicalError(
"New subqueries size miss match nodes plan",
));
}
let new_expressions = self.subqueries_expressions[self.local_pos].clone();
if new_expressions.len() != exprs.len() {
return Err(ErrorCode::LogicalError(
"New expression size miss match exprs",
));
}
self.nodes_plan[self.local_pos] = PlanNode::SubQueryExpression(SubQueriesSetPlan {
expressions: new_expressions,
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
Ok(())
}
fn visit_cluster_subqueries(&mut self, exprs: &[Expression], tasks: &mut Tasks) -> Result<()> {
self.visit_subqueries(exprs, tasks)?;
if self.subqueries_expressions.len() != self.nodes_plan.len() {
return Err(ErrorCode::LogicalError(
"New subqueries size miss match nodes plan",
));
}
for index in 0..self.nodes_plan.len() {
let new_expressions = self.subqueries_expressions[index].clone();
if new_expressions.len() != exprs.len() {
return Err(ErrorCode::LogicalError(
"New expression size miss match exprs",
));
}
self.nodes_plan[index] = PlanNode::SubQueryExpression(SubQueriesSetPlan {
expressions: new_expressions,
input: Arc::new(self.nodes_plan[index].clone()),
});
}
Ok(())
}
fn visit_subqueries(&mut self, exprs: &[Expression], tasks: &mut Tasks) -> Result<()> {
self.subqueries_expressions.clear();
for expression in exprs {
match expression {
Expression::Subquery { name, query_plan } => {
let subquery = query_plan.as_ref();
let subquery_nodes_plan = self.visit_subquery(subquery, tasks)?;
for index in 0..subquery_nodes_plan.len() {
let new_expression = Expression::Subquery {
name: name.clone(),
query_plan: Arc::new(subquery_nodes_plan[index].clone()),
};
match index < self.subqueries_expressions.len() {
true => self.subqueries_expressions[index].push(new_expression),
false => self.subqueries_expressions.push(vec![new_expression]),
};
}
}
Expression::ScalarSubquery { name, query_plan } => {
let subquery = query_plan.as_ref();
let subquery_nodes_plan = self.visit_subquery(subquery, tasks)?;
for index in 0..subquery_nodes_plan.len() {
let new_expression = Expression::ScalarSubquery {
name: name.clone(),
query_plan: Arc::new(subquery_nodes_plan[index].clone()),
};
match index < self.subqueries_expressions.len() {
true => self.subqueries_expressions.push(vec![new_expression]),
false => self.subqueries_expressions[index].push(new_expression),
};
}
}
_ => unreachable!(),
};
}
Ok(())
}
fn visit_subquery(&mut self, plan: &PlanNode, tasks: &mut Tasks) -> Result<Vec<PlanNode>> {
let subquery_context = QueryContext::new(self.query_context.clone());
let mut subquery_scheduler = PlanScheduler::try_create(subquery_context)?;
subquery_scheduler.visit_plan_node(plan, tasks)?;
Ok(subquery_scheduler.nodes_plan)
}
fn visit_filter(&mut self, plan: &FilterPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_filter(plan),
RunningMode::Standalone => self.visit_local_filter(plan),
};
Ok(())
}
fn visit_local_filter(&mut self, plan: &FilterPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Filter(FilterPlan {
schema: plan.schema.clone(),
predicate: plan.predicate.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_filter(&mut self, plan: &FilterPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Filter(FilterPlan {
schema: plan.schema.clone(),
predicate: plan.predicate.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_having(&mut self, plan: &HavingPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_having(plan),
RunningMode::Standalone => self.visit_local_having(plan),
};
Ok(())
}
fn visit_local_having(&mut self, plan: &HavingPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Having(HavingPlan {
schema: plan.schema.clone(),
predicate: plan.predicate.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_having(&mut self, plan: &HavingPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Having(HavingPlan {
schema: plan.schema.clone(),
predicate: plan.predicate.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_sort(&mut self, plan: &SortPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_sort(plan),
RunningMode::Standalone => self.visit_local_sort(plan),
};
Ok(())
}
fn visit_local_sort(&mut self, plan: &SortPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Sort(SortPlan {
schema: plan.schema.clone(),
order_by: plan.order_by.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_sort(&mut self, plan: &SortPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Sort(SortPlan {
schema: plan.schema.clone(),
order_by: plan.order_by.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_limit(&mut self, plan: &LimitPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_limit(plan),
RunningMode::Standalone => self.visit_local_limit(plan),
};
Ok(())
}
fn visit_local_limit(&mut self, plan: &LimitPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Limit(LimitPlan {
n: plan.n,
offset: plan.offset,
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_limit(&mut self, plan: &LimitPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Limit(LimitPlan {
n: plan.n,
offset: plan.offset,
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_limit_by(&mut self, plan: &LimitByPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_limit_by(plan),
RunningMode::Standalone => self.visit_local_limit_by(plan),
};
Ok(())
}
fn visit_local_limit_by(&mut self, plan: &LimitByPlan) {
self.nodes_plan[self.local_pos] = PlanNode::LimitBy(LimitByPlan {
limit: plan.limit,
limit_by: plan.limit_by.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_limit_by(&mut self, plan: &LimitByPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::LimitBy(LimitByPlan {
limit: plan.limit,
limit_by: plan.limit_by.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
fn visit_data_source(&mut self, plan: &ReadDataSourcePlan, _: &mut Tasks) -> Result<()> {
let table = self.query_context.build_table_from_source_plan(plan)?;
match table.is_local() {
true => self.visit_local_data_source(plan),
false => self.visit_cluster_data_source(plan),
}
}
fn visit_local_data_source(&mut self, plan: &ReadDataSourcePlan) -> Result<()> {
self.running_mode = RunningMode::Standalone;
self.nodes_plan[self.local_pos] = PlanNode::ReadSource(plan.clone());
Ok(())
}
fn visit_cluster_data_source(&mut self, plan: &ReadDataSourcePlan) -> Result<()> {
self.running_mode = RunningMode::Cluster;
let nodes_parts = self.repartition(plan);
for index in 0..self.nodes_plan.len() {
let mut read_plan = plan.clone();
read_plan.parts = nodes_parts[index].clone();
self.nodes_plan[index] = PlanNode::ReadSource(read_plan);
}
Ok(())
}
fn visit_sink(&mut self, plan: &SinkPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
let local = self.running_mode == RunningMode::Standalone;
match local {
true => self.visit_local_sink(plan),
false => self.visit_cluster_sink(plan),
}
Ok(())
}
fn visit_local_sink(&mut self, plan: &SinkPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Sink(SinkPlan {
table_info: plan.table_info.clone(),
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
cast_schema: plan.cast_schema.clone(),
})
}
fn visit_cluster_sink(&mut self, plan: &SinkPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Sink(SinkPlan {
table_info: plan.table_info.clone(),
input: Arc::new(self.nodes_plan[index].clone()),
cast_schema: plan.cast_schema.clone(),
})
}
}
fn visit_select(&mut self, plan: &SelectPlan, tasks: &mut Tasks) -> Result<()> {
self.visit_plan_node(plan.input.as_ref(), tasks)?;
match self.running_mode {
RunningMode::Cluster => self.visit_cluster_select(plan),
RunningMode::Standalone => self.visit_local_select(plan),
};
Ok(())
}
fn visit_local_select(&mut self, _: &SelectPlan) {
self.nodes_plan[self.local_pos] = PlanNode::Select(SelectPlan {
input: Arc::new(self.nodes_plan[self.local_pos].clone()),
});
}
fn visit_cluster_select(&mut self, _: &SelectPlan) {
for index in 0..self.nodes_plan.len() {
self.nodes_plan[index] = PlanNode::Select(SelectPlan {
input: Arc::new(self.nodes_plan[index].clone()),
});
}
}
}
impl PlanScheduler {
fn repartition(&mut self, cluster_source: &ReadDataSourcePlan) -> Vec<Partitions> {
// We always put adjacent partitions in the same node
let nodes = self.cluster_nodes.clone();
let cluster_parts = &cluster_source.parts;
let parts_pre_node = cluster_parts.len() / nodes.len();
let mut nodes_parts = Vec::with_capacity(nodes.len());
for index in 0..nodes.len() {
let begin = parts_pre_node * index;
let end = parts_pre_node * (index + 1);
let node_parts = cluster_parts[begin..end].to_vec();
nodes_parts.push(node_parts);
}
// For some irregular partitions, we assign them to the head nodes
let begin = parts_pre_node * nodes.len();
let remain_cluster_parts = &cluster_parts[begin..];
for index in 0..remain_cluster_parts.len() {
nodes_parts[index].push(remain_cluster_parts[index].clone());
}
nodes_parts
}
}
| 37.578178 | 99 | 0.595444 |
e2c935e2f6921fb6615eb9982648fb3afc94fcf0 | 22,133 | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Rust AST Visitor. Extracts useful information and massages it into a form
//! usable for clean
use std::mem;
use syntax::ast;
use syntax::attr;
use syntax::codemap::Spanned;
use syntax_pos::{self, Span};
use rustc::hir::map as hir_map;
use rustc::hir::def::Def;
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::middle::privacy::AccessLevel;
use rustc::util::nodemap::{FxHashSet, FxHashMap};
use rustc::hir;
use core;
use clean::{self, AttributesExt, NestedAttributesExt, def_id_to_path};
use doctree::*;
// looks to me like the first two of these are actually
// output parameters, maybe only mutated once; perhaps
// better simply to have the visit method return a tuple
// containing them?
// also, is there some reason that this doesn't use the 'visit'
// framework from syntax?
pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a, 'cstore: 'rcx> {
pub module: Module,
pub attrs: hir::HirVec<ast::Attribute>,
pub cx: &'a core::DocContext<'a, 'tcx, 'rcx, 'cstore>,
view_item_stack: FxHashSet<ast::NodeId>,
inlining: bool,
/// Is the current module and all of its parents public?
inside_public_path: bool,
exact_paths: Option<FxHashMap<DefId, Vec<String>>>,
}
impl<'a, 'tcx, 'rcx, 'cstore> RustdocVisitor<'a, 'tcx, 'rcx, 'cstore> {
pub fn new(
cx: &'a core::DocContext<'a, 'tcx, 'rcx, 'cstore>
) -> RustdocVisitor<'a, 'tcx, 'rcx, 'cstore> {
// If the root is re-exported, terminate all recursion.
let mut stack = FxHashSet();
stack.insert(ast::CRATE_NODE_ID);
RustdocVisitor {
module: Module::new(None),
attrs: hir::HirVec::new(),
cx,
view_item_stack: stack,
inlining: false,
inside_public_path: true,
exact_paths: Some(FxHashMap()),
}
}
fn store_path(&mut self, did: DefId) {
// We can't use the entry api, as that keeps the mutable borrow of self active
// when we try to use cx
let exact_paths = self.exact_paths.as_mut().unwrap();
if exact_paths.get(&did).is_none() {
let path = def_id_to_path(self.cx, did, self.cx.crate_name.clone());
exact_paths.insert(did, path);
}
}
fn stability(&self, id: ast::NodeId) -> Option<attr::Stability> {
self.cx.tcx.hir.opt_local_def_id(id)
.and_then(|def_id| self.cx.tcx.lookup_stability(def_id)).cloned()
}
fn deprecation(&self, id: ast::NodeId) -> Option<attr::Deprecation> {
self.cx.tcx.hir.opt_local_def_id(id)
.and_then(|def_id| self.cx.tcx.lookup_deprecation(def_id))
}
pub fn visit(&mut self, krate: &hir::Crate) {
self.attrs = krate.attrs.clone();
self.module = self.visit_mod_contents(krate.span,
krate.attrs.clone(),
Spanned { span: syntax_pos::DUMMY_SP,
node: hir::VisibilityKind::Public },
ast::CRATE_NODE_ID,
&krate.module,
None);
// attach the crate's exported macros to the top-level module:
let macro_exports: Vec<_> =
krate.exported_macros.iter().map(|def| self.visit_local_macro(def)).collect();
self.module.macros.extend(macro_exports);
self.module.is_crate = true;
self.cx.renderinfo.borrow_mut().exact_paths = self.exact_paths.take().unwrap();
}
pub fn visit_variant_data(&mut self, item: &hir::Item,
name: ast::Name, sd: &hir::VariantData,
generics: &hir::Generics) -> Struct {
debug!("Visiting struct");
let struct_type = struct_type_from_def(&*sd);
Struct {
id: item.id,
struct_type,
name,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
attrs: item.attrs.clone(),
generics: generics.clone(),
fields: sd.fields().iter().cloned().collect(),
whence: item.span
}
}
pub fn visit_union_data(&mut self, item: &hir::Item,
name: ast::Name, sd: &hir::VariantData,
generics: &hir::Generics) -> Union {
debug!("Visiting union");
let struct_type = struct_type_from_def(&*sd);
Union {
id: item.id,
struct_type,
name,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
attrs: item.attrs.clone(),
generics: generics.clone(),
fields: sd.fields().iter().cloned().collect(),
whence: item.span
}
}
pub fn visit_enum_def(&mut self, it: &hir::Item,
name: ast::Name, def: &hir::EnumDef,
params: &hir::Generics) -> Enum {
debug!("Visiting enum");
Enum {
name,
variants: def.variants.iter().map(|v| Variant {
name: v.node.name,
attrs: v.node.attrs.clone(),
stab: self.stability(v.node.data.id()),
depr: self.deprecation(v.node.data.id()),
def: v.node.data.clone(),
whence: v.span,
}).collect(),
vis: it.vis.clone(),
stab: self.stability(it.id),
depr: self.deprecation(it.id),
generics: params.clone(),
attrs: it.attrs.clone(),
id: it.id,
whence: it.span,
}
}
pub fn visit_fn(&mut self, item: &hir::Item,
name: ast::Name, fd: &hir::FnDecl,
header: hir::FnHeader,
gen: &hir::Generics,
body: hir::BodyId) -> Function {
debug!("Visiting fn");
Function {
id: item.id,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
attrs: item.attrs.clone(),
decl: fd.clone(),
name,
whence: item.span,
generics: gen.clone(),
header,
body,
}
}
pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribute>,
vis: hir::Visibility, id: ast::NodeId,
m: &hir::Mod,
name: Option<ast::Name>) -> Module {
let mut om = Module::new(name);
om.where_outer = span;
om.where_inner = m.inner;
om.attrs = attrs;
om.vis = vis.clone();
om.stab = self.stability(id);
om.depr = self.deprecation(id);
om.id = id;
// Keep track of if there were any private modules in the path.
let orig_inside_public_path = self.inside_public_path;
self.inside_public_path &= vis.node.is_pub();
for i in &m.item_ids {
let item = self.cx.tcx.hir.expect_item(i.id);
self.visit_item(item, None, &mut om);
}
self.inside_public_path = orig_inside_public_path;
om
}
/// Tries to resolve the target of a `pub use` statement and inlines the
/// target if it is defined locally and would not be documented otherwise,
/// or when it is specifically requested with `please_inline`.
/// (the latter is the case when the import is marked `doc(inline)`)
///
/// Cross-crate inlining occurs later on during crate cleaning
/// and follows different rules.
///
/// Returns true if the target has been inlined.
fn maybe_inline_local(&mut self,
id: ast::NodeId,
def: Def,
renamed: Option<ast::Name>,
glob: bool,
om: &mut Module,
please_inline: bool) -> bool {
fn inherits_doc_hidden(cx: &core::DocContext, mut node: ast::NodeId) -> bool {
while let Some(id) = cx.tcx.hir.get_enclosing_scope(node) {
node = id;
if cx.tcx.hir.attrs(node).lists("doc").has_word("hidden") {
return true;
}
if node == ast::CRATE_NODE_ID {
break;
}
}
false
}
debug!("maybe_inline_local def: {:?}", def);
let tcx = self.cx.tcx;
if def == Def::Err {
return false;
}
let def_did = def.def_id();
let use_attrs = tcx.hir.attrs(id);
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let is_no_inline = use_attrs.lists("doc").has_word("no_inline") ||
use_attrs.lists("doc").has_word("hidden");
// For cross-crate impl inlining we need to know whether items are
// reachable in documentation - a previously nonreachable item can be
// made reachable by cross-crate inlining which we're checking here.
// (this is done here because we need to know this upfront)
if !def_did.is_local() && !is_no_inline {
let attrs = clean::inline::load_attrs(self.cx, def_did);
let self_is_hidden = attrs.lists("doc").has_word("hidden");
match def {
Def::Trait(did) |
Def::Struct(did) |
Def::Union(did) |
Def::Enum(did) |
Def::TyForeign(did) |
Def::TyAlias(did) if !self_is_hidden => {
self.cx.access_levels.borrow_mut().map.insert(did, AccessLevel::Public);
},
Def::Mod(did) => if !self_is_hidden {
::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did);
},
_ => {},
}
return false
}
let def_node_id = match tcx.hir.as_local_node_id(def_did) {
Some(n) => n, None => return false
};
let is_private = !self.cx.access_levels.borrow().is_public(def_did);
let is_hidden = inherits_doc_hidden(self.cx, def_node_id);
// Only inline if requested or if the item would otherwise be stripped
if (!please_inline && !is_private && !is_hidden) || is_no_inline {
return false
}
if !self.view_item_stack.insert(def_node_id) { return false }
let ret = match tcx.hir.get(def_node_id) {
hir_map::NodeItem(&hir::Item { node: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for i in &m.item_ids {
let i = self.cx.tcx.hir.expect_item(i.id);
self.visit_item(i, None, om);
}
self.inlining = prev;
true
}
hir_map::NodeItem(it) if !glob => {
let prev = mem::replace(&mut self.inlining, true);
self.visit_item(it, renamed, om);
self.inlining = prev;
true
}
hir_map::NodeForeignItem(it) if !glob => {
// generate a fresh `extern {}` block if we want to inline a foreign item.
om.foreigns.push(hir::ForeignMod {
abi: tcx.hir.get_foreign_abi(it.id),
items: vec![hir::ForeignItem {
name: renamed.unwrap_or(it.name),
.. it.clone()
}].into(),
});
true
}
_ => false,
};
self.view_item_stack.remove(&def_node_id);
ret
}
pub fn visit_item(&mut self, item: &hir::Item,
renamed: Option<ast::Name>, om: &mut Module) {
debug!("Visiting item {:?}", item);
let name = renamed.unwrap_or(item.name);
if item.vis.node.is_pub() {
let def_id = self.cx.tcx.hir.local_def_id(item.id);
self.store_path(def_id);
}
match item.node {
hir::ItemKind::ForeignMod(ref fm) => {
// If inlining we only want to include public functions.
om.foreigns.push(if self.inlining {
hir::ForeignMod {
abi: fm.abi,
items: fm.items.iter().filter(|i| i.vis.node.is_pub()).cloned().collect(),
}
} else {
fm.clone()
});
}
// If we're inlining, skip private items.
_ if self.inlining && !item.vis.node.is_pub() => {}
hir::ItemKind::GlobalAsm(..) => {}
hir::ItemKind::ExternCrate(orig_name) => {
let def_id = self.cx.tcx.hir.local_def_id(item.id);
om.extern_crates.push(ExternCrate {
cnum: self.cx.tcx.extern_mod_stmt_cnum(def_id)
.unwrap_or(LOCAL_CRATE),
name,
path: orig_name.map(|x|x.to_string()),
vis: item.vis.clone(),
attrs: item.attrs.clone(),
whence: item.span,
})
}
hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
hir::ItemKind::Use(ref path, kind) => {
let is_glob = kind == hir::UseKind::Glob;
// struct and variant constructors always show up alongside their definitions, we've
// already processed them so just discard these.
match path.def {
Def::StructCtor(..) | Def::VariantCtor(..) => return,
_ => {}
}
// If there was a private module in the current path then don't bother inlining
// anything as it will probably be stripped anyway.
if item.vis.node.is_pub() && self.inside_public_path {
let please_inline = item.attrs.iter().any(|item| {
match item.meta_item_list() {
Some(ref list) if item.check_name("doc") => {
list.iter().any(|i| i.check_name("inline"))
}
_ => false,
}
});
let name = if is_glob { None } else { Some(name) };
if self.maybe_inline_local(item.id,
path.def,
name,
is_glob,
om,
please_inline) {
return;
}
}
om.imports.push(Import {
name,
id: item.id,
vis: item.vis.clone(),
attrs: item.attrs.clone(),
path: (**path).clone(),
glob: is_glob,
whence: item.span,
});
}
hir::ItemKind::Mod(ref m) => {
om.mods.push(self.visit_mod_contents(item.span,
item.attrs.clone(),
item.vis.clone(),
item.id,
m,
Some(name)));
},
hir::ItemKind::Enum(ref ed, ref gen) =>
om.enums.push(self.visit_enum_def(item, name, ed, gen)),
hir::ItemKind::Struct(ref sd, ref gen) =>
om.structs.push(self.visit_variant_data(item, name, sd, gen)),
hir::ItemKind::Union(ref sd, ref gen) =>
om.unions.push(self.visit_union_data(item, name, sd, gen)),
hir::ItemKind::Fn(ref fd, header, ref gen, body) =>
om.fns.push(self.visit_fn(item, name, &**fd, header, gen, body)),
hir::ItemKind::Ty(ref ty, ref gen) => {
let t = Typedef {
ty: ty.clone(),
gen: gen.clone(),
name,
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.typedefs.push(t);
},
hir::ItemKind::Existential(ref exist_ty) => {
let t = Existential {
exist_ty: exist_ty.clone(),
name,
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.existentials.push(t);
},
hir::ItemKind::Static(ref ty, ref mut_, ref exp) => {
let s = Static {
type_: ty.clone(),
mutability: mut_.clone(),
expr: exp.clone(),
id: item.id,
name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.statics.push(s);
},
hir::ItemKind::Const(ref ty, ref exp) => {
let s = Constant {
type_: ty.clone(),
expr: exp.clone(),
id: item.id,
name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.constants.push(s);
},
hir::ItemKind::Trait(is_auto, unsafety, ref gen, ref b, ref item_ids) => {
let items = item_ids.iter()
.map(|ti| self.cx.tcx.hir.trait_item(ti.id).clone())
.collect();
let t = Trait {
is_auto,
unsafety,
name,
items,
generics: gen.clone(),
bounds: b.iter().cloned().collect(),
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.traits.push(t);
},
hir::ItemKind::TraitAlias(..) => {
unimplemented!("trait objects are not yet implemented")
},
hir::ItemKind::Impl(unsafety,
polarity,
defaultness,
ref gen,
ref tr,
ref ty,
ref item_ids) => {
// Don't duplicate impls when inlining, we'll pick them up
// regardless of where they're located.
if !self.inlining {
let items = item_ids.iter()
.map(|ii| self.cx.tcx.hir.impl_item(ii.id).clone())
.collect();
let i = Impl {
unsafety,
polarity,
defaultness,
generics: gen.clone(),
trait_: tr.clone(),
for_: ty.clone(),
items,
attrs: item.attrs.clone(),
id: item.id,
whence: item.span,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
};
om.impls.push(i);
}
},
}
}
// convert each exported_macro into a doc item
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
debug!("visit_local_macro: {}", def.name);
let tts = def.body.trees().collect::<Vec<_>>();
// Extract the spans of all matchers. They represent the "interface" of the macro.
let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
Macro {
def_id: self.cx.tcx.hir.local_def_id(def.id),
attrs: def.attrs.clone(),
name: def.name,
whence: def.span,
matchers,
stab: self.stability(def.id),
depr: self.deprecation(def.id),
imported_from: None,
}
}
}
| 39.593918 | 100 | 0.467447 |
29fb8b40366f60c381f7aa76e085f09e96b0e741 | 678 | #[cfg(any(target_os = "android", target_os = "linux"))]
#[test]
fn test_eventfd() {
use rsix::io::{eventfd, read, write, EventfdFlags};
use std::mem::size_of;
use std::thread;
let efd = eventfd(0, EventfdFlags::CLOEXEC).unwrap();
let child = thread::spawn(move || {
for u in [1_u64, 3, 6, 11, 5000].iter() {
assert_eq!(write(&efd, &u.to_ne_bytes()).unwrap(), size_of::<u64>());
}
efd
});
let efd = child.join().unwrap();
let mut bytes = [0_u8; size_of::<u64>()];
let s = read(&efd, &mut bytes).unwrap();
assert_eq!(s, bytes.len());
let u = u64::from_ne_bytes(bytes);
assert_eq!(u, 5021);
}
| 27.12 | 81 | 0.560472 |
d76305cac4735961d5a48e756573871c60aabb42 | 6,774 | #[doc = "Register `MAC_ADDRESS3_HIGH` reader"]
pub struct R(crate::R<MAC_ADDRESS3_HIGH_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<MAC_ADDRESS3_HIGH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<MAC_ADDRESS3_HIGH_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<MAC_ADDRESS3_HIGH_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `MAC_ADDRESS3_HIGH` writer"]
pub struct W(crate::W<MAC_ADDRESS3_HIGH_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<MAC_ADDRESS3_HIGH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<MAC_ADDRESS3_HIGH_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<MAC_ADDRESS3_HIGH_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `ADDRHI` reader - MAC Address3 \\[47:32\\]"]
pub struct ADDRHI_R(crate::FieldReader<u16, u16>);
impl ADDRHI_R {
pub(crate) fn new(bits: u16) -> Self {
ADDRHI_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ADDRHI_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADDRHI` writer - MAC Address3 \\[47:32\\]"]
pub struct ADDRHI_W<'a> {
w: &'a mut W,
}
impl<'a> ADDRHI_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff);
self.w
}
}
#[doc = "Field `MBC` reader - Mask Byte Control"]
pub struct MBC_R(crate::FieldReader<u8, u8>);
impl MBC_R {
pub(crate) fn new(bits: u8) -> Self {
MBC_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for MBC_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MBC` writer - Mask Byte Control"]
pub struct MBC_W<'a> {
w: &'a mut W,
}
impl<'a> MBC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3f << 24)) | ((value as u32 & 0x3f) << 24);
self.w
}
}
#[doc = "Field `SA` reader - Source Address"]
pub struct SA_R(crate::FieldReader<bool, bool>);
impl SA_R {
pub(crate) fn new(bits: bool) -> Self {
SA_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SA_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SA` writer - Source Address"]
pub struct SA_W<'a> {
w: &'a mut W,
}
impl<'a> SA_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | ((value as u32 & 0x01) << 30);
self.w
}
}
#[doc = "Field `AE` reader - Address Enable"]
pub struct AE_R(crate::FieldReader<bool, bool>);
impl AE_R {
pub(crate) fn new(bits: bool) -> Self {
AE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for AE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `AE` writer - Address Enable"]
pub struct AE_W<'a> {
w: &'a mut W,
}
impl<'a> AE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - MAC Address3 \\[47:32\\]"]
#[inline(always)]
pub fn addrhi(&self) -> ADDRHI_R {
ADDRHI_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bits 24:29 - Mask Byte Control"]
#[inline(always)]
pub fn mbc(&self) -> MBC_R {
MBC_R::new(((self.bits >> 24) & 0x3f) as u8)
}
#[doc = "Bit 30 - Source Address"]
#[inline(always)]
pub fn sa(&self) -> SA_R {
SA_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Address Enable"]
#[inline(always)]
pub fn ae(&self) -> AE_R {
AE_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:15 - MAC Address3 \\[47:32\\]"]
#[inline(always)]
pub fn addrhi(&mut self) -> ADDRHI_W {
ADDRHI_W { w: self }
}
#[doc = "Bits 24:29 - Mask Byte Control"]
#[inline(always)]
pub fn mbc(&mut self) -> MBC_W {
MBC_W { w: self }
}
#[doc = "Bit 30 - Source Address"]
#[inline(always)]
pub fn sa(&mut self) -> SA_W {
SA_W { w: self }
}
#[doc = "Bit 31 - Address Enable"]
#[inline(always)]
pub fn ae(&mut self) -> AE_W {
AE_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "MAC Address3 High Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [mac_address3_high](index.html) module"]
pub struct MAC_ADDRESS3_HIGH_SPEC;
impl crate::RegisterSpec for MAC_ADDRESS3_HIGH_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [mac_address3_high::R](R) reader structure"]
impl crate::Readable for MAC_ADDRESS3_HIGH_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [mac_address3_high::W](W) writer structure"]
impl crate::Writable for MAC_ADDRESS3_HIGH_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets MAC_ADDRESS3_HIGH to value 0xffff"]
impl crate::Resettable for MAC_ADDRESS3_HIGH_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0xffff
}
}
| 29.324675 | 424 | 0.574993 |
1da58131052b5e6b9651c5c066d5b3d27eefb7df | 14,090 | // Copyright 2018-2020 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::thread::sleep;
use std::time::{Duration, Instant};
use actix_web::{client::Client, dev::Body, error, http::StatusCode, web, Error, HttpResponse};
use gameroom_database::{helpers, ConnectionPool};
use scabbard::{
protocol::SCABBARD_PROTOCOL_VERSION,
service::{BatchInfo, BatchStatus},
};
use super::{ErrorResponse, SuccessResponse};
use crate::config::NodeInfo;
use crate::rest_api::{GameroomdData, RestApiResponseError};
const DEFAULT_WAIT: u64 = 30; // default wait time in seconds for batch to be commited
// The admin protocol version supported by the gameroom app auth handler
const GAMEROOM_ADMIN_PROTOCOL_VERSION: &str = "1";
pub async fn submit_signed_payload(
client: web::Data<Client>,
gameroomd_data: web::Data<GameroomdData>,
signed_payload: web::Bytes,
) -> Result<HttpResponse, Error> {
let mut response = client
.post(format!("{}/admin/submit", &gameroomd_data.splinterd_url))
.header("Authorization", gameroomd_data.authorization.as_str())
.header(
"SplinterProtocolVersion",
GAMEROOM_ADMIN_PROTOCOL_VERSION.to_string(),
)
.send_body(Body::Bytes(signed_payload))
.await
.map_err(Error::from)?;
let status = response.status();
let body = response.body().await?;
match status {
StatusCode::ACCEPTED => Ok(HttpResponse::Accepted().json(SuccessResponse::new(
"The payload was submitted successfully",
))),
StatusCode::BAD_REQUEST => {
let body_value: serde_json::Value = serde_json::from_slice(&body)?;
let message = match body_value.get("message") {
Some(value) => value.as_str().unwrap_or("Request malformed."),
None => "Request malformed.",
};
Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))
}
_ => {
debug!(
"Internal Server Error. Splinterd responded with error {}",
response.status(),
);
Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))
}
}
}
pub async fn submit_scabbard_payload(
client: web::Data<Client>,
gameroomd_data: web::Data<GameroomdData>,
pool: web::Data<ConnectionPool>,
circuit_id: web::Path<String>,
node_info: web::Data<NodeInfo>,
signed_payload: web::Bytes,
query: web::Query<HashMap<String, String>>,
) -> Result<HttpResponse, Error> {
let circuit_id_clone = circuit_id.clone();
let service_id = match web::block(move || {
fetch_service_id_for_gameroom_service_from_db(pool, &circuit_id_clone, &node_info.identity)
})
.await
{
Ok(service_id) => service_id,
Err(err) => match err {
error::BlockingError::Error(err) => match err {
RestApiResponseError::NotFound(err) => {
return Ok(HttpResponse::NotFound().json(ErrorResponse::not_found(&err)));
}
_ => {
return Ok(HttpResponse::BadRequest()
.json(ErrorResponse::bad_request(&err.to_string())))
}
},
error::BlockingError::Canceled => {
debug!("Internal Server Error: {}", err);
return Ok(
HttpResponse::InternalServerError().json(ErrorResponse::internal_error())
);
}
},
};
let wait = query
.get("wait")
.map(|val| match val.as_ref() {
"false" => 0,
_ => val.parse().unwrap_or(DEFAULT_WAIT),
})
.unwrap_or_else(|| DEFAULT_WAIT);
let mut response = client
.post(format!(
"{}/scabbard/{}/{}/batches",
&gameroomd_data.splinterd_url, &circuit_id, &service_id
))
.header("Authorization", gameroomd_data.authorization.as_str())
.header(
"SplinterProtocolVersion",
SCABBARD_PROTOCOL_VERSION.to_string(),
)
.send_body(Body::Bytes(signed_payload))
.await?;
let status = response.status();
let body = response.body().await?;
let link = match status {
StatusCode::ACCEPTED => match parse_link(&body) {
Ok(value) => value,
Err(err) => {
debug!(
"Internal Server Error. Error parsing splinter daemon response {}",
err
);
return Ok(
HttpResponse::InternalServerError().json(ErrorResponse::internal_error())
);
}
},
StatusCode::BAD_REQUEST => {
let body_value: serde_json::Value = serde_json::from_slice(&body)?;
let message = match body_value.get("message") {
Some(value) => value.as_str().unwrap_or("Request malformed."),
None => "Request malformed.",
};
return Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)));
}
_ => {
let body_value: serde_json::Value = serde_json::from_slice(&body)?;
let message = match body_value.get("message") {
Some(value) => value.as_str().unwrap_or("Unknown cause"),
None => "Unknown cause",
};
debug!(
"Internal Server Error. Gameroom service responded with an error {} with message {}",
response.status(),
message
);
return Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()));
}
};
let start = Instant::now();
match check_batch_status(
client,
&gameroomd_data.splinterd_url,
&gameroomd_data.authorization,
&link,
start,
wait,
)
.await
{
Ok(batches_info) => {
let invalid_batches = batches_info
.iter()
.filter(|batch| {
if let BatchStatus::Invalid(_) = batch.status {
return true;
}
false
})
.collect::<Vec<&BatchInfo>>();
if !invalid_batches.is_empty() {
let error_message = process_failed_baches(&invalid_batches);
return Ok(
HttpResponse::BadRequest().json(ErrorResponse::bad_request_with_data(
&error_message,
batches_info,
)),
);
}
if batches_info
.iter()
.any(|batch| batch.status == BatchStatus::Pending)
{
return Ok(HttpResponse::Accepted().json(SuccessResponse::new(batches_info)));
}
Ok(HttpResponse::Ok().json(SuccessResponse::new(batches_info)))
}
Err(err) => match err {
RestApiResponseError::BadRequest(message) => {
Ok(HttpResponse::BadRequest().json(ErrorResponse::bad_request(&message)))
}
_ => {
debug!("Internal Server Error: {}", err);
Ok(HttpResponse::InternalServerError().json(ErrorResponse::internal_error()))
}
},
}
}
fn fetch_service_id_for_gameroom_service_from_db(
pool: web::Data<ConnectionPool>,
circuit_id: &str,
node_id: &str,
) -> Result<String, RestApiResponseError> {
helpers::fetch_service_id_for_gameroom_service(&*pool.get()?, circuit_id, node_id)?.ok_or_else(
|| {
RestApiResponseError::NotFound(format!(
"Gameroom service for circuit ID {} not found",
circuit_id,
))
},
)
}
fn parse_link(response_bytes: &[u8]) -> Result<String, RestApiResponseError> {
let mut response_value: HashMap<String, String> = serde_json::from_slice(&response_bytes)
.map_err(|err| {
RestApiResponseError::InternalError(format!(
"Failed to parse batches_ids from splinterd response {}",
err
))
})?;
if let Some(link) = response_value.remove("link") {
Ok(link)
} else {
Err(RestApiResponseError::InternalError(
"The splinter daemon did not return a link for batch status".to_string(),
))
}
}
fn process_failed_baches(invalid_batches: &[&BatchInfo]) -> String {
if invalid_batches.is_empty() {
"".to_string()
} else if invalid_batches.len() == 1 {
if let BatchStatus::Invalid(invalid_transactions) = &invalid_batches[0].status {
if invalid_transactions.len() <= 1 {
"A transaction failed. Please try again. If it continues to fail contact your administrator for help.".to_string()
} else {
"Several transactions failed. Please try again. If it continues to fail contact your administrator for help.".to_string()
}
} else {
"".to_string()
}
} else {
"Several transactions failed. Please try again. If it continues to fail please contact your administrator.".to_string()
}
}
async fn check_batch_status(
client: web::Data<Client>,
splinterd_url: &str,
authorization: &str,
link: &str,
start_time: Instant,
wait: u64,
) -> Result<Vec<BatchInfo>, RestApiResponseError> {
let splinterd_url = splinterd_url.to_owned();
let link = link.to_owned();
loop {
debug!("Checking batch status {}", link);
let mut response = match client
.get(format!("{}{}", splinterd_url, link))
.header("Authorization", authorization)
.header(
"SplinterProtocolVersion",
SCABBARD_PROTOCOL_VERSION.to_string(),
)
.send()
.await
.map_err(|err| {
RestApiResponseError::InternalError(format!("Failed to send request {}", err))
}) {
Ok(r) => r,
Err(err) => {
return Err(RestApiResponseError::InternalError(format!(
"Failed to retrieve state: {}",
err
)));
}
};
let body = match response.body().await {
Ok(b) => b,
Err(err) => {
return Err(RestApiResponseError::InternalError(format!(
"Failed to receive response body {}",
err
)));
}
};
match response.status() {
StatusCode::OK => {
let batches_info: Vec<BatchInfo> = match serde_json::from_slice(&body) {
Ok(b) => b,
Err(err) => {
return Err(RestApiResponseError::InternalError(format!(
"Failed to parse response body {}",
err
)));
}
};
// If batch status is still pending and the wait time has not yet passed,
// send request again to re-check the batch status
if batches_info
.iter()
.any(|batch_info| match batch_info.status {
BatchStatus::Pending => true,
BatchStatus::Valid(_) => true,
_ => false,
})
&& Instant::now().duration_since(start_time) < Duration::from_secs(wait)
{
// wait one second before sending request again
sleep(Duration::from_secs(1));
continue;
} else {
return Ok(batches_info);
}
}
StatusCode::BAD_REQUEST => {
let body_value: serde_json::Value = match serde_json::from_slice(&body) {
Ok(b) => b,
Err(err) => {
return Err(RestApiResponseError::InternalError(format!(
"Failed to parse response body {}",
err
)));
}
};
let message = match body_value.get("message") {
Some(value) => value.as_str().unwrap_or("Request malformed."),
None => "Request malformed.",
};
return Err(RestApiResponseError::BadRequest(message.to_string()));
}
_ => {
let body_value: serde_json::Value = match serde_json::from_slice(&body) {
Ok(b) => b,
Err(err) => {
return Err(RestApiResponseError::InternalError(format!(
"Failed to parse response body {}",
err
)));
}
};
let message = match body_value.get("message") {
Some(value) => value.as_str().unwrap_or("Unknown cause"),
None => "Unknown cause",
};
return Err(RestApiResponseError::InternalError(message.to_string()));
}
};
}
}
| 36.128205 | 137 | 0.530518 |
9c76b17da48bd53b6efbccca80378490a3d038e7 | 28,765 | use super::*;
use std::cell::{Cell};
use std::thread;
use std::io::{Read, Write};
use std::time::{Duration};
fn head_tail<T>(rb: &RingBuffer<T>) -> (usize, usize) {
(rb.head.load(Ordering::SeqCst), rb.tail.load(Ordering::SeqCst))
}
#[test]
fn capacity() {
let cap = 13;
let buf = RingBuffer::<i32>::new(cap);
assert_eq!(buf.capacity(), cap);
}
#[test]
fn split_capacity() {
let cap = 13;
let buf = RingBuffer::<i32>::new(cap);
let (prod, cons) = buf.split();
assert_eq!(prod.capacity(), cap);
assert_eq!(cons.capacity(), cap);
}
#[test]
fn split_threads() {
let buf = RingBuffer::<i32>::new(10);
let (prod, cons) = buf.split();
let pjh = thread::spawn(move || {
let _ = prod;
});
let cjh = thread::spawn(move || {
let _ = cons;
});
pjh.join().unwrap();
cjh.join().unwrap();
}
#[test]
fn push() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, _) = buf.split();
assert_eq!(head_tail(&prod.rb), (0, 0));
assert_eq!(prod.push(123), Ok(()));
assert_eq!(head_tail(&prod.rb), (0, 1));
assert_eq!(prod.push(234), Ok(()));
assert_eq!(head_tail(&prod.rb), (0, 2));
assert_eq!(prod.push(345), Err(PushError::Full(345)));
assert_eq!(head_tail(&prod.rb), (0, 2));
}
#[test]
fn pop_empty() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (_, mut cons) = buf.split();
assert_eq!(head_tail(&cons.rb), (0, 0));
assert_eq!(cons.pop(), Err(PopError::Empty));
assert_eq!(head_tail(&cons.rb), (0, 0));
}
#[test]
fn push_pop_one() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let vcap = cap + 1;
let values = [12, 34, 56, 78, 90];
assert_eq!(head_tail(&cons.rb), (0, 0));
for (i, v) in values.iter().enumerate() {
assert_eq!(prod.push(*v), Ok(()));
assert_eq!(head_tail(&cons.rb), (i % vcap, (i + 1) % vcap));
match cons.pop() {
Ok(w) => assert_eq!(w, *v),
other => panic!(other),
}
assert_eq!(head_tail(&cons.rb), ((i + 1) % vcap, (i + 1) % vcap));
assert_eq!(cons.pop(), Err(PopError::Empty));
assert_eq!(head_tail(&cons.rb), ((i + 1) % vcap, (i + 1) % vcap));
}
}
#[test]
fn push_pop_all() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let vcap = cap + 1;
let values = [(12, 34, 13), (56, 78, 57), (90, 10, 91)];
assert_eq!(head_tail(&cons.rb), (0, 0));
for (i, v) in values.iter().enumerate() {
assert_eq!(prod.push(v.0), Ok(()));
assert_eq!(head_tail(&cons.rb), (cap*i % vcap, (cap*i + 1) % vcap));
assert_eq!(prod.push(v.1), Ok(()));
assert_eq!(head_tail(&cons.rb), (cap*i % vcap, (cap*i + 2) % vcap));
match prod.push(v.2) {
Err(PushError::Full(w)) => assert_eq!(w, v.2),
other => panic!(other),
}
assert_eq!(head_tail(&cons.rb), (cap*i % vcap, (cap*i + 2) % vcap));
match cons.pop() {
Ok(w) => assert_eq!(w, v.0),
other => panic!(other),
}
assert_eq!(head_tail(&cons.rb), ((cap*i + 1) % vcap, (cap*i + 2) % vcap));
match cons.pop() {
Ok(w) => assert_eq!(w, v.1),
other => panic!(other),
}
assert_eq!(head_tail(&cons.rb), ((cap*i + 2) % vcap, (cap*i + 2) % vcap));
assert_eq!(cons.pop(), Err(PopError::Empty));
assert_eq!(head_tail(&cons.rb), ((cap*i + 2) % vcap, (cap*i + 2) % vcap));
}
}
#[test]
fn empty_full() {
let buf = RingBuffer::<i32>::new(1);
let (mut prod, cons) = buf.split();
assert!(prod.is_empty());
assert!(cons.is_empty());
assert!(!prod.is_full());
assert!(!cons.is_full());
assert_eq!(prod.push(123), Ok(()));
assert!(!prod.is_empty());
assert!(!cons.is_empty());
assert!(prod.is_full());
assert!(cons.is_full());
}
#[test]
fn len_remaining() {
let buf = RingBuffer::<i32>::new(2);
let (mut prod, mut cons) = buf.split();
assert_eq!(prod.len(), 0);
assert_eq!(cons.len(), 0);
assert_eq!(prod.remaining(), 2);
assert_eq!(cons.remaining(), 2);
assert_eq!(prod.push(123), Ok(()));
assert_eq!(prod.len(), 1);
assert_eq!(cons.len(), 1);
assert_eq!(prod.remaining(), 1);
assert_eq!(cons.remaining(), 1);
assert_eq!(prod.push(456), Ok(()));
assert_eq!(prod.len(), 2);
assert_eq!(cons.len(), 2);
assert_eq!(prod.remaining(), 0);
assert_eq!(cons.remaining(), 0);
assert_eq!(cons.pop(), Ok(123));
assert_eq!(prod.len(), 1);
assert_eq!(cons.len(), 1);
assert_eq!(prod.remaining(), 1);
assert_eq!(cons.remaining(), 1);
assert_eq!(cons.pop(), Ok(456));
assert_eq!(prod.len(), 0);
assert_eq!(cons.len(), 0);
assert_eq!(prod.remaining(), 2);
assert_eq!(cons.remaining(), 2);
// now head is at 2, so tail will be at 0. This caught an overflow error
// when tail+1 < head because of the substraction of usize.
assert_eq!(prod.push(789), Ok(()));
assert_eq!(prod.len(), 1);
assert_eq!(cons.len(), 1);
assert_eq!(prod.remaining(), 1);
assert_eq!(cons.remaining(), 1);
}
#[derive(Debug)]
struct Dropper<'a> {
cnt: &'a Cell<i32>,
}
impl<'a> Dropper<'a> {
fn new(c: &'a Cell<i32>) -> Self {
Self { cnt: c }
}
}
impl<'a> Drop for Dropper<'a> {
fn drop(&mut self) {
self.cnt.set(self.cnt.get() + 1);
}
}
#[test]
fn drop() {
let (ca, cb) = (Cell::new(0), Cell::new(0));
let (da, db) = (Dropper::new(&ca), Dropper::new(&cb));
let cap = 3;
let buf = RingBuffer::new(cap);
{
let (mut prod, mut cons) = buf.split();
assert_eq!((ca.get(), cb.get()), (0, 0));
prod.push(da).unwrap();
assert_eq!((ca.get(), cb.get()), (0, 0));
prod.push(db).unwrap();
assert_eq!((ca.get(), cb.get()), (0, 0));
cons.pop().unwrap();
assert_eq!((ca.get(), cb.get()), (1, 0));
}
assert_eq!((ca.get(), cb.get()), (1, 1));
}
#[test]
fn push_access() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let vs_20 = (123, 456);
let push_fn_20 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
left[0] = vs_20.0;
left[1] = vs_20.1;
Ok((2, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_20) }.unwrap().unwrap(), (2, ()));
assert_eq!(cons.pop().unwrap(), vs_20.0);
assert_eq!(cons.pop().unwrap(), vs_20.1);
assert_eq!(cons.pop(), Err(PopError::Empty));
let vs_11 = (123, 456);
let push_fn_11 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 1);
left[0] = vs_11.0;
right[0] = vs_11.1;
Ok((2, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_11) }.unwrap().unwrap(), (2, ()));
assert_eq!(cons.pop().unwrap(), vs_11.0);
assert_eq!(cons.pop().unwrap(), vs_11.1);
assert_eq!(cons.pop(), Err(PopError::Empty));
}
/*
/// This test doesn't compile.
/// And that's good :)
#[test]
fn push_access_oref() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, _) = buf.split();
let mut ovar = 123;
let mut oref = &mut 123;
let push_fn_20 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
left[0] = 456;
oref = &mut left[0];
Ok((1, ()))
};
assert_eq!(unsafe {
prod.push_access(push_fn_20)
}.unwrap().unwrap(), (1, ()));
assert_eq!(*oref, 456);
}
*/
#[test]
fn pop_access_full() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (_, mut cons) = buf.split();
let dummy_fn = |_l: &mut [i32], _r: &mut [i32]| -> Result<(usize, ()), ()> {
if true {
Ok((0, ()))
} else {
Err(())
}
};
assert_eq!(unsafe { cons.pop_access(dummy_fn) }, Err(PopAccessError::Empty));
}
#[test]
fn pop_access_empty() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (_, mut cons) = buf.split();
let dummy_fn = |_l: &mut [i32], _r: &mut [i32]| -> Result<(usize, ()), ()> {
if true {
Ok((0, ()))
} else {
Err(())
}
};
assert_eq!(unsafe { cons.pop_access(dummy_fn) }, Err(PopAccessError::Empty));
}
#[test]
fn pop_access() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let vs_20 = (123, 456);
assert_eq!(prod.push(vs_20.0), Ok(()));
assert_eq!(prod.push(vs_20.1), Ok(()));
assert_eq!(prod.push(0), Err(PushError::Full(0)));
let pop_fn_20 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
assert_eq!(left[0], vs_20.0);
assert_eq!(left[1], vs_20.1);
Ok((2, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_20) }.unwrap().unwrap(), (2, ()));
let vs_11 = (123, 456);
assert_eq!(prod.push(vs_11.0), Ok(()));
assert_eq!(prod.push(vs_11.1), Ok(()));
assert_eq!(prod.push(0), Err(PushError::Full(0)));
let pop_fn_11 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 1);
assert_eq!(left[0], vs_11.0);
assert_eq!(right[0], vs_11.1);
Ok((2, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_11) }.unwrap().unwrap(), (2, ()));
}
#[test]
fn push_access_return() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let push_fn_3 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Ok((3, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_3) }, Err(PushAccessError::BadLen)
);
let push_fn_err = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), i32> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Err(123)
};
assert_eq!(unsafe { prod.push_access(push_fn_err) }, Ok(Err(123))
);
let push_fn_0 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Ok((0, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_0) }, Ok(Ok((0, ())))
);
let push_fn_1 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
left[0] = 12;
Ok((1, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_1) }, Ok(Ok((1, ())))
);
let push_fn_2 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 0);
left[0] = 34;
Ok((1, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_2) }, Ok(Ok((1, ())))
);
assert_eq!(cons.pop().unwrap(), 12);
assert_eq!(cons.pop().unwrap(), 34);
assert_eq!(cons.pop(), Err(PopError::Empty));
}
#[test]
fn pop_access_return() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
assert_eq!(prod.push(12), Ok(()));
assert_eq!(prod.push(34), Ok(()));
assert_eq!(prod.push(0), Err(PushError::Full(0)));
let pop_fn_3 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Ok((3, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_3) }, Err(PopAccessError::BadLen)
);
let pop_fn_err = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), i32> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Err(123)
};
assert_eq!(unsafe { cons.pop_access(pop_fn_err) }, Ok(Err(123))
);
let pop_fn_0 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
Ok((0, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_0) }, Ok(Ok((0, ())))
);
let pop_fn_1 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
assert_eq!(left[0], 12);
Ok((1, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_1) }, Ok(Ok((1, ())))
);
let pop_fn_2 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 0);
assert_eq!(left[0], 34);
Ok((1, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_2) }, Ok(Ok((1, ())))
);
}
#[test]
fn push_pop_access() {
let cap = 2;
let buf = RingBuffer::<i32>::new(cap);
let (mut prod, mut cons) = buf.split();
let vs_20 = (123, 456);
let push_fn_20 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
left[0] = vs_20.0;
left[1] = vs_20.1;
Ok((2, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_20) }.unwrap().unwrap(), (2, ()));
let pop_fn_20 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 2);
assert_eq!(right.len(), 0);
assert_eq!(left[0], vs_20.0);
assert_eq!(left[1], vs_20.1);
Ok((2, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_20) }.unwrap().unwrap(), (2, ()));
let vs_11 = (123, 456);
let push_fn_11 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 1);
left[0] = vs_11.0;
right[0] = vs_11.1;
Ok((2, ()))
};
assert_eq!(unsafe { prod.push_access(push_fn_11) }.unwrap().unwrap(), (2, ()));
let pop_fn_11 = |left: &mut [i32], right: &mut [i32]| -> Result<(usize, ()), ()> {
assert_eq!(left.len(), 1);
assert_eq!(right.len(), 1);
assert_eq!(left[0], vs_11.0);
assert_eq!(right[0], vs_11.1);
Ok((2, ()))
};
assert_eq!(unsafe { cons.pop_access(pop_fn_11) }.unwrap().unwrap(), (2, ()));
}
#[test]
fn push_pop_slice() {
let buf = RingBuffer::<i32>::new(4);
let (mut prod, mut cons) = buf.split();
let mut tmp = [0; 5];
assert_eq!(prod.push_slice(&[]), Ok(0));
assert_eq!(prod.push_slice(&[0, 1, 2]), Ok(3));
assert_eq!(cons.pop_slice(&mut tmp[0..2]), Ok(2));
assert_eq!(tmp[0..2], [0, 1]);
assert_eq!(prod.push_slice(&[3, 4]), Ok(2));
assert_eq!(prod.push_slice(&[5, 6]), Ok(1));
assert_eq!(cons.pop_slice(&mut tmp[0..3]), Ok(3));
assert_eq!(tmp[0..3], [2, 3, 4]);
assert_eq!(prod.push_slice(&[6, 7, 8, 9]), Ok(3));
assert_eq!(cons.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [5, 6, 7, 8]);
}
#[test]
fn move_slice() {
let buf0 = RingBuffer::<i32>::new(4);
let buf1 = RingBuffer::<i32>::new(4);
let (mut prod0, mut cons0) = buf0.split();
let (mut prod1, mut cons1) = buf1.split();
let mut tmp = [0; 5];
assert_eq!(prod0.push_slice(&[0, 1, 2]), Ok(3));
assert_eq!(prod1.move_slice(&mut cons0, None), Ok(3));
assert_eq!(prod1.move_slice(&mut cons0, None), Err(MoveSliceError::Empty));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(3));
assert_eq!(tmp[0..3], [0, 1, 2]);
assert_eq!(prod0.push_slice(&[3, 4, 5]), Ok(3));
assert_eq!(prod1.move_slice(&mut cons0, None), Ok(3));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(3));
assert_eq!(tmp[0..3], [3, 4, 5]);
assert_eq!(prod1.push_slice(&[6, 7, 8]), Ok(3));
assert_eq!(prod0.push_slice(&[9, 10]), Ok(2));
assert_eq!(prod1.move_slice(&mut cons0, None), Ok(1));
assert_eq!(prod1.move_slice(&mut cons0, None), Err(MoveSliceError::Full));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [6, 7, 8, 9]);
}
#[test]
fn move_slice_count() {
let buf0 = RingBuffer::<i32>::new(4);
let buf1 = RingBuffer::<i32>::new(4);
let (mut prod0, mut cons0) = buf0.split();
let (mut prod1, mut cons1) = buf1.split();
let mut tmp = [0; 5];
assert_eq!(prod0.push_slice(&[0, 1, 2]), Ok(3));
assert_eq!(prod1.move_slice(&mut cons0, Some(2)), Ok(2));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(2));
assert_eq!(tmp[0..2], [0, 1]);
assert_eq!(prod1.move_slice(&mut cons0, Some(2)), Ok(1));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(1));
assert_eq!(tmp[0..1], [2]);
assert_eq!(prod0.push_slice(&[3, 4, 5, 6]), Ok(4));
assert_eq!(prod1.move_slice(&mut cons0, Some(3)), Ok(3));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(3));
assert_eq!(tmp[0..3], [3, 4, 5]);
assert_eq!(prod0.push_slice(&[7, 8, 9]), Ok(3));
assert_eq!(prod1.move_slice(&mut cons0, Some(5)), Ok(4));
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [6, 7, 8, 9]);
}
#[test]
fn read_from() {
let buf0 = RingBuffer::<u8>::new(4);
let buf1 = RingBuffer::<u8>::new(4);
let (mut prod0, mut cons0) = buf0.split();
let (mut prod1, mut cons1) = buf1.split();
let mut tmp = [0; 5];
assert_eq!(prod0.push_slice(&[0, 1, 2]), Ok(3));
match prod1.read_from(&mut cons0, None) {
Ok(n) => assert_eq!(n, 3),
other => panic!("{:?}", other),
}
match prod1.read_from(&mut cons0, None) {
Err(ReadFromError::Read(e)) => {
assert_eq!(e.kind(), io::ErrorKind::WouldBlock);
},
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(3));
assert_eq!(tmp[0..3], [0, 1, 2]);
assert_eq!(prod0.push_slice(&[3, 4, 5]), Ok(3));
match prod1.read_from(&mut cons0, None) {
Ok(n) => assert_eq!(n, 2),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(2));
assert_eq!(tmp[0..2], [3, 4]);
match prod1.read_from(&mut cons0, None) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(1));
assert_eq!(tmp[0..1], [5]);
assert_eq!(prod1.push_slice(&[6, 7, 8]), Ok(3));
assert_eq!(prod0.push_slice(&[9, 10]), Ok(2));
match prod1.read_from(&mut cons0, None) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
match prod1.read_from(&mut cons0, None) {
Err(ReadFromError::RbFull) => (),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [6, 7, 8, 9]);
}
#[test]
fn write_into() {
let buf0 = RingBuffer::<u8>::new(4);
let buf1 = RingBuffer::<u8>::new(4);
let (mut prod0, mut cons0) = buf0.split();
let (mut prod1, mut cons1) = buf1.split();
let mut tmp = [0; 5];
assert_eq!(prod0.push_slice(&[0, 1, 2]), Ok(3));
match cons0.write_into(&mut prod1, None) {
Ok(n) => assert_eq!(n, 3),
other => panic!("{:?}", other),
}
match cons0.write_into(&mut prod1, None) {
Err(WriteIntoError::RbEmpty) => (),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(3));
assert_eq!(tmp[0..3], [0, 1, 2]);
assert_eq!(prod0.push_slice(&[3, 4, 5]), Ok(3));
match cons0.write_into(&mut prod1, None) {
Ok(n) => assert_eq!(n, 2),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(2));
assert_eq!(tmp[0..2], [3, 4]);
match cons0.write_into(&mut prod1, None) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(1));
assert_eq!(tmp[0..1], [5]);
assert_eq!(prod1.push_slice(&[6, 7, 8]), Ok(3));
assert_eq!(prod0.push_slice(&[9, 10]), Ok(2));
match cons0.write_into(&mut prod1, None) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
match cons0.write_into(&mut prod1, None) {
Err(WriteIntoError::Write(e)) => {
assert_eq!(e.kind(), io::ErrorKind::WouldBlock);
},
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [6, 7, 8, 9]);
}
#[test]
fn read_from_write_into_count() {
let buf0 = RingBuffer::<u8>::new(4);
let buf1 = RingBuffer::<u8>::new(4);
let (mut prod0, mut cons0) = buf0.split();
let (mut prod1, mut cons1) = buf1.split();
let mut tmp = [0; 5];
assert_eq!(prod0.push_slice(&[0, 1, 2, 3]), Ok(4));
match prod1.read_from(&mut cons0, Some(3)) {
Ok(n) => assert_eq!(n, 3),
other => panic!("{:?}", other),
}
match prod1.read_from(&mut cons0, Some(2)) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [0, 1, 2, 3]);
assert_eq!(prod0.push_slice(&[4, 5, 6, 7]), Ok(4));
match cons0.write_into(&mut prod1, Some(3)) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
match cons0.write_into(&mut prod1, Some(2)) {
Ok(n) => assert_eq!(n, 2),
other => panic!("{:?}", other),
}
match cons0.write_into(&mut prod1, Some(2)) {
Ok(n) => assert_eq!(n, 1),
other => panic!("{:?}", other),
}
assert_eq!(cons1.pop_slice(&mut tmp), Ok(4));
assert_eq!(tmp[0..4], [4, 5, 6, 7]);
}
#[test]
fn push_pop_access_message() {
let buf = RingBuffer::<u8>::new(7);
let (mut prod, mut cons) = buf.split();
let smsg = "The quick brown fox jumps over the lazy dog";
let pjh = thread::spawn(move || {
let zero = [0 as u8];
let mut bytes = smsg.as_bytes().chain(&zero[..]);
loop {
let push_fn = |left: &mut [u8], right: &mut [u8]| -> Result<(usize, ()),()> {
let n = bytes.read(left).unwrap();
let m = if n == left.len() {
bytes.read(right).unwrap()
} else {
0
};
Ok((n + m, ()))
};
match unsafe { prod.push_access(push_fn) } {
Ok(res) => match res {
Ok((n, ())) => if n == 0 { break; },
Err(()) => unreachable!(),
},
Err(e) => match e {
PushAccessError::Full => thread::sleep(Duration::from_millis(1)),
PushAccessError::BadLen => unreachable!(),
}
}
}
});
let cjh = thread::spawn(move || {
let mut bytes = Vec::<u8>::new();
loop {
let pop_fn = |left: &mut [u8], right: &mut [u8]| -> Result<(usize, ()),()> {
let n = bytes.write(left).unwrap();
let m = if n == left.len() {
bytes.write(right).unwrap()
} else {
0
};
Ok((n + m, ()))
};
match unsafe { cons.pop_access(pop_fn) } {
Ok(res) => match res {
Ok((_n, ())) => (),
Err(()) => unreachable!(),
},
Err(e) => match e {
PopAccessError::Empty => {
if bytes.ends_with(&[0]) {
break;
} else {
thread::sleep(Duration::from_millis(1));
}
},
PopAccessError::BadLen => unreachable!(),
}
}
}
assert_eq!(bytes.pop().unwrap(), 0);
String::from_utf8(bytes).unwrap()
});
pjh.join().unwrap();
let rmsg = cjh.join().unwrap();
assert_eq!(smsg, rmsg);
}
#[test]
fn push_pop_slice_message() {
let buf = RingBuffer::<u8>::new(7);
let (mut prod, mut cons) = buf.split();
let smsg = "The quick brown fox jumps over the lazy dog";
let pjh = thread::spawn(move || {
let mut bytes = smsg.as_bytes();
while bytes.len() > 0 {
match prod.push_slice(bytes) {
Ok(n) => bytes = &bytes[n..bytes.len()],
Err(PushSliceError::Full) => thread::sleep(Duration::from_millis(1)),
}
}
loop {
match prod.push(0) {
Ok(()) => break,
Err(PushError::Full(_)) => thread::sleep(Duration::from_millis(1)),
}
}
});
let cjh = thread::spawn(move || {
let mut bytes = Vec::<u8>::new();
let mut buffer = [0; 5];
loop {
match cons.pop_slice(&mut buffer) {
Ok(n) => bytes.extend_from_slice(&buffer[0..n]),
Err(PopSliceError::Empty) => {
if bytes.ends_with(&[0]) {
break;
} else {
thread::sleep(Duration::from_millis(1));
}
}
}
}
assert_eq!(bytes.pop().unwrap(), 0);
String::from_utf8(bytes).unwrap()
});
pjh.join().unwrap();
let rmsg = cjh.join().unwrap();
assert_eq!(smsg, rmsg);
}
#[test]
fn read_from_write_into_message() {
let buf = RingBuffer::<u8>::new(7);
let (mut prod, mut cons) = buf.split();
let smsg = "The quick brown fox jumps over the lazy dog";
let pjh = thread::spawn(move || {
let zero = [0 as u8];
let mut bytes = smsg.as_bytes().chain(&zero[..]);
loop {
match prod.read_from(&mut bytes, None) {
Ok(n) => if n == 0 { break; },
Err(err) => {
if let ReadFromError::RbFull = err {
thread::sleep(Duration::from_millis(1));
} else {
unreachable!();
}
},
}
}
});
let cjh = thread::spawn(move || {
let mut bytes = Vec::<u8>::new();
loop {
match cons.write_into(&mut bytes, None) {
Ok(_n) => (),
Err(err) => {
if let WriteIntoError::RbEmpty = err {
if bytes.ends_with(&[0]) {
break;
} else {
thread::sleep(Duration::from_millis(1));
}
} else {
unreachable!();
}
},
}
}
assert_eq!(bytes.pop().unwrap(), 0);
String::from_utf8(bytes).unwrap()
});
pjh.join().unwrap();
let rmsg = cjh.join().unwrap();
assert_eq!(smsg, rmsg);
}
#[test]
fn read_write_message() {
let buf = RingBuffer::<u8>::new(7);
let (mut prod, mut cons) = buf.split();
let smsg = "The quick brown fox jumps over the lazy dog";
let pjh = thread::spawn(move || {
let mut bytes = smsg.as_bytes();
while bytes.len() > 0 {
match prod.write(bytes) {
Ok(n) => bytes = &bytes[n..bytes.len()],
Err(err) => {
assert_eq!(err.kind(), io::ErrorKind::WouldBlock);
thread::sleep(Duration::from_millis(1));
},
}
}
loop {
match prod.push(0) {
Ok(()) => break,
Err(PushError::Full(_)) => thread::sleep(Duration::from_millis(1)),
}
}
});
let cjh = thread::spawn(move || {
let mut bytes = Vec::<u8>::new();
let mut buffer = [0; 5];
loop {
match cons.read(&mut buffer) {
Ok(n) => bytes.extend_from_slice(&buffer[0..n]),
Err(err) => {
assert_eq!(err.kind(), io::ErrorKind::WouldBlock);
if bytes.ends_with(&[0]) {
break;
} else {
thread::sleep(Duration::from_millis(1));
}
},
}
}
assert_eq!(bytes.pop().unwrap(), 0);
String::from_utf8(bytes).unwrap()
});
pjh.join().unwrap();
let rmsg = cjh.join().unwrap();
assert_eq!(smsg, rmsg);
}
| 27.632085 | 89 | 0.495533 |
299f9515e51aa571f399aec4988852ba4f02cedd | 9,272 | use crate::error::*;
use crate::{Any, Class, Explicit, Implicit, Tag, TaggedParser};
use core::convert::{TryFrom, TryInto};
#[cfg(feature = "std")]
use std::io::Write;
/// Phantom type representing a BER parser
#[doc(hidden)]
#[derive(Debug)]
pub enum BerParser {}
/// Phantom type representing a DER parser
#[doc(hidden)]
#[derive(Debug)]
pub enum DerParser {}
#[doc(hidden)]
pub trait ASN1Parser {}
impl ASN1Parser for BerParser {}
impl ASN1Parser for DerParser {}
pub trait Tagged {
const TAG: Tag;
}
impl<T> Tagged for &'_ T
where
T: Tagged,
{
const TAG: Tag = T::TAG;
}
pub trait DynTagged {
fn tag(&self) -> Tag;
}
impl<T> DynTagged for T
where
T: Tagged,
{
fn tag(&self) -> Tag {
T::TAG
}
}
/// Base trait for BER object parsers
///
/// Library authors should usually not directly implement this trait, but should prefer implementing the
/// `TryFrom<Any>` trait,
/// which offers greater flexibility and provides an equivalent `FromBer` implementation for free.
///
/// # Examples
///
/// ```
/// use asn1_rs::{Any, Result, Tag};
/// use std::convert::TryFrom;
///
/// // The type to be decoded
/// #[derive(Clone, Copy, Debug, PartialEq, Eq)]
/// pub struct MyType(pub u32);
///
/// impl<'a> TryFrom<Any<'a>> for MyType {
/// type Error = asn1_rs::Error;
///
/// fn try_from(any: Any<'a>) -> Result<MyType> {
/// any.tag().assert_eq(Tag::Integer)?;
/// // for this fictive example, the type contains the number of characters
/// let n = any.data.len() as u32;
/// Ok(MyType(n))
/// }
/// }
///
/// // The above code provides a `FromBer` implementation for free.
///
/// // Example of parsing code:
/// use asn1_rs::FromBer;
///
/// let input = &[2, 1, 2];
/// // Objects can be parsed using `from_ber`, which returns the remaining bytes
/// // and the parsed object:
/// let (rem, my_type) = MyType::from_ber(input).expect("parsing failed");
/// ```
pub trait FromBer<'a>: Sized {
/// Attempt to parse input bytes into a BER object
fn from_ber(bytes: &'a [u8]) -> ParseResult<'a, Self>;
}
impl<'a, T> FromBer<'a> for T
where
T: TryFrom<Any<'a>, Error = Error>,
{
fn from_ber(bytes: &'a [u8]) -> ParseResult<T> {
let (i, any) = Any::from_ber(bytes)?;
let result = any.try_into().map_err(nom::Err::Error)?;
Ok((i, result))
}
}
/// Base trait for DER object parsers
///
/// Library authors should usually not directly implement this trait, but should prefer implementing the
/// `TryFrom<Any>` + `CheckDerConstraint` traits,
/// which offers greater flexibility and provides an equivalent `FromDer` implementation for free
/// (in fact, it provides both [`FromBer`] and `FromDer`).
///
/// Note: if you already implemented `TryFrom<Any>` to get the [`FromBer`] implementation, then you only
/// have to add a `CheckDerConstraint` implementation.
///
/// # Examples
///
/// ```
/// use asn1_rs::{Any, CheckDerConstraints, Result, Tag};
/// use std::convert::TryFrom;
///
/// // The type to be decoded
/// #[derive(Clone, Copy, Debug, PartialEq, Eq)]
/// pub struct MyType(pub u32);
///
/// impl<'a> TryFrom<Any<'a>> for MyType {
/// type Error = asn1_rs::Error;
///
/// fn try_from(any: Any<'a>) -> Result<MyType> {
/// any.tag().assert_eq(Tag::Integer)?;
/// // for this fictive example, the type contains the number of characters
/// let n = any.data.len() as u32;
/// Ok(MyType(n))
/// }
/// }
///
/// impl CheckDerConstraints for MyType {
/// fn check_constraints(any: &Any) -> Result<()> {
/// any.header.assert_primitive()?;
/// Ok(())
/// }
/// }
///
/// // The above code provides a `FromDer` implementation for free.
///
/// // Example of parsing code:
/// use asn1_rs::FromDer;
///
/// let input = &[2, 1, 2];
/// // Objects can be parsed using `from_der`, which returns the remaining bytes
/// // and the parsed object:
/// let (rem, my_type) = MyType::from_der(input).expect("parsing failed");
/// ```
pub trait FromDer<'a>: Sized {
/// Attempt to parse input bytes into a DER object (enforcing constraints)
fn from_der(bytes: &'a [u8]) -> ParseResult<'a, Self>;
}
impl<'a, T> FromDer<'a> for T
where
T: TryFrom<Any<'a>, Error = Error>,
T: CheckDerConstraints,
{
fn from_der(bytes: &'a [u8]) -> ParseResult<T> {
let (i, any) = Any::from_der(bytes)?;
// X.690 section 10.1: definite form of length encoding shall be used
if !any.header.length.is_definite() {
return Err(nom::Err::Error(Error::IndefiniteLengthUnexpected));
}
<T as CheckDerConstraints>::check_constraints(&any).map_err(nom::Err::Error)?;
let result = any.try_into().map_err(nom::Err::Error)?;
Ok((i, result))
}
}
/// Verification of DER constraints
pub trait CheckDerConstraints {
fn check_constraints(any: &Any) -> Result<()>;
}
/// Common trait for all objects that can be encoded using the DER representation
///
/// # Examples
///
/// Objects from this crate can be encoded as DER:
///
/// ```
/// use asn1_rs::{Integer, ToDer};
///
/// let int = Integer::from(4u32);
/// let mut writer = Vec::new();
/// let sz = int.write_der(&mut writer).expect("serialization failed");
///
/// assert_eq!(&writer, &[0x02, 0x01, 0x04]);
/// # assert_eq!(sz, 3);
/// ```
///
/// Many of the primitive types can also directly be encoded as DER:
///
/// ```
/// use asn1_rs::ToDer;
///
/// let mut writer = Vec::new();
/// let sz = 4.write_der(&mut writer).expect("serialization failed");
///
/// assert_eq!(&writer, &[0x02, 0x01, 0x04]);
/// # assert_eq!(sz, 3);
/// ```
#[cfg(feature = "std")]
pub trait ToDer
where
Self: DynTagged,
{
/// Get the length of the object, when encoded
///
// Since we are using DER, length cannot be Indefinite, so we can use `usize`.
// XXX can this function fail?
fn to_der_len(&self) -> Result<usize>;
/// Write the DER encoded representation to a newly allocated `Vec<u8>`.
fn to_der_vec(&self) -> SerializeResult<Vec<u8>> {
let mut v = Vec::new();
let _ = self.write_der(&mut v)?;
Ok(v)
}
/// Similar to using `to_vec`, but uses provided values without changes.
/// This can generate an invalid encoding for a DER object.
fn to_der_vec_raw(&self) -> SerializeResult<Vec<u8>> {
let mut v = Vec::new();
let _ = self.write_der_raw(&mut v)?;
Ok(v)
}
/// Attempt to write the DER encoded representation (header and content) into this writer.
///
/// # Examples
///
/// ```
/// use asn1_rs::{Integer, ToDer};
///
/// let int = Integer::from(4u32);
/// let mut writer = Vec::new();
/// let sz = int.write_der(&mut writer).expect("serialization failed");
///
/// assert_eq!(&writer, &[0x02, 0x01, 0x04]);
/// # assert_eq!(sz, 3);
/// ```
fn write_der(&self, writer: &mut dyn Write) -> SerializeResult<usize> {
let sz = self.write_der_header(writer)?;
let sz = sz + self.write_der_content(writer)?;
Ok(sz)
}
/// Attempt to write the DER header to this writer.
fn write_der_header(&self, writer: &mut dyn Write) -> SerializeResult<usize>;
/// Attempt to write the DER content (all except header) to this writer.
fn write_der_content(&self, writer: &mut dyn Write) -> SerializeResult<usize>;
/// Similar to using `to_der`, but uses provided values without changes.
/// This can generate an invalid encoding for a DER object.
fn write_der_raw(&self, writer: &mut dyn Write) -> SerializeResult<usize> {
self.write_der(writer)
}
}
#[cfg(feature = "std")]
impl<'a, T> ToDer for &'a T
where
T: ToDer,
&'a T: DynTagged,
{
fn to_der_len(&self) -> Result<usize> {
(*self).to_der_len()
}
fn write_der_header(&self, writer: &mut dyn Write) -> SerializeResult<usize> {
(*self).write_der_header(writer)
}
fn write_der_content(&self, writer: &mut dyn Write) -> SerializeResult<usize> {
(*self).write_der_content(writer)
}
}
/// Helper trait for creating tagged EXPLICIT values
///
/// # Examples
///
/// ```
/// use asn1_rs::{AsTaggedExplicit, Class};
///
/// // create a `[1] EXPLICIT INTEGER` value
/// let tagged = 4u32.explicit(Class::ContextSpecific, 1);
/// ```
pub trait AsTaggedExplicit<'a>: Sized {
fn explicit(self, class: Class, tag: u32) -> TaggedParser<'a, Explicit, Self> {
TaggedParser::new_explicit(class, tag, self)
}
}
impl<'a, T> AsTaggedExplicit<'a> for T where T: Sized + 'a {}
/// Helper trait for creating tagged IMPLICIT values
///
/// # Examples
///
/// ```
/// use asn1_rs::{AsTaggedImplicit, Class};
///
/// // create a `[1] IMPLICIT INTEGER` value, not constructed
/// let tagged = 4u32.implicit(Class::ContextSpecific, false, 1);
/// ```
pub trait AsTaggedImplicit<'a>: Sized {
fn implicit(
self,
class: Class,
constructed: bool,
tag: u32,
) -> TaggedParser<'a, Implicit, Self> {
TaggedParser::new_implicit(class, constructed, tag, self)
}
}
impl<'a, T> AsTaggedImplicit<'a> for T where T: Sized + 'a {}
pub trait ToStatic {
type Owned: 'static;
fn to_static(&self) -> Self::Owned;
}
| 28.35474 | 104 | 0.612705 |
18a3bf2075d0e1376e5586115b605276c0df8970 | 2,292 | use crate::database::Database;
use crate::from_headers::*;
use crate::ResourceQuota;
use azure_core::errors::AzureError;
use azure_core::headers::{continuation_token_from_headers_optional, session_token_from_headers};
use chrono::{DateTime, Utc};
use http::HeaderMap;
#[derive(Clone, PartialEq, PartialOrd, Debug)]
pub struct ListDatabasesResponse {
pub rid: String,
pub databases: Vec<Database>,
pub count: u32,
pub activity_id: uuid::Uuid,
pub charge: f64,
pub session_token: String,
pub last_state_change: DateTime<Utc>,
pub resource_quota: Vec<ResourceQuota>,
pub resource_usage: Vec<ResourceQuota>,
pub schema_version: String,
pub service_version: String,
pub continuation_token: Option<String>,
pub gateway_version: String,
}
impl std::convert::TryFrom<(&HeaderMap, &[u8])> for ListDatabasesResponse {
type Error = AzureError;
fn try_from(value: (&HeaderMap, &[u8])) -> Result<Self, Self::Error> {
let headers = value.0;
let body = value.1;
debug!("headers == {:#?}", headers);
#[derive(Deserialize, Debug)]
pub struct Response {
#[serde(rename = "_rid")]
rid: String,
#[serde(rename = "Databases")]
pub databases: Vec<Database>,
#[serde(rename = "_count")]
pub count: u32,
}
let response: Response = serde_json::from_slice(body)?;
Ok(Self {
rid: response.rid,
databases: response.databases,
count: response.count,
charge: request_charge_from_headers(headers)?,
activity_id: activity_id_from_headers(headers)?,
session_token: session_token_from_headers(headers)?,
last_state_change: last_state_change_from_headers(headers)?,
resource_quota: resource_quota_from_headers(headers)?,
resource_usage: resource_usage_from_headers(headers)?,
schema_version: schema_version_from_headers(headers)?.to_owned(),
service_version: service_version_from_headers(headers)?.to_owned(),
continuation_token: continuation_token_from_headers_optional(headers)?,
gateway_version: gateway_version_from_headers(headers)?.to_owned(),
})
}
}
| 36.380952 | 96 | 0.657941 |
c1ab933c7b5ac054ace400579955027440ce23b1 | 2,997 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_arrow::arrow::bitmap::Bitmap;
use common_exception::ErrorCode;
use common_exception::Result;
use common_io::prelude::FormatSettings;
use enum_dispatch::enum_dispatch;
use opensrv_clickhouse::types::column::ArcColumnData;
use serde_json::Value;
use crate::prelude::*;
mod array;
mod boolean;
mod date;
mod null;
mod nullable;
mod number;
mod string;
mod struct_;
mod timestamp;
mod variant;
pub use array::*;
pub use boolean::*;
pub use date::*;
pub use null::*;
pub use nullable::*;
pub use number::*;
pub use string::*;
pub use struct_::*;
pub use timestamp::*;
pub use variant::*;
#[enum_dispatch]
pub trait TypeSerializer: Send + Sync {
fn serialize_value(&self, value: &DataValue, format: &FormatSettings) -> Result<String>;
fn serialize_json(&self, column: &ColumnRef, format: &FormatSettings) -> Result<Vec<Value>>;
fn serialize_column(&self, column: &ColumnRef, format: &FormatSettings) -> Result<Vec<String>>;
fn serialize_clickhouse_format(
&self,
column: &ColumnRef,
_format: &FormatSettings,
) -> Result<ArcColumnData>;
fn serialize_json_object(
&self,
_column: &ColumnRef,
_valids: Option<&Bitmap>,
_format: &FormatSettings,
) -> Result<Vec<Value>> {
Err(ErrorCode::BadDataValueType(
"Error parsing JSON: unsupported data type",
))
}
fn serialize_json_object_suppress_error(
&self,
_column: &ColumnRef,
_format: &FormatSettings,
) -> Result<Vec<Option<Value>>> {
Err(ErrorCode::BadDataValueType(
"Error parsing JSON: unsupported data type",
))
}
}
#[derive(Debug, Clone)]
#[enum_dispatch(TypeSerializer)]
pub enum TypeSerializerImpl {
Null(NullSerializer),
Nullable(NullableSerializer),
Boolean(BooleanSerializer),
Int8(NumberSerializer<i8>),
Int16(NumberSerializer<i16>),
Int32(NumberSerializer<i32>),
Int64(NumberSerializer<i64>),
UInt8(NumberSerializer<u8>),
UInt16(NumberSerializer<u16>),
UInt32(NumberSerializer<u32>),
UInt64(NumberSerializer<u64>),
Float32(NumberSerializer<f32>),
Float64(NumberSerializer<f64>),
Date(DateSerializer<i32>),
Interval(DateSerializer<i64>),
Timestamp(TimestampSerializer),
String(StringSerializer),
Array(ArraySerializer),
Struct(StructSerializer),
Variant(VariantSerializer),
}
| 28.817308 | 99 | 0.695696 |
1197cf6e89835254c9a87ec0c83b26f2ed3622bc | 2,920 | //These are practice modules for rust error handling
//Using generic result types, box dyn, user defined errors, crate "inherited" errors
use std::io::{stdin, self, Write};
pub fn run() {
the_jist();
match_jist()
}
//the jist will allow you to enter anything and convert to string
//match jist only wants a number<f64> and maps incorrect input to error msg
fn the_jist(){
let mut user_input = String::new();
print!("1. Enter a number: ");
//**cases when you want to use .expect or .unwrap**
//"flush" - standard output from printed line //almost guaranteed not to crash from std output
//"flush" - is used when you are really not expecting any errors and not dealing with it
//"expect" - is similar, when you are pretty sure its going to work
//"readline" - is fine when you are expecting a string,
//When pretty sure it will work its followed with expect
io::stdout().flush().unwrap(); //double check to see if this is converting to string
stdin().read_line(&mut user_input)
.expect("Did not enter a correct string");
//"unwrap" isn't the best to use because it will panic crash when unknown type is returned
//"unwrap" is mainly used for testing because its easy to use.
// let my_number: f64 = user_input.trim().parse().unwrap();
let my_number: String = user_input.trim().parse().expect("You probably didn't enter a number...");
//"trim" removes extra line before/after input,text.etc
//"expect" is a little better because you can enter an error message... but still crashes.
//Error needs to be handled in order not to crash -> see match jist
println!("You entered a number, it was: {:?}", my_number);
}
fn match_jist(){
let mut my_string = String::new();
println!("2. Enter a number: ");
io::stdout().flush().unwrap();
//whatever value loop returns is stored in my_num
let my_num = loop {
my_string.clear(); //clear to ensure string is empty then read user input into it
//"match" keyword, trim to remove extra ln, then parse/convert
//to 64-bit, parse::<64> format due to "match",can be .parse()
stdin().read_line(&mut my_string)
.expect("Did not enter a correct string");
//w.e is returned from this, match takes result
match my_string.trim().parse::<f64>(){ // and allows handling of what is returned
Ok(_s) => break _s, //anything after break is what gets returned "_s" is var name
Err(_err) => println!("Try again. Enter a number.")
}
};
println!("You entered {:?}", my_num);
}
// next concept -> result_type.rs | 40 | 108 | 0.597603 |
56bd0ee699ecec8b347d7970029ac96d92138a59 | 2,831 | use crate::integ_tests::workflow_tests::timers::timer_wf;
use std::{
sync::atomic::{AtomicBool, AtomicUsize, Ordering},
time::Duration,
};
use temporal_sdk::{WfContext, WorkflowResult};
use test_utils::CoreWfStarter;
use tokio::sync::Barrier;
#[tokio::test]
async fn timer_workflow_not_sticky() {
let wf_name = "timer_wf_not_sticky";
let mut starter = CoreWfStarter::new(wf_name);
starter.max_cached_workflows(0);
let mut worker = starter.worker().await;
worker.register_wf(wf_name.to_owned(), timer_wf);
worker
.submit_wf(wf_name.to_owned(), wf_name.to_owned(), vec![])
.await
.unwrap();
worker.run_until_done().await.unwrap();
}
static TIMED_OUT_ONCE: AtomicBool = AtomicBool::new(false);
static RUN_CT: AtomicUsize = AtomicUsize::new(0);
async fn timer_timeout_wf(ctx: WfContext) -> WorkflowResult<()> {
RUN_CT.fetch_add(1, Ordering::SeqCst);
let t = ctx.timer(Duration::from_secs(1));
if !TIMED_OUT_ONCE.load(Ordering::SeqCst) {
ctx.force_task_fail(anyhow::anyhow!("I AM SLAIN!"));
TIMED_OUT_ONCE.store(true, Ordering::SeqCst);
}
t.await;
Ok(().into())
}
#[tokio::test]
async fn timer_workflow_timeout_on_sticky() {
// This test intentionally times out a workflow task in order to make the next task be scheduled
// on a not-sticky queue
let wf_name = "timer_workflow_timeout_on_sticky";
let mut starter = CoreWfStarter::new(wf_name);
starter.wft_timeout(Duration::from_secs(2));
let mut worker = starter.worker().await;
worker.register_wf(wf_name.to_owned(), timer_timeout_wf);
worker
.submit_wf(wf_name.to_owned(), wf_name.to_owned(), vec![])
.await
.unwrap();
worker.run_until_done().await.unwrap();
// If it didn't run twice it didn't time out
assert_eq!(RUN_CT.load(Ordering::SeqCst), 2);
}
#[tokio::test]
async fn cache_miss_ok() {
let wf_name = "cache_miss_ok";
let mut starter = CoreWfStarter::new(wf_name);
starter.max_wft(1);
let mut worker = starter.worker().await;
let barr: &'static Barrier = Box::leak(Box::new(Barrier::new(2)));
worker.register_wf(wf_name.to_owned(), move |ctx: WfContext| async move {
barr.wait().await;
ctx.timer(Duration::from_secs(1)).await;
Ok(().into())
});
let run_id = worker
.submit_wf(wf_name.to_owned(), wf_name.to_owned(), vec![])
.await
.unwrap();
let core = starter.get_core().await;
let tq = starter.get_task_queue();
let (r1, _) = tokio::join!(worker.run_until_done(), async move {
barr.wait().await;
core.request_workflow_eviction(tq, &run_id);
// We need to signal the barrier again since the wf gets evicted and will hit it again
barr.wait().await;
});
r1.unwrap();
}
| 33.305882 | 100 | 0.661604 |
269bb851935cddd971b63e363f5ad80c01668f31 | 1,569 | //! Error types
use abscissa_core::error::{BoxError, Context};
use std::{
fmt::{self, Display},
io,
ops::Deref,
};
use thiserror::Error;
/// Kinds of errors
#[derive(Copy, Clone, Debug, Eq, Error, PartialEq)]
pub enum ErrorKind {
/// Error in configuration file
#[error("config error")]
Config,
/// Input/output error
#[error("I/O error")]
Io,
/// Input/output error
#[error("Wallet")]
Wallet,
/// Input/output error
#[error("Transaction Error")]
Transaction,
}
impl ErrorKind {
/// Create an error context from this error
pub fn context(self, source: impl Into<BoxError>) -> Context<ErrorKind> {
Context::new(self, Some(source.into()))
}
}
/// Error type
#[derive(Debug)]
pub struct Error(Box<Context<ErrorKind>>);
impl Deref for Error {
type Target = Context<ErrorKind>;
fn deref(&self) -> &Context<ErrorKind> {
&self.0
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.0.source()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Self {
Context::new(kind, None).into()
}
}
impl From<Context<ErrorKind>> for Error {
fn from(context: Context<ErrorKind>) -> Self {
Error(Box::new(context))
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
ErrorKind::Io.context(err).into()
}
}
| 19.860759 | 77 | 0.593372 |
4adea476fd335a41790f9e58e05c62cdd9dbdfb1 | 11,323 | use super::*;
pub const ALT_STR: &str = "Alt";
pub const LOGO_STR: &str = "Win";
pub const MODIFIERS_ORDER: &str = "csam"; // Ctrl + Shift + Alt + Meta
pub const SC_INVALID: u16 = 0x0000;
pub const SC_TO_KEY_MAPPING: fn(u16) -> KeyMapping = KeyMapping::Win;
pub const KEY_MAP_TO_SC: fn(KeyMap) -> u16 = |k| k.win;
pub fn scancode_name(sc: u16) -> String {
// This code is based on Frinksy's `keyboard-keynames` crate:
// https://gitlab.com/Frinksy/keyboard-keynames/-/blob/master/src/platform/windows/key_layout.rs
// Convert the scancode.
let mut l_param = sc as cty::c_long;
l_param <<= 16;
// Check if 0xE0 escape sequence is present and set extended key flag.
if (sc & 0x0000FF00) == 0xE000 {
l_param |= 1 << 24;
}
// Allocate a buffer for the UTF-16 encoded key name.
const BUFFER_SIZE: usize = 32;
let mut utf16_key_name = vec![0_u16; BUFFER_SIZE];
// SAFETY: `utf16_key_name` is not borrowed, and `GetKeyNameTextW()` returns
// 0 if it fails.
let name_len = unsafe {
winapi::um::winuser::GetKeyNameTextW(
l_param,
utf16_key_name.as_mut_ptr(),
BUFFER_SIZE as cty::c_int,
)
};
if name_len == 0 {
return format!("SC{}", sc);
}
// Truncate the array to the size of the key name.
utf16_key_name.truncate(name_len as usize);
// Decode the UTF-16 string.
String::from_utf16_lossy(&utf16_key_name)
}
/// Stolen shamelessly from winit:
/// https://github.com/rust-windowing/winit/blob/bcd76d47186b074e536ca5ab9714953931796243/src/platform_impl/windows/event.rs#L186-L361
#[cfg(feature = "winit")]
pub fn key_map_to_winit_vkey(key: KeyMap) -> Option<winit::event::VirtualKeyCode> {
use winapi::um::winuser::*;
use winit::event::VirtualKeyCode;
let vkey = unsafe {
winapi::um::winuser::MapVirtualKeyW(key.win as _, winapi::um::winuser::MAPVK_VSC_TO_VK)
};
// VK_* codes are documented here https://msdn.microsoft.com/en-us/library/windows/desktop/dd375731(v=vs.85).aspx
match vkey as _ {
//VK_LBUTTON => Some(VirtualKeyCode::Lbutton),
//VK_RBUTTON => Some(VirtualKeyCode::Rbutton),
//VK_CANCEL => Some(VirtualKeyCode::Cancel),
//VK_MBUTTON => Some(VirtualKeyCode::Mbutton),
//VK_XBUTTON1 => Some(VirtualKeyCode::Xbutton1),
//VK_XBUTTON2 => Some(VirtualKeyCode::Xbutton2),
VK_BACK => Some(VirtualKeyCode::Back),
VK_TAB => Some(VirtualKeyCode::Tab),
//VK_CLEAR => Some(VirtualKeyCode::Clear),
VK_RETURN => Some(VirtualKeyCode::Return),
VK_LSHIFT => Some(VirtualKeyCode::LShift),
VK_RSHIFT => Some(VirtualKeyCode::RShift),
VK_LCONTROL => Some(VirtualKeyCode::LControl),
VK_RCONTROL => Some(VirtualKeyCode::RControl),
VK_LMENU => Some(VirtualKeyCode::LAlt),
VK_RMENU => Some(VirtualKeyCode::RAlt),
VK_PAUSE => Some(VirtualKeyCode::Pause),
VK_CAPITAL => Some(VirtualKeyCode::Capital),
VK_KANA => Some(VirtualKeyCode::Kana),
//VK_HANGUEL => Some(VirtualKeyCode::Hanguel),
//VK_HANGUL => Some(VirtualKeyCode::Hangul),
//VK_JUNJA => Some(VirtualKeyCode::Junja),
//VK_FINAL => Some(VirtualKeyCode::Final),
//VK_HANJA => Some(VirtualKeyCode::Hanja),
VK_KANJI => Some(VirtualKeyCode::Kanji),
VK_ESCAPE => Some(VirtualKeyCode::Escape),
VK_CONVERT => Some(VirtualKeyCode::Convert),
VK_NONCONVERT => Some(VirtualKeyCode::NoConvert),
//VK_ACCEPT => Some(VirtualKeyCode::Accept),
//VK_MODECHANGE => Some(VirtualKeyCode::Modechange),
VK_SPACE => Some(VirtualKeyCode::Space),
VK_PRIOR => Some(VirtualKeyCode::PageUp),
VK_NEXT => Some(VirtualKeyCode::PageDown),
VK_END => Some(VirtualKeyCode::End),
VK_HOME => Some(VirtualKeyCode::Home),
VK_LEFT => Some(VirtualKeyCode::Left),
VK_UP => Some(VirtualKeyCode::Up),
VK_RIGHT => Some(VirtualKeyCode::Right),
VK_DOWN => Some(VirtualKeyCode::Down),
//VK_SELECT => Some(VirtualKeyCode::Select),
//VK_PRINT => Some(VirtualKeyCode::Print),
//VK_EXECUTE => Some(VirtualKeyCode::Execute),
VK_SNAPSHOT => Some(VirtualKeyCode::Snapshot),
VK_INSERT => Some(VirtualKeyCode::Insert),
VK_DELETE => Some(VirtualKeyCode::Delete),
//VK_HELP => Some(VirtualKeyCode::Help),
48 => Some(VirtualKeyCode::Key0),
49 => Some(VirtualKeyCode::Key1),
50 => Some(VirtualKeyCode::Key2),
51 => Some(VirtualKeyCode::Key3),
52 => Some(VirtualKeyCode::Key4),
53 => Some(VirtualKeyCode::Key5),
54 => Some(VirtualKeyCode::Key6),
55 => Some(VirtualKeyCode::Key7),
56 => Some(VirtualKeyCode::Key8),
57 => Some(VirtualKeyCode::Key9),
65 => Some(VirtualKeyCode::A),
66 => Some(VirtualKeyCode::B),
67 => Some(VirtualKeyCode::C),
68 => Some(VirtualKeyCode::D),
69 => Some(VirtualKeyCode::E),
70 => Some(VirtualKeyCode::F),
71 => Some(VirtualKeyCode::G),
72 => Some(VirtualKeyCode::H),
73 => Some(VirtualKeyCode::I),
74 => Some(VirtualKeyCode::J),
75 => Some(VirtualKeyCode::K),
76 => Some(VirtualKeyCode::L),
77 => Some(VirtualKeyCode::M),
78 => Some(VirtualKeyCode::N),
79 => Some(VirtualKeyCode::O),
80 => Some(VirtualKeyCode::P),
81 => Some(VirtualKeyCode::Q),
82 => Some(VirtualKeyCode::R),
83 => Some(VirtualKeyCode::S),
84 => Some(VirtualKeyCode::T),
85 => Some(VirtualKeyCode::U),
86 => Some(VirtualKeyCode::V),
87 => Some(VirtualKeyCode::W),
88 => Some(VirtualKeyCode::X),
89 => Some(VirtualKeyCode::Y),
90 => Some(VirtualKeyCode::Z),
VK_LWIN => Some(VirtualKeyCode::LWin),
VK_RWIN => Some(VirtualKeyCode::RWin),
VK_APPS => Some(VirtualKeyCode::Apps),
VK_SLEEP => Some(VirtualKeyCode::Sleep),
VK_NUMPAD0 => Some(VirtualKeyCode::Numpad0),
VK_NUMPAD1 => Some(VirtualKeyCode::Numpad1),
VK_NUMPAD2 => Some(VirtualKeyCode::Numpad2),
VK_NUMPAD3 => Some(VirtualKeyCode::Numpad3),
VK_NUMPAD4 => Some(VirtualKeyCode::Numpad4),
VK_NUMPAD5 => Some(VirtualKeyCode::Numpad5),
VK_NUMPAD6 => Some(VirtualKeyCode::Numpad6),
VK_NUMPAD7 => Some(VirtualKeyCode::Numpad7),
VK_NUMPAD8 => Some(VirtualKeyCode::Numpad8),
VK_NUMPAD9 => Some(VirtualKeyCode::Numpad9),
VK_MULTIPLY => Some(VirtualKeyCode::NumpadMultiply),
VK_ADD => Some(VirtualKeyCode::NumpadAdd),
//VK_SEPARATOR => Some(VirtualKeyCode::Separator),
VK_SUBTRACT => Some(VirtualKeyCode::NumpadSubtract),
VK_DECIMAL => Some(VirtualKeyCode::NumpadDecimal),
VK_DIVIDE => Some(VirtualKeyCode::NumpadDivide),
VK_F1 => Some(VirtualKeyCode::F1),
VK_F2 => Some(VirtualKeyCode::F2),
VK_F3 => Some(VirtualKeyCode::F3),
VK_F4 => Some(VirtualKeyCode::F4),
VK_F5 => Some(VirtualKeyCode::F5),
VK_F6 => Some(VirtualKeyCode::F6),
VK_F7 => Some(VirtualKeyCode::F7),
VK_F8 => Some(VirtualKeyCode::F8),
VK_F9 => Some(VirtualKeyCode::F9),
VK_F10 => Some(VirtualKeyCode::F10),
VK_F11 => Some(VirtualKeyCode::F11),
VK_F12 => Some(VirtualKeyCode::F12),
VK_F13 => Some(VirtualKeyCode::F13),
VK_F14 => Some(VirtualKeyCode::F14),
VK_F15 => Some(VirtualKeyCode::F15),
VK_F16 => Some(VirtualKeyCode::F16),
VK_F17 => Some(VirtualKeyCode::F17),
VK_F18 => Some(VirtualKeyCode::F18),
VK_F19 => Some(VirtualKeyCode::F19),
VK_F20 => Some(VirtualKeyCode::F20),
VK_F21 => Some(VirtualKeyCode::F21),
VK_F22 => Some(VirtualKeyCode::F22),
VK_F23 => Some(VirtualKeyCode::F23),
VK_F24 => Some(VirtualKeyCode::F24),
VK_NUMLOCK => Some(VirtualKeyCode::Numlock),
VK_SCROLL => Some(VirtualKeyCode::Scroll),
VK_BROWSER_BACK => Some(VirtualKeyCode::NavigateBackward),
VK_BROWSER_FORWARD => Some(VirtualKeyCode::NavigateForward),
VK_BROWSER_REFRESH => Some(VirtualKeyCode::WebRefresh),
VK_BROWSER_STOP => Some(VirtualKeyCode::WebStop),
VK_BROWSER_SEARCH => Some(VirtualKeyCode::WebSearch),
VK_BROWSER_FAVORITES => Some(VirtualKeyCode::WebFavorites),
VK_BROWSER_HOME => Some(VirtualKeyCode::WebHome),
VK_VOLUME_MUTE => Some(VirtualKeyCode::Mute),
VK_VOLUME_DOWN => Some(VirtualKeyCode::VolumeDown),
VK_VOLUME_UP => Some(VirtualKeyCode::VolumeUp),
VK_MEDIA_NEXT_TRACK => Some(VirtualKeyCode::NextTrack),
VK_MEDIA_PREV_TRACK => Some(VirtualKeyCode::PrevTrack),
VK_MEDIA_STOP => Some(VirtualKeyCode::MediaStop),
VK_MEDIA_PLAY_PAUSE => Some(VirtualKeyCode::PlayPause),
VK_LAUNCH_MAIL => Some(VirtualKeyCode::Mail),
VK_LAUNCH_MEDIA_SELECT => Some(VirtualKeyCode::MediaSelect),
/*VK_LAUNCH_APP1 => Some(VirtualKeyCode::Launch_app1),
VK_LAUNCH_APP2 => Some(VirtualKeyCode::Launch_app2),*/
VK_OEM_PLUS => Some(VirtualKeyCode::Equals),
VK_OEM_COMMA => Some(VirtualKeyCode::Comma),
VK_OEM_MINUS => Some(VirtualKeyCode::Minus),
VK_OEM_PERIOD => Some(VirtualKeyCode::Period),
VK_OEM_1 => map_text_keys(vkey as _),
VK_OEM_2 => map_text_keys(vkey as _),
VK_OEM_3 => map_text_keys(vkey as _),
VK_OEM_4 => map_text_keys(vkey as _),
VK_OEM_5 => map_text_keys(vkey as _),
VK_OEM_6 => map_text_keys(vkey as _),
VK_OEM_7 => map_text_keys(vkey as _),
/* VK_OEM_8 => Some(VirtualKeyCode::Oem_8), */
VK_OEM_102 => Some(VirtualKeyCode::OEM102),
/*VK_PROCESSKEY => Some(VirtualKeyCode::Processkey),
VK_PACKET => Some(VirtualKeyCode::Packet),
VK_ATTN => Some(VirtualKeyCode::Attn),
VK_CRSEL => Some(VirtualKeyCode::Crsel),
VK_EXSEL => Some(VirtualKeyCode::Exsel),
VK_EREOF => Some(VirtualKeyCode::Ereof),
VK_PLAY => Some(VirtualKeyCode::Play),
VK_ZOOM => Some(VirtualKeyCode::Zoom),
VK_NONAME => Some(VirtualKeyCode::Noname),
VK_PA1 => Some(VirtualKeyCode::Pa1),
VK_OEM_CLEAR => Some(VirtualKeyCode::Oem_clear),*/
_ => None,
}
}
// This is needed as windows doesn't properly distinguish
// some virtual key codes for different keyboard layouts
#[cfg(feature = "winit")]
fn map_text_keys(
win_virtual_key: winapi::shared::minwindef::UINT,
) -> Option<winit::event::VirtualKeyCode> {
use winit::event::VirtualKeyCode;
let char_key = unsafe {
winapi::um::winuser::MapVirtualKeyA(win_virtual_key, winapi::um::winuser::MAPVK_VK_TO_CHAR)
} & 0x7FFF;
match char::from_u32(char_key) {
Some(';') => Some(VirtualKeyCode::Semicolon),
Some('/') => Some(VirtualKeyCode::Slash),
Some('`') => Some(VirtualKeyCode::Grave),
Some('[') => Some(VirtualKeyCode::LBracket),
Some(']') => Some(VirtualKeyCode::RBracket),
Some('\'') => Some(VirtualKeyCode::Apostrophe),
Some('\\') => Some(VirtualKeyCode::Backslash),
_ => None,
}
}
| 44.57874 | 134 | 0.632959 |
5d39341ef40b603bf55c539d8247f1967b3c8ca2 | 2,993 | //! Limb addition
use super::{Inner, Limb, Wide};
use crate::{Encoding, Wrapping};
use core::ops::{Add, AddAssign};
use subtle::CtOption;
impl Limb {
/// Computes `self + rhs + carry`, returning the result along with the new carry.
#[inline(always)]
pub const fn adc(self, rhs: Limb, carry: Limb) -> (Limb, Limb) {
let a = self.0 as Wide;
let b = rhs.0 as Wide;
let carry = carry.0 as Wide;
let ret = a + b + carry;
(Limb(ret as Inner), Limb((ret >> Self::BIT_SIZE) as Inner))
}
/// Perform wrapping addition, discarding overflow.
#[inline(always)]
pub const fn wrapping_add(&self, rhs: Self) -> Self {
Limb(self.0.wrapping_add(rhs.0))
}
/// Perform checked addition, returning a [`CtOption`] which `is_some` only
/// if the operation did not overflow.
#[inline]
pub fn checked_add(&self, rhs: Self) -> CtOption<Self> {
let (result, carry) = self.adc(rhs, Limb::ZERO);
CtOption::new(result, carry.is_zero())
}
}
impl Add for Wrapping<Limb> {
type Output = Self;
fn add(self, rhs: Self) -> Wrapping<Limb> {
Wrapping(self.0.wrapping_add(rhs.0))
}
}
impl Add<&Wrapping<Limb>> for Wrapping<Limb> {
type Output = Wrapping<Limb>;
fn add(self, rhs: &Wrapping<Limb>) -> Wrapping<Limb> {
Wrapping(self.0.wrapping_add(rhs.0))
}
}
impl Add<Wrapping<Limb>> for &Wrapping<Limb> {
type Output = Wrapping<Limb>;
fn add(self, rhs: Wrapping<Limb>) -> Wrapping<Limb> {
Wrapping(self.0.wrapping_add(rhs.0))
}
}
impl Add<&Wrapping<Limb>> for &Wrapping<Limb> {
type Output = Wrapping<Limb>;
fn add(self, rhs: &Wrapping<Limb>) -> Wrapping<Limb> {
Wrapping(self.0.wrapping_add(rhs.0))
}
}
impl AddAssign for Wrapping<Limb> {
fn add_assign(&mut self, other: Self) {
*self = *self + other;
}
}
impl AddAssign<&Wrapping<Limb>> for Wrapping<Limb> {
fn add_assign(&mut self, other: &Self) {
*self = *self + other;
}
}
#[cfg(test)]
mod tests {
use crate::Limb;
#[test]
fn adc_no_carry() {
let (res, carry) = Limb::ZERO.adc(Limb::ONE, Limb::ZERO);
assert_eq!(res, Limb::ONE);
assert_eq!(carry, Limb::ZERO);
}
#[test]
fn adc_with_carry() {
let (res, carry) = Limb::MAX.adc(Limb::ONE, Limb::ZERO);
assert_eq!(res, Limb::ZERO);
assert_eq!(carry, Limb::ONE);
}
#[test]
fn wrapping_add_no_carry() {
assert_eq!(Limb::ZERO.wrapping_add(Limb::ONE), Limb::ONE);
}
#[test]
fn wrapping_add_with_carry() {
assert_eq!(Limb::MAX.wrapping_add(Limb::ONE), Limb::ZERO);
}
#[test]
fn checked_add_ok() {
let result = Limb::ZERO.checked_add(Limb::ONE);
assert_eq!(result.unwrap(), Limb::ONE);
}
#[test]
fn checked_add_overflow() {
let result = Limb::MAX.checked_add(Limb::ONE);
assert!(!bool::from(result.is_some()));
}
}
| 25.364407 | 85 | 0.588039 |
eb2e0d38c6ddcc07fb83316d884bcb19616b9a78 | 6,213 | use crate::codec::decoder::Decoder;
use crate::codec::encoder::Encoder;
use tokio::{
io::{AsyncRead, AsyncWrite},
stream::Stream,
};
use bytes::{Buf, BytesMut};
use futures_core::ready;
use futures_sink::Sink;
use log::trace;
use pin_project_lite::pin_project;
use std::borrow::{Borrow, BorrowMut};
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
pin_project! {
#[derive(Debug)]
pub(crate) struct FramedImpl<T, U, State> {
#[pin]
pub(crate) inner: T,
pub(crate) state: State,
pub(crate) codec: U,
}
}
const INITIAL_CAPACITY: usize = 8 * 1024;
const BACKPRESSURE_BOUNDARY: usize = INITIAL_CAPACITY;
pub(crate) struct ReadFrame {
pub(crate) eof: bool,
pub(crate) is_readable: bool,
pub(crate) buffer: BytesMut,
}
pub(crate) struct WriteFrame {
pub(crate) buffer: BytesMut,
}
#[derive(Default)]
pub(crate) struct RWFrames {
pub(crate) read: ReadFrame,
pub(crate) write: WriteFrame,
}
impl Default for ReadFrame {
fn default() -> Self {
Self {
eof: false,
is_readable: false,
buffer: BytesMut::with_capacity(INITIAL_CAPACITY),
}
}
}
impl Default for WriteFrame {
fn default() -> Self {
Self {
buffer: BytesMut::with_capacity(INITIAL_CAPACITY),
}
}
}
impl From<BytesMut> for ReadFrame {
fn from(mut buffer: BytesMut) -> Self {
let size = buffer.capacity();
if size < INITIAL_CAPACITY {
buffer.reserve(INITIAL_CAPACITY - size);
}
Self {
buffer,
is_readable: size > 0,
eof: false,
}
}
}
impl From<BytesMut> for WriteFrame {
fn from(mut buffer: BytesMut) -> Self {
let size = buffer.capacity();
if size < INITIAL_CAPACITY {
buffer.reserve(INITIAL_CAPACITY - size);
}
Self { buffer }
}
}
impl Borrow<ReadFrame> for RWFrames {
fn borrow(&self) -> &ReadFrame {
&self.read
}
}
impl BorrowMut<ReadFrame> for RWFrames {
fn borrow_mut(&mut self) -> &mut ReadFrame {
&mut self.read
}
}
impl Borrow<WriteFrame> for RWFrames {
fn borrow(&self) -> &WriteFrame {
&self.write
}
}
impl BorrowMut<WriteFrame> for RWFrames {
fn borrow_mut(&mut self) -> &mut WriteFrame {
&mut self.write
}
}
impl<T, U, R> Stream for FramedImpl<T, U, R>
where
T: AsyncRead,
U: Decoder,
R: BorrowMut<ReadFrame>,
{
type Item = Result<U::Item, U::Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut pinned = self.project();
let state: &mut ReadFrame = pinned.state.borrow_mut();
loop {
// Repeatedly call `decode` or `decode_eof` as long as it is
// "readable". Readable is defined as not having returned `None`. If
// the upstream has returned EOF, and the decoder is no longer
// readable, it can be assumed that the decoder will never become
// readable again, at which point the stream is terminated.
if state.is_readable {
if state.eof {
let frame = pinned.codec.decode_eof(&mut state.buffer)?;
return Poll::Ready(frame.map(Ok));
}
trace!("attempting to decode a frame");
if let Some(frame) = pinned.codec.decode(&mut state.buffer)? {
trace!("frame decoded from buffer");
return Poll::Ready(Some(Ok(frame)));
}
state.is_readable = false;
}
assert!(!state.eof);
// Otherwise, try to read more data and try again. Make sure we've
// got room for at least one byte to read to ensure that we don't
// get a spurious 0 that looks like EOF
state.buffer.reserve(1);
let bytect = match pinned.inner.as_mut().poll_read_buf(cx, &mut state.buffer)? {
Poll::Ready(ct) => ct,
Poll::Pending => return Poll::Pending,
};
if bytect == 0 {
state.eof = true;
}
state.is_readable = true;
}
}
}
impl<T, I, U, W> Sink<I> for FramedImpl<T, U, W>
where
T: AsyncWrite,
U: Encoder<I>,
U::Error: From<io::Error>,
W: BorrowMut<WriteFrame>,
{
type Error = U::Error;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
if self.state.borrow().buffer.len() >= BACKPRESSURE_BOUNDARY {
self.as_mut().poll_flush(cx)
} else {
Poll::Ready(Ok(()))
}
}
fn start_send(self: Pin<&mut Self>, item: I) -> Result<(), Self::Error> {
let pinned = self.project();
pinned
.codec
.encode(item, &mut pinned.state.borrow_mut().buffer)?;
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
trace!("flushing framed transport");
let mut pinned = self.project();
while !pinned.state.borrow_mut().buffer.is_empty() {
let WriteFrame { buffer } = pinned.state.borrow_mut();
trace!("writing; remaining={}", buffer.len());
let buf = &buffer;
let n = ready!(pinned.inner.as_mut().poll_write(cx, &buf))?;
if n == 0 {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::WriteZero,
"failed to \
write frame to transport",
)
.into()));
}
pinned.state.borrow_mut().buffer.advance(n);
}
// Try flushing the underlying IO
ready!(pinned.inner.poll_flush(cx))?;
trace!("framed transport flushed");
Poll::Ready(Ok(()))
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
ready!(self.as_mut().poll_flush(cx))?;
ready!(self.project().inner.poll_shutdown(cx))?;
Poll::Ready(Ok(()))
}
}
| 27.49115 | 100 | 0.549654 |
dbc5e10480f9eb11232c6abce878f8c0c1d459d7 | 4,435 | //! Platform-independent platform abstraction
//!
//! This is the platform-independent portion of the standard library's
//! platform abstraction layer, whereas `std::sys` is the
//! platform-specific portion.
//!
//! The relationship between `std::sys_common`, `std::sys` and the
//! rest of `std` is complex, with dependencies going in all
//! directions: `std` depending on `sys_common`, `sys_common`
//! depending on `sys`, and `sys` depending on `sys_common` and `std`.
//! Ideally `sys_common` would be split into two and the dependencies
//! between them all would form a dag, facilitating the extraction of
//! `std::sys` from the standard library.
#![allow(missing_docs)]
#![allow(missing_debug_implementations)]
#[cfg(test)]
mod tests;
use crate::sync::Once;
use crate::sys;
macro_rules! rtabort {
($($t:tt)*) => (crate::sys_common::util::abort(format_args!($($t)*)))
}
macro_rules! rtassert {
($e:expr) => {
if !$e {
rtabort!(concat!("assertion failed: ", stringify!($e)));
}
};
}
#[allow(unused_macros)] // not used on all platforms
macro_rules! rtunwrap {
($ok:ident, $e:expr) => {
match $e {
$ok(v) => v,
ref err => {
let err = err.as_ref().map(drop); // map Ok/Some which might not be Debug
rtabort!(concat!("unwrap failed: ", stringify!($e), " = {:?}"), err)
}
}
};
}
pub mod alloc;
pub mod at_exit_imp;
pub mod backtrace;
pub mod bytestring;
pub mod condvar;
pub mod fs;
pub mod io;
pub mod mutex;
// `doc` is required because `sys/mod.rs` imports `unix/ext/mod.rs` on Windows
// when generating documentation.
#[cfg(any(doc, not(windows)))]
pub mod os_str_bytes;
pub mod poison;
pub mod process;
pub mod remutex;
pub mod rwlock;
pub mod thread;
pub mod thread_info;
pub mod thread_local_dtor;
pub mod thread_local_key;
pub mod thread_parker;
pub mod util;
pub mod wtf8;
cfg_if::cfg_if! {
if #[cfg(any(target_os = "l4re",
target_os = "hermit",
target_os = "miosix",
feature = "restricted-std",
all(target_arch = "wasm32", not(target_os = "emscripten")),
all(target_vendor = "fortanix", target_env = "sgx")))] {
pub use crate::sys::net;
} else {
pub mod net;
}
}
// common error constructors
/// A trait for viewing representations from std types
#[doc(hidden)]
pub trait AsInner<Inner: ?Sized> {
fn as_inner(&self) -> &Inner;
}
/// A trait for viewing representations from std types
#[doc(hidden)]
pub trait AsInnerMut<Inner: ?Sized> {
fn as_inner_mut(&mut self) -> &mut Inner;
}
/// A trait for extracting representations from std types
#[doc(hidden)]
pub trait IntoInner<Inner> {
fn into_inner(self) -> Inner;
}
/// A trait for creating std types from internal representations
#[doc(hidden)]
pub trait FromInner<Inner> {
fn from_inner(inner: Inner) -> Self;
}
/// Enqueues a procedure to run when the main thread exits.
///
/// Currently these closures are only run once the main *Rust* thread exits.
/// Once the `at_exit` handlers begin running, more may be enqueued, but not
/// infinitely so. Eventually a handler registration will be forced to fail.
///
/// Returns `Ok` if the handler was successfully registered, meaning that the
/// closure will be run once the main thread exits. Returns `Err` to indicate
/// that the closure could not be registered, meaning that it is not scheduled
/// to be run.
pub fn at_exit<F: FnOnce() + Send + 'static>(f: F) -> Result<(), ()> {
if at_exit_imp::push(Box::new(f)) { Ok(()) } else { Err(()) }
}
/// One-time runtime cleanup.
pub fn cleanup() {
static CLEANUP: Once = Once::new();
CLEANUP.call_once(|| unsafe {
sys::args::cleanup();
sys::stack_overflow::cleanup();
at_exit_imp::cleanup();
});
}
// Computes (value*numer)/denom without overflow, as long as both
// (numer*denom) and the overall result fit into i64 (which is the case
// for our time conversions).
#[allow(dead_code)] // not used on all platforms
pub fn mul_div_u64(value: u64, numer: u64, denom: u64) -> u64 {
let q = value / denom;
let r = value % denom;
// Decompose value as (value/denom*denom + value%denom),
// substitute into (value*numer)/denom and simplify.
// r < denom, so (denom*numer) is the upper bound of (r*numer)
q * numer + r * numer / denom
}
| 29.966216 | 89 | 0.643968 |
d53de4d480101b9247eca183deb8c19dff7d145a | 62,560 | use crate::color::IntoLinSrgba;
use crate::draw::primitive::Primitive;
use crate::draw::properties::spatial::{dimension, orientation, position};
use crate::draw::properties::{
ColorScalar, SetColor, SetDimensions, SetFill, SetOrientation, SetPosition, SetStroke,
};
use crate::draw::{self, Draw};
use crate::geom::graph::node;
use crate::geom::{self, Point2, Point3, Vector2, Vector3};
use crate::math::{Angle, BaseFloat, Euler, Quaternion, Rad};
use lyon::path::PathEvent;
use lyon::tessellation::{FillOptions, FillTessellator, LineCap, LineJoin, StrokeOptions};
use std::marker::PhantomData;
/// A **Drawing** in progress.
///
/// **Drawing** provides a way of chaining together method calls describing properties of the thing
/// that we are drawing. **Drawing** ends when the instance is **Drop**ped, at which point the
/// properties of the drawing are inserted into the **Draw** type.
///
/// When a **Drawing** begins, a node is immediately created for the drawing within the **Draw**'s
/// inner **geom::Graph**. This ensures the correct instantiation order is maintained within the
/// graph. As a result, each **Drawing** is associated with a single, unique node. Thus a
/// **Drawing** can be thought of as a way of specifying properties for a node.
#[derive(Debug)]
pub struct Drawing<'a, T, S = geom::scalar::Default>
where
S: 'a + BaseFloat,
{
// The `Draw` instance used to create this drawing.
draw: &'a Draw<S>,
// The `Index` of the node that was created.
//
// This may not be accessed by the user until drawing is complete. This is because the
// **Drawing** may yet describe further positioning, orientation or scaling and in turn using
// the index to refer to a node before these properties are set may yield unexpected behaviour.
index: node::Index,
// Whether or not the **Drawing** should attempt to finish the drawing on drop.
finish_on_drop: bool,
// The node type currently being drawn.
_ty: PhantomData<T>,
}
/// Some context that may be optionally provided to primitives in the drawing implementation.
///
/// This is particularly useful for paths and meshes.
pub struct DrawingContext<'a, S> {
/// The intermediary mesh for buffering yet-to-be-drawn paths and meshes.
pub mesh: &'a mut draw::IntermediaryMesh<S>,
/// A re-usable fill tessellator for 2D paths.
pub fill_tessellator: &'a mut FillTessellator,
/// A re-usable buffer for collecting path events.
pub path_event_buffer: &'a mut Vec<PathEvent>,
/// A re-usable buffer for collecting text.
pub text_buffer: &'a mut String,
/// Cache for text glyphs.
pub glyph_cache: &'a mut draw::GlyphCache,
}
/// Construct a new **Drawing** instance.
pub fn new<'a, T, S>(draw: &'a Draw<S>, index: node::Index) -> Drawing<'a, T, S>
where
S: BaseFloat,
{
let _ty = PhantomData;
let finish_on_drop = true;
Drawing {
draw,
index,
finish_on_drop,
_ty,
}
}
impl<'a, T, S> Drop for Drawing<'a, T, S>
where
S: BaseFloat,
{
fn drop(&mut self) {
if self.finish_on_drop {
self.finish_inner().expect(
"the drawing contained a relative edge that would have \
caused a cycle within the geometry graph",
);
}
}
}
impl<'a, S> DrawingContext<'a, S> {
// Initialise the DrawingContext from the draw's IntermediaryState.
pub(crate) fn from_intermediary_state(state: &'a mut super::IntermediaryState<S>) -> Self {
let super::IntermediaryState {
ref mut intermediary_mesh,
ref mut fill_tessellator,
ref mut path_event_buffer,
ref mut text_buffer,
ref mut glyph_cache,
} = *state;
DrawingContext {
mesh: intermediary_mesh,
fill_tessellator: &mut fill_tessellator.0,
path_event_buffer: path_event_buffer,
text_buffer: text_buffer,
glyph_cache: glyph_cache,
}
}
}
impl<'a, T, S> Drawing<'a, T, S>
where
S: BaseFloat,
{
// Shared between the **finish** method and the **Drawing**'s **Drop** implementation.
//
// 1. Create vertices based on node-specific position, points, etc.
// 2. Insert edges into geom graph based on
fn finish_inner(&mut self) -> Result<(), geom::graph::WouldCycle<S>> {
if let Ok(mut state) = self.draw.state.try_borrow_mut() {
if let Some(prim) = state.drawing.remove(&self.index) {
let index = self.index;
draw::draw_primitive(&mut state, index, prim)?;
}
}
Ok(())
}
/// Complete the drawing and insert it into the parent **Draw** instance.
///
/// This will be called when the **Drawing** is **Drop**ped if it has not yet been called.
pub fn finish(mut self) -> Result<(), geom::graph::WouldCycle<S>> {
self.finish_inner()
}
/// Complete the drawing and return its unique identifier.
///
/// **Panics** if adding the edge would cause a cycle in the graph.
pub fn id(self) -> node::Index {
let id = self.index;
self.finish().expect(draw::WOULD_CYCLE);
id
}
// Map the given function onto the primitive stored within **Draw** at `index`.
//
// The functionn is only applied if the node has not yet been **Drawn**.
fn map_primitive<F, T2>(mut self, map: F) -> Drawing<'a, T2, S>
where
F: FnOnce(Primitive<S>) -> Primitive<S>,
T2: Into<Primitive<S>>,
{
if let Ok(mut state) = self.draw.state.try_borrow_mut() {
if let Some(mut primitive) = state.drawing.remove(&self.index) {
primitive = map(primitive);
state.drawing.insert(self.index, primitive);
}
}
self.finish_on_drop = false;
let Drawing { draw, index, .. } = self;
Drawing {
draw,
index,
finish_on_drop: true,
_ty: PhantomData,
}
}
// The same as `map_primitive` but also passes a mutable reference to the vertex data to the
// map function. This is useful for types that may have an unknown number of arbitrary
// vertices.
fn map_primitive_with_context<F, T2>(mut self, map: F) -> Drawing<'a, T2, S>
where
F: FnOnce(Primitive<S>, DrawingContext<S>) -> Primitive<S>,
T2: Into<Primitive<S>>,
{
if let Ok(mut state) = self.draw.state.try_borrow_mut() {
if let Some(mut primitive) = state.drawing.remove(&self.index) {
{
let mut intermediary_state = state.intermediary_state.borrow_mut();
let ctxt = DrawingContext::from_intermediary_state(&mut *intermediary_state);
primitive = map(primitive, ctxt);
}
state.drawing.insert(self.index, primitive);
}
}
self.finish_on_drop = false;
let Drawing { draw, index, .. } = self;
Drawing {
draw,
index,
finish_on_drop: true,
_ty: PhantomData,
}
}
/// Apply the given function to the type stored within **Draw**.
///
/// The function is only applied if the node has not yet been **Drawn**.
///
/// **Panics** if the primitive does not contain type **T**.
pub fn map_ty<F, T2>(self, map: F) -> Drawing<'a, T2, S>
where
F: FnOnce(T) -> T2,
T2: Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
{
self.map_primitive(|prim| {
let maybe_ty: Option<T> = prim.into();
let ty = maybe_ty.expect("expected `T` but primitive contained different type");
let ty2 = map(ty);
ty2.into()
})
}
/// Apply the given function to the type stored within **Draw**.
///
/// The function is only applied if the node has not yet been **Drawn**.
///
/// **Panics** if the primitive does not contain type **T**.
pub(crate) fn map_ty_with_context<F, T2>(self, map: F) -> Drawing<'a, T2, S>
where
F: FnOnce(T, DrawingContext<S>) -> T2,
T2: Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
{
self.map_primitive_with_context(|prim, ctxt| {
let maybe_ty: Option<T> = prim.into();
let ty = maybe_ty.expect("expected `T` but primitive contained different type");
let ty2 = map(ty, ctxt);
ty2.into()
})
}
}
// SetColor implementations.
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetColor<ColorScalar> + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
/// Specify a color.
///
/// This method supports any color type that can be converted into RGBA.
///
/// Colors that have no alpha channel will be given an opaque alpha channel value `1.0`.
pub fn color<C>(self, color: C) -> Self
where
C: IntoLinSrgba<ColorScalar>,
{
self.map_ty(|ty| SetColor::color(ty, color))
}
/// Specify the color via red, green and blue channels.
pub fn rgb(self, r: ColorScalar, g: ColorScalar, b: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::rgb(ty, r, g, b))
}
/// Specify the color via red, green and blue channels as bytes
pub fn rgb8(self, r: u8, g: u8, b: u8) -> Self {
self.map_ty(|ty| SetColor::rgb8(ty, r, g, b))
}
/// Specify the color via red, green, blue and alpha channels.
pub fn rgba(self, r: ColorScalar, g: ColorScalar, b: ColorScalar, a: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::rgba(ty, r, g, b, a))
}
/// Specify the color via red, green, blue and alpha channels as bytes.
pub fn rgba8(self, r: u8, g: u8, b: u8, a: u8) -> Self {
self.map_ty(|ty| SetColor::rgba8(ty, r, g, b, a))
}
/// Specify the color via hue, saturation and luminance.
///
/// If you're looking for HSVA or HSBA, use the `hsva` method instead.
///
/// The given hue expects a value between `0.0` and `1.0` where `0.0` is 0 degress and `1.0` is
/// 360 degrees (or 2 PI radians).
///
/// See the [wikipedia entry](https://en.wikipedia.org/wiki/HSL_and_HSV) for more details on
/// this color space.
pub fn hsl(self, h: ColorScalar, s: ColorScalar, l: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::hsl(ty, h, s, l))
}
/// Specify the color via hue, saturation, luminance and an alpha channel.
///
/// If you're looking for HSVA or HSBA, use the `hsva` method instead.
///
/// The given hue expects a value between `0.0` and `1.0` where `0.0` is 0 degress and `1.0` is
/// 360 degrees (or 2 PI radians).
///
/// See the [wikipedia entry](https://en.wikipedia.org/wiki/HSL_and_HSV) for more details on
/// this color space.
pub fn hsla(self, h: ColorScalar, s: ColorScalar, l: ColorScalar, a: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::hsla(ty, h, s, l, a))
}
/// Specify the color via hue, saturation and *value* (brightness).
///
/// This is sometimes also known as "hsb".
///
/// The given hue expects a value between `0.0` and `1.0` where `0.0` is 0 degress and `1.0` is
/// 360 degrees (or 2 PI radians).
///
/// See the [wikipedia entry](https://en.wikipedia.org/wiki/HSL_and_HSV) for more details on
/// this color space.
pub fn hsv(self, h: ColorScalar, s: ColorScalar, v: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::hsv(ty, h, s, v))
}
/// Specify the color via hue, saturation, *value* (brightness) and an alpha channel.
///
/// This is sometimes also known as "hsba".
///
/// The given hue expects a value between `0.0` and `1.0` where `0.0` is 0 degress and `1.0` is
/// 360 degrees (or 2 PI radians).
///
/// See the [wikipedia entry](https://en.wikipedia.org/wiki/HSL_and_HSV) for more details on
/// this color space.
pub fn hsva(self, h: ColorScalar, s: ColorScalar, v: ColorScalar, a: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::hsva(ty, h, s, v, a))
}
/// Specify the color as gray scale
///
/// The given g expects a value between `0.0` and `1.0` where `0.0` is black and `1.0` is white
pub fn gray(self, g: ColorScalar) -> Self {
self.map_ty(|ty| SetColor::gray(ty, g))
}
}
// SetDimensions implementations.
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetDimensions<S> + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
// Setters for each axis.
/// Set the length along the x axis.
pub fn x_dimension(self, x: dimension::Dimension<S>) -> Self {
self.map_ty(|ty| SetDimensions::x_dimension(ty, x))
}
/// Set the length along the y axis.
pub fn y_dimension(self, y: dimension::Dimension<S>) -> Self {
self.map_ty(|ty| SetDimensions::y_dimension(ty, y))
}
/// Set the length along the z axis.
pub fn z_dimension(self, z: dimension::Dimension<S>) -> Self {
self.map_ty(|ty| SetDimensions::z_dimension(ty, z))
}
// Absolute dimensions.
/// Set the absolute width for the node.
pub fn width(self, w: S) -> Self {
self.map_ty(|ty| SetDimensions::width(ty, w))
}
/// Set the absolute height for the node.
pub fn height(self, h: S) -> Self {
self.map_ty(|ty| SetDimensions::height(ty, h))
}
/// Set the absolute depth for the node.
pub fn depth(self, d: S) -> Self {
self.map_ty(|ty| SetDimensions::depth(ty, d))
}
/// Short-hand for the **width** method.
pub fn w(self, w: S) -> Self {
self.map_ty(|ty| SetDimensions::w(ty, w))
}
/// Short-hand for the **height** method.
pub fn h(self, h: S) -> Self {
self.map_ty(|ty| SetDimensions::h(ty, h))
}
/// Short-hand for the **depth** method.
pub fn d(self, d: S) -> Self {
self.map_ty(|ty| SetDimensions::d(ty, d))
}
/// Set the **x** and **y** dimensions for the node.
pub fn wh(self, v: Vector2<S>) -> Self {
self.map_ty(|ty| SetDimensions::wh(ty, v))
}
/// Set the **x**, **y** and **z** dimensions for the node.
pub fn whd(self, v: Vector3<S>) -> Self {
self.map_ty(|ty| SetDimensions::whd(ty, v))
}
/// Set the width and height for the node.
pub fn w_h(self, x: S, y: S) -> Self {
self.map_ty(|ty| SetDimensions::w_h(ty, x, y))
}
/// Set the width and height for the node.
pub fn w_h_d(self, x: S, y: S, z: S) -> Self {
self.map_ty(|ty| SetDimensions::w_h_d(ty, x, y, z))
}
// Relative dimensions.
/// Some relative dimension along the **x** axis.
pub fn x_dimension_relative(self, other: node::Index, x: dimension::Relative<S>) -> Self {
self.map_ty(|ty| SetDimensions::x_dimension_relative(ty, other, x))
}
/// Some relative dimension along the **y** axis.
pub fn y_dimension_relative(self, other: node::Index, y: dimension::Relative<S>) -> Self {
self.map_ty(|ty| SetDimensions::y_dimension_relative(ty, other, y))
}
/// Some relative dimension along the **z** axis.
pub fn z_dimension_relative(self, other: node::Index, z: dimension::Relative<S>) -> Self {
self.map_ty(|ty| SetDimensions::z_dimension_relative(ty, other, z))
}
/// Set the x-axis dimension as the width of the node at the given index.
pub fn w_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetDimensions::w_of(ty, other))
}
/// Set the y-axis dimension as the height of the node at the given index.
pub fn h_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetDimensions::h_of(ty, other))
}
/// Set the z-axis dimension as the depth of the node at the given index.
pub fn d_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetDimensions::d_of(ty, other))
}
/// Set the dimensions as the dimensions of the node at the given index.
pub fn wh_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetDimensions::wh_of(ty, other))
}
/// Set the dimensions as the dimensions of the node at the given index.
pub fn whd_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetDimensions::whd_of(ty, other))
}
/// Set the width as the width of the node at the given index padded at both ends by the
/// given Scalar.
pub fn padded_w_of(self, other: node::Index, pad: S) -> Self {
self.map_ty(|ty| SetDimensions::padded_w_of(ty, other, pad))
}
/// Set the height as the height of the node at the given index padded at both ends by the
/// given Scalar.
pub fn padded_h_of(self, other: node::Index, pad: S) -> Self {
self.map_ty(|ty| SetDimensions::padded_h_of(ty, other, pad))
}
/// Set the depth as the depth of the node at the given index padded at both ends by the
/// given Scalar.
pub fn padded_d_of(self, other: node::Index, pad: S) -> Self {
self.map_ty(|ty| SetDimensions::padded_d_of(ty, other, pad))
}
/// Set the dimensions as the dimensions of the node at the given index with each dimension
/// padded by the given scalar.
pub fn padded_wh_of(self, other: node::Index, pad: S) -> Self
where
S: Clone,
{
self.map_ty(|ty| SetDimensions::padded_wh_of(ty, other, pad))
}
/// Set the dimensions as the dimensions of the node at the given index with each dimension
/// padded by the given scalar.
pub fn padded_whd_of(self, other: node::Index, pad: S) -> Self
where
S: Clone,
{
self.map_ty(|ty| SetDimensions::padded_whd_of(ty, other, pad))
}
/// Set the width as the width of the node at the given index multiplied by the given **scale**
/// Scalar value.
pub fn scaled_w_of(self, other: node::Index, scale: S) -> Self {
self.map_ty(|ty| SetDimensions::scaled_w_of(ty, other, scale))
}
/// Set the height as the height of the node at the given index multiplied by the given **scale**
/// Scalar value.
pub fn scaled_h_of(self, other: node::Index, scale: S) -> Self {
self.map_ty(|ty| SetDimensions::scaled_h_of(ty, other, scale))
}
/// Set the depth as the depth of the node at the given index multiplied by the given **scale**
/// Scalar value.
pub fn scaled_d_of(self, other: node::Index, scale: S) -> Self {
self.map_ty(|ty| SetDimensions::scaled_d_of(ty, other, scale))
}
/// Set the dimensions as the dimensions of the node at the given index multiplied by the given
/// **scale** Scalar value.
pub fn scaled_wh_of(self, other: node::Index, scale: S) -> Self
where
S: Clone,
{
self.map_ty(|ty| SetDimensions::scaled_wh_of(ty, other, scale))
}
/// Set the dimensions as the dimensions of the node at the given index multiplied by the given
/// **scale** Scalar value.
pub fn scaled_whd_of(self, other: node::Index, scale: S) -> Self
where
S: Clone,
{
self.map_ty(|ty| SetDimensions::scaled_whd_of(ty, other, scale))
}
}
// SetPosition methods.
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetPosition<S> + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
/// Build with the given **Position** along the *x* axis.
pub fn x_position(self, position: position::Position<S>) -> Self {
self.map_ty(|ty| SetPosition::x_position(ty, position))
}
/// Build with the given **Position** along the *y* axis.
pub fn y_position(self, position: position::Position<S>) -> Self {
self.map_ty(|ty| SetPosition::y_position(ty, position))
}
/// Build with the given **Position** along the *z* axis.
pub fn z_position(self, position: position::Position<S>) -> Self {
self.map_ty(|ty| SetPosition::z_position(ty, position))
}
// Absolute positioning.
/// Build with the given **Absolute** **Position** along the *x* axis.
pub fn x(self, x: S) -> Self {
self.map_ty(|ty| SetPosition::x(ty, x))
}
/// Build with the given **Absolute** **Position** along the *y* axis.
pub fn y(self, y: S) -> Self {
self.map_ty(|ty| SetPosition::y(ty, y))
}
/// Build with the given **Absolute** **Position** along the *z* axis.
pub fn z(self, z: S) -> Self {
self.map_ty(|ty| SetPosition::z(ty, z))
}
/// Set the **Position** with some two-dimensional point.
pub fn xy(self, p: Point2<S>) -> Self {
self.map_ty(|ty| SetPosition::xy(ty, p))
}
/// Set the **Position** with some three-dimensional point.
pub fn xyz(self, p: Point3<S>) -> Self {
self.map_ty(|ty| SetPosition::xyz(ty, p))
}
/// Set the **Position** with *x* *y* coordinates.
pub fn x_y(self, x: S, y: S) -> Self {
self.map_ty(|ty| SetPosition::x_y(ty, x, y))
}
/// Set the **Position** with *x* *y* *z* coordinates.
pub fn x_y_z(self, x: S, y: S, z: S) -> Self {
self.map_ty(|ty| SetPosition::x_y_z(ty, x, y, z))
}
// Relative positioning.
/// Set the *x* **Position** **Relative** to the previous node.
pub fn x_position_relative(self, x: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::x_position_relative(ty, x))
}
/// Set the *y* **Position** **Relative** to the previous node.
pub fn y_position_relative(self, y: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::y_position_relative(ty, y))
}
/// Set the *z* **Position** **Relative** to the previous node.
pub fn z_position_relative(self, z: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::z_position_relative(ty, z))
}
/// Set the *x* and *y* **Position**s **Relative** to the previous node.
pub fn x_y_position_relative(self, x: position::Relative<S>, y: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::x_y_position_relative(ty, x, y))
}
/// Set the *x*, *y* and *z* **Position**s **Relative** to the previous node.
pub fn x_y_z_position_relative(
self,
x: position::Relative<S>,
y: position::Relative<S>,
z: position::Relative<S>,
) -> Self {
self.map_ty(|ty| SetPosition::x_y_z_position_relative(ty, x, y, z))
}
/// Set the *x* **Position** **Relative** to the given node.
pub fn x_position_relative_to(self, other: node::Index, x: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::x_position_relative_to(ty, other, x))
}
/// Set the *y* **Position** **Relative** to the given node.
pub fn y_position_relative_to(self, other: node::Index, y: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::y_position_relative_to(ty, other, y))
}
/// Set the *y* **Position** **Relative** to the given node.
pub fn z_position_relative_to(self, other: node::Index, z: position::Relative<S>) -> Self {
self.map_ty(|ty| SetPosition::z_position_relative_to(ty, other, z))
}
/// Set the *x* and *y* **Position**s **Relative** to the given node.
pub fn x_y_position_relative_to(
self,
other: node::Index,
x: position::Relative<S>,
y: position::Relative<S>,
) -> Self {
self.map_ty(|ty| SetPosition::x_y_position_relative_to(ty, other, x, y))
}
/// Set the *x*, *y* and *z* **Position**s **Relative** to the given node.
pub fn x_y_z_position_relative_to(
self,
other: node::Index,
x: position::Relative<S>,
y: position::Relative<S>,
z: position::Relative<S>,
) -> Self {
self.map_ty(|ty| SetPosition::x_y_z_position_relative_to(ty, other, x, y, z))
}
// Relative `Scalar` positioning.
/// Set the **Position** as a **Scalar** along the *x* axis **Relative** to the middle of
/// previous node.
pub fn x_relative(self, x: S) -> Self {
self.map_ty(|ty| SetPosition::x_relative(ty, x))
}
/// Set the **Position** as a **Scalar** along the *y* axis **Relative** to the middle of
/// previous node.
pub fn y_relative(self, y: S) -> Self {
self.map_ty(|ty| SetPosition::y_relative(ty, y))
}
/// Set the **Position** as a **Scalar** along the *z* axis **Relative** to the middle of
/// previous node.
pub fn z_relative(self, z: S) -> Self {
self.map_ty(|ty| SetPosition::z_relative(ty, z))
}
/// Set the **Position** as a **Point** **Relative** to the middle of the previous node.
pub fn xy_relative(self, p: Point2<S>) -> Self {
self.map_ty(|ty| SetPosition::xy_relative(ty, p))
}
/// Set the **Position** as a **Point** **Relative** to the middle of the previous node.
pub fn xyz_relative(self, p: Point3<S>) -> Self {
self.map_ty(|ty| SetPosition::xyz_relative(ty, p))
}
/// Set the **Position** as **Scalar**s along the *x* and *y* axes **Relative** to the middle
/// of the previous node.
pub fn x_y_relative(self, x: S, y: S) -> Self {
self.map_ty(|ty| SetPosition::x_y_relative(ty, x, y))
}
/// Set the **Position** as **Scalar**s along the *x*, *y* and *z* axes **Relative** to the
/// middle of the previous node.
pub fn x_y_z_relative(self, x: S, y: S, z: S) -> Self {
self.map_ty(|ty| SetPosition::x_y_z_relative(ty, x, y, z))
}
/// Set the position relative to the node with the given node::Index.
pub fn x_relative_to(self, other: node::Index, x: S) -> Self {
self.map_ty(|ty| SetPosition::x_relative_to(ty, other, x))
}
/// Set the position relative to the node with the given node::Index.
pub fn y_relative_to(self, other: node::Index, y: S) -> Self {
self.map_ty(|ty| SetPosition::y_relative_to(ty, other, y))
}
/// Set the position relative to the node with the given node::Index.
pub fn z_relative_to(self, other: node::Index, z: S) -> Self {
self.map_ty(|ty| SetPosition::z_relative_to(ty, other, z))
}
/// Set the position relative to the node with the given node::Index.
pub fn xy_relative_to(self, other: node::Index, p: Point2<S>) -> Self {
self.map_ty(|ty| SetPosition::xy_relative_to(ty, other, p))
}
/// Set the position relative to the node with the given node::Index.
pub fn xyz_relative_to(self, other: node::Index, p: Point3<S>) -> Self {
self.map_ty(|ty| SetPosition::xyz_relative_to(ty, other, p))
}
/// Set the position relative to the node with the given node::Index.
pub fn x_y_relative_to(self, other: node::Index, x: S, y: S) -> Self {
self.map_ty(|ty| SetPosition::x_y_relative_to(ty, other, x, y))
}
/// Set the position relative to the node with the given node::Index.
pub fn x_y_z_relative_to(self, other: node::Index, x: S, y: S, z: S) -> Self {
self.map_ty(|ty| SetPosition::x_y_z_relative_to(ty, other, x, y, z))
}
// Directional positioning.
/// Build with the **Position** along the *x* axis as some distance from another node.
pub fn x_direction(self, direction: position::Direction, x: S) -> Self {
self.map_ty(|ty| SetPosition::x_direction(ty, direction, x))
}
/// Build with the **Position** along the *y* axis as some distance from another node.
pub fn y_direction(self, direction: position::Direction, y: S) -> Self {
self.map_ty(|ty| SetPosition::y_direction(ty, direction, y))
}
/// Build with the **Position** along the *z* axis as some distance from another node.
pub fn z_direction(self, direction: position::Direction, z: S) -> Self {
self.map_ty(|ty| SetPosition::z_direction(ty, direction, z))
}
/// Build with the **Position** as some distance to the left of another node.
pub fn left(self, x: S) -> Self {
self.map_ty(|ty| SetPosition::left(ty, x))
}
/// Build with the **Position** as some distance to the right of another node.
pub fn right(self, x: S) -> Self {
self.map_ty(|ty| SetPosition::right(ty, x))
}
/// Build with the **Position** as some distance below another node.
pub fn down(self, y: S) -> Self {
self.map_ty(|ty| SetPosition::down(ty, y))
}
/// Build with the **Position** as some distance above another node.
pub fn up(self, y: S) -> Self {
self.map_ty(|ty| SetPosition::up(ty, y))
}
/// Build with the **Position** as some distance in front of another node.
pub fn backwards(self, z: S) -> Self {
self.map_ty(|ty| SetPosition::backwards(ty, z))
}
/// Build with the **Position** as some distance behind another node.
pub fn forwards(self, z: S) -> Self {
self.map_ty(|ty| SetPosition::forwards(ty, z))
}
/// Build with the **Position** along the *x* axis as some distance from the given node.
pub fn x_direction_from(
self,
other: node::Index,
direction: position::Direction,
x: S,
) -> Self {
self.map_ty(|ty| SetPosition::x_direction_from(ty, other, direction, x))
}
/// Build with the **Position** along the *y* axis as some distance from the given node.
pub fn y_direction_from(
self,
other: node::Index,
direction: position::Direction,
y: S,
) -> Self {
self.map_ty(|ty| SetPosition::y_direction_from(ty, other, direction, y))
}
/// Build with the **Position** along the *z* axis as some distance from the given node.
pub fn z_direction_from(
self,
other: node::Index,
direction: position::Direction,
z: S,
) -> Self {
self.map_ty(|ty| SetPosition::z_direction_from(ty, other, direction, z))
}
/// Build with the **Position** as some distance to the left of the given node.
pub fn left_from(self, other: node::Index, x: S) -> Self {
self.map_ty(|ty| SetPosition::left_from(ty, other, x))
}
/// Build with the **Position** as some distance to the right of the given node.
pub fn right_from(self, other: node::Index, x: S) -> Self {
self.map_ty(|ty| SetPosition::right_from(ty, other, x))
}
/// Build with the **Position** as some distance below the given node.
pub fn down_from(self, other: node::Index, y: S) -> Self {
self.map_ty(|ty| SetPosition::down_from(ty, other, y))
}
/// Build with the **Position** as some distance above the given node.
pub fn up_from(self, other: node::Index, y: S) -> Self {
self.map_ty(|ty| SetPosition::up_from(ty, other, y))
}
/// Build with the **Position** as some distance in front of the given node.
pub fn backwards_from(self, other: node::Index, z: S) -> Self {
self.map_ty(|ty| SetPosition::backwards_from(ty, other, z))
}
/// Build with the **Position** as some distance above the given node.
pub fn forwards_from(self, other: node::Index, z: S) -> Self {
self.map_ty(|ty| SetPosition::forwards_from(ty, other, z))
}
// Alignment positioning.
/// Align the **Position** of the node along the *x* axis.
pub fn x_align(self, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::x_align(ty, align))
}
/// Align the **Position** of the node along the *y* axis.
pub fn y_align(self, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::y_align(ty, align))
}
/// Align the **Position** of the node along the *z* axis.
pub fn z_align(self, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::z_align(ty, align))
}
/// Align the position to the left.
pub fn align_left(self) -> Self {
self.map_ty(|ty| SetPosition::align_left(ty))
}
/// Align the position to the left.
pub fn align_left_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_left_with_margin(ty, margin))
}
/// Align the position to the middle.
pub fn align_middle_x(self) -> Self {
self.map_ty(|ty| SetPosition::align_middle_x(ty))
}
/// Align the position to the right.
pub fn align_right(self) -> Self {
self.map_ty(|ty| SetPosition::align_right(ty))
}
/// Align the position to the right.
pub fn align_right_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_right_with_margin(ty, margin))
}
/// Align the position to the bottom.
pub fn align_bottom(self) -> Self {
self.map_ty(|ty| SetPosition::align_bottom(ty))
}
/// Align the position to the bottom.
pub fn align_bottom_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_bottom_with_margin(ty, margin))
}
/// Align the position to the middle.
pub fn align_middle_y(self) -> Self {
self.map_ty(|ty| SetPosition::align_middle_y(ty))
}
/// Align the position to the top.
pub fn align_top(self) -> Self {
self.map_ty(|ty| SetPosition::align_top(ty))
}
/// Align the position to the top.
pub fn align_top_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_top_with_margin(ty, margin))
}
/// Align the position to the front.
pub fn align_front(self) -> Self {
self.map_ty(|ty| SetPosition::align_front(ty))
}
/// Align the position to the front.
pub fn align_front_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_front_with_margin(ty, margin))
}
/// Align the position to the middle.
pub fn align_middle_z(self) -> Self {
self.map_ty(|ty| SetPosition::align_middle_z(ty))
}
/// Align the position to the back.
pub fn align_back(self) -> Self {
self.map_ty(|ty| SetPosition::align_back(ty))
}
/// Align the position to the back.
pub fn align_back_with_margin(self, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_back_with_margin(ty, margin))
}
/// Align the **Position** of the node with the given node along the *x* axis.
pub fn x_align_to(self, other: node::Index, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::x_align_to(ty, other, align))
}
/// Align the **Position** of the node with the given node along the *y* axis.
pub fn y_align_to(self, other: node::Index, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::y_align_to(ty, other, align))
}
/// Align the **Position** of the node with the given node along the *z* axis.
pub fn z_align_to(self, other: node::Index, align: position::Align<S>) -> Self {
self.map_ty(|ty| SetPosition::z_align_to(ty, other, align))
}
/// Align the position to the left.
pub fn align_left_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_left_of(ty, other))
}
/// Align the position to the left.
pub fn align_left_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_left_of_with_margin(ty, other, margin))
}
/// Align the position to the middle.
pub fn align_middle_x_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_middle_x_of(ty, other))
}
/// Align the position to the right.
pub fn align_right_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_right_of(ty, other))
}
/// Align the position to the right.
pub fn align_right_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_right_of_with_margin(ty, other, margin))
}
/// Align the position to the bottom.
pub fn align_bottom_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_bottom_of(ty, other))
}
/// Align the position to the bottom.
pub fn align_bottom_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_bottom_of_with_margin(ty, other, margin))
}
/// Align the position to the middle.
pub fn align_middle_y_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_middle_y_of(ty, other))
}
/// Align the position to the top.
pub fn align_top_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_top_of(ty, other))
}
/// Align the position to the top.
pub fn align_top_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_top_of_with_margin(ty, other, margin))
}
/// Align the position to the front.
pub fn align_front_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_front_of(ty, other))
}
/// Align the position to the front.
pub fn align_front_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_front_of_with_margin(ty, other, margin))
}
/// Align the position to the middle.
pub fn align_middle_z_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_middle_z_of(ty, other))
}
/// Align the position to the back.
pub fn align_back_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::align_back_of(ty, other))
}
/// Align the position to the back.
pub fn align_back_of_with_margin(self, other: node::Index, margin: S) -> Self {
self.map_ty(|ty| SetPosition::align_back_of_with_margin(ty, other, margin))
}
// Alignment combinations.
/// Align the node to the middle of the last node.
pub fn middle(self) -> Self {
self.map_ty(|ty| SetPosition::middle(ty))
}
/// Align the node to the bottom left of the last node.
pub fn bottom_left(self) -> Self {
self.map_ty(|ty| SetPosition::bottom_left(ty))
}
/// Align the node to the middle left of the last node.
pub fn mid_left(self) -> Self {
self.map_ty(|ty| SetPosition::mid_left(ty))
}
/// Align the node to the top left of the last node.
pub fn top_left(self) -> Self {
self.map_ty(|ty| SetPosition::top_left(ty))
}
/// Align the node to the middle top of the last node.
pub fn mid_top(self) -> Self {
self.map_ty(|ty| SetPosition::mid_top(ty))
}
/// Align the node to the top right of the last node.
pub fn top_right(self) -> Self {
self.map_ty(|ty| SetPosition::top_right(ty))
}
/// Align the node to the middle right of the last node.
pub fn mid_right(self) -> Self {
self.map_ty(|ty| SetPosition::mid_right(ty))
}
/// Align the node to the bottom right of the last node.
pub fn bottom_right(self) -> Self {
self.map_ty(|ty| SetPosition::bottom_right(ty))
}
/// Align the node to the middle bottom of the last node.
pub fn mid_bottom(self) -> Self {
self.map_ty(|ty| SetPosition::mid_bottom(ty))
}
/// Align the node in the middle of the given Node.
pub fn middle_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::middle_of(ty, other))
}
/// Align the node to the bottom left of the given Node.
pub fn bottom_left_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::bottom_left_of(ty, other))
}
/// Align the node to the middle left of the given Node.
pub fn mid_left_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::mid_left_of(ty, other))
}
/// Align the node to the top left of the given Node.
pub fn top_left_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::top_left_of(ty, other))
}
/// Align the node to the middle top of the given Node.
pub fn mid_top_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::mid_top_of(ty, other))
}
/// Align the node to the top right of the given Node.
pub fn top_right_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::top_right_of(ty, other))
}
/// Align the node to the middle right of the given Node.
pub fn mid_right_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::mid_right_of(ty, other))
}
/// Align the node to the bottom right of the given Node.
pub fn bottom_right_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::bottom_right_of(ty, other))
}
/// Align the node to the middle bottom of the given Node.
pub fn mid_bottom_of(self, other: node::Index) -> Self {
self.map_ty(|ty| SetPosition::mid_bottom_of(ty, other))
}
}
// SetOrientation methods.
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetOrientation<S> + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
/// Describe orientation via the vector that points to the given target.
pub fn look_at(self, target: orientation::LookAt<S>) -> Self {
self.map_ty(|ty| SetOrientation::look_at(ty, target))
}
/// Describe orientation via the vector that points to the given node.
pub fn look_at_node(self, node: node::Index) -> Self {
self.map_ty(|ty| SetOrientation::look_at_node(ty, node))
}
/// Describe orientation via the vector that points to the given point.
pub fn look_at_point(self, point: Point3<S>) -> Self {
self.map_ty(|ty| SetOrientation::look_at_point(ty, point))
}
/// Build with the given **Orientation** along the *x* axis.
pub fn x_orientation(self, orientation: orientation::Orientation<S>) -> Self {
self.map_ty(|ty| SetOrientation::x_orientation(ty, orientation))
}
/// Build with the given **Orientation** along the *y* axis.
pub fn y_orientation(self, orientation: orientation::Orientation<S>) -> Self {
self.map_ty(|ty| SetOrientation::y_orientation(ty, orientation))
}
/// Build with the given **Orientation** along the *z* axis.
pub fn z_orientation(self, orientation: orientation::Orientation<S>) -> Self {
self.map_ty(|ty| SetOrientation::z_orientation(ty, orientation))
}
/// Specify the orientation around the *x* axis as an absolute value in radians.
pub fn x_radians(self, x: S) -> Self {
self.map_ty(|ty| SetOrientation::x_radians(ty, x))
}
/// Specify the orientation around the *y* axis as an absolute value in radians.
pub fn y_radians(self, y: S) -> Self {
self.map_ty(|ty| SetOrientation::y_radians(ty, y))
}
/// Specify the orientation around the *z* axis as an absolute value in radians.
pub fn z_radians(self, z: S) -> Self {
self.map_ty(|ty| SetOrientation::y_radians(ty, z))
}
/// Specify the orientation around the *x* axis as an absolute value in radians.
pub fn x_degrees(self, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_degrees(ty, x))
}
/// Specify the orientation around the *y* axis as an absolute value in radians.
pub fn y_degrees(self, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_degrees(ty, y))
}
/// Specify the orientation around the *z* axis as an absolute value in radians.
pub fn z_degrees(self, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_degrees(ty, z))
}
/// Specify the orientation around the *x* axis as a number of turns around the axis.
pub fn x_turns(self, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_turns(ty, x))
}
/// Specify the orientation around the *y* axis as a number of turns around the axis.
pub fn y_turns(self, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_turns(ty, y))
}
/// Specify the orientation around the *z* axis as a number of turns around the axis.
pub fn z_turns(self, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_turns(ty, z))
}
/// Specify the orientation along each axis with the given **Vector** of radians.
///
/// This has the same affect as calling `self.x_radians(v.x).y_radians(v.y).z_radians(v.z)`.
pub fn radians(self, v: Vector3<S>) -> Self {
self.map_ty(|ty| SetOrientation::radians(ty, v))
}
/// Specify the orientation along each axis with the given **Vector** of degrees.
///
/// This has the same affect as calling `self.x_degrees(v.x).y_degrees(v.y).z_degrees(v.z)`.
pub fn degrees(self, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::degrees(ty, v))
}
/// Specify the orientation along each axis with the given **Vector** of "turns".
///
/// This has the same affect as calling `self.x_turns(v.x).y_turns(v.y).z_turns(v.z)`.
pub fn turns(self, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::turns(ty, v))
}
/// Specify the orientation with the given **Euler**.
///
/// The euler can be specified in either radians (via **Rad**) or degrees (via **Deg**).
pub fn euler<A>(self, e: Euler<A>) -> Self
where
S: BaseFloat,
A: Angle + Into<Rad<S>>,
{
self.map_ty(|ty| SetOrientation::euler(ty, e))
}
/// Specify the orientation with the given **Quaternion**.
pub fn quaternion(self, q: Quaternion<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::quaternion(ty, q))
}
// Relative orientation.
/// Specify the orientation around the *x* axis as a relative value in radians.
pub fn x_radians_relative(self, x: S) -> Self {
self.map_ty(|ty| SetOrientation::x_radians_relative(ty, x))
}
/// Specify the orientation around the *y* axis as a relative value in radians.
pub fn y_radians_relative(self, y: S) -> Self {
self.map_ty(|ty| SetOrientation::y_radians_relative(ty, y))
}
/// Specify the orientation around the *z* axis as a relative value in radians.
pub fn z_radians_relative(self, z: S) -> Self {
self.map_ty(|ty| SetOrientation::z_radians_relative(ty, z))
}
/// Specify the orientation around the *x* axis as a relative value in radians.
pub fn x_radians_relative_to(self, other: node::Index, x: S) -> Self {
self.map_ty(|ty| SetOrientation::x_radians_relative_to(ty, other, x))
}
/// Specify the orientation around the *y* axis as a relative value in radians.
pub fn y_radians_relative_to(self, other: node::Index, y: S) -> Self {
self.map_ty(|ty| SetOrientation::y_radians_relative_to(ty, other, y))
}
/// Specify the orientation around the *z* axis as a relative value in radians.
pub fn z_radians_relative_to(self, other: node::Index, z: S) -> Self {
self.map_ty(|ty| SetOrientation::z_radians_relative_to(ty, other, z))
}
/// Specify the orientation around the *x* axis as a relative value in degrees.
pub fn x_degrees_relative(self, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_degrees_relative(ty, x))
}
/// Specify the orientation around the *y* axis as a relative value in degrees.
pub fn y_degrees_relative(self, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_degrees_relative(ty, y))
}
/// Specify the orientation around the *z* axis as a relative value in degrees.
pub fn z_degrees_relative(self, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_degrees_relative(ty, z))
}
/// Specify the orientation around the *x* axis as a relative value in degrees.
pub fn x_degrees_relative_to(self, other: node::Index, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_degrees_relative_to(ty, other, x))
}
/// Specify the orientation around the *y* axis as a relative value in degrees.
pub fn y_degrees_relative_to(self, other: node::Index, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_degrees_relative_to(ty, other, y))
}
/// Specify the orientation around the *z* axis as a relative value in degrees.
pub fn z_degrees_relative_to(self, other: node::Index, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_degrees_relative_to(ty, other, z))
}
/// Specify the relative orientation around the *x* axis as a number of turns around the axis.
pub fn x_turns_relative(self, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_turns_relative(ty, x))
}
/// Specify the relative orientation around the *y* axis as a number of turns around the axis.
pub fn y_turns_relative(self, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_turns_relative(ty, y))
}
/// Specify the relative orientation around the *z* axis as a number of turns around the axis.
pub fn z_turns_relative(self, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_turns_relative(ty, z))
}
/// Specify the relative orientation around the *x* axis as a number of turns around the axis.
pub fn x_turns_relative_to(self, other: node::Index, x: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::x_turns_relative_to(ty, other, x))
}
/// Specify the relative orientation around the *y* axis as a number of turns around the axis.
pub fn y_turns_relative_to(self, other: node::Index, y: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::y_turns_relative_to(ty, other, y))
}
/// Specify the relative orientation around the *z* axis as a number of turns around the axis.
pub fn z_turns_relative_to(self, other: node::Index, z: S) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::z_turns_relative_to(ty, other, z))
}
/// Specify a relative orientation along each axis with the given **Vector** of radians.
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_radians_relative(v.x)
/// .y_radians_relative(v.y)
/// .z_radians_relative(v.z)
/// ```
pub fn radians_relative(self, v: Vector3<S>) -> Self {
self.map_ty(|ty| SetOrientation::radians_relative(ty, v))
}
/// Specify a relative orientation along each axis with the given **Vector** of radians.
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_radians_relative_to(other, v.x)
/// .y_radians_relative_to(other, v.y)
/// .z_radians_relative_to(other, v.z)
/// ```
pub fn radians_relative_to(self, other: node::Index, v: Vector3<S>) -> Self {
self.map_ty(|ty| SetOrientation::radians_relative_to(ty, other, v))
}
/// Specify a relative orientation along each axis with the given **Vector** of degrees.
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_degrees_relative(v.x)
/// .y_degrees_relative(v.y)
/// .z_degrees_relative(v.z)
/// ```
pub fn degrees_relative(self, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::degrees_relative(ty, v))
}
/// Specify a relative orientation along each axis with the given **Vector** of degrees.
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_degrees_relative_to(other, v.x)
/// .y_degrees_relative_to(other, v.y)
/// .z_degrees_relative_to(other, v.z)
/// ```
pub fn degrees_relative_to(self, other: node::Index, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::degrees_relative_to(ty, other, v))
}
/// Specify a relative orientation along each axis with the given **Vector** of "turns".
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_turns_relative(v.x)
/// .y_turns_relative(v.y)
/// .z_turns_relative(v.z)
/// ```
pub fn turns_relative(self, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::turns_relative(ty, v))
}
/// Specify a relative orientation along each axis with the given **Vector** of "turns".
///
/// This has the same affect as the following:
///
/// ```ignore
/// self.x_turns_relative_to(other, v.x)
/// .y_turns_relative_to(other, v.y)
/// .z_turns_relative_to(other, v.z)
/// ```
pub fn turns_relative_to(self, other: node::Index, v: Vector3<S>) -> Self
where
S: BaseFloat,
{
self.map_ty(|ty| SetOrientation::turns_relative_to(ty, other, v))
}
/// Specify a relative orientation with the given **Euler**.
///
/// The euler can be specified in either radians (via **Rad**) or degrees (via **Deg**).
pub fn euler_relative<A>(self, e: Euler<A>) -> Self
where
S: BaseFloat,
A: Angle + Into<Rad<S>>,
{
self.map_ty(|ty| SetOrientation::euler_relative(ty, e))
}
/// Specify a relative orientation with the given **Euler**.
///
/// The euler can be specified in either radians (via **Rad**) or degrees (via **Deg**).
pub fn euler_relative_to<A>(self, other: node::Index, e: Euler<A>) -> Self
where
S: BaseFloat,
A: Angle + Into<Rad<S>>,
{
self.map_ty(|ty| SetOrientation::euler_relative_to(ty, other, e))
}
// Higher level methods.
/// Specify the "pitch" of the orientation in radians.
///
/// This has the same effect as calling `x_radians`.
pub fn pitch(self, pitch: S) -> Self {
self.map_ty(|ty| SetOrientation::pitch(ty, pitch))
}
/// Specify the "yaw" of the orientation in radians.
///
/// This has the same effect as calling `y_radians`.
pub fn yaw(self, yaw: S) -> Self {
self.map_ty(|ty| SetOrientation::yaw(ty, yaw))
}
/// Specify the "roll" of the orientation in radians.
///
/// This has the same effect as calling `z_radians`.
pub fn roll(self, roll: S) -> Self {
self.map_ty(|ty| SetOrientation::roll(ty, roll))
}
/// Assuming we're looking at a 2D plane, positive values cause a clockwise rotation where the
/// given value is specified in radians.
///
/// This is equivalent to calling the `z_radians` or `roll` methods.
pub fn rotate(self, radians: S) -> Self {
self.map_ty(|ty| SetOrientation::rotate(ty, radians))
}
}
// SetFill methods
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetFill + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
/// Specify the whole set of fill tessellation options.
pub fn fill_opts(self, opts: FillOptions) -> Self {
self.map_ty(|ty| ty.fill_opts(opts))
}
/// Maximum allowed distance to the path when building an approximation.
pub fn fill_tolerance(self, tolerance: f32) -> Self {
self.map_ty(|ty| ty.fill_tolerance(tolerance))
}
/// Specify the rule used to determine what is inside and what is outside of the shape.
///
/// Currently, only the `EvenOdd` rule is implemented.
pub fn fill_rule(self, rule: lyon::tessellation::FillRule) -> Self {
self.map_ty(|ty| ty.fill_rule(rule))
}
/// A fast path to avoid some expensive operations if the path is known to not have any
/// self-intesections.
///
/// Do not set this to `true` if the path may have intersecting edges else the tessellator may
/// panic or produce incorrect results. In doubt, do not change the default value.
///
/// Default value: `false`.
pub fn assume_no_intersections(self, b: bool) -> Self {
self.map_ty(|ty| ty.assume_no_intersections(b))
}
}
// SetStroke methods
impl<'a, T, S> Drawing<'a, T, S>
where
T: SetStroke + Into<Primitive<S>>,
Primitive<S>: Into<Option<T>>,
S: BaseFloat,
{
/// The start line cap as specified by the SVG spec.
pub fn start_cap(self, cap: LineCap) -> Self {
self.map_ty(|ty| ty.start_cap(cap))
}
/// The end line cap as specified by the SVG spec.
pub fn end_cap(self, cap: LineCap) -> Self {
self.map_ty(|ty| ty.end_cap(cap))
}
/// The start and end line cap as specified by the SVG spec.
pub fn caps(self, cap: LineCap) -> Self {
self.map_ty(|ty| ty.caps(cap))
}
/// The stroke for each sub-path does not extend beyond its two endpoints. A zero length
/// sub-path will therefore not have any stroke.
pub fn start_cap_butt(self) -> Self {
self.map_ty(|ty| ty.start_cap_butt())
}
/// At the end of each sub-path, the shape representing the stroke will be extended by a
/// rectangle with the same width as the stroke width and whose length is half of the stroke
/// width. If a sub-path has zero length, then the resulting effect is that the stroke for that
/// sub-path consists solely of a square with side length equal to the stroke width, centered
/// at the sub-path's point.
pub fn start_cap_square(self) -> Self {
self.map_ty(|ty| ty.start_cap_square())
}
/// At each end of each sub-path, the shape representing the stroke will be extended by a half
/// circle with a radius equal to the stroke width. If a sub-path has zero length, then the
/// resulting effect is that the stroke for that sub-path consists solely of a full circle
/// centered at the sub-path's point.
pub fn start_cap_round(self) -> Self {
self.map_ty(|ty| ty.start_cap_round())
}
/// The stroke for each sub-path does not extend beyond its two endpoints. A zero length
/// sub-path will therefore not have any stroke.
pub fn end_cap_butt(self) -> Self {
self.map_ty(|ty| ty.end_cap_butt())
}
/// At the end of each sub-path, the shape representing the stroke will be extended by a
/// rectangle with the same width as the stroke width and whose length is half of the stroke
/// width. If a sub-path has zero length, then the resulting effect is that the stroke for that
/// sub-path consists solely of a square with side length equal to the stroke width, centered
/// at the sub-path's point.
pub fn end_cap_square(self) -> Self {
self.map_ty(|ty| ty.end_cap_square())
}
/// At each end of each sub-path, the shape representing the stroke will be extended by a half
/// circle with a radius equal to the stroke width. If a sub-path has zero length, then the
/// resulting effect is that the stroke for that sub-path consists solely of a full circle
/// centered at the sub-path's point.
pub fn end_cap_round(self) -> Self {
self.map_ty(|ty| ty.end_cap_round())
}
/// The stroke for each sub-path does not extend beyond its two endpoints. A zero length
/// sub-path will therefore not have any stroke.
pub fn caps_butt(self) -> Self {
self.map_ty(|ty| ty.caps_butt())
}
/// At the end of each sub-path, the shape representing the stroke will be extended by a
/// rectangle with the same width as the stroke width and whose length is half of the stroke
/// width. If a sub-path has zero length, then the resulting effect is that the stroke for that
/// sub-path consists solely of a square with side length equal to the stroke width, centered
/// at the sub-path's point.
pub fn caps_square(self) -> Self {
self.map_ty(|ty| ty.caps_square())
}
/// At each end of each sub-path, the shape representing the stroke will be extended by a half
/// circle with a radius equal to the stroke width. If a sub-path has zero length, then the
/// resulting effect is that the stroke for that sub-path consists solely of a full circle
/// centered at the sub-path's point.
pub fn caps_round(self) -> Self {
self.map_ty(|ty| ty.caps_round())
}
/// The way in which lines are joined at the vertices, matching the SVG spec.
///
/// Default value is `MiterClip`.
pub fn join(self, join: LineJoin) -> Self {
self.map_ty(|ty| ty.join(join))
}
/// A sharp corner is to be used to join path segments.
pub fn join_miter(self) -> Self {
self.map_ty(|ty| ty.join_miter())
}
/// Same as a `join_miter`, but if the miter limit is exceeded, the miter is clipped at a miter
/// length equal to the miter limit value multiplied by the stroke width.
pub fn join_miter_clip(self) -> Self {
self.map_ty(|ty| ty.join_miter_clip())
}
/// A round corner is to be used to join path segments.
pub fn join_round(self) -> Self {
self.map_ty(|ty| ty.join_round())
}
/// A bevelled corner is to be used to join path segments. The bevel shape is a triangle that
/// fills the area between the two stroked segments.
pub fn join_bevel(self) -> Self {
self.map_ty(|ty| ty.join_bevel())
}
/// The total stroke_weight (aka width) of the line.
pub fn stroke_weight(self, stroke_weight: f32) -> Self {
self.map_ty(|ty| ty.stroke_weight(stroke_weight))
}
/// Describes the limit before miter lines will clip, as described in the SVG spec.
///
/// Must be greater than or equal to `1.0`.
pub fn miter_limit(self, limit: f32) -> Self {
self.map_ty(|ty| ty.miter_limit(limit))
}
/// Maximum allowed distance to the path when building an approximation.
pub fn stroke_tolerance(self, tolerance: f32) -> Self {
self.map_ty(|ty| ty.stroke_tolerance(tolerance))
}
/// Specify the full set of stroke options for the path tessellation.
pub fn stroke_opts(self, opts: StrokeOptions) -> Self {
self.map_ty(|ty| ty.stroke_opts(opts))
}
}
| 36.542056 | 101 | 0.620157 |
397de70fca816769e805cc5048513cf1065ce1ec | 9,148 |
/*
AUTO GENERATED FILE
DO NOT EDIT
codegen/elder_dragon_quicktype.py
*/
use serde::{Serialize, Deserialize};
extern crate serde_json;
use self::serde_json::Error;
pub fn serialize(json: &str) -> Result<QuinnJson,Error>{
serde_json::from_str(json)
}
use std::collections::HashMap;
#[derive(Serialize, Deserialize)]
pub struct QuinnJson {
#[serde(rename = "type")]
quinn_json_type: GroupEnum,
format: Format,
version: Version,
data: Data,
}
#[derive(Serialize, Deserialize)]
pub struct Data {
#[serde(rename = "Quinn")]
quinn: Quinn,
}
#[derive(Serialize, Deserialize)]
pub struct Quinn {
id: ChampionEnum,
key: String,
name: String,
title: String,
image: Image,
skins: Vec<Skin>,
lore: String,
blurb: String,
allytips: Vec<String>,
enemytips: Vec<String>,
tags: Vec<Tag>,
partype: String,
info: Info,
stats: HashMap<String, f64>,
spells: Vec<Spell>,
passive: Passive,
recommended: Vec<Recommended>,
}
#[derive(Serialize, Deserialize)]
pub struct Image {
full: Full,
sprite: Sprite,
group: GroupEnum,
x: i64,
y: i64,
w: i64,
h: i64,
}
#[derive(Serialize, Deserialize)]
pub struct Info {
attack: i64,
defense: i64,
magic: i64,
difficulty: i64,
}
#[derive(Serialize, Deserialize)]
pub struct Passive {
name: String,
description: String,
image: Image,
}
#[derive(Serialize, Deserialize)]
pub struct Recommended {
champion: ChampionEnum,
title: Title,
map: Map,
mode: Mode,
#[serde(rename = "type")]
recommended_type: RecommendedType,
#[serde(rename = "customTag")]
custom_tag: String,
#[serde(rename = "extensionPage")]
extension_page: bool,
#[serde(rename = "customPanel")]
custom_panel: Option<serde_json::Value>,
blocks: Vec<Block>,
sortrank: Option<i64>,
#[serde(rename = "useObviousCheckmark")]
use_obvious_checkmark: Option<bool>,
}
#[derive(Serialize, Deserialize)]
pub struct Block {
#[serde(rename = "type")]
block_type: BlockType,
#[serde(rename = "recMath")]
rec_math: bool,
#[serde(rename = "recSteps")]
rec_steps: bool,
#[serde(rename = "minSummonerLevel")]
min_summoner_level: i64,
#[serde(rename = "maxSummonerLevel")]
max_summoner_level: i64,
#[serde(rename = "showIfSummonerSpell")]
show_if_summoner_spell: IfSummonerSpell,
#[serde(rename = "hideIfSummonerSpell")]
hide_if_summoner_spell: IfSummonerSpell,
items: Vec<Item>,
#[serde(rename = "appendAfterSection")]
append_after_section: Option<String>,
#[serde(rename = "visibleWithAllOf")]
visible_with_all_of: Option<Vec<String>>,
#[serde(rename = "hiddenWithAnyOf")]
hidden_with_any_of: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize)]
pub struct Item {
id: String,
count: i64,
#[serde(rename = "hideCount")]
hide_count: bool,
}
#[derive(Serialize, Deserialize)]
pub struct Skin {
id: String,
num: i64,
name: String,
chromas: bool,
}
#[derive(Serialize, Deserialize)]
pub struct Spell {
id: SpellId,
name: String,
description: String,
tooltip: String,
leveltip: Leveltip,
maxrank: i64,
cooldown: Vec<f64>,
#[serde(rename = "cooldownBurn")]
cooldown_burn: String,
cost: Vec<i64>,
#[serde(rename = "costBurn")]
cost_burn: String,
datavalues: Datavalues,
effect: Vec<Option<Vec<f64>>>,
#[serde(rename = "effectBurn")]
effect_burn: Vec<Option<String>>,
vars: Vec<Var>,
#[serde(rename = "costType")]
cost_type: CostType,
maxammo: String,
range: Vec<i64>,
#[serde(rename = "rangeBurn")]
range_burn: String,
image: Image,
resource: Resource,
}
#[derive(Serialize, Deserialize)]
pub struct Datavalues {
}
#[derive(Serialize, Deserialize)]
pub struct Leveltip {
label: Vec<String>,
effect: Vec<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Var {
link: Link,
coeff: f64,
key: Key,
}
#[derive(Serialize, Deserialize)]
pub enum ChampionEnum {
Quinn,
}
#[derive(Serialize, Deserialize)]
pub enum Full {
#[serde(rename = "QuinnE.png")]
QuinnEPng,
#[serde(rename = "Quinn_Passive.png")]
QuinnPassivePng,
#[serde(rename = "Quinn.png")]
QuinnPng,
#[serde(rename = "QuinnQ.png")]
QuinnQPng,
#[serde(rename = "QuinnR.png")]
QuinnRPng,
#[serde(rename = "QuinnW.png")]
QuinnWPng,
}
#[derive(Serialize, Deserialize)]
pub enum GroupEnum {
#[serde(rename = "champion")]
Champion,
#[serde(rename = "passive")]
Passive,
#[serde(rename = "spell")]
Spell,
}
#[derive(Serialize, Deserialize)]
pub enum Sprite {
#[serde(rename = "champion2.png")]
Champion2Png,
#[serde(rename = "passive2.png")]
Passive2Png,
#[serde(rename = "spell9.png")]
Spell9Png,
}
#[derive(Serialize, Deserialize)]
pub enum BlockType {
#[serde(rename = "beginner_advanced")]
BeginnerAdvanced,
#[serde(rename = "beginner_legendaryitem")]
BeginnerLegendaryitem,
#[serde(rename = "beginner_morelegendaryitems")]
BeginnerMorelegendaryitems,
#[serde(rename = "beginner_movementspeed")]
BeginnerMovementspeed,
#[serde(rename = "beginner_starter")]
BeginnerStarter,
#[serde(rename = "consumables")]
Consumables,
#[serde(rename = "defensive")]
Defensive,
#[serde(rename = "early")]
Early,
#[serde(rename = "earlyjungle")]
Earlyjungle,
#[serde(rename = "essential")]
Essential,
#[serde(rename = "essentialjungle")]
Essentialjungle,
KingPoroSnax,
#[serde(rename = "offensive")]
Offensive,
#[serde(rename = "selective")]
Selective,
#[serde(rename = "siegeDefense")]
SiegeDefense,
#[serde(rename = "siegeOffense")]
SiegeOffense,
#[serde(rename = "situational")]
Situational,
#[serde(rename = "starting")]
Starting,
#[serde(rename = "startingjungle")]
Startingjungle,
}
#[derive(Serialize, Deserialize)]
pub enum IfSummonerSpell {
#[serde(rename = "")]
Empty,
SummonerSmite,
}
#[derive(Serialize, Deserialize)]
pub enum Map {
CrystalScar,
#[serde(rename = "HA")]
Ha,
#[serde(rename = "SL")]
Sl,
#[serde(rename = "SR")]
Sr,
#[serde(rename = "TT")]
Tt,
}
#[derive(Serialize, Deserialize)]
pub enum Mode {
#[serde(rename = "ARAM")]
Aram,
#[serde(rename = "CLASSIC")]
Classic,
#[serde(rename = "FIRSTBLOOD")]
Firstblood,
#[serde(rename = "GAMEMODEX")]
Gamemodex,
#[serde(rename = "INTRO")]
Intro,
#[serde(rename = "KINGPORO")]
Kingporo,
#[serde(rename = "ODIN")]
Odin,
#[serde(rename = "SIEGE")]
Siege,
}
#[derive(Serialize, Deserialize)]
pub enum RecommendedType {
#[serde(rename = "riot")]
Riot,
}
#[derive(Serialize, Deserialize)]
pub enum Title {
Beginner,
#[serde(rename = "QuinnARAM")]
QuinnAram,
#[serde(rename = "QuinnCS")]
QuinnCs,
#[serde(rename = "QuinnFIRSTBLOOD")]
QuinnFirstblood,
#[serde(rename = "QuinnKINGPORO")]
QuinnKingporo,
#[serde(rename = "QuinnSIEGE")]
QuinnSiege,
#[serde(rename = "QuinnSL")]
QuinnSl,
#[serde(rename = "QuinnSR")]
QuinnSr,
#[serde(rename = "QuinnTT")]
QuinnTt,
}
#[derive(Serialize, Deserialize)]
pub enum CostType {
#[serde(rename = " {{ abilityresourcename }}")]
Abilityresourcename,
#[serde(rename = ": {{ cost }}")]
Cost,
#[serde(rename = "{{ abilityresourcename }}")]
CostTypeAbilityresourcename,
#[serde(rename = " {{ cost }}")]
CostTypeCost,
#[serde(rename = " de {{ abilityresourcename }}")]
DeAbilityresourcename,
#[serde(rename = " pkt. ({{ abilityresourcename }})")]
PktAbilityresourcename,
}
#[derive(Serialize, Deserialize)]
pub enum SpellId {
QuinnE,
QuinnQ,
QuinnR,
QuinnW,
}
#[derive(Serialize, Deserialize)]
pub enum Resource {
#[serde(rename = "{{ abilityresourcename }}: {{ cost }}")]
AbilityresourcenameCost,
#[serde(rename = "{{ cost }} {{ abilityresourcename }}")]
CostAbilityresourcename,
#[serde(rename = "{{ cost }} de {{ abilityresourcename }}")]
CostDeAbilityresourcename,
#[serde(rename = "{{ cost }} pkt. ({{ abilityresourcename }})")]
CostPktAbilityresourcename,
#[serde(rename = "{{ abilityresourcename }} {{ cost }}")]
ResourceAbilityresourcenameCost,
#[serde(rename = "{{ cost }}{{ abilityresourcename }}")]
ResourceCostAbilityresourcename,
}
#[derive(Serialize, Deserialize)]
pub enum Key {
#[serde(rename = "a1")]
A1,
}
#[derive(Serialize, Deserialize)]
pub enum Link {
#[serde(rename = "bonusattackdamage")]
Bonusattackdamage,
#[serde(rename = "spelldamage")]
Spelldamage,
}
#[derive(Serialize, Deserialize)]
pub enum Tag {
Assassin,
Marksman,
}
#[derive(Serialize, Deserialize)]
pub enum Format {
#[serde(rename = "standAloneComplex")]
StandAloneComplex,
}
#[derive(Serialize, Deserialize)]
pub enum Version {
#[serde(rename = "9.23.1")]
The9231,
}
| 21.780952 | 68 | 0.629318 |
7af2f8f583ed195b1e0a4451446dcad0e2ef532e | 60,471 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod action_groups {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
action_group_name: &str,
subscription_id: &str,
) -> std::result::Result<ActionGroupResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups/{}",
&operation_config.base_path, subscription_id, resource_group_name, action_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ActionGroupResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
action_group_name: &str,
action_group: &ActionGroupResource,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups/{}",
&operation_config.base_path, subscription_id, resource_group_name, action_group_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(action_group);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ActionGroupResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ActionGroupResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ActionGroupResource),
Created201(ActionGroupResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
action_group_name: &str,
action_group_patch: &ActionGroupPatchBody,
) -> std::result::Result<ActionGroupResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups/{}",
&operation_config.base_path, subscription_id, resource_group_name, action_group_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(action_group_patch);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ActionGroupResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
action_group_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups/{}",
&operation_config.base_path, subscription_id, resource_group_name, action_group_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_subscription_id(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<ActionGroupList, list_by_subscription_id::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/microsoft.insights/actionGroups",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription_id::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_subscription_id::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription_id::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription_id::ResponseBytesError)?;
let rsp_value: ActionGroupList =
serde_json::from_slice(&body).context(list_by_subscription_id::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription_id::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_subscription_id::DeserializeError { body })?;
list_by_subscription_id::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_subscription_id {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<ActionGroupList, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: ActionGroupList =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn enable_receiver(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
action_group_name: &str,
enable_request: &EnableRequest,
subscription_id: &str,
) -> std::result::Result<(), enable_receiver::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.insights/actionGroups/{}/subscribe",
&operation_config.base_path, subscription_id, resource_group_name, action_group_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(enable_receiver::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(enable_request);
let req = req_builder.build().context(enable_receiver::BuildRequestError)?;
let rsp = client.execute(req).await.context(enable_receiver::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(()),
StatusCode::CONFLICT => enable_receiver::Conflict409 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(enable_receiver::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(enable_receiver::DeserializeError { body })?;
enable_receiver::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod enable_receiver {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
Conflict409 {},
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod metric_alerts {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<MetricAlertResourceCollection, list_by_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Insights/metricAlerts",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_subscription::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: MetricAlertResourceCollection =
serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
list_by_subscription::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<MetricAlertResourceCollection, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: MetricAlertResourceCollection =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
) -> std::result::Result<MetricAlertResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: MetricAlertResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
parameters: &MetricAlertResource,
) -> std::result::Result<MetricAlertResource, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: MetricAlertResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
parameters: &MetricAlertResourcePatch,
) -> std::result::Result<MetricAlertResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: MetricAlertResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
delete::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod metric_alerts_status {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
) -> std::result::Result<MetricAlertStatusCollection, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}/status",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: MetricAlertStatusCollection = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_name(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
status_name: &str,
) -> std::result::Result<MetricAlertStatusCollection, list_by_name::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Insights/metricAlerts/{}/status/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name, status_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_name::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_name::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_name::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_name::ResponseBytesError)?;
let rsp_value: MetricAlertStatusCollection =
serde_json::from_slice(&body).context(list_by_name::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_name::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_name::DeserializeError { body })?;
list_by_name::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_name {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod alert_rules {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
rule_name: &str,
subscription_id: &str,
) -> std::result::Result<AlertRuleResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.insights/alertrules/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: AlertRuleResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
get::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
rule_name: &str,
parameters: &AlertRuleResource,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.insights/alertrules/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: AlertRuleResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: AlertRuleResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(AlertRuleResource),
Created201(AlertRuleResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
rule_name: &str,
alert_rules_resource: &AlertRuleResourcePatch,
) -> std::result::Result<update::Response, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.insights/alertrules/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(alert_rules_resource);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: AlertRuleResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: AlertRuleResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(AlertRuleResource),
Created201(AlertRuleResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
rule_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.insights/alertrules/{}",
&operation_config.base_path, subscription_id, resource_group_name, rule_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
StatusCode::OK => Ok(delete::Response::Ok200),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
delete::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
NoContent204,
Ok200,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<AlertRuleResourceCollection, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.insights/alertrules",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: AlertRuleResourceCollection =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
list_by_resource_group::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<AlertRuleResourceCollection, list_by_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/microsoft.insights/alertrules",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_subscription::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: AlertRuleResourceCollection =
serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
list_by_subscription::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| 42.948153 | 138 | 0.573911 |
87c5629cc238fbb539688197001524d601eb0395 | 2,127 | use proc_macro2::TokenStream;
use quote::quote;
use rtic_syntax::{analyze::Analysis, ast::App};
use crate::codegen::util;
/// Generates `local` variables and local resource proxies
///
/// I.e. the `static` variables and theirs proxies.
pub fn codegen(
app: &App,
_analysis: &Analysis,
) -> (
// mod_app -- the `static` variables behind the proxies
Vec<TokenStream>,
// mod_resources -- the `resources` module
TokenStream,
) {
let mut mod_app = vec![];
// let mut mod_resources: _ = vec![];
// All local resources declared in the `#[local]' struct
for (name, res) in &app.local_resources {
let cfgs = &res.cfgs;
let ty = &res.ty;
let mangled_name = util::static_local_resource_ident(name);
let attrs = &res.attrs;
// For future use
// let doc = format!(" RTIC internal: {}:{}", file!(), line!());
mod_app.push(quote!(
#[allow(non_camel_case_types)]
#[allow(non_upper_case_globals)]
// #[doc = #doc]
#[doc(hidden)]
#(#attrs)*
#(#cfgs)*
static #mangled_name: rtic::RacyCell<core::mem::MaybeUninit<#ty>> =
rtic::RacyCell::new(core::mem::MaybeUninit::uninit());
));
}
// All declared `local = [NAME: TY = EXPR]` local resources
for (task_name, resource_name, task_local) in app.declared_local_resources() {
let cfgs = &task_local.cfgs;
let ty = &task_local.ty;
let expr = &task_local.expr;
let attrs = &task_local.attrs;
let mangled_name = util::declared_static_local_resource_ident(resource_name, &task_name);
// For future use
// let doc = format!(" RTIC internal: {}:{}", file!(), line!());
mod_app.push(quote!(
#[allow(non_camel_case_types)]
#[allow(non_upper_case_globals)]
// #[doc = #doc]
#[doc(hidden)]
#(#attrs)*
#(#cfgs)*
static #mangled_name: rtic::RacyCell<#ty> = rtic::RacyCell::new(#expr);
));
}
(mod_app, TokenStream::new())
}
| 31.279412 | 97 | 0.565115 |
398fd0bf1d2bb0d472c3c5344eb46c2801098c5e | 38,236 | // The macros break if the references are taken out, for some reason.
#![allow(clippy::op_ref)]
/*
* This file provides:
*
* NOTE: Work in progress https://github.com/dimforge/nalgebra/issues/487
*
* (Dual Quaternion)
*
* Index<usize>
* IndexMut<usize>
*
* (Assignment Operators)
*
* -DualQuaternion
* DualQuaternion × Scalar
* DualQuaternion × DualQuaternion
* DualQuaternion + DualQuaternion
* DualQuaternion - DualQuaternion
* DualQuaternion × UnitDualQuaternion
* DualQuaternion ÷ UnitDualQuaternion
* -UnitDualQuaternion
* UnitDualQuaternion × DualQuaternion
* UnitDualQuaternion × UnitDualQuaternion
* UnitDualQuaternion ÷ UnitDualQuaternion
* UnitDualQuaternion × Translation3
* UnitDualQuaternion ÷ Translation3
* UnitDualQuaternion × UnitQuaternion
* UnitDualQuaternion ÷ UnitQuaternion
* Translation3 × UnitDualQuaternion
* Translation3 ÷ UnitDualQuaternion
* UnitQuaternion × UnitDualQuaternion
* UnitQuaternion ÷ UnitDualQuaternion
* UnitDualQuaternion × Isometry3
* UnitDualQuaternion ÷ Isometry3
* Isometry3 × UnitDualQuaternion
* Isometry3 ÷ UnitDualQuaternion
* UnitDualQuaternion × Point
* UnitDualQuaternion × Vector
* UnitDualQuaternion × Unit<Vector>
*
* ---
*
* References:
* Multiplication:
* - https://cs.gmu.edu/~jmlien/teaching/cs451/uploads/Main/dual-quaternion.pdf
*/
use crate::base::storage::Storage;
use crate::{
DualQuaternion, Isometry3, Point, Point3, Quaternion, SimdRealField, Translation3, Unit,
UnitDualQuaternion, UnitQuaternion, Vector, Vector3, U3,
};
use std::ops::{
Add, AddAssign, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Neg, Sub, SubAssign,
};
impl<T: SimdRealField> AsRef<[T; 8]> for DualQuaternion<T> {
#[inline]
fn as_ref(&self) -> &[T; 8] {
unsafe { &*(self as *const Self as *const [T; 8]) }
}
}
impl<T: SimdRealField> AsMut<[T; 8]> for DualQuaternion<T> {
#[inline]
fn as_mut(&mut self) -> &mut [T; 8] {
unsafe { &mut *(self as *mut Self as *mut [T; 8]) }
}
}
impl<T: SimdRealField> Index<usize> for DualQuaternion<T> {
type Output = T;
#[inline]
fn index(&self, i: usize) -> &Self::Output {
&self.as_ref()[i]
}
}
impl<T: SimdRealField> IndexMut<usize> for DualQuaternion<T> {
#[inline]
fn index_mut(&mut self, i: usize) -> &mut T {
&mut self.as_mut()[i]
}
}
impl<T: SimdRealField> Neg for DualQuaternion<T>
where
T::Element: SimdRealField,
{
type Output = DualQuaternion<T>;
#[inline]
fn neg(self) -> Self::Output {
DualQuaternion::from_real_and_dual(-self.real, -self.dual)
}
}
impl<'a, T: SimdRealField> Neg for &'a DualQuaternion<T>
where
T::Element: SimdRealField,
{
type Output = DualQuaternion<T>;
#[inline]
fn neg(self) -> Self::Output {
DualQuaternion::from_real_and_dual(-&self.real, -&self.dual)
}
}
impl<T: SimdRealField> Neg for UnitDualQuaternion<T>
where
T::Element: SimdRealField,
{
type Output = UnitDualQuaternion<T>;
#[inline]
fn neg(self) -> Self::Output {
UnitDualQuaternion::new_unchecked(-self.into_inner())
}
}
impl<'a, T: SimdRealField> Neg for &'a UnitDualQuaternion<T>
where
T::Element: SimdRealField,
{
type Output = UnitDualQuaternion<T>;
#[inline]
fn neg(self) -> Self::Output {
UnitDualQuaternion::new_unchecked(-self.as_ref())
}
}
macro_rules! dual_quaternion_op_impl(
($Op: ident, $op: ident;
($LhsRDim: ident, $LhsCDim: ident), ($RhsRDim: ident, $RhsCDim: ident)
$(for $Storage: ident: $StoragesBound: ident $(<$($BoundParam: ty),*>)*),*;
$lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Result: ty $(=> $VDimA: ty, $VDimB: ty)*;
$action: expr; $($lives: tt),*) => {
impl<$($lives ,)* T: SimdRealField $(, $Storage: $StoragesBound $(<$($BoundParam),*>)*)*> $Op<$Rhs> for $Lhs
where T::Element: SimdRealField, {
type Output = $Result;
#[inline]
fn $op($lhs, $rhs: $Rhs) -> Self::Output {
$action
}
}
}
);
// DualQuaternion + DualQuaternion
dual_quaternion_op_impl!(
Add, add;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
&self.real + &rhs.real,
&self.dual + &rhs.dual,
);
'a, 'b);
dual_quaternion_op_impl!(
Add, add;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
&self.real + rhs.real,
&self.dual + rhs.dual,
);
'a);
dual_quaternion_op_impl!(
Add, add;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
self.real + &rhs.real,
self.dual + &rhs.dual,
);
'b);
dual_quaternion_op_impl!(
Add, add;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
self.real + rhs.real,
self.dual + rhs.dual,
); );
// DualQuaternion - DualQuaternion
dual_quaternion_op_impl!(
Sub, sub;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
&self.real - &rhs.real,
&self.dual - &rhs.dual,
);
'a, 'b);
dual_quaternion_op_impl!(
Sub, sub;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
&self.real - rhs.real,
&self.dual - rhs.dual,
);
'a);
dual_quaternion_op_impl!(
Sub, sub;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
self.real - &rhs.real,
self.dual - &rhs.dual,
);
'b);
dual_quaternion_op_impl!(
Sub, sub;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
self.real - rhs.real,
self.dual - rhs.dual,
); );
// DualQuaternion × DualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
DualQuaternion::from_real_and_dual(
&self.real * &rhs.real,
&self.real * &rhs.dual + &self.dual * &rhs.real,
);
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
self * &rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>, Output = DualQuaternion<T>;
&self * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>, Output = DualQuaternion<T>;
&self * &rhs; );
// DualQuaternion × UnitDualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = DualQuaternion<T>;
self * rhs.dual_quaternion();
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = DualQuaternion<T>;
self * rhs.dual_quaternion();
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = DualQuaternion<T>;
self * rhs.dual_quaternion();
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = DualQuaternion<T>;
self * rhs.dual_quaternion(););
// DualQuaternion ÷ UnitDualQuaternion
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = DualQuaternion<T>;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * rhs.inverse().dual_quaternion() };
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a DualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = DualQuaternion<T>;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * rhs.inverse().dual_quaternion() };
'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = DualQuaternion<T>;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * rhs.inverse().dual_quaternion() };
'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = DualQuaternion<T>;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * rhs.inverse().dual_quaternion() };);
// UnitDualQuaternion × UnitDualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
UnitDualQuaternion::new_unchecked(self.as_ref() * rhs.as_ref());
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
self * &rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
&self * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
&self * &rhs; );
// UnitDualQuaternion ÷ UnitDualQuaternion
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
#[allow(clippy::suspicious_arithmetic_impl)] { self * rhs.inverse() };
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
self / &rhs;
'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
&self / rhs;
'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>, Output = UnitDualQuaternion<T>;
&self / &rhs; );
// UnitDualQuaternion × DualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b DualQuaternion<T>,
Output = DualQuaternion<T> => U1, U4;
self.dual_quaternion() * rhs;
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: DualQuaternion<T>,
Output = DualQuaternion<T> => U3, U3;
self.dual_quaternion() * rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b DualQuaternion<T>,
Output = DualQuaternion<T> => U3, U3;
self.dual_quaternion() * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: DualQuaternion<T>,
Output = DualQuaternion<T> => U3, U3;
self.dual_quaternion() * rhs;);
// UnitDualQuaternion × UnitQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U1, U4;
self * UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(rhs.clone().into_inner()));
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
self * UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(rhs.into_inner()));
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
self * UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(rhs.clone().into_inner()));
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
self * UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(rhs.into_inner())););
// UnitQuaternion × UnitDualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitQuaternion<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U1, U4;
UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(self.clone().into_inner())) * rhs;
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: &'a UnitQuaternion<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(self.clone().into_inner())) * rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitQuaternion<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(self.into_inner())) * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U4, U1);
self: UnitQuaternion<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
UnitDualQuaternion::<T>::new_unchecked(DualQuaternion::from_real(self.into_inner())) * rhs;);
// UnitDualQuaternion ÷ UnitQuaternion
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U1, U4;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_rotation(rhs.inverse()) };
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitDualQuaternion<T>, rhs: UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_rotation(rhs.inverse()) };
'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_rotation(rhs.inverse()) };
'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_rotation(rhs.inverse()) };);
// UnitQuaternion ÷ UnitDualQuaternion
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitQuaternion<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U1, U4;
#[allow(clippy::suspicious_arithmetic_impl)]
{
UnitDualQuaternion::<T>::new_unchecked(
DualQuaternion::from_real(self.clone().into_inner())
) * rhs.inverse()
}; 'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: &'a UnitQuaternion<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{
UnitDualQuaternion::<T>::new_unchecked(
DualQuaternion::from_real(self.clone().into_inner())
) * rhs.inverse()
}; 'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitQuaternion<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{
UnitDualQuaternion::<T>::new_unchecked(
DualQuaternion::from_real(self.into_inner())
) * rhs.inverse()
}; 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U4, U1);
self: UnitQuaternion<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U3;
#[allow(clippy::suspicious_arithmetic_impl)]
{
UnitDualQuaternion::<T>::new_unchecked(
DualQuaternion::from_real(self.into_inner())
) * rhs.inverse()
};);
// UnitDualQuaternion × Translation3
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_parts(rhs.clone(), UnitQuaternion::identity());
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: &'a UnitDualQuaternion<T>, rhs: Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_parts(rhs, UnitQuaternion::identity());
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: &'b Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_parts(rhs.clone(), UnitQuaternion::identity());
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_parts(rhs, UnitQuaternion::identity()); );
// UnitDualQuaternion ÷ Translation3
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_parts(rhs.inverse(), UnitQuaternion::identity()) };
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: &'a UnitDualQuaternion<T>, rhs: Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_parts(rhs.inverse(), UnitQuaternion::identity()) };
'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: &'b Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_parts(rhs.inverse(), UnitQuaternion::identity()) };
'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: Translation3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
#[allow(clippy::suspicious_arithmetic_impl)]
{ self * UnitDualQuaternion::<T>::from_parts(rhs.inverse(), UnitQuaternion::identity()) };);
// Translation3 × UnitDualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: &'b Translation3<T>, rhs: &'a UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self.clone(), UnitQuaternion::identity()) * rhs;
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: &'a Translation3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self.clone(), UnitQuaternion::identity()) * rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: Translation3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self, UnitQuaternion::identity()) * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: Translation3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self, UnitQuaternion::identity()) * rhs;);
// Translation3 ÷ UnitDualQuaternion
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: &'b Translation3<T>, rhs: &'a UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self.clone(), UnitQuaternion::identity()) / rhs;
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: &'a Translation3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self.clone(), UnitQuaternion::identity()) / rhs;
'a);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: Translation3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self, UnitQuaternion::identity()) / rhs;
'b);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: Translation3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_parts(self, UnitQuaternion::identity()) / rhs;);
// UnitDualQuaternion × Isometry3
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_isometry(rhs);
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: &'a UnitDualQuaternion<T>, rhs: Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_isometry(&rhs);
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: &'b Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_isometry(rhs);
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self * UnitDualQuaternion::<T>::from_isometry(&rhs); );
// UnitDualQuaternion ÷ Isometry3
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
// TODO: can we avoid the conversion to a rotation matrix?
self / UnitDualQuaternion::<T>::from_isometry(rhs);
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: &'a UnitDualQuaternion<T>, rhs: Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self / UnitDualQuaternion::<T>::from_isometry(&rhs);
'a);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: &'b Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self / UnitDualQuaternion::<T>::from_isometry(rhs);
'b);
dual_quaternion_op_impl!(
Div, div;
(U4, U1), (U3, U3);
self: UnitDualQuaternion<T>, rhs: Isometry3<T>,
Output = UnitDualQuaternion<T> => U3, U1;
self / UnitDualQuaternion::<T>::from_isometry(&rhs); );
// Isometry × UnitDualQuaternion
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: &'a Isometry3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(self) * rhs;
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: &'a Isometry3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(self) * rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: Isometry3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(&self) * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U3, U1), (U4, U1);
self: Isometry3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(&self) * rhs; );
// Isometry ÷ UnitDualQuaternion
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: &'a Isometry3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
// TODO: can we avoid the conversion from a rotation matrix?
UnitDualQuaternion::<T>::from_isometry(self) / rhs;
'a, 'b);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: &'a Isometry3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(self) / rhs;
'a);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: Isometry3<T>, rhs: &'b UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(&self) / rhs;
'b);
dual_quaternion_op_impl!(
Div, div;
(U3, U1), (U4, U1);
self: Isometry3<T>, rhs: UnitDualQuaternion<T>,
Output = UnitDualQuaternion<T> => U3, U1;
UnitDualQuaternion::<T>::from_isometry(&self) / rhs; );
// UnitDualQuaternion × Vector
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: &'a UnitDualQuaternion<T>, rhs: &'b Vector<T, U3, SB>,
Output = Vector3<T> => U3, U1;
Unit::new_unchecked(self.as_ref().real.clone()) * rhs;
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: &'a UnitDualQuaternion<T>, rhs: Vector<T, U3, SB>,
Output = Vector3<T> => U3, U1;
self * &rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: UnitDualQuaternion<T>, rhs: &'b Vector<T, U3, SB>,
Output = Vector3<T> => U3, U1;
&self * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: UnitDualQuaternion<T>, rhs: Vector<T, U3, SB>,
Output = Vector3<T> => U3, U1;
&self * &rhs; );
// UnitDualQuaternion × Point
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: &'b Point3<T>,
Output = Point3<T> => U3, U1;
{
let two: T = crate::convert(2.0f64);
let q_point = Quaternion::from_parts(T::zero(), rhs.coords.clone());
Point::from(
((self.as_ref().real.clone() * q_point + self.as_ref().dual.clone() * two) * self.as_ref().real.clone().conjugate())
.vector()
.into_owned(),
)
};
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: &'a UnitDualQuaternion<T>, rhs: Point3<T>,
Output = Point3<T> => U3, U1;
self * &rhs;
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: &'b Point3<T>,
Output = Point3<T> => U3, U1;
&self * rhs;
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: Point3<T>,
Output = Point3<T> => U3, U1;
&self * &rhs; );
// UnitDualQuaternion × Unit<Vector>
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: &'a UnitDualQuaternion<T>, rhs: &'b Unit<Vector<T, U3, SB>>,
Output = Unit<Vector3<T>> => U3, U4;
Unit::new_unchecked(self * rhs.as_ref());
'a, 'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: &'a UnitDualQuaternion<T>, rhs: Unit<Vector<T, U3, SB>>,
Output = Unit<Vector3<T>> => U3, U4;
Unit::new_unchecked(self * rhs.into_inner());
'a);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: UnitDualQuaternion<T>, rhs: &'b Unit<Vector<T, U3, SB>>,
Output = Unit<Vector3<T>> => U3, U4;
Unit::new_unchecked(self * rhs.as_ref());
'b);
dual_quaternion_op_impl!(
Mul, mul;
(U4, U1), (U3, U1) for SB: Storage<T, U3> ;
self: UnitDualQuaternion<T>, rhs: Unit<Vector<T, U3, SB>>,
Output = Unit<Vector3<T>> => U3, U4;
Unit::new_unchecked(self * rhs.into_inner()); );
macro_rules! left_scalar_mul_impl(
($($T: ty),* $(,)*) => {$(
impl Mul<DualQuaternion<$T>> for $T {
type Output = DualQuaternion<$T>;
#[inline]
fn mul(self, right: DualQuaternion<$T>) -> Self::Output {
DualQuaternion::from_real_and_dual(
self * right.real,
self * right.dual
)
}
}
impl<'b> Mul<&'b DualQuaternion<$T>> for $T {
type Output = DualQuaternion<$T>;
#[inline]
fn mul(self, right: &'b DualQuaternion<$T>) -> Self::Output {
DualQuaternion::from_real_and_dual(
self * &right.real,
self * &right.dual
)
}
}
)*}
);
left_scalar_mul_impl!(f32, f64);
macro_rules! dual_quaternion_op_impl(
($OpAssign: ident, $op_assign: ident;
($LhsRDim: ident, $LhsCDim: ident), ($RhsRDim: ident, $RhsCDim: ident);
$lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty $(=> $VDimA: ty, $VDimB: ty)*;
$action: expr; $($lives: tt),*) => {
impl<$($lives ,)* T: SimdRealField> $OpAssign<$Rhs> for $Lhs
where T::Element: SimdRealField {
#[inline]
fn $op_assign(&mut $lhs, $rhs: $Rhs) {
$action
}
}
}
);
// DualQuaternion += DualQuaternion
dual_quaternion_op_impl!(
AddAssign, add_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>;
{
self.real += &rhs.real;
self.dual += &rhs.dual;
};
'b);
dual_quaternion_op_impl!(
AddAssign, add_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>;
{
self.real += rhs.real;
self.dual += rhs.dual;
};);
// DualQuaternion -= DualQuaternion
dual_quaternion_op_impl!(
SubAssign, sub_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>;
{
self.real -= &rhs.real;
self.dual -= &rhs.dual;
};
'b);
dual_quaternion_op_impl!(
SubAssign, sub_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>;
{
self.real -= rhs.real;
self.dual -= rhs.dual;
};);
// DualQuaternion ×= DualQuaternion
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b DualQuaternion<T>;
{
let res = &*self * rhs;
self.real.coords.copy_from(&res.real.coords);
self.dual.coords.copy_from(&res.dual.coords);
};
'b);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: DualQuaternion<T>;
*self *= &rhs;);
// DualQuaternion ×= UnitDualQuaternion
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>;
{
let res = &*self * rhs;
self.real.coords.copy_from(&res.real.coords);
self.dual.coords.copy_from(&res.dual.coords);
};
'b);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: UnitDualQuaternion<T>;
*self *= &rhs; );
// DualQuaternion ÷= UnitDualQuaternion
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>;
{
let res = &*self / rhs;
self.real.coords.copy_from(&res.real.coords);
self.dual.coords.copy_from(&res.dual.coords);
};
'b);
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: DualQuaternion<T>, rhs: UnitDualQuaternion<T>;
*self /= &rhs; );
// UnitDualQuaternion ×= UnitDualQuaternion
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>;
{
let res = &*self * rhs;
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>;
*self *= &rhs; );
// UnitDualQuaternion ÷= UnitDualQuaternion
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitDualQuaternion<T>;
{
let res = &*self / rhs;
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitDualQuaternion<T>;
*self /= &rhs; );
// UnitDualQuaternion ×= UnitQuaternion
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitQuaternion<T>;
{
let res = &*self * UnitDualQuaternion::from_rotation(rhs);
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>;
*self *= rhs.clone(); 'b);
// UnitDualQuaternion ÷= UnitQuaternion
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b UnitQuaternion<T>;
#[allow(clippy::suspicious_op_assign_impl)]
{
let res = &*self * UnitDualQuaternion::from_rotation(rhs.inverse());
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: UnitQuaternion<T>;
*self /= &rhs; );
// UnitDualQuaternion ×= Translation3
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: Translation3<T>;
{
let res = &*self * UnitDualQuaternion::from_parts(rhs, UnitQuaternion::identity());
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b Translation3<T>;
*self *= rhs.clone(); 'b);
// UnitDualQuaternion ÷= Translation3
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: &'b Translation3<T>;
#[allow(clippy::suspicious_op_assign_impl)]
{
let res = &*self * UnitDualQuaternion::from_parts(rhs.inverse(), UnitQuaternion::identity());
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U4, U1);
self: UnitDualQuaternion<T>, rhs: Translation3<T>;
*self /= &rhs; );
// UnitDualQuaternion ×= Isometry3
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: &'b Isometry3<T> => U3, U1;
{
let res = &*self * rhs;
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
MulAssign, mul_assign;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: Isometry3<T> => U3, U1;
*self *= &rhs; );
// UnitDualQuaternion ÷= Isometry3
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: &'b Isometry3<T> => U3, U1;
{
let res = &*self / rhs;
self.as_mut_unchecked().real.coords.copy_from(&res.as_ref().real.coords);
self.as_mut_unchecked().dual.coords.copy_from(&res.as_ref().dual.coords);
};
'b);
dual_quaternion_op_impl!(
DivAssign, div_assign;
(U4, U1), (U3, U1);
self: UnitDualQuaternion<T>, rhs: Isometry3<T> => U3, U1;
*self /= &rhs; );
macro_rules! scalar_op_impl(
($($Op: ident, $op: ident, $OpAssign: ident, $op_assign: ident);* $(;)*) => {$(
impl<T: SimdRealField> $Op<T> for DualQuaternion<T>
where T::Element: SimdRealField {
type Output = DualQuaternion<T>;
#[inline]
fn $op(self, n: T) -> Self::Output {
DualQuaternion::from_real_and_dual(
self.real.clone().$op(n.clone()),
self.dual.clone().$op(n)
)
}
}
impl<'a, T: SimdRealField> $Op<T> for &'a DualQuaternion<T>
where T::Element: SimdRealField {
type Output = DualQuaternion<T>;
#[inline]
fn $op(self, n: T) -> Self::Output {
DualQuaternion::from_real_and_dual(
self.real.clone().$op(n.clone()),
self.dual.clone().$op(n)
)
}
}
impl<T: SimdRealField> $OpAssign<T> for DualQuaternion<T>
where T::Element: SimdRealField {
#[inline]
fn $op_assign(&mut self, n: T) {
self.real.$op_assign(n.clone());
self.dual.$op_assign(n);
}
}
)*}
);
scalar_op_impl!(
Mul, mul, MulAssign, mul_assign;
Div, div, DivAssign, div_assign;
);
| 30.418457 | 128 | 0.616539 |
7a6ad9331b9cf1e6f7c756a58e9cc20388588239 | 1,016 | //! # Square
//!
//! A square wave generator.
use super::*;
/// Struct for generating square wave samples at a specified frequency
pub struct Square {
ind:MathT,
inv:MathT,
}
impl FreqMod for Square {
fn new(f:MathT) -> Self {
Square {
ind: 0.0,
inv: SAMPLE_RATE as MathT/(2.0 * f)
}
}
fn set_frequency(&mut self, f: MathT) {
self.inv = SAMPLE_RATE as MathT/(2.0 * f);
}
fn get_frequency(&self) -> MathT {
SAMPLE_RATE as MathT/(2.0 * self.inv)
}
}
impl Generator for Square {
fn process(&mut self) -> SampleT {
let y = if self.ind >= self.inv && self.ind < 2.0 * self.inv {
-1.0
} else {
1.0
};
if self.ind >= 2.0 * self.inv {
self.ind -= 2.0 * self.inv;
}
self.ind += 1.0;
y
}
}
impl Clone for Square {
fn clone(&self) -> Self {
Square {
ind: 0.0,
inv: self.inv
}
}
}
| 18.142857 | 70 | 0.478346 |
1a06a91b160e7ad847c4c0432849289f14f9357d | 1,059 | use serde_json::Value;
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Database(HashMap<String, Value>);
impl Database {
pub fn get(&self, s: &str) -> Option<Value> {
self.0.get(s).map(|x| x.to_owned())
}
/// Returns element with the given key, filtered for the provided ID if
/// present both the key and ID are present.
pub fn find_with_id(&self, key: &str, id: usize) -> Option<Value> {
let value = self.get(key);
if let Some(value) = value {
value
.as_array()
.expect("Not an array")
.into_iter()
.find(|x| {
x.as_object()
.expect("Not an object")
.get("id")
.expect("No ID field found")
.as_u64()
.expect("ID not u64")
== id as u64
}).map(|x| x.to_owned())
} else {
None
}
}
}
| 30.257143 | 75 | 0.458924 |
ddbc789fe2d848c4dca136926f4ceaf1fa263cdb | 1,666 | #![allow(clippy::module_inception)]
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::large_enum_variant)]
#![allow(clippy::wrong_self_convention)]
#![allow(clippy::should_implement_trait)]
#![allow(clippy::blacklisted_name)]
#![allow(clippy::vec_init_then_push)]
#![allow(rustdoc::bare_urls)]
#![warn(missing_docs)]
//! <p>Operations for Amazon Web Services Account Management</p>
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub use error_meta::Error;
pub use config::Config;
mod aws_endpoint;
/// Client and fluent builders for calling the service.
#[cfg(feature = "client")]
pub mod client;
/// Configuration for the service.
pub mod config;
/// Errors that can occur when calling the service.
pub mod error;
mod error_meta;
/// Input structures for operations.
pub mod input;
mod json_deser;
mod json_errors;
mod json_ser;
/// Data structures used by operation inputs/outputs.
pub mod model;
mod no_credentials;
/// All operations that this crate can perform.
pub mod operation;
mod operation_deser;
mod operation_ser;
/// Output structures for operations.
pub mod output;
/// Crate version number.
pub static PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
pub use aws_smithy_http::byte_stream::ByteStream;
pub use aws_smithy_http::result::SdkError;
pub use aws_smithy_types::Blob;
static API_METADATA: aws_http::user_agent::ApiMetadata =
aws_http::user_agent::ApiMetadata::new("account", PKG_VERSION);
pub use aws_smithy_http::endpoint::Endpoint;
pub use aws_smithy_types::retry::RetryConfig;
pub use aws_types::region::Region;
pub use aws_types::Credentials;
#[cfg(feature = "client")]
pub use client::Client;
| 31.433962 | 80 | 0.767107 |
6aea3d1379347776e3c2598ac8357923c258a726 | 2,330 | mod multiple;
mod single;
use super::abstract_mut::AbstractMut;
use super::into_abstract::IntoAbstract;
use super::iterators::*;
// This trait exists because of conflicting implementations
// when using std::iter::IntoIterator
/// Trait used to create iterators.
///
/// `std::iter::Iterator` can't be used because of conflicting implementation.
/// This trait serves as substitute.
pub trait IntoIter {
type IntoIter;
#[cfg(feature = "parallel")]
type IntoParIter;
/// Returns an iterator over storages yielding only components meeting the requirements.
///
/// Iterators can only be made inside [run] closure and systems.
/// ### Example
/// ```
/// # use shipyard::prelude::*;
/// let world = World::new();
///
/// world.run::<(EntitiesMut, &mut usize, &mut u32), _, _>(|(mut entities, mut usizes, mut u32s)| {
/// entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));
/// entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));
/// (&mut usizes, &u32s).iter().for_each(|(x, &y)| {
/// *x += y as usize;
/// });
/// });
/// ```
/// [run]: ../struct.World.html#method.run
fn iter(self) -> Self::IntoIter;
/// Returns a parallel iterator over storages yielding only components meeting the requirements.
///
/// Iterators can only be made inside [run] closure and systems.
/// ### Example
/// ```
/// # use shipyard::prelude::*;
/// use rayon::prelude::ParallelIterator;
///
/// let world = World::new();
///
/// world.run::<(EntitiesMut, &mut usize, &mut u32, ThreadPool), _, _>(|(mut entities, mut usizes, mut u32s, thread_pool)| {
/// entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));
/// entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));
/// thread_pool.install(|| {
/// (&mut usizes, &u32s).par_iter().for_each(|(mut x, &y)| {
/// *x += y as usize;
/// });
/// })
/// });
/// ```
/// [run]: ../struct.World.html#method.run
#[cfg(feature = "parallel")]
fn par_iter(self) -> Self::IntoParIter;
}
/// Shorthand for a Shiperator only yielding ids.
pub trait IntoIterIds {
type IntoIterIds;
fn iter_ids(self) -> Self::IntoIterIds;
}
| 35.30303 | 128 | 0.581974 |
8ff34c0ab776b530a14aaeaec8403c6edfa7d3e3 | 10,761 | //! ARM System Control Block
//!
//! <http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0553a/CIHFDJCA.html>
use kernel::common::registers::{register_bitfields, register_structs, ReadOnly, ReadWrite};
use kernel::common::StaticRef;
register_structs! {
/// In an ARMv7-M processor, a System Control Block (SCB) in the SCS
/// provides key status information and control features for the processor.
ScbRegisters {
/// CPUID Base Register
(0x00 => cpuid: ReadOnly<u32, CpuId::Register>),
/// Interrupt Control and State Register
(0x04 => icsr: ReadWrite<u32, InterruptControlAndState::Register>),
/// Vector Table Offset Register
(0x08 => vtor: ReadWrite<u32, VectorTableOffset::Register>),
/// Application Interrupt and Reset Control Register
(0x0c => aircr: ReadWrite<u32, ApplicationInterruptAndReset::Register>),
/// System Control Register
(0x10 => scr: ReadWrite<u32, SystemControl::Register>),
/// Configuration and Control Register
(0x14 => ccr: ReadWrite<u32, ConfigurationAndControl::Register>),
/// System Handler Priority Register (1-4)
(0x18 => shp: [ReadWrite<u32, SystemHandlerPriority::Register>; 3]),
/// System Handler Control and State Register
(0x24 => shcsr: ReadWrite<u32, SystemHandlerControlAndState::Register>),
/// Configurable Fault Status Register
(0x28 => cfsr: ReadWrite<u32, ConfigurableFaultStatus::Register>),
/// HardFault Status Register
(0x2c => hfsr: ReadWrite<u32, HardFaultStatus::Register>),
/// Debug Fault Status Register
(0x30 => dfsr: ReadWrite<u32, DebugFaultStatus::Register>),
/// MemManage Fault Address Register
(0x34 => mmfar: ReadWrite<u32, FaultAddress::Register>),
/// BusFault Address Register
(0x38 => bfar: ReadWrite<u32, FaultAddress::Register>),
/// Auxiliary Fault Status Register
(0x3c => afsr: ReadWrite<u32, FaultAddress::Register>),
/// 0xE000ED40-7C, Reserved for CPUID registers.
(0x40 => _reserved0),
/// 0xE000ED80-84, Reserved.
(0x80 => _reserved1),
/// Coprocessor Access Control Register
(0x88 => cpacr: ReadWrite<u32, CoprocessorAccessControl::Register>),
/// 0xE000ED8C, Reserved.
(0x8c => _reserved2),
(0x90 => @END),
}
}
register_bitfields![u32,
CpuId [
/// Implementer code assigned by ARM. ARM implementations are 0x41.
IMPLEMENTER OFFSET(24) NUMBITS(8),
/// Implementer-defined variant number.
VARIANT OFFSET(20) NUMBITS(4),
/// Archtiecture always reads as 0xF for Cortex-M
ARCHITECTURE OFFSET(16) NUMBITS(4),
/// Implementer-defined part number.
PARTNO OFFSET(4) NUMBITS(12),
/// Implementer-defined revision number.
REVISION OFFSET(0) NUMBITS(4)
],
InterruptControlAndState [
/// Non-Maskable Interrupt.
/// Write 0 is no-op, write 1 triggers. Read returns whether NMI is active.
/// RW.
NMIPENDSET OFFSET(31) NUMBITS(1),
/// Pendable SerVice.
/// Write 0 is no-op, write 1 triggers. Read returns whether PendSV is active.
/// RW.
PENDSVSET OFFSET(28) NUMBITS(1),
/// Write 1 to clear PendSV.
/// WO.
PENDSVCLR OFFSET(27) NUMBITS(1),
/// Pendable SysTick.
/// Write 0 is no-op, write 1 triggers. Read returns whether PendST is active.
/// RW.
PENDSTSET OFFSET(26) NUMBITS(1),
/// Write 1 to clear PendST.
/// WO.
PENDSTCLR OFFSET(25) NUMBITS(1),
/// Whether an excpetion will be serviced when existing debug state.
/// RO.
ISRPREEMPT OFFSET(23) NUMBITS(1),
/// Whether an external interrupt (from NVIC) is pending.
/// RO.
ISRPENDING OFFSET(22) NUMBITS(1),
/// Highest pending exception. Zero if none pending.
/// RO.
VECTACTIVE OFFSET(0) NUMBITS(9)
],
/// Note: Software can write all 1s to `TBLOFF` and read result to learn
/// maximum supported value.
VectorTableOffset [
/// Bits [31:7] of the vector table address
/// n.b. bits [6:0] are always 0.
TBLOFF OFFSET(7) NUMBITS(25)
],
ApplicationInterruptAndReset [
/// Key field. Must write 0x05FA or write is ignored. Reads as 0xFA05.
/// RW.
VECTKEY OFFSET(16) NUMBITS(16),
/// 0=Little endian, 1=Big endian.
/// RO.
ENDIANNESS OFFSET(15) NUMBITS(1),
/// Binary point position for priority grouping. Defaults to 0b000.
/// RW.
PRIGROUP OFFSET(8) NUMBITS(3),
/// Writing 1 to this bit requests a Local reset. Cleared to 0b0 on reset.
/// RW.
SYSRESETREQ OFFSET(2) NUMBITS(1),
/// Writing 1 clears all state information for exceptions.
/// WARN: Writing this bit when not in a Debug halt is UNPREDICTABLE.
/// WO.
VECTCLRACTIVE OFFSET(1) NUMBITS(1),
/// Writing 1 causes a local system reset.
/// WARN: Writing this bit when not in a Debug halt is UNPREDICTABLE.
/// WARN: Writing this and `SYSRESETREQ` is UNPREDICTABLE.
/// WO.
VECTRESET OFFSET(0) NUMBITS(1)
],
SystemControl [
SEVONPEND OFFSET(4) NUMBITS(1),
SLEEPDEEP OFFSET(2) NUMBITS(1),
SLEEPONEXIT OFFSET(1) NUMBITS(1)
],
ConfigurationAndControl [
STKALIGN OFFSET(9) NUMBITS(1),
BFHFNMIGN OFFSET(8) NUMBITS(1),
DIV_0_TRAP OFFSET(4) NUMBITS(1),
UNALIGN_TRP OFFSET(3) NUMBITS(1),
USERSETMPEND OFFSET(1) NUMBITS(1),
NONBASETHRDENA OFFSET(0) NUMBITS(1)
],
// Note: Simplified
SystemHandlerPriority [
PRI_N3 OFFSET(24) NUMBITS(4),
PRI_N2 OFFSET(16) NUMBITS(4),
PRI_N1 OFFSET(8) NUMBITS(4),
PRI_N0 OFFSET(0) NUMBITS(4)
],
SystemHandlerControlAndState [
USGFAULTENA OFFSET(18) NUMBITS(1),
BUSFAULTENA OFFSET(17) NUMBITS(1),
MEMFAULTENA OFFSET(16) NUMBITS(1),
SVCALLPENDED OFFSET(15) NUMBITS(1),
BUSFAULTPENDED OFFSET(14) NUMBITS(1),
MEMFAULTPENDED OFFSET(14) NUMBITS(1),
USGFAULTPENDED OFFSET(14) NUMBITS(1),
SYSTICKACT OFFSET(11) NUMBITS(1),
PENDSVACT OFFSET(10) NUMBITS(1),
MONITORACT OFFSET(8) NUMBITS(1),
SVCALLACT OFFSET(7) NUMBITS(1),
USGFAULTACT OFFSET(3) NUMBITS(1),
BUSFAULTACT OFFSET(1) NUMBITS(1),
MEMFAULTACT OFFSET(0) NUMBITS(1)
],
ConfigurableFaultStatus [
UsageFault OFFSET(16) NUMBITS(16),
BusFault OFFSET(8) NUMBITS(8),
MemManage OFFSET(0) NUMBITS(8)
],
MemManageStatus [
MMARVALID OFFSET(7) NUMBITS(1),
MLSPERR OFFSET(5) NUMBITS(1),
MSTKERR OFFSET(4) NUMBITS(1),
MUNSTKERR OFFSET(3) NUMBITS(1),
DACCVIOL OFFSET(1) NUMBITS(1),
IACCVIOL OFFSET(1) NUMBITS(1)
],
BusFaultStatus [
BFARVALID OFFSET(7) NUMBITS(1),
LSPERR OFFSET(5) NUMBITS(1),
STKERR OFFSET(4) NUMBITS(1),
UNSTKERR OFFSET(3) NUMBITS(1),
IMPRECISERR OFFSET(2) NUMBITS(1),
PRECISERR OFFSET(1) NUMBITS(1),
IBUSERR OFFSET(0) NUMBITS(1)
],
UsageFaultStatus [
DIVBYZERO OFFSET(9) NUMBITS(1),
UNALIGNED OFFSET(8) NUMBITS(1),
NOCP OFFSET(3) NUMBITS(1),
INVPC OFFSET(2) NUMBITS(1),
INVSTATE OFFSET(1) NUMBITS(1),
UNDEFINSTR OFFSET(0) NUMBITS(1)
],
HardFaultStatus [
DEBUGEVT OFFSET(31) NUMBITS(1),
FORCED OFFSET(30) NUMBITS(1),
VECTTBL OFFSET(1) NUMBITS(1)
],
DebugFaultStatus [
EXTERNAL OFFSET(4) NUMBITS(1),
VCATCH OFFSET(3) NUMBITS(1),
DWTTRAP OFFSET(2) NUMBITS(1),
BKPT OFFSET(1) NUMBITS(1),
HALTED OFFSET(0) NUMBITS(1)
],
FaultAddress [
ADDRESS OFFSET(0) NUMBITS(32)
],
CoprocessorAccessControl [
CP11 OFFSET(22) NUMBITS(2),
CP10 OFFSET(20) NUMBITS(2),
CP7 OFFSET(14) NUMBITS(2),
CP6 OFFSET(12) NUMBITS(2),
CP5 OFFSET(10) NUMBITS(2),
CP4 OFFSET(8) NUMBITS(2),
CP3 OFFSET(6) NUMBITS(2),
CP2 OFFSET(4) NUMBITS(2),
CP1 OFFSET(2) NUMBITS(2),
CP0 OFFSET(0) NUMBITS(2)
]
];
const SCB: StaticRef<ScbRegisters> = unsafe { StaticRef::new(0xE000ED00 as *const ScbRegisters) };
/// Allow the core to go into deep sleep on WFI.
///
/// The specific definition of "deep sleep" is chip specific.
pub unsafe fn set_sleepdeep() {
SCB.scr.modify(SystemControl::SLEEPDEEP::SET);
}
/// Do not allow the core to go into deep sleep on WFI.
///
/// The specific definition of "deep sleep" is chip specific.
pub unsafe fn unset_sleepdeep() {
SCB.scr.modify(SystemControl::SLEEPDEEP::CLEAR);
}
/// Software reset using the ARM System Control Block
pub unsafe fn reset() {
SCB.aircr.modify(
ApplicationInterruptAndReset::VECTKEY.val(0x05FA)
+ ApplicationInterruptAndReset::PRIGROUP.val(0b111)
+ ApplicationInterruptAndReset::SYSRESETREQ::SET,
);
}
/// relocate interrupt vector table
pub unsafe fn set_vector_table_offset(offset: *const ()) {
SCB.vtor.set(offset as u32);
}
/// Disable the FPU
#[cfg(all(target_arch = "arm", target_os = "none"))]
pub unsafe fn disable_fpca() {
SCB.cpacr
.modify(CoprocessorAccessControl::CP10::CLEAR + CoprocessorAccessControl::CP11::CLEAR);
asm!("dsb", "isb", options(nomem, nostack, preserves_flags));
if SCB.cpacr.read(CoprocessorAccessControl::CP10) != 0
|| SCB.cpacr.read(CoprocessorAccessControl::CP11) != 0
{
panic!("Unable to disable FPU");
}
}
// Mock implementation for tests on Travis-CI.
#[cfg(not(any(target_arch = "arm", target_os = "none")))]
pub unsafe fn disable_fpca() {
unimplemented!()
}
| 33.946372 | 98 | 0.574854 |
384a8746a3f4f1ffe7b6838ecda5419da97b9c1f | 3,873 | use gfx;
use gfx_debug_draw;
use collada;
use math::*;
use transform::Transform;
pub type JointIndex = u8;
pub const ROOT_JOINT_PARENT_INDEX: JointIndex = 255u8;
#[derive(Debug, Clone)]
pub struct Skeleton {
///
/// All joints in the skeleton
///
pub joints: Vec<Joint>,
}
impl Skeleton {
///
/// Build a skeleton fromm a Collada skeleton
///
pub fn from_collada(skeleton: &collada::Skeleton) -> Skeleton {
Skeleton {
joints: skeleton.joints.iter().map(|j| {
Joint {
name: j.name.clone(),
parent_index: j.parent_index,
inverse_bind_pose: j.inverse_bind_pose,
}
}).collect()
}
}
pub fn draw<R: gfx::Resources, T: Transform>(&self, global_poses: &[T], debug_renderer: &mut gfx_debug_draw::DebugRenderer<R>, draw_labels: bool) {
for (joint_index, joint) in self.joints.iter().enumerate() {
let joint_position = global_poses[joint_index].transform_vector([0.0, 0.0, 0.0]);
let leaf_end = global_poses[joint_index].transform_vector([0.0, 1.0, 0.0]);
if !joint.is_root() {
let parent_position = global_poses[joint.parent_index as usize].transform_vector([0.0, 0.0, 0.0]);
// Draw bone (between joint and parent joint)
debug_renderer.draw_line(
[parent_position[0], parent_position[1], parent_position[2]],
[joint_position[0], joint_position[1], joint_position[2]],
[0.2, 0.2, 0.2, 1.0]
);
if !self.joints.iter().any(|j| j.parent_index as usize == joint_index) {
// Draw extension along joint's y-axis...
debug_renderer.draw_line(
[joint_position[0], joint_position[1], joint_position[2]],
[leaf_end[0], leaf_end[1], leaf_end[2]],
[0.2, 0.2, 0.2, 1.0]
);
}
}
if draw_labels {
// Label joint
debug_renderer.draw_text_at_position(
&joint.name[..],
[leaf_end[0], leaf_end[1], leaf_end[2]],
[1.0, 1.0, 1.0, 1.0]
);
}
// Draw joint-relative axes
let p_x_axis = global_poses[joint_index].transform_vector([1.0, 0.0, 0.0]);
let p_y_axis = global_poses[joint_index].transform_vector([0.0, 1.0, 0.0]);
let p_z_axis = global_poses[joint_index].transform_vector([0.0, 0.0, 1.0]);
debug_renderer.draw_line(
[joint_position[0], joint_position[1], joint_position[2]],
[p_x_axis[0], p_x_axis[1], p_x_axis[2]],
[1.0, 0.2, 0.2, 1.0]
);
debug_renderer.draw_line(
[joint_position[0], joint_position[1], joint_position[2]],
[p_y_axis[0], p_y_axis[1], p_y_axis[2]],
[0.2, 1.0, 0.2, 1.0]
);
debug_renderer.draw_line(
[joint_position[0], joint_position[1], joint_position[2]],
[p_z_axis[0], p_z_axis[1], p_z_axis[2]],
[0.2, 0.2, 1.0, 1.0]
);
}
}
}
#[derive(Debug, Clone)]
pub struct Joint {
///
/// Name of joint
///
pub name: String,
///
/// Index of parent joint in Skeleton's 'joints' vector
///
pub parent_index: JointIndex,
///
/// Matrix transforming vertex coordinates from model-space to joint-space
/// Column-major.
///
pub inverse_bind_pose: Matrix4<f32>,
}
impl Joint {
pub fn is_root(&self) -> bool {
self.parent_index == ROOT_JOINT_PARENT_INDEX
}
}
| 30.984 | 151 | 0.518203 |
482e09f76e0d33ce5ef2aa4d749ca9bd678f7c3f | 804 | use std::future::{ready, Future};
use std::pin::Pin;
use futures::FutureExt;
use nix::libc::pid_t;
use nix::sys::signal::{self, Signal};
use nix::unistd::Pid;
use tokio::process::Child;
type Output<'a> = Pin<Box<dyn Future<Output = ()> + 'a>>;
pub trait TerminateExt {
fn terminate(&mut self) -> Output;
}
impl TerminateExt for Child {
//noinspection RsUnresolvedReference
fn terminate(&mut self) -> Output {
self.id().map_or_else(
|| ready(()).boxed_local(),
|pid| {
#[allow(clippy::cast_possible_wrap)]
let _ = signal::kill(Pid::from_raw(pid as pid_t), Signal::SIGTERM);
async {
drop(self.wait().await);
}
.boxed_local()
},
)
}
}
| 25.125 | 83 | 0.538557 |
90adcbb3dec582ab0d6ff2b5d582fcceb19aea54 | 4,490 | use std::path::Path;
use std::sync::Arc;
use tempfile::NamedTempFile;
use tokio::sync::watch::{self, Sender};
use tokio::task::JoinHandle;
use wasm3::{Environment, Module};
use kubelet::container::Handle as ContainerHandle;
use kubelet::container::Status as ContainerStatus;
use kubelet::handle::StopHandler;
pub struct Runtime {
handle: JoinHandle<anyhow::Result<()>>,
}
#[async_trait::async_trait]
impl StopHandler for Runtime {
async fn stop(&mut self) -> anyhow::Result<()> {
// no nothing
Ok(())
}
async fn wait(&mut self) -> anyhow::Result<()> {
(&mut self.handle).await??;
Ok(())
}
}
/// A runtime context for running a wasm module with wasm3
pub struct Wasm3Runtime {
module_bytes: Vec<u8>,
stack_size: u32,
output: Arc<NamedTempFile>,
}
impl Wasm3Runtime {
pub async fn new<L: AsRef<Path> + Send + Sync + 'static>(module_bytes: Vec<u8>, stack_size: u32, log_dir: L) -> anyhow::Result<Self> {
let temp = tokio::task::spawn_blocking(move || -> anyhow::Result<NamedTempFile> {
Ok(NamedTempFile::new_in(log_dir)?)
})
.await??;
Ok(Self {
module_bytes: module_bytes,
stack_size: stack_size,
output: Arc::new(temp),
})
}
pub async fn start(&mut self) -> anyhow::Result<ContainerHandle<Runtime, LogHandleFactory>> {
let temp = self.output.clone();
let output_write = tokio::task::spawn_blocking(move || -> anyhow::Result<std::fs::File> {
Ok(temp.reopen()?)
})
.await??;
let (status_sender, status_recv) = watch::channel(ContainerStatus::Waiting {
timestamp: chrono::Utc::now(),
message: "No status has been received from the process".into(),
});
let handle = spawn_wasm3(self.module_bytes.clone(), self.stack_size, status_sender, output_write).await?;
let log_handle_factory = LogHandleFactory {
temp: self.output.clone(),
};
Ok(ContainerHandle::new(
Runtime{handle},
log_handle_factory,
))
}
}
/// Holds our tempfile handle.
pub struct LogHandleFactory {
temp: Arc<NamedTempFile>,
}
impl kubelet::log::HandleFactory<tokio::fs::File> for LogHandleFactory {
/// Creates `tokio::fs::File` on demand for log reading.
fn new_handle(&self) -> tokio::fs::File {
tokio::fs::File::from_std(self.temp.reopen().unwrap())
}
}
// Spawns a running wasmtime instance with the given context and status
// channel. Due to the Instance type not being Send safe, all of the logic
// needs to be done within the spawned task
async fn spawn_wasm3(
module_bytes: Vec<u8>,
stack_size: u32,
status_sender: Sender<ContainerStatus>,
_output_write: std::fs::File, //TODO: hook this up such that log output will be written to the file
) -> anyhow::Result<JoinHandle<anyhow::Result<()>>> {
let handle = tokio::task::spawn_blocking(move || -> anyhow::Result<_> {
let env = match Environment::new() {
// We can't map errors here or it moves the send channel, so we
// do it in a match
Ok(m) => m,
Err(e) => {
let message = "cannot create environment";
error!("{}: {:?}", message, e);
send(
status_sender.clone(),
name,
Status::Terminated {
failed: true,
message: message.into(),
timestamp: chrono::Utc::now(),
},
&mut cx,
);
return Err(e);
}
}
let rt = env.create_runtime(stack_size).expect("cannot create runtime");
let module = Module::parse(&env, &module_bytes).expect("cannot parse module");
let mut module = rt.load_module(module).expect("cannot load module");
module.link_wasi().expect("cannot link WASI");
let func = module.find_function::<(), ()>("_start").expect("cannot find function '_start' in module");
func.call().expect("cannot call '_start' in module");
status_sender.broadcast(ContainerStatus::Terminated {
failed: false,
message: "Module run completed".into(),
timestamp: chrono::Utc::now(),
}).expect("status should be able to send");
Ok(())
});
Ok(handle)
}
| 31.619718 | 138 | 0.582405 |
56967422a1e535b422361f6dfb77d8f7913155a7 | 774 | //! Consenus constants
pub const BLOCK_MAX_FUTURE: i64 = 2 * 60 * 60; // 2 hours
pub const COINBASE_MATURITY: u32 = 100; // 2 hours
pub const MIN_COINBASE_SIZE: usize = 2;
pub const MAX_COINBASE_SIZE: usize = 100;
pub const RETARGETING_FACTOR: u32 = 4;
pub const TARGET_SPACING_SECONDS: u32 = 10 * 60;
pub const DOUBLE_SPACING_SECONDS: u32 = 2 * TARGET_SPACING_SECONDS;
pub const TARGET_TIMESPAN_SECONDS: u32 = 2 * 7 * 24 * 60 * 60;
// The upper and lower bounds for retargeting timespan
pub const MIN_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS / RETARGETING_FACTOR;
pub const MAX_TIMESPAN: u32 = TARGET_TIMESPAN_SECONDS * RETARGETING_FACTOR;
// Target number of blocks, 2 weaks, 2016
pub const RETARGETING_INTERVAL: u32 = TARGET_TIMESPAN_SECONDS / TARGET_SPACING_SECONDS;
| 40.736842 | 87 | 0.773902 |
755d9887cc7f9b88e6daf0e8b202cc3018ab9830 | 1,521 | use syntax::{
optimizer::fold_bf_code,
parser::{bf_code_to_ast, Error},
shared::Bop,
};
mod syntax;
mod target;
#[derive(Copy, Clone)]
enum Language {
BrainFuck,
C,
Lua,
Python,
}
fn make_ast(source: &str) -> Result<Vec<Bop>, String> {
bf_code_to_ast(source).map_err(|e| match e {
Error::NotEof(n) => {
format!("`eof` expected at index {}", n)
}
Error::NotClosed(n) => {
format!("expected `]` to close `[` at index {}", n)
}
_ => unreachable!(),
})
}
fn translate_to_bf(name: &str, is_opt: bool, lang: Language) -> String {
let source = std::fs::read_to_string(name).unwrap();
let mut ast = make_ast(source.as_str()).expect("Failure to translate");
if is_opt {
fold_bf_code(ast.as_mut());
}
match lang {
Language::BrainFuck => target::brainfuck::from_ast(ast.as_ref()),
Language::C => target::c::from_ast(ast.as_ref()),
Language::Lua => target::lua::from_ast(ast.as_ref()),
Language::Python => target::python::from_ast(ast.as_ref()),
}
}
fn main() {
let mut is_opt = true;
let mut lang = Language::Lua;
for arg in std::env::args().skip(1) {
match arg.as_str() {
"--brainfuck" => {
lang = Language::BrainFuck;
}
"--c-lang" => {
lang = Language::C;
}
"--lua" => {
lang = Language::Lua;
}
"--python" => {
lang = Language::Python;
}
"--opt" | "-O" => {
is_opt = true;
}
"--no-opt" | "-N" => {
is_opt = false;
}
name => {
let bf = translate_to_bf(name, is_opt, lang);
print!("{}", bf);
}
}
}
}
| 19.5 | 72 | 0.579224 |
ef65abbfd7424fa4b371f7f30a70cff5469b5230 | 12,490 | use std::env;
use std::error::Error;
use std::ffi::OsStr;
use std::fs::{File, Metadata};
use std::io;
use std::io::{BufWriter, Write};
use std::os::windows::prelude::OsStrExt;
// use std::os::unix::fs::MetadataExt;
use std::path::PathBuf;
use std::string::FromUtf16Error;
use clap::{ArgEnum, Parser};
use chrono::{DateTime, Local};
use jwalk::WalkDir;
use path_clean::PathClean;
use serde::Serialize;
use windows::{
core::{PCWSTR, PWSTR},
Win32::{
Foundation::{GetLastError, ERROR_SUCCESS, HANDLE, PSID},
Security::{
Authorization::{GetSecurityInfo, SE_FILE_OBJECT},
LookupAccountSidW, SidTypeUnknown, OWNER_SECURITY_INFORMATION, SECURITY_DESCRIPTOR,
SID_NAME_USE,
},
Storage::FileSystem::{
CreateFileW, FILE_ATTRIBUTE_NORMAL, FILE_GENERIC_READ, FILE_SHARE_READ, OPEN_EXISTING,
},
},
};
#[derive(Parser, Debug)]
#[clap(
about = "(d)irectory + rec(urse) => recursively acquire file metadata within a directory",
long_about = None
)]
pub struct Args {
/// Directory to begin recursive walk, begin in current directory if no value provided
#[clap()]
pub path: Option<PathBuf>,
/// Path to file to where metadata will be written{n}Results written to stdout if not provided
#[clap(long, short, parse(from_os_str))]
pub file_name: Option<PathBuf>,
/// Output file type
#[clap(arg_enum, long, short, default_value = "csv", ignore_case = true)]
pub out_type: OutType,
}
#[derive(ArgEnum, Clone, Debug)]
#[allow(non_camel_case_types)]
pub enum OutType {
csv,
ndjson
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct Record<'a> {
pub run_date: &'a str,
pub full_name: String,
pub name: String,
pub base_name: String,
pub is_directory: bool,
pub extension: String,
pub directory_name: String,
pub creation_time: DateTime<Local>,
pub last_access_time: DateTime<Local>,
pub last_modified_time: DateTime<Local>,
pub owner: String,
pub size: u64,
pub size_kb: f64,
pub size_mb: f64,
pub size_gb: f64,
pub size_tb: f64,
}
#[derive(Debug)]
struct RecordSet<'a> {
file_name: Option<PathBuf>,
out_type: OutType,
set: Vec<Record<'a>>,
}
impl RecordSet<'_> {
fn new(file_name: Option<PathBuf>, out_type: OutType) -> Self {
Self {
file_name,
out_type,
set: Vec::with_capacity(20),
}
}
fn write(&self) -> Result<(), Box<dyn Error>> {
match (&self.file_name, &self.out_type) {
(Some(file_name), OutType::csv) => {
let mut wtr = csv::WriterBuilder::new()
.quote_style(csv::QuoteStyle::Always)
.from_path(file_name)?;
for r in &self.set {
wtr.serialize(r)?;
}
wtr.flush()?;
Ok(())
}
(None, OutType::csv) => {
let mut wtr = csv::WriterBuilder::new()
.quote_style(csv::QuoteStyle::Always)
.from_writer(io::stdout());
for r in &self.set {
wtr.serialize(r)?;
}
Ok(())
}
(Some(file_name), OutType::ndjson) => {
let f = File::create(file_name)?;
let mut wtr = BufWriter::new(f);
for r in &self.set {
serde_json::to_writer(&mut wtr, r)?;
wtr.write_all(b"\n")?;
}
wtr.flush()?;
Ok(())
}
(None, OutType::ndjson) => {
let mut wtr = BufWriter::new(io::stdout());
for r in &self.set {
serde_json::to_writer(&mut wtr, r)?;
wtr.write_all(b"\n")?;
}
wtr.flush()?;
Ok(())
}
}
}
}
#[derive(Debug)]
struct WideString(Vec<u16>);
impl WideString {
fn as_const_ptr(&self) -> *const u16 {
let s_ref: &Vec<u16> = &self.0.as_ref();
s_ref.as_ptr() as *const u16
}
fn as_ptr(&self) -> *mut u16 {
let s_ref: &Vec<u16> = &self.0.as_ref();
s_ref.as_ptr() as *mut u16
}
fn from_os_str(s: &OsStr) -> Self {
Self(s.encode_wide().chain(std::iter::once(0)).collect())
}
#[allow(dead_code)]
fn from_str(s: &str) -> Self {
Self(s.encode_utf16().chain(std::iter::once(0)).collect())
}
fn new(capacity: usize) -> Self {
let mut v: Vec<u16> = Vec::default();
v.resize(capacity, 0);
Self(v)
}
#[allow(dead_code)]
fn to_string(&self) -> Result<String, FromUtf16Error> {
let v = &self.0;
String::from_utf16(&v[..(v.len() - 1)]) // remove trailing null
}
}
#[cfg(target_os = "windows")]
fn get_file_owner(path: &PathBuf) -> Result<String, Box<dyn Error>> {
let path_as_wstring = WideString::from_os_str(path.as_os_str());
let path_as_wstring_ptr = path_as_wstring.as_const_ptr();
let path_as_pcwstr = PCWSTR(path_as_wstring_ptr);
// File handle
let handle: HANDLE = unsafe {
CreateFileW(
path_as_pcwstr,
FILE_GENERIC_READ,
FILE_SHARE_READ,
std::ptr::null_mut(),
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
None,
)
};
if let Err(e) = handle.ok() {
panic!("Error with {:#?}: {:#?}", path_as_pcwstr, e);
}
// Security Info
let mut psidowner = PSID::default();
let mut sd: *mut SECURITY_DESCRIPTOR =
&mut SECURITY_DESCRIPTOR::default() as *mut SECURITY_DESCRIPTOR;
let gsi_rc = unsafe {
GetSecurityInfo(
handle,
SE_FILE_OBJECT,
OWNER_SECURITY_INFORMATION.0,
&mut psidowner,
std::ptr::null_mut(),
std::ptr::null_mut(),
std::ptr::null_mut(),
&mut sd,
)
};
if gsi_rc != ERROR_SUCCESS.0 {
let last_error = unsafe { GetLastError() };
panic!("Error code is {:#?}", last_error);
}
// Lookup Account Sid
let mut name_size = 0 as u32;
let mut domain_size = 0 as u32;
let name_as_wstring = WideString::new(name_size as usize);
let name_as_wstring_ptr = name_as_wstring.as_ptr();
let name_as_pwstr = PWSTR(name_as_wstring_ptr);
let domain_as_wstring = WideString::new(domain_size as usize);
let domain_as_wstring_ptr = domain_as_wstring.as_ptr();
let domain_as_pwstr = PWSTR(domain_as_wstring_ptr);
let euse = &mut SidTypeUnknown.to_owned() as *mut SID_NAME_USE;
// Call to get size of name_size and domain_size
let las_rc = unsafe {
LookupAccountSidW(
None,
psidowner,
name_as_pwstr,
&mut name_size,
domain_as_pwstr,
&mut domain_size,
euse,
)
};
if las_rc.0 != 0 {
panic!("Expecting an error when calling LookupAccountSidW initially");
}
// Call again, this time with appropriately sized buffers
let name_as_wstring = WideString::new(name_size as usize);
let name_as_wstring_ptr = name_as_wstring.as_ptr();
let name_as_pwstr = PWSTR(name_as_wstring_ptr);
let domain_as_wstring = WideString::new(domain_size as usize);
let domain_as_wstring_ptr = domain_as_wstring.as_ptr();
let domain_as_pwstr = PWSTR(domain_as_wstring_ptr);
let las_rc = unsafe {
LookupAccountSidW(
None,
psidowner,
name_as_pwstr,
&mut name_size,
domain_as_pwstr,
&mut domain_size,
euse,
)
};
if las_rc.0 == 0 {
let last_error = unsafe { GetLastError() };
panic!("Error code is {:#?}", last_error);
}
let owner = name_as_wstring.to_string()?;
Ok(owner)
}
pub fn run(args: Args) -> Result<(), Box<dyn Error>> {
// Implement the following:
// - RunDate
// - FullName
// - Name
// - IsDirectory
// - Basename
// - Extension
// - DirectoryName
// - CreationTime
// - LastAccessTime
// - LastWriteTime
// - Owner
// - Size B
// - Size KB (distinguish kilobytes from kibibytes)
// - Size MB (distinguish megaytes from mebibytes)
// - Size GB (distinguish gigabytes from gibibytes)
// - Size TB (distinguish terabytes from tebibytes)
// Process args
let path = &args.path.unwrap_or(std::env::current_dir()?);
// Validate file_name
match file_name_valid(&args.file_name).unwrap() {
(true, _) => (),
(false, parent) => {
return Err(From::from(format!(
"The parent directory of the value of the parameter --file-name ({}) does not exist",
parent
)))
}
};
if path.is_dir() {
walk_dir(&path, args.file_name, args.out_type)?;
} else {
return Err(From::from(
"The provided value of the argument <path> was not a directory",
));
}
Ok(())
}
fn file_name_valid(f: &Option<PathBuf>) -> Result<(bool, String), Box<dyn Error>> {
let result = match f {
None => (true, String::from("")),
Some(p) => {
// https://stackoverflow.com/a/54817755
let abs_path = if p.is_absolute() {
p.to_path_buf()
} else {
env::current_dir()?.join(p)
}
.clean();
abs_path.parent().map_or_else(
|| (false, String::from("no_parent")),
|parent| (parent.exists(), parent.to_string_lossy().into_owned()),
)
}
};
Ok(result)
}
fn get_metadata<'a>(
path: &PathBuf,
md: &Metadata,
run_date: &'a str,
) -> Result<Record<'a>, Box<dyn Error>> {
let full_name = path.to_string_lossy().into_owned();
let name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or_default()
.to_owned();
let base_name = path
.file_stem()
.and_then(|b| b.to_str())
.unwrap_or_default()
.to_owned();
let is_directory = path.is_dir();
let extension = path
.extension()
.and_then(|e| e.to_str())
.unwrap_or_default()
.to_owned();
let directory_name = path
.parent()
.map(|d| d.to_string_lossy().into_owned())
.unwrap_or_default();
// Linux => btime field of statx
// Unix => birthtime field of stat
// Windows => ftCreationTime
let creation_time: DateTime<Local> = DateTime::from(md.created()?);
// Unix => atime field of stat
// Windows => ftAccessTime field
let last_access_time: DateTime<Local> = DateTime::from(md.accessed()?);
// Unix => mtime field of stat
// Windows => ftLastWriteTime field
let last_modified_time: DateTime<Local> = DateTime::from(md.modified()?);
//let owner = String::from("");
let owner = if cfg!(windows) {
get_file_owner(&path)?
} else {
String::from("")
};
// let owner = std::fs::metadata(path)?.uid();
let size = md.len();
// kibibyes
let size_kb = (md.len() as f64) / 1024_f64.powi(1);
// mebibytes
let size_mb = (md.len() as f64) / 1024_f64.powi(2);
// gibibytes
let size_gb = (md.len() as f64) / 1024_f64.powi(3);
// tebibytes
let size_tb = (md.len() as f64) / 1024_f64.powi(4);
Ok(Record {
run_date,
full_name,
name,
base_name,
is_directory,
extension,
directory_name,
creation_time,
last_access_time,
last_modified_time,
owner,
size,
size_kb,
size_mb,
size_gb,
size_tb,
})
}
fn walk_dir(
dir: &PathBuf,
file_name: Option<PathBuf>,
out_type: OutType,
) -> Result<(), Box<dyn Error>> {
let mut records = RecordSet::new(file_name, out_type);
let run_date = Local::now().to_string();
for entry in WalkDir::new(dir).skip_hidden(false) {
let entry = entry?;
let path = entry.path();
if path.is_file() {
let md = entry.metadata()?;
let r = get_metadata(&path, &md, &run_date)?;
records.set.push(r);
}
}
records.write()?;
Ok(())
}
| 27.941834 | 98 | 0.553963 |
9c4f617c07a3af4a46866b736838c06b610bfc3e | 17,562 | use super::errors::{ErrorKind, Result};
use super::system::InfallibleSystem;
use failchain::bail;
use idcontain::{Id, IdSlab, OptionId};
use log::{debug, error};
use std::fmt::Write;
use std::mem;
pub type EntityId = Id<Entity>;
pub struct Entities {
slab: IdSlab<Entity>,
first_root: OptionId<Entity>,
removed: Vec<EntityId>,
last_removed: Vec<EntityId>,
}
impl Entities {
#[inline]
pub fn len(&self) -> usize {
self.slab.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.slab.len() == 0
}
#[inline]
pub fn contains(&self, id: EntityId) -> bool {
self.slab.contains(id)
}
#[inline]
pub fn add_root(&mut self, name: &'static str) -> EntityId {
let Entities {
ref mut slab,
ref mut first_root,
..
} = *self;
debug!("Adding root {:?}...", name);
let new_id = slab.insert(Entity {
name,
parent: OptionId::none(),
child: OptionId::none(),
next: *first_root,
previous: OptionId::none(),
liveness: Liveness::Alive,
});
debug!("New root id {:?} for {:?}.", new_id, name);
let old_first_root: Option<EntityId> =
mem::replace(first_root, OptionId::some(new_id)).into();
if let Some(old_first_root) = old_first_root {
let old_entity = &mut slab[old_first_root];
debug!(
"Patched previous of root {:?} {:?} to {:?}...",
old_entity.name, old_first_root, new_id
);
old_entity.previous = OptionId::some(new_id);
}
debug!("Added root {:?} {:?}...", name, new_id);
new_id
}
#[inline]
pub fn add(&mut self, parent: EntityId, name: &'static str) -> Result<EntityId> {
debug!("Adding entity {:?} as child of {:?}...", name, parent);
let Entities {
ref mut slab,
ref mut removed,
..
} = *self;
let new_id = slab.insert(Entity {
name,
parent: OptionId::some(parent),
child: OptionId::none(),
next: OptionId::none(),
previous: OptionId::none(),
liveness: Liveness::Alive,
});
let (parent_exists, parent_dead, old_child) = if let Some(parent) = slab.get_mut(parent) {
(
true,
!parent.liveness.is_alive(),
mem::replace(&mut parent.child, OptionId::some(new_id)),
)
} else {
(false, false, OptionId::none())
};
if !parent_exists {
slab.remove(new_id);
bail!(ErrorKind::NoSuchEntity {
context: "add",
needed_by: Some(name),
id: parent.cast(),
});
}
if let Some(old_child) = old_child.into() {
debug!("Old child {:?}", old_child);
let new = &mut slab[new_id];
new.next = OptionId::some(old_child);
if parent_dead {
debug!("Parent already dead, setting liveness appropriately.");
new.liveness = Liveness::DeadDueToParent;
removed.push(new_id);
}
slab[old_child].previous = OptionId::some(new_id);
} else if parent_dead {
debug!("No previous child, but parent is already dead.");
slab[new_id].liveness = Liveness::DeadDueToParent;
} else {
debug!("No previous child.");
}
debug!(
"Added entity {:?} {:?} as child of {:?}...",
name, new_id, parent
);
Ok(new_id)
}
pub fn remove(&mut self, id: EntityId) {
debug!("Lazily removed entity {:?}...", id);
self.removed.push(id);
}
#[inline]
pub fn last_removed(&self) -> &[EntityId] {
&self.last_removed
}
#[inline]
pub fn get(&self, id: EntityId) -> Option<&Entity> {
self.slab.get(id)
}
#[inline]
pub fn debug_name_of(&self, id: EntityId) -> Option<&'static str> {
self.slab.get(id).map(|entity| entity.name)
}
pub fn debug_tree_dump(&self, indent: usize) -> String {
let mut output = "Entity tree dump:\n".to_owned();
let mut stack = Vec::new();
stack.push((
0,
if let Some(root) = self.first_root.into_option() {
root
} else {
return output;
},
));
while let Some((depth, id)) = stack.pop() {
for _ in 0..(indent + depth * 4) {
output.push(' ');
}
if let Some(entity) = self.slab.get(id) {
write!(&mut output, "|- {} ", entity.name).expect("string write fail");
let id_padding = {
let length = indent + depth * 4 + 3 + entity.name.len() + 4;
if length > 60 {
0
} else {
60 - length
}
};
for _ in 0..id_padding {
output.push('.');
}
writeln!(&mut output, " ({:?})", id).expect("string write fail");
if let Some(next_id) = entity.next.into_option() {
stack.push((depth, next_id));
}
if let Some(child_id) = entity.child.into_option() {
stack.push((depth + 1, child_id));
}
} else {
output.push_str("|- <missing>\n");
};
}
output
}
}
impl<'context> InfallibleSystem<'context> for Entities {
type Dependencies = ();
fn debug_name() -> &'static str {
"entities"
}
fn create(_deps: ()) -> Self {
Self {
slab: IdSlab::with_capacity(1024),
first_root: OptionId::none(),
removed: Vec::with_capacity(1024),
last_removed: Vec::with_capacity(1024),
}
}
// TODO(cristicbz): Split up into simpler, more self-documenting functions.
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cyclomatic_complexity))]
fn update(&mut self, _dependencies: ()) {
let Self {
ref mut removed,
ref mut last_removed,
ref mut slab,
ref mut first_root,
..
} = *self;
if removed.is_empty() {
return;
}
let num_killed_in_removed = removed.len();
debug!(
"Collecting removed. Explictly removed {} ids.",
num_killed_in_removed
);
// First iterate through the explictly killed deleted entities. As we go through them we
// (A) If the entity is marked as killed or orphan then skip.
// (B) Otherwise mark as killed (this will make sure we dedupe the killed entities).
// (C) Push entity to `last_removed`.
// (D) Push its first child to `removed`.
last_removed.clear();
for i_removed in 0..num_killed_in_removed {
let removed_id = removed[i_removed];
let &mut Entity {
child,
name,
ref mut liveness,
..
} = if let Some(entity) = slab.get_mut(removed_id) {
entity
} else {
debug!("Skipping already removed {:?}.", removed_id);
continue;
};
if !liveness.is_alive() {
debug!(
"Explicitly removed {:?} ({:?}) was already processed.",
name, removed_id
);
continue;
}
*liveness = Liveness::Killed;
last_removed.push(removed_id);
if let Some(child) = child.into_option() {
debug!(
"Adding child {:?} of {:?} ({:?}) to orphan queue.",
child, name, removed_id
);
removed.push(child);
}
}
let num_killed_in_last_removed = last_removed.len();
debug!(
"Deduplicated explictly removed {} ids.",
num_killed_in_last_removed
);
let mut i_removed = num_killed_in_removed;
while i_removed < removed.len() {
let mut removed_id = removed[i_removed];
loop {
let Entity {
liveness,
next,
child,
name,
..
} = slab.remove(removed_id).expect("missing removed child");
debug!("Removed orphan entity {:?} {:?}.", name, removed_id);
if liveness.is_alive() {
last_removed.push(removed_id);
if let Some(child) = child.into() {
debug!(
"Added child for {:?} ({:?}) to queue: {:?}",
name, removed_id, child
);
removed.push(child);
} else {
debug!(
"Entity {:?} ({:?}) was alive but had no children to remove.",
name, removed_id,
);
}
} else {
debug!(
"Entity {:?} ({:?}) was already marked as {:?}, skipping.",
name, removed_id, liveness
);
}
if let Some(sibling) = next.into() {
debug!("Moving to sibling {:?}", sibling);
removed_id = sibling;
} else {
debug!("No more siblings.");
break;
}
}
i_removed += 1;
}
for &removed_id in &last_removed[..num_killed_in_last_removed] {
let Entity {
next,
previous,
parent,
name,
..
} = if let Some(entity) = slab.remove(removed_id) {
entity
} else {
// Removed by the orphan processing loop.
debug!("Skipped already removed {:?}.", removed_id);
continue;
};
debug!("Removed killed {:?} ({:?})", name, removed_id);
if let Some((next_id, next)) = next.into_option().map(|id| (id, &mut slab[id])) {
debug!(
"Patched previous for {:?} ({:?}) to point to {:?}",
next.name, next_id, previous
);
next.previous = previous;
}
if let Some((previous_id, previous)) =
previous.into_option().map(|id| (id, &mut slab[id]))
{
debug!(
"Patched next for {:?} ({:?}) to point to {:?}",
previous.name, previous_id, next
);
previous.next = next;
}
if let Some(parent_id) = parent.into_option() {
let parent = &mut slab[parent_id];
if parent.child.into_option().expect("parent has no children") == removed_id {
debug!("Patched child for {:?} to point to {:?}", parent.name, next);
parent.child = next;
}
} else if first_root.expect("no root") == removed_id {
debug!("Patched first root to point to {:?}", next);
*first_root = next
}
}
debug!("Collected {:?} removed ids.", last_removed.len());
removed.clear();
}
fn teardown(&mut self, _deps: ()) {
self.update(());
}
fn destroy(mut self, _deps: ()) {
self.update(());
if !self.is_empty() {
error!("Entities leaked. {}", self.debug_tree_dump(4));
}
}
}
#[derive(Eq, PartialEq, Debug)]
enum Liveness {
Alive,
Killed,
DeadDueToParent,
}
impl Liveness {
fn is_alive(&self) -> bool {
*self == Liveness::Alive
}
}
pub struct Entity {
name: &'static str,
parent: OptionId<Entity>,
child: OptionId<Entity>,
next: OptionId<Entity>,
previous: OptionId<Entity>,
liveness: Liveness,
}
impl Entity {
#[inline]
pub fn parent(&self) -> Option<EntityId> {
self.parent.into_option()
}
}
#[cfg(test)]
mod test {
use super::super::system::InfallibleSystem;
use super::{Entities, EntityId};
use std::collections::HashSet;
struct Tree1 {
root_a: EntityId,
root_b: EntityId,
root_c: EntityId,
a1: EntityId,
a2: EntityId,
a2x: EntityId,
a2xa: EntityId,
a2xb: EntityId,
c1: EntityId,
a2y: EntityId,
}
impl Tree1 {
fn new(entities: &mut Entities) -> Self {
let root_a = entities.add_root("root_a");
let root_b = entities.add_root("root_b");
let root_c = entities.add_root("root_c");
let a1 = entities.add(root_a, "a1").unwrap();
let a2 = entities.add(root_a, "a2").unwrap();
let a2x = entities.add(a2, "a2x").unwrap();
let a2xa = entities.add(a2x, "a2xa").unwrap();
let a2xb = entities.add(a2x, "a2xb").unwrap();
let c1 = entities.add(root_c, "c1").unwrap();
let a2y = entities.add(a2, "a2y").unwrap();
Self {
root_a,
root_b,
root_c,
a1,
a2,
a2x,
a2xa,
a2xb,
c1,
a2y,
}
}
}
fn check_removed(entities: &Entities, expected: &[EntityId]) {
let actual: HashSet<EntityId> = entities
.last_removed
.iter()
.cloned()
.collect::<HashSet<_>>();
assert!(
expected.len() == actual.len() && expected.iter().all(|id| actual.contains(id)),
"actual: {:?}\nexpected: {:?}",
entities.removed,
expected
);
}
#[test]
fn add_contains() {
let mut entities = Entities::create(());
let tree1 = Tree1::new(&mut entities);
assert!(entities.contains(tree1.root_a));
assert!(entities.contains(tree1.root_b));
assert!(entities.contains(tree1.root_c));
assert!(entities.contains(tree1.a1));
assert!(entities.contains(tree1.a2));
assert!(entities.contains(tree1.a2x));
assert!(entities.contains(tree1.a2xa));
assert!(entities.contains(tree1.a2xb));
assert!(entities.contains(tree1.c1));
assert!(entities.contains(tree1.a2y));
assert_eq!(entities.len(), 10);
assert_eq!(entities.removed.len(), 0);
}
#[test]
fn add_remove_single() {
let mut entities = Entities::create(());
let tree1 = Tree1::new(&mut entities);
entities.remove(tree1.root_b);
assert_eq!(&entities.removed, &[tree1.root_b]);
entities.update(());
assert_eq!(entities.last_removed, &[tree1.root_b]);
assert_eq!(entities.removed.len(), 0);
assert!(!entities.contains(tree1.root_b));
}
#[test]
fn add_remove_one_child() {
let mut entities = Entities::create(());
let tree1 = Tree1::new(&mut entities);
entities.remove(tree1.root_c);
entities.update(());
check_removed(&entities, &[tree1.c1, tree1.root_c]);
assert_eq!(entities.removed.len(), 0);
assert!(!entities.contains(tree1.c1));
assert!(!entities.contains(tree1.root_c));
}
#[test]
fn add_remove_one_subtree() {
let mut entities = Entities::create(());
let tree1 = Tree1::new(&mut entities);
entities.remove(tree1.a2x);
entities.update(());
check_removed(&entities, &[tree1.a2xa, tree1.a2xb, tree1.a2x]);
assert_eq!(entities.removed.len(), 0);
assert!(!entities.contains(tree1.a2xa));
assert!(!entities.contains(tree1.a2xb));
assert!(!entities.contains(tree1.a2x));
assert!(entities.contains(tree1.a2y));
assert!(entities.contains(tree1.a2));
assert!(entities.contains(tree1.root_a));
}
#[test]
fn add_remove_all() {
let mut entities = Entities::create(());
let tree1 = Tree1::new(&mut entities);
entities.remove(tree1.a2y);
entities.update(());
assert_eq!(entities.last_removed, &[tree1.a2y]);
assert_eq!(entities.removed.len(), 0);
entities.remove(tree1.a2);
entities.remove(tree1.root_a);
entities.remove(tree1.root_c);
let c2 = entities.add(tree1.root_c, "c2").unwrap();
entities.update(());
check_removed(
&entities,
&[
tree1.a2xa,
tree1.a2xb,
tree1.a2x,
tree1.a2,
tree1.a1,
tree1.root_a,
tree1.c1,
tree1.root_c,
c2,
],
);
entities.remove(tree1.root_b);
entities.update(());
assert_eq!(entities.len(), 0);
}
}
| 29.969283 | 98 | 0.478362 |
eb6069dd7d16851f3b2ff03b543b0bac92659351 | 17,560 | // Copyright (c) The Dijets Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Remotely authenticated vs. unauthenticated network end-points:
//! ---------------------------------------------------
//! A network end-point operates with remote authentication if it only accepts connections
//! from a known set of peers (`trusted_peers`) identified by their network identity keys.
//! This does not mean that the other end-point of a connection also needs to operate with
//! authentication -- a network end-point running with remote authentication enabled will
//! connect to or accept connections from an end-point running in authenticated mode as
//! long as the latter is in its trusted peers set.
use channel::{self, message_queues::QueueStyle};
use dijets_config::{
config::{
DiscoveryMethod, NetworkConfig, Peer, PeerRole, PeerSet, RateLimitConfig, RoleType,
CONNECTION_BACKOFF_BASE, CONNECTIVITY_CHECK_INTERVAL_MS, MAX_CONCURRENT_NETWORK_REQS,
MAX_CONNECTION_DELAY_MS, MAX_FRAME_SIZE, MAX_FULLNODE_OUTBOUND_CONNECTIONS,
MAX_INBOUND_CONNECTIONS, NETWORK_CHANNEL_SIZE,
},
network_id::NetworkContext,
};
use dijets_crypto::x25519::PublicKey;
use dijets_infallible::RwLock;
use dijets_logger::prelude::*;
use dijets_metrics::IntCounterVec;
use dijets_network_address_encryption::Encryptor;
use dijets_secure_storage::Storage;
use dijets_time_service::TimeService;
use dijets_types::{chain_id::ChainId, network_address::NetworkAddress};
use network::{
connectivity_manager::{builder::ConnectivityManagerBuilder, ConnectivityRequest},
logging::NetworkSchema,
peer_manager::{
builder::{AuthenticationMode, PeerManagerBuilder},
ConnectionRequestSender,
},
protocols::{
health_checker::{self, builder::HealthCheckerBuilder},
network::{NewNetworkEvents, NewNetworkSender},
},
ProtocolId,
};
use network_discovery::{gen_simple_discovery_reconfig_subscription, DiscoveryChangeListener};
use std::{
clone::Clone,
collections::{HashMap, HashSet},
sync::Arc,
};
use subscription_service::ReconfigSubscription;
use tokio::runtime::Handle;
#[derive(Debug, PartialEq, PartialOrd)]
enum State {
CREATED,
BUILT,
STARTED,
}
/// Build Network module with custom configuration values.
/// Methods can be chained in order to set the configuration values.
/// MempoolNetworkHandler and ConsensusNetworkHandler are constructed by calling
/// [`NetworkBuilder::build`]. New instances of `NetworkBuilder` are obtained
/// via [`NetworkBuilder::create`].
pub struct NetworkBuilder {
state: State,
executor: Option<Handle>,
time_service: TimeService,
network_context: Arc<NetworkContext>,
discovery_listeners: Option<Vec<DiscoveryChangeListener>>,
connectivity_manager_builder: Option<ConnectivityManagerBuilder>,
health_checker_builder: Option<HealthCheckerBuilder>,
peer_manager_builder: PeerManagerBuilder,
// (StateSync) ReconfigSubscriptions required by internal Network components.
reconfig_subscriptions: Vec<ReconfigSubscription>,
}
impl NetworkBuilder {
/// Return a new NetworkBuilder initialized with default configuration values.
// TODO: Remove `pub`. NetworkBuilder should only be created thorugh `::create()`
pub fn new(
chain_id: ChainId,
trusted_peers: Arc<RwLock<PeerSet>>,
network_context: Arc<NetworkContext>,
time_service: TimeService,
listen_address: NetworkAddress,
authentication_mode: AuthenticationMode,
max_frame_size: usize,
enable_proxy_protocol: bool,
network_channel_size: usize,
max_concurrent_network_reqs: usize,
inbound_connection_limit: usize,
inbound_rate_limit_config: Option<RateLimitConfig>,
outbound_rate_limit_config: Option<RateLimitConfig>,
) -> Self {
// A network cannot exist without a PeerManager
// TODO: construct this in create and pass it to new() as a parameter. The complication is manual construction of NetworkBuilder in various tests.
let peer_manager_builder = PeerManagerBuilder::create(
chain_id,
network_context.clone(),
time_service.clone(),
listen_address,
trusted_peers,
authentication_mode,
network_channel_size,
max_concurrent_network_reqs,
max_frame_size,
enable_proxy_protocol,
inbound_connection_limit,
inbound_rate_limit_config,
outbound_rate_limit_config,
);
NetworkBuilder {
state: State::CREATED,
executor: None,
time_service,
network_context,
discovery_listeners: None,
connectivity_manager_builder: None,
health_checker_builder: None,
peer_manager_builder,
reconfig_subscriptions: vec![],
}
}
pub fn new_for_test(
chain_id: ChainId,
seeds: PeerSet,
trusted_peers: Arc<RwLock<PeerSet>>,
network_context: Arc<NetworkContext>,
time_service: TimeService,
listen_address: NetworkAddress,
authentication_mode: AuthenticationMode,
) -> NetworkBuilder {
let mutual_authentication = matches!(authentication_mode, AuthenticationMode::Mutual(_));
let mut builder = NetworkBuilder::new(
chain_id,
trusted_peers.clone(),
network_context,
time_service,
listen_address,
authentication_mode,
MAX_FRAME_SIZE,
false, /* Disable proxy protocol */
NETWORK_CHANNEL_SIZE,
MAX_CONCURRENT_NETWORK_REQS,
MAX_INBOUND_CONNECTIONS,
None,
None,
);
builder.add_connectivity_manager(
seeds,
trusted_peers,
MAX_FULLNODE_OUTBOUND_CONNECTIONS,
CONNECTION_BACKOFF_BASE,
MAX_CONNECTION_DELAY_MS,
CONNECTIVITY_CHECK_INTERVAL_MS,
NETWORK_CHANNEL_SIZE,
mutual_authentication,
);
builder
}
/// Create a new NetworkBuilder based on the provided configuration.
pub fn create(
chain_id: ChainId,
role: RoleType,
config: &NetworkConfig,
time_service: TimeService,
) -> NetworkBuilder {
let peer_id = config.peer_id();
let identity_key = config.identity_key();
let pubkey = identity_key.public_key();
let authentication_mode = if config.mutual_authentication {
AuthenticationMode::Mutual(identity_key)
} else {
AuthenticationMode::MaybeMutual(identity_key)
};
let network_context = Arc::new(NetworkContext::new(
role,
config.network_id.clone(),
peer_id,
));
let trusted_peers = Arc::new(RwLock::new(HashMap::new()));
let mut network_builder = NetworkBuilder::new(
chain_id,
trusted_peers.clone(),
network_context,
time_service,
config.listen_address.clone(),
authentication_mode,
config.max_frame_size,
config.enable_proxy_protocol,
config.network_channel_size,
config.max_concurrent_network_reqs,
config.max_inbound_connections,
config.inbound_rate_limit_config,
config.outbound_rate_limit_config,
);
network_builder.add_connection_monitoring(
config.ping_interval_ms,
config.ping_timeout_ms,
config.ping_failures_tolerated,
);
// Always add a connectivity manager to keep track of known peers
let seeds = merge_seeds(config);
network_builder.add_connectivity_manager(
seeds,
trusted_peers,
config.max_outbound_connections,
config.connection_backoff_base,
config.max_connection_delay_ms,
config.connectivity_check_interval_ms,
config.network_channel_size,
config.mutual_authentication,
);
network_builder.discovery_listeners = Some(Vec::new());
for discovery_method in config.discovery_methods() {
network_builder.add_discovery_change_listener(
discovery_method,
pubkey,
config.encryptor(),
);
}
// Ensure there are no duplicate source types
let set: HashSet<_> = network_builder
.discovery_listeners
.as_ref()
.unwrap()
.iter()
.map(|listener| listener.discovery_source())
.collect();
assert_eq!(
set.len(),
network_builder.discovery_listeners.as_ref().unwrap().len()
);
network_builder
}
/// Create the configured Networking components.
pub fn build(&mut self, executor: Handle) -> &mut Self {
assert_eq!(self.state, State::CREATED);
self.state = State::BUILT;
self.executor = Some(executor);
self.peer_manager_builder
.build(self.executor.as_mut().expect("Executor must exist"));
self
}
/// Start the built Networking components.
pub fn start(&mut self) -> &mut Self {
assert_eq!(self.state, State::BUILT);
self.state = State::STARTED;
let executor = self.executor.as_mut().expect("Executor must exist");
self.peer_manager_builder.start(executor);
debug!(
NetworkSchema::new(&self.network_context),
"{} Started peer manager", self.network_context
);
if let Some(conn_mgr_builder) = self.connectivity_manager_builder.as_mut() {
conn_mgr_builder.start(executor);
debug!(
NetworkSchema::new(&self.network_context),
"{} Started conn manager", self.network_context
);
}
if let Some(health_checker_builder) = self.health_checker_builder.as_mut() {
health_checker_builder.start(executor);
debug!(
NetworkSchema::new(&self.network_context),
"{} Started health checker", self.network_context
);
}
if let Some(discovery_listeners) = self.discovery_listeners.take() {
discovery_listeners
.into_iter()
.for_each(|listener| listener.start(executor))
}
self
}
pub fn reconfig_subscriptions(&mut self) -> &mut Vec<ReconfigSubscription> {
&mut self.reconfig_subscriptions
}
pub fn network_context(&self) -> Arc<NetworkContext> {
self.network_context.clone()
}
pub fn conn_mgr_reqs_tx(&self) -> Option<channel::Sender<ConnectivityRequest>> {
self.connectivity_manager_builder
.as_ref()
.map(|conn_mgr_builder| conn_mgr_builder.conn_mgr_reqs_tx())
}
pub fn listen_address(&self) -> NetworkAddress {
self.peer_manager_builder.listen_address()
}
/// Add a [`ConnectivityManager`] to the network.
///
/// [`ConnectivityManager`] is responsible for ensuring that we are connected
/// to a node iff. it is an eligible node and maintaining persistent
/// connections with all eligible nodes. A list of eligible nodes is received
/// at initialization, and updates are received on changes to system membership.
///
/// Note: a connectivity manager should only be added if the network is
/// permissioned.
pub fn add_connectivity_manager(
&mut self,
seeds: PeerSet,
trusted_peers: Arc<RwLock<PeerSet>>,
max_outbound_connections: usize,
connection_backoff_base: u64,
max_connection_delay_ms: u64,
connectivity_check_interval_ms: u64,
channel_size: usize,
mutual_authentication: bool,
) -> &mut Self {
let pm_conn_mgr_notifs_rx = self.peer_manager_builder.add_connection_event_listener();
let outbound_connection_limit = if !self.network_context.network_id().is_validator_network()
{
Some(max_outbound_connections)
} else {
None
};
self.connectivity_manager_builder = Some(ConnectivityManagerBuilder::create(
self.network_context(),
self.time_service.clone(),
trusted_peers,
seeds,
connectivity_check_interval_ms,
connection_backoff_base,
max_connection_delay_ms,
channel_size,
ConnectionRequestSender::new(self.peer_manager_builder.connection_reqs_tx()),
pm_conn_mgr_notifs_rx,
outbound_connection_limit,
mutual_authentication,
));
self
}
fn add_discovery_change_listener(
&mut self,
discovery_method: &DiscoveryMethod,
pubkey: PublicKey,
encryptor: Encryptor<Storage>,
) {
let conn_mgr_reqs_tx = self
.conn_mgr_reqs_tx()
.expect("ConnectivityManager must exist");
let listener = match discovery_method {
DiscoveryMethod::Onchain => {
let (simple_discovery_reconfig_subscription, simple_discovery_reconfig_rx) =
gen_simple_discovery_reconfig_subscription();
self.reconfig_subscriptions
.push(simple_discovery_reconfig_subscription);
DiscoveryChangeListener::validator_set(
self.network_context.clone(),
conn_mgr_reqs_tx,
pubkey,
encryptor,
simple_discovery_reconfig_rx,
)
}
DiscoveryMethod::File(path, interval_duration) => DiscoveryChangeListener::file(
self.network_context.clone(),
conn_mgr_reqs_tx,
path,
*interval_duration,
self.time_service.clone(),
),
DiscoveryMethod::None => return,
};
self.discovery_listeners
.as_mut()
.expect("Can only add listeners before starting")
.push(listener);
}
/// Add a HealthChecker to the network.
fn add_connection_monitoring(
&mut self,
ping_interval_ms: u64,
ping_timeout_ms: u64,
ping_failures_tolerated: u64,
) -> &mut Self {
// Initialize and start HealthChecker.
let (hc_network_tx, hc_network_rx) =
self.add_protocol_handler(health_checker::network_endpoint_config());
self.health_checker_builder = Some(HealthCheckerBuilder::new(
self.network_context(),
self.time_service.clone(),
ping_interval_ms,
ping_timeout_ms,
ping_failures_tolerated,
hc_network_tx,
hc_network_rx,
));
debug!(
NetworkSchema::new(&self.network_context),
"{} Created health checker", self.network_context
);
self
}
/// Adds a endpoints for the provided configuration. Returns NetworkSender and NetworkEvent which
/// can be attached to other components.
pub fn add_protocol_handler<SenderT, EventT>(
&mut self,
(rpc_protocols, direct_send_protocols, queue_preference, max_queue_size_per_peer, counter): (
Vec<ProtocolId>,
Vec<ProtocolId>,
QueueStyle,
usize,
Option<&'static IntCounterVec>,
),
) -> (SenderT, EventT)
where
EventT: NewNetworkEvents,
SenderT: NewNetworkSender,
{
let (peer_mgr_reqs_tx, peer_mgr_reqs_rx, connection_reqs_tx, connection_notifs_rx) =
self.peer_manager_builder.add_protocol_handler(
rpc_protocols,
direct_send_protocols,
queue_preference,
max_queue_size_per_peer,
counter,
);
(
SenderT::new(peer_mgr_reqs_tx, connection_reqs_tx),
EventT::new(peer_mgr_reqs_rx, connection_notifs_rx),
)
}
}
/// Retrieve and merge seeds so that they have all keys associated
fn merge_seeds(config: &NetworkConfig) -> PeerSet {
config.verify_seeds().expect("Seeds must be well formed");
let mut seeds = config.seeds.clone();
// Merge old seed configuration with new seed configuration
// TODO(gnazario): Once fully migrated, remove `seed_addrs`
config
.seed_addrs
.iter()
.map(|(peer_id, addrs)| {
(
peer_id,
Peer::from_addrs(PeerRole::ValidatorFullNode, addrs.clone()),
)
})
.for_each(|(peer_id, peer)| {
seeds
.entry(*peer_id)
// Sad clone due to Rust not realizing these are two distinct paths
.and_modify(|seed| seed.extend(peer.clone()).unwrap())
.or_insert(peer);
});
// Pull public keys out of addresses
seeds.values_mut().for_each(
|Peer {
addresses, keys, ..
}| {
addresses
.iter()
.filter_map(NetworkAddress::find_noise_proto)
.for_each(|pubkey| {
keys.insert(pubkey);
});
},
);
seeds
}
| 35.12 | 155 | 0.623007 |
713a38d2f6c5a74506c777e1d3ba59c325d42c6b | 17,285 | use super::*;
use std::convert::TryInto;
use proptest::prop_oneof;
use proptest::strategy::{BoxedStrategy, Just, Strategy};
use liblumen_alloc::erts::exception::{Class, Exception};
use proptest::test_runner::TestCaseError;
#[test]
fn without_class_errors_badarg() {
run!(
|arc_process| {
(
strategy::term::atom().prop_filter(
"Class cannot be error, exit, or throw",
|class| {
let is_class: Result<exception::Class, _> = (*class).try_into();
is_class.is_err()
},
),
strategy::term(arc_process.clone()),
strategy::term::list::proper(arc_process.clone()),
)
},
|(class, reason, stacktrace)| {
prop_assert_badarg!(
native(class, reason, stacktrace),
"supported exception classes are error, exit, or throw"
);
Ok(())
},
);
}
#[test]
fn with_class_without_list_stacktrace_errors_badarg() {
run!(
|arc_process| {
(
class(),
strategy::term(arc_process.clone()),
strategy::term::is_not_list(arc_process.clone()),
)
},
|(class, reason, stacktrace)| {
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_empty_list_stacktrace_raises() {
run!(
|arc_process| {
(
class_variant_and_term(),
strategy::term(arc_process.clone()),
)
},
|((class_variant, class), reason)| {
let stacktrace = Term::NIL;
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_class_with_stacktrace_without_atom_module_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term(arc_process.clone()).prop_filter(
"Module must not be an atom or function",
|module| {
!(
// {M, F, arity | args}
module.is_atom() ||
// {function, args, location}
module.is_boxed_function()
)
},
),
strategy::term::function::function(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments)| {
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_stacktrace_with_atom_module_without_atom_function_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::is_not_atom(arc_process.clone()),
strategy::term::function::arity_or_arguments(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments)| {
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_stacktrace_with_atom_module_with_atom_function_without_arity_or_arguments_errors_badarg(
) {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::function::is_not_arity_or_arguments(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments)| {
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_stacktrace_with_mfa_with_file_without_charlist_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::is_not_list(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments, file_value)| {
let file_key = atom!("file");
let location = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[file_key, file_value])
.unwrap()])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_stacktrace_with_mfa_with_non_positive_line_with_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term::atom(),
strategy::term::atom(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::integer::non_positive(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments, line_value)| {
let line_key = atom!("line");
let location = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[line_key, line_value])
.unwrap()])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_class_with_stacktrace_with_mfa_with_invalid_location_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class(),
strategy::term(arc_process.clone()),
strategy::term::atom(),
strategy::term::atom(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::atom().prop_filter("Key cannot be file or line", |key| {
let key_atom: Atom = (*key).try_into().unwrap();
match key_atom.name() {
"file" | "line" => false,
_ => true,
}
}),
strategy::term(arc_process.clone()),
)
},
|(arc_process, class, reason, module, function, arity_or_arguments, key, value)| {
let location = arc_process
.list_from_slice(&[arc_process.tuple_from_slice(&[key, value]).unwrap()])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_badarg!(
native(class, reason, stacktrace),
format!("stacktrace ({}) is not a stacktrace", stacktrace)
);
Ok(())
},
);
}
#[test]
fn with_atom_module_with_atom_function_with_arity_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::integer::non_negative(arc_process.clone()),
)
},
|(arc_process, (class_variant, class), reason, module, function, arity)| {
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_atom_module_with_atom_function_with_arguments_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::list::proper(arc_process.clone()),
)
},
|(arc_process, (class_variant, class), reason, module, function, arguments)| {
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arguments])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_mfa_with_empty_location_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
)
},
|(arc_process, (class_variant, class), reason, module, function, arity_or_arguments)| {
let location = Term::NIL;
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_mfa_with_file_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::function::module(),
strategy::term::function::function(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::charlist(arc_process.clone()),
)
},
|(arc_process, (class_variant, class), reason, module, function, arity, file_value)| {
let file_key = atom!("file");
let location = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[file_key, file_value])
.unwrap()])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity, location])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_mfa_with_positive_line_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::atom(),
strategy::term::atom(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::integer::positive(arc_process.clone()),
)
},
|(
arc_process,
(class_variant, class),
reason,
module,
function,
arity_or_arguments,
line_value,
)| {
let line_key = atom!("line");
let location = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[line_key, line_value])
.unwrap()])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
#[test]
fn with_mfa_with_file_and_line_raises() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
class_variant_and_term(),
strategy::term(arc_process.clone()),
strategy::term::atom(),
strategy::term::atom(),
strategy::term::function::arity_or_arguments(arc_process.clone()),
strategy::term::charlist(arc_process.clone()),
strategy::term::integer::positive(arc_process),
)
},
|(
arc_process,
(class_variant, class),
reason,
module,
function,
arity_or_arguments,
file_value,
line_value,
)| {
let file_key = atom!("file");
let line_key = atom!("line");
let location = arc_process
.list_from_slice(&[
arc_process
.tuple_from_slice(&[file_key, file_value])
.unwrap(),
arc_process
.tuple_from_slice(&[line_key, line_value])
.unwrap(),
])
.unwrap();
let stacktrace = arc_process
.list_from_slice(&[arc_process
.tuple_from_slice(&[module, function, arity_or_arguments, location])
.unwrap()])
.unwrap();
prop_assert_raises(class_variant, class, reason, stacktrace)
},
);
}
fn class() -> BoxedStrategy<Term> {
prop_oneof![Just("error"), Just("exit"), Just("throw")]
.prop_map(|string| atom!(&string))
.boxed()
}
fn class_variant_and_term() -> BoxedStrategy<(Class, Term)> {
prop_oneof![
Just((Class::Error { arguments: None }, "error")),
Just((Class::Exit, "exit")),
Just((Class::Throw, "throw"))
]
.prop_map(|(class_variant, string)| (class_variant, atom!(&string)))
.boxed()
}
fn prop_assert_raises(
class_variant: Class,
class: Term,
reason: Term,
stacktrace: Term,
) -> Result<(), TestCaseError> {
if let Err(Exception::Runtime(ref runtime_exception)) = native(class, reason, stacktrace) {
prop_assert_eq!(runtime_exception.class(), Some(class_variant));
prop_assert_eq!(runtime_exception.reason(), Some(reason));
prop_assert_eq!(runtime_exception.stacktrace(), Some(stacktrace));
Ok(())
} else {
Err(proptest::test_runner::TestCaseError::fail("not a raise"))
}
}
| 33.176583 | 107 | 0.502459 |
569c662ec676db6e8085823bb0d3459b3143c441 | 4,455 | // I think borrowed boxing is necessary for objekt::clone_box to work
#![allow(clippy::borrowed_box)]
//! UI event handling
use crate::cmd::{parse_cmd, CmdArgs, ParseCmdResult};
use crate::config;
use futures_util::stream::StreamExt;
use libtiny_client::Client;
use libtiny_ui::{MsgSource, MsgTarget, UI};
use std::path::{Path, PathBuf};
use tokio::sync::mpsc;
pub(crate) async fn task(
config_path: PathBuf,
defaults: config::Defaults,
ui: Box<dyn UI>,
mut clients: Vec<Client>,
mut rcv_ev: mpsc::Receiver<libtiny_ui::Event>,
) {
while let Some(ev) = rcv_ev.next().await {
if handle_input_ev(&config_path, &defaults, &ui, &mut clients, ev) {
return;
}
ui.draw();
}
}
fn handle_input_ev(
config_path: &Path,
defaults: &config::Defaults,
ui: &Box<dyn UI>,
clients: &mut Vec<Client>,
ev: libtiny_ui::Event,
) -> bool {
use libtiny_ui::Event::*;
match ev {
Abort => {
for client in clients {
client.quit(None);
}
return true; // abort
}
Msg { msg, source } => {
send_msg(&**ui, clients, &source, msg, false);
}
Lines { lines, source } => {
for line in lines.into_iter() {
send_msg(&**ui, clients, &source, line, false)
}
}
Cmd { cmd, source } => handle_cmd(config_path, defaults, ui, clients, source, &cmd),
}
false // continue
}
fn handle_cmd(
config_path: &Path,
defaults: &config::Defaults,
ui: &Box<dyn UI>,
clients: &mut Vec<Client>,
src: MsgSource,
cmd: &str,
) {
match parse_cmd(cmd) {
ParseCmdResult::Ok { cmd, rest } => {
let cmd_args = CmdArgs {
args: rest,
config_path,
defaults,
ui,
clients,
src,
};
(cmd.cmd_fn)(cmd_args);
}
// ParseCmdResult::Ambiguous(vec) => {
// self.ui.add_client_err_msg(
// &format!("Unsupported command: \"/{}\"", msg),
// &MsgTarget::CurrentTab,
// );
// self.ui.add_client_err_msg(
// &format!("Did you mean one of {:?} ?", vec),
// &MsgTarget::CurrentTab,
// );
// },
ParseCmdResult::Unknown => ui.add_client_err_msg(
&format!("Unsupported command: \"/{}\"", cmd),
&MsgTarget::CurrentTab,
),
}
}
// TODO: move this somewhere else
pub(crate) fn send_msg(
ui: &dyn UI,
clients: &mut Vec<Client>,
src: &MsgSource,
msg: String,
is_action: bool,
) {
if src.serv_name() == "mentions" {
ui.add_client_err_msg(
"Use `/connect <server>` to connect to a server",
&MsgTarget::CurrentTab,
);
return;
}
let client = clients
.iter_mut()
.find(|client| client.get_serv_name() == src.serv_name())
.unwrap();
// TODO: For errors:
//
// ui.add_client_err_msg(
// &format!("Can't find server: {}", serv),
// &MsgTarget::CurrentTab,
// );
// `ui_target`: Where to show the message on ui
// `msg_target`: Actual PRIVMSG target to send to the server
let (ui_target, msg_target) = {
match src {
MsgSource::Serv { .. } => {
// we don't split raw messages to 512-bytes long chunks
client.raw_msg(&msg);
return;
}
MsgSource::Chan { ref serv, ref chan } => (MsgTarget::Chan { serv, chan }, chan),
MsgSource::User { ref serv, ref nick } => {
let msg_target = if nick.eq_ignore_ascii_case("nickserv")
|| nick.eq_ignore_ascii_case("chanserv")
{
MsgTarget::Server { serv }
} else {
MsgTarget::User { serv, nick }
};
(msg_target, nick)
}
}
};
let ts = time::now();
let extra_len = msg_target.len()
+ if is_action {
9 // "\0x1ACTION \0x1".len()
} else {
0
};
for msg in client.split_privmsg(extra_len, &msg) {
client.privmsg(msg_target, msg, is_action);
ui.add_privmsg(&client.get_nick(), msg, ts, &ui_target, false, is_action);
}
}
| 27.84375 | 93 | 0.508418 |
163b400973b7cd2a0feb05f38c383223a0a02f95 | 29,321 | //! Trait Resolution. See the [rustc dev guide] for more information on how this works.
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/resolution.html
mod chalk;
pub mod query;
pub mod select;
pub mod specialization_graph;
mod structural_impls;
use crate::infer::canonical::Canonical;
use crate::mir::interpret::ErrorHandled;
use crate::ty::subst::SubstsRef;
use crate::ty::{self, AdtKind, Ty, TyCtxt};
use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::Constness;
use rustc_span::symbol::Symbol;
use rustc_span::{Span, DUMMY_SP};
use smallvec::SmallVec;
use std::borrow::Cow;
use std::fmt;
use std::ops::Deref;
use std::rc::Rc;
pub use self::select::{EvaluationCache, EvaluationResult, OverflowError, SelectionCache};
pub type CanonicalChalkEnvironmentAndGoal<'tcx> = Canonical<'tcx, ChalkEnvironmentAndGoal<'tcx>>;
pub use self::ObligationCauseCode::*;
pub use self::chalk::{ChalkEnvironmentAndGoal, RustInterner as ChalkRustInterner};
/// Depending on the stage of compilation, we want projection to be
/// more or less conservative.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, HashStable)]
pub enum Reveal {
/// At type-checking time, we refuse to project any associated
/// type that is marked `default`. Non-`default` ("final") types
/// are always projected. This is necessary in general for
/// soundness of specialization. However, we *could* allow
/// projections in fully-monomorphic cases. We choose not to,
/// because we prefer for `default type` to force the type
/// definition to be treated abstractly by any consumers of the
/// impl. Concretely, that means that the following example will
/// fail to compile:
///
/// ```
/// trait Assoc {
/// type Output;
/// }
///
/// impl<T> Assoc for T {
/// default type Output = bool;
/// }
///
/// fn main() {
/// let <() as Assoc>::Output = true;
/// }
/// ```
UserFacing,
/// At codegen time, all monomorphic projections will succeed.
/// Also, `impl Trait` is normalized to the concrete type,
/// which has to be already collected by type-checking.
///
/// NOTE: as `impl Trait`'s concrete type should *never*
/// be observable directly by the user, `Reveal::All`
/// should not be used by checks which may expose
/// type equality or type contents to the user.
/// There are some exceptions, e.g., around auto traits and
/// transmute-checking, which expose some details, but
/// not the whole concrete type of the `impl Trait`.
All,
}
/// The reason why we incurred this obligation; used for error reporting.
///
/// As the happy path does not care about this struct, storing this on the heap
/// ends up increasing performance.
///
/// We do not want to intern this as there are a lot of obligation causes which
/// only live for a short period of time.
#[derive(Clone, PartialEq, Eq, Hash, Lift)]
pub struct ObligationCause<'tcx> {
/// `None` for `ObligationCause::dummy`, `Some` otherwise.
data: Option<Rc<ObligationCauseData<'tcx>>>,
}
const DUMMY_OBLIGATION_CAUSE_DATA: ObligationCauseData<'static> =
ObligationCauseData { span: DUMMY_SP, body_id: hir::CRATE_HIR_ID, code: MiscObligation };
// Correctly format `ObligationCause::dummy`.
impl<'tcx> fmt::Debug for ObligationCause<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ObligationCauseData::fmt(self, f)
}
}
impl Deref for ObligationCause<'tcx> {
type Target = ObligationCauseData<'tcx>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
self.data.as_deref().unwrap_or(&DUMMY_OBLIGATION_CAUSE_DATA)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)]
pub struct ObligationCauseData<'tcx> {
pub span: Span,
/// The ID of the fn body that triggered this obligation. This is
/// used for region obligations to determine the precise
/// environment in which the region obligation should be evaluated
/// (in particular, closures can add new assumptions). See the
/// field `region_obligations` of the `FulfillmentContext` for more
/// information.
pub body_id: hir::HirId,
pub code: ObligationCauseCode<'tcx>,
}
impl<'tcx> ObligationCause<'tcx> {
#[inline]
pub fn new(
span: Span,
body_id: hir::HirId,
code: ObligationCauseCode<'tcx>,
) -> ObligationCause<'tcx> {
ObligationCause { data: Some(Rc::new(ObligationCauseData { span, body_id, code })) }
}
pub fn misc(span: Span, body_id: hir::HirId) -> ObligationCause<'tcx> {
ObligationCause::new(span, body_id, MiscObligation)
}
pub fn dummy_with_span(span: Span) -> ObligationCause<'tcx> {
ObligationCause::new(span, hir::CRATE_HIR_ID, MiscObligation)
}
#[inline(always)]
pub fn dummy() -> ObligationCause<'tcx> {
ObligationCause { data: None }
}
pub fn make_mut(&mut self) -> &mut ObligationCauseData<'tcx> {
Rc::make_mut(self.data.get_or_insert_with(|| Rc::new(DUMMY_OBLIGATION_CAUSE_DATA)))
}
pub fn span(&self, tcx: TyCtxt<'tcx>) -> Span {
match self.code {
ObligationCauseCode::CompareImplMethodObligation { .. }
| ObligationCauseCode::MainFunctionType
| ObligationCauseCode::StartFunctionType => {
tcx.sess.source_map().guess_head_span(self.span)
}
ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
arm_span,
..
}) => arm_span,
_ => self.span,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)]
pub struct UnifyReceiverContext<'tcx> {
pub assoc_item: ty::AssocItem,
pub param_env: ty::ParamEnv<'tcx>,
pub substs: SubstsRef<'tcx>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)]
pub enum ObligationCauseCode<'tcx> {
/// Not well classified or should be obvious from the span.
MiscObligation,
/// A slice or array is WF only if `T: Sized`.
SliceOrArrayElem,
/// A tuple is WF only if its middle elements are `Sized`.
TupleElem,
/// This is the trait reference from the given projection.
ProjectionWf(ty::ProjectionTy<'tcx>),
/// In an impl of trait `X` for type `Y`, type `Y` must
/// also implement all supertraits of `X`.
ItemObligation(DefId),
/// Like `ItemObligation`, but with extra detail on the source of the obligation.
BindingObligation(DefId, Span),
/// A type like `&'a T` is WF only if `T: 'a`.
ReferenceOutlivesReferent(Ty<'tcx>),
/// A type like `Box<Foo<'a> + 'b>` is WF only if `'b: 'a`.
ObjectTypeBound(Ty<'tcx>, ty::Region<'tcx>),
/// Obligation incurred due to an object cast.
ObjectCastObligation(/* Object type */ Ty<'tcx>),
/// Obligation incurred due to a coercion.
Coercion {
source: Ty<'tcx>,
target: Ty<'tcx>,
},
/// Various cases where expressions must be `Sized` / `Copy` / etc.
/// `L = X` implies that `L` is `Sized`.
AssignmentLhsSized,
/// `(x1, .., xn)` must be `Sized`.
TupleInitializerSized,
/// `S { ... }` must be `Sized`.
StructInitializerSized,
/// Type of each variable must be `Sized`.
VariableType(hir::HirId),
/// Argument type must be `Sized`.
SizedArgumentType(Option<Span>),
/// Return type must be `Sized`.
SizedReturnType,
/// Yield type must be `Sized`.
SizedYieldType,
/// Inline asm operand type must be `Sized`.
InlineAsmSized,
/// `[T, ..n]` implies that `T` must be `Copy`.
RepeatVec,
/// Types of fields (other than the last, except for packed structs) in a struct must be sized.
FieldSized {
adt_kind: AdtKind,
span: Span,
last: bool,
},
/// Constant expressions must be sized.
ConstSized,
/// `static` items must have `Sync` type.
SharedStatic,
BuiltinDerivedObligation(DerivedObligationCause<'tcx>),
ImplDerivedObligation(DerivedObligationCause<'tcx>),
DerivedObligation(DerivedObligationCause<'tcx>),
/// Error derived when matching traits/impls; see ObligationCause for more details
CompareImplConstObligation,
/// Error derived when matching traits/impls; see ObligationCause for more details
CompareImplMethodObligation {
item_name: Symbol,
impl_item_def_id: DefId,
trait_item_def_id: DefId,
},
/// Error derived when matching traits/impls; see ObligationCause for more details
CompareImplTypeObligation {
item_name: Symbol,
impl_item_def_id: DefId,
trait_item_def_id: DefId,
},
/// Checking that this expression can be assigned where it needs to be
// FIXME(eddyb) #11161 is the original Expr required?
ExprAssignable,
/// Computing common supertype in the arms of a match expression
MatchExpressionArm(Box<MatchExpressionArmCause<'tcx>>),
/// Type error arising from type checking a pattern against an expected type.
Pattern {
/// The span of the scrutinee or type expression which caused the `root_ty` type.
span: Option<Span>,
/// The root expected type induced by a scrutinee or type expression.
root_ty: Ty<'tcx>,
/// Whether the `Span` came from an expression or a type expression.
origin_expr: bool,
},
/// Constants in patterns must have `Structural` type.
ConstPatternStructural,
/// Computing common supertype in an if expression
IfExpression(Box<IfExpressionCause>),
/// Computing common supertype of an if expression with no else counter-part
IfExpressionWithNoElse,
/// `main` has wrong type
MainFunctionType,
/// `start` has wrong type
StartFunctionType,
/// Intrinsic has wrong type
IntrinsicType,
/// Method receiver
MethodReceiver,
UnifyReceiver(Box<UnifyReceiverContext<'tcx>>),
/// `return` with no expression
ReturnNoExpression,
/// `return` with an expression
ReturnValue(hir::HirId),
/// Return type of this function
ReturnType,
/// Block implicit return
BlockTailExpression(hir::HirId),
/// #[feature(trivial_bounds)] is not enabled
TrivialBound,
}
impl ObligationCauseCode<'_> {
// Return the base obligation, ignoring derived obligations.
pub fn peel_derives(&self) -> &Self {
let mut base_cause = self;
while let BuiltinDerivedObligation(cause)
| ImplDerivedObligation(cause)
| DerivedObligation(cause) = base_cause
{
base_cause = &cause.parent_code;
}
base_cause
}
}
// `ObligationCauseCode` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
static_assert_size!(ObligationCauseCode<'_>, 32);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum StatementAsExpression {
CorrectType,
NeedsBoxing,
}
impl<'tcx> ty::Lift<'tcx> for StatementAsExpression {
type Lifted = StatementAsExpression;
fn lift_to_tcx(self, _tcx: TyCtxt<'tcx>) -> Option<StatementAsExpression> {
Some(self)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)]
pub struct MatchExpressionArmCause<'tcx> {
pub arm_span: Span,
pub scrut_span: Span,
pub semi_span: Option<(Span, StatementAsExpression)>,
pub source: hir::MatchSource,
pub prior_arms: Vec<Span>,
pub last_ty: Ty<'tcx>,
pub scrut_hir_id: hir::HirId,
pub opt_suggest_box_span: Option<Span>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct IfExpressionCause {
pub then: Span,
pub else_sp: Span,
pub outer: Option<Span>,
pub semicolon: Option<(Span, StatementAsExpression)>,
pub opt_suggest_box_span: Option<Span>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)]
pub struct DerivedObligationCause<'tcx> {
/// The trait reference of the parent obligation that led to the
/// current obligation. Note that only trait obligations lead to
/// derived obligations, so we just store the trait reference here
/// directly.
pub parent_trait_ref: ty::PolyTraitRef<'tcx>,
/// The parent trait had this cause.
pub parent_code: Rc<ObligationCauseCode<'tcx>>,
}
#[derive(Clone, Debug, TypeFoldable, Lift)]
pub enum SelectionError<'tcx> {
Unimplemented,
OutputTypeParameterMismatch(
ty::PolyTraitRef<'tcx>,
ty::PolyTraitRef<'tcx>,
ty::error::TypeError<'tcx>,
),
TraitNotObjectSafe(DefId),
ConstEvalFailure(ErrorHandled),
Overflow,
}
/// When performing resolution, it is typically the case that there
/// can be one of three outcomes:
///
/// - `Ok(Some(r))`: success occurred with result `r`
/// - `Ok(None)`: could not definitely determine anything, usually due
/// to inconclusive type inference.
/// - `Err(e)`: error `e` occurred
pub type SelectionResult<'tcx, T> = Result<Option<T>, SelectionError<'tcx>>;
/// Given the successful resolution of an obligation, the `ImplSource`
/// indicates where the impl comes from.
///
/// For example, the obligation may be satisfied by a specific impl (case A),
/// or it may be relative to some bound that is in scope (case B).
///
/// ```
/// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1
/// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2
/// impl Clone for i32 { ... } // Impl_3
///
/// fn foo<T: Clone>(concrete: Option<Box<i32>>, param: T, mixed: Option<T>) {
/// // Case A: Vtable points at a specific impl. Only possible when
/// // type is concretely known. If the impl itself has bounded
/// // type parameters, Vtable will carry resolutions for those as well:
/// concrete.clone(); // Vtable(Impl_1, [Vtable(Impl_2, [Vtable(Impl_3)])])
///
/// // Case A: ImplSource points at a specific impl. Only possible when
/// // type is concretely known. If the impl itself has bounded
/// // type parameters, ImplSource will carry resolutions for those as well:
/// concrete.clone(); // ImplSource(Impl_1, [ImplSource(Impl_2, [ImplSource(Impl_3)])])
///
/// // Case B: ImplSource must be provided by caller. This applies when
/// // type is a type parameter.
/// param.clone(); // ImplSource::Param
///
/// // Case C: A mix of cases A and B.
/// mixed.clone(); // ImplSource(Impl_1, [ImplSource::Param])
/// }
/// ```
///
/// ### The type parameter `N`
///
/// See explanation on `ImplSourceUserDefinedData`.
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub enum ImplSource<'tcx, N> {
/// ImplSource identifying a particular impl.
UserDefined(ImplSourceUserDefinedData<'tcx, N>),
/// ImplSource for auto trait implementations.
/// This carries the information and nested obligations with regards
/// to an auto implementation for a trait `Trait`. The nested obligations
/// ensure the trait implementation holds for all the constituent types.
AutoImpl(ImplSourceAutoImplData<N>),
/// Successful resolution to an obligation provided by the caller
/// for some type parameter. The `Vec<N>` represents the
/// obligations incurred from normalizing the where-clause (if
/// any).
Param(Vec<N>, Constness),
/// Virtual calls through an object.
Object(ImplSourceObjectData<'tcx, N>),
/// Successful resolution for a builtin trait.
Builtin(ImplSourceBuiltinData<N>),
/// ImplSource automatically generated for a closure. The `DefId` is the ID
/// of the closure expression. This is a `ImplSource::UserDefined` in spirit, but the
/// impl is generated by the compiler and does not appear in the source.
Closure(ImplSourceClosureData<'tcx, N>),
/// Same as above, but for a function pointer type with the given signature.
FnPointer(ImplSourceFnPointerData<'tcx, N>),
/// ImplSource for a builtin `DeterminantKind` trait implementation.
DiscriminantKind(ImplSourceDiscriminantKindData),
/// ImplSource automatically generated for a generator.
Generator(ImplSourceGeneratorData<'tcx, N>),
/// ImplSource for a trait alias.
TraitAlias(ImplSourceTraitAliasData<'tcx, N>),
}
impl<'tcx, N> ImplSource<'tcx, N> {
pub fn nested_obligations(self) -> Vec<N> {
match self {
ImplSource::UserDefined(i) => i.nested,
ImplSource::Param(n, _) => n,
ImplSource::Builtin(i) => i.nested,
ImplSource::AutoImpl(d) => d.nested,
ImplSource::Closure(c) => c.nested,
ImplSource::Generator(c) => c.nested,
ImplSource::Object(d) => d.nested,
ImplSource::FnPointer(d) => d.nested,
ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData) => Vec::new(),
ImplSource::TraitAlias(d) => d.nested,
}
}
pub fn borrow_nested_obligations(&self) -> &[N] {
match &self {
ImplSource::UserDefined(i) => &i.nested[..],
ImplSource::Param(n, _) => &n[..],
ImplSource::Builtin(i) => &i.nested[..],
ImplSource::AutoImpl(d) => &d.nested[..],
ImplSource::Closure(c) => &c.nested[..],
ImplSource::Generator(c) => &c.nested[..],
ImplSource::Object(d) => &d.nested[..],
ImplSource::FnPointer(d) => &d.nested[..],
ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData) => &[],
ImplSource::TraitAlias(d) => &d.nested[..],
}
}
pub fn map<M, F>(self, f: F) -> ImplSource<'tcx, M>
where
F: FnMut(N) -> M,
{
match self {
ImplSource::UserDefined(i) => ImplSource::UserDefined(ImplSourceUserDefinedData {
impl_def_id: i.impl_def_id,
substs: i.substs,
nested: i.nested.into_iter().map(f).collect(),
}),
ImplSource::Param(n, ct) => ImplSource::Param(n.into_iter().map(f).collect(), ct),
ImplSource::Builtin(i) => ImplSource::Builtin(ImplSourceBuiltinData {
nested: i.nested.into_iter().map(f).collect(),
}),
ImplSource::Object(o) => ImplSource::Object(ImplSourceObjectData {
upcast_trait_ref: o.upcast_trait_ref,
vtable_base: o.vtable_base,
nested: o.nested.into_iter().map(f).collect(),
}),
ImplSource::AutoImpl(d) => ImplSource::AutoImpl(ImplSourceAutoImplData {
trait_def_id: d.trait_def_id,
nested: d.nested.into_iter().map(f).collect(),
}),
ImplSource::Closure(c) => ImplSource::Closure(ImplSourceClosureData {
closure_def_id: c.closure_def_id,
substs: c.substs,
nested: c.nested.into_iter().map(f).collect(),
}),
ImplSource::Generator(c) => ImplSource::Generator(ImplSourceGeneratorData {
generator_def_id: c.generator_def_id,
substs: c.substs,
nested: c.nested.into_iter().map(f).collect(),
}),
ImplSource::FnPointer(p) => ImplSource::FnPointer(ImplSourceFnPointerData {
fn_ty: p.fn_ty,
nested: p.nested.into_iter().map(f).collect(),
}),
ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData) => {
ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
}
ImplSource::TraitAlias(d) => ImplSource::TraitAlias(ImplSourceTraitAliasData {
alias_def_id: d.alias_def_id,
substs: d.substs,
nested: d.nested.into_iter().map(f).collect(),
}),
}
}
}
/// Identifies a particular impl in the source, along with a set of
/// substitutions from the impl's type/lifetime parameters. The
/// `nested` vector corresponds to the nested obligations attached to
/// the impl's type parameters.
///
/// The type parameter `N` indicates the type used for "nested
/// obligations" that are required by the impl. During type-check, this
/// is `Obligation`, as one might expect. During codegen, however, this
/// is `()`, because codegen only requires a shallow resolution of an
/// impl, and nested obligations are satisfied later.
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceUserDefinedData<'tcx, N> {
pub impl_def_id: DefId,
pub substs: SubstsRef<'tcx>,
pub nested: Vec<N>,
}
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceGeneratorData<'tcx, N> {
pub generator_def_id: DefId,
pub substs: SubstsRef<'tcx>,
/// Nested obligations. This can be non-empty if the generator
/// signature contains associated types.
pub nested: Vec<N>,
}
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceClosureData<'tcx, N> {
pub closure_def_id: DefId,
pub substs: SubstsRef<'tcx>,
/// Nested obligations. This can be non-empty if the closure
/// signature contains associated types.
pub nested: Vec<N>,
}
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceAutoImplData<N> {
pub trait_def_id: DefId,
pub nested: Vec<N>,
}
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceBuiltinData<N> {
pub nested: Vec<N>,
}
#[derive(PartialEq, Eq, Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceObjectData<'tcx, N> {
/// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`.
pub upcast_trait_ref: ty::PolyTraitRef<'tcx>,
/// The vtable is formed by concatenating together the method lists of
/// the base object trait and all supertraits; this is the start of
/// `upcast_trait_ref`'s methods in that vtable.
pub vtable_base: usize,
pub nested: Vec<N>,
}
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceFnPointerData<'tcx, N> {
pub fn_ty: Ty<'tcx>,
pub nested: Vec<N>,
}
// FIXME(@lcnr): This should be refactored and merged with other builtin vtables.
#[derive(Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)]
pub struct ImplSourceDiscriminantKindData;
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
pub struct ImplSourceTraitAliasData<'tcx, N> {
pub alias_def_id: DefId,
pub substs: SubstsRef<'tcx>,
pub nested: Vec<N>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, HashStable)]
pub enum ObjectSafetyViolation {
/// `Self: Sized` declared on the trait.
SizedSelf(SmallVec<[Span; 1]>),
/// Supertrait reference references `Self` an in illegal location
/// (e.g., `trait Foo : Bar<Self>`).
SupertraitSelf(SmallVec<[Span; 1]>),
/// Method has something illegal.
Method(Symbol, MethodViolationCode, Span),
/// Associated const.
AssocConst(Symbol, Span),
}
impl ObjectSafetyViolation {
pub fn error_msg(&self) -> Cow<'static, str> {
match *self {
ObjectSafetyViolation::SizedSelf(_) => "it requires `Self: Sized`".into(),
ObjectSafetyViolation::SupertraitSelf(ref spans) => {
if spans.iter().any(|sp| *sp != DUMMY_SP) {
"it uses `Self` as a type parameter".into()
} else {
"it cannot use `Self` as a type parameter in a supertrait or `where`-clause"
.into()
}
}
ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_, _, _), _) => {
format!("associated function `{}` has no `self` parameter", name).into()
}
ObjectSafetyViolation::Method(
name,
MethodViolationCode::ReferencesSelfInput(_),
DUMMY_SP,
) => format!("method `{}` references the `Self` type in its parameters", name).into(),
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfInput(_), _) => {
format!("method `{}` references the `Self` type in this parameter", name).into()
}
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfOutput, _) => {
format!("method `{}` references the `Self` type in its return type", name).into()
}
ObjectSafetyViolation::Method(
name,
MethodViolationCode::WhereClauseReferencesSelf,
_,
) => {
format!("method `{}` references the `Self` type in its `where` clause", name).into()
}
ObjectSafetyViolation::Method(name, MethodViolationCode::Generic, _) => {
format!("method `{}` has generic type parameters", name).into()
}
ObjectSafetyViolation::Method(name, MethodViolationCode::UndispatchableReceiver, _) => {
format!("method `{}`'s `self` parameter cannot be dispatched on", name).into()
}
ObjectSafetyViolation::AssocConst(name, DUMMY_SP) => {
format!("it contains associated `const` `{}`", name).into()
}
ObjectSafetyViolation::AssocConst(..) => "it contains this associated `const`".into(),
}
}
pub fn solution(&self, err: &mut DiagnosticBuilder<'_>) {
match *self {
ObjectSafetyViolation::SizedSelf(_) | ObjectSafetyViolation::SupertraitSelf(_) => {}
ObjectSafetyViolation::Method(
name,
MethodViolationCode::StaticMethod(sugg, self_span, has_args),
_,
) => {
err.span_suggestion(
self_span,
&format!(
"consider turning `{}` into a method by giving it a `&self` argument",
name
),
format!("&self{}", if has_args { ", " } else { "" }),
Applicability::MaybeIncorrect,
);
match sugg {
Some((sugg, span)) => {
err.span_suggestion(
span,
&format!(
"alternatively, consider constraining `{}` so it does not apply to \
trait objects",
name
),
sugg.to_string(),
Applicability::MaybeIncorrect,
);
}
None => {
err.help(&format!(
"consider turning `{}` into a method by giving it a `&self` \
argument or constraining it so it does not apply to trait objects",
name
));
}
}
}
ObjectSafetyViolation::Method(
name,
MethodViolationCode::UndispatchableReceiver,
span,
) => {
err.span_suggestion(
span,
&format!(
"consider changing method `{}`'s `self` parameter to be `&self`",
name
),
"&Self".to_string(),
Applicability::MachineApplicable,
);
}
ObjectSafetyViolation::AssocConst(name, _)
| ObjectSafetyViolation::Method(name, ..) => {
err.help(&format!("consider moving `{}` to another trait", name));
}
}
}
pub fn spans(&self) -> SmallVec<[Span; 1]> {
// When `span` comes from a separate crate, it'll be `DUMMY_SP`. Treat it as `None` so
// diagnostics use a `note` instead of a `span_label`.
match self {
ObjectSafetyViolation::SupertraitSelf(spans)
| ObjectSafetyViolation::SizedSelf(spans) => spans.clone(),
ObjectSafetyViolation::AssocConst(_, span)
| ObjectSafetyViolation::Method(_, _, span)
if *span != DUMMY_SP =>
{
smallvec![*span]
}
_ => smallvec![],
}
}
}
/// Reasons a method might not be object-safe.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
pub enum MethodViolationCode {
/// e.g., `fn foo()`
StaticMethod(Option<(&'static str, Span)>, Span, bool /* has args */),
/// e.g., `fn foo(&self, x: Self)`
ReferencesSelfInput(usize),
/// e.g., `fn foo(&self) -> Self`
ReferencesSelfOutput,
/// e.g., `fn foo(&self) where Self: Clone`
WhereClauseReferencesSelf,
/// e.g., `fn foo<A>()`
Generic,
/// the method's receiver (`self` argument) can't be dispatched on
UndispatchableReceiver,
}
| 36.514321 | 100 | 0.617271 |
c107dca40f7ee3e27c870cfe9f98bc1649ae405b | 21,884 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
builtin::{
arguments::Arguments as BootArguments,
capability::BuiltinCapability,
kernel_stats::KernelStats,
log::{ReadOnlyLog, WriteOnlyLog},
process_launcher::ProcessLauncher,
root_job::{RootJob, ROOT_JOB_CAPABILITY_PATH, ROOT_JOB_FOR_INSPECT_CAPABILITY_PATH},
root_resource::RootResource,
runner::{BuiltinRunner, BuiltinRunnerFactory},
system_controller::SystemController,
time::UtcTimeMaintainer,
vmex::VmexService,
},
capability_ready_notifier::CapabilityReadyNotifier,
config::RuntimeConfig,
framework::RealmCapabilityHost,
fuchsia_base_pkg_resolver, fuchsia_boot_resolver, fuchsia_pkg_resolver,
model::{
binding::Binder,
environment::{Environment, RunnerRegistration, RunnerRegistry},
error::ModelError,
event_logger::EventLogger,
events::{
event::SyncMode,
registry::{EventRegistry, ExecutionMode},
running_provider::RunningProvider,
source_factory::EventSourceFactory,
stream_provider::EventStreamProvider,
},
hooks::EventType,
hub::Hub,
model::{Model, ModelParams},
resolver::{Resolver, ResolverRegistry},
},
root_realm_stop_notifier::RootRealmStopNotifier,
startup::Arguments,
work_scheduler::WorkScheduler,
},
anyhow::{format_err, Context as _, Error},
cm_rust::CapabilityName,
fidl::endpoints::{create_endpoints, create_proxy, ServerEnd, ServiceMarker},
fidl_fuchsia_component_internal::Config,
fidl_fuchsia_io::{
DirectoryMarker, DirectoryProxy, MODE_TYPE_DIRECTORY, OPEN_RIGHT_READABLE,
OPEN_RIGHT_WRITABLE,
},
fidl_fuchsia_sys::{LoaderMarker, LoaderProxy},
fuchsia_async as fasync,
fuchsia_component::{client, server::*},
fuchsia_runtime::{take_startup_handle, HandleType},
fuchsia_zircon::{self as zx, Clock, HandleBased},
futures::{channel::oneshot, prelude::*},
log::info,
std::{
path::PathBuf,
sync::{Arc, Weak},
},
};
// Re-export so that the component_manager binary can see it.
pub use crate::builtin::time::create_and_start_utc_clock;
// Allow shutdown to take up to an hour.
pub static SHUTDOWN_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(60 * 60);
// TODO(viktard): Merge Arguments, RuntimeConfig and root_component_url from ModelParams
#[derive(Default)]
pub struct BuiltinEnvironmentBuilder {
args: Option<Arguments>,
config: Option<Config>,
runtime_config: Option<RuntimeConfig>,
runners: Vec<(CapabilityName, Arc<dyn BuiltinRunnerFactory>)>,
resolvers: ResolverRegistry,
// This is used to initialize fuchsia_base_pkg_resolver's Model reference. Resolvers must
// be created to construct the Model.
model_for_resolver: Option<oneshot::Sender<Weak<Model>>>,
utc_clock: Option<Arc<Clock>>,
}
impl BuiltinEnvironmentBuilder {
pub fn new() -> Self {
BuiltinEnvironmentBuilder::default()
}
pub fn set_args(mut self, args: Arguments) -> Self {
self.args = Some(args);
self
}
pub fn set_config(mut self, config: Config) -> Self {
self.config = Some(config);
self
}
// TODO(viktard): add a method to populate config from Args.
pub fn set_runtime_config(mut self, runtime_config: RuntimeConfig) -> Self {
self.runtime_config = Some(runtime_config);
self
}
pub fn set_utc_clock(mut self, clock: Arc<Clock>) -> Self {
self.utc_clock = Some(clock);
self
}
pub fn add_runner(
mut self,
name: CapabilityName,
runner: Arc<dyn BuiltinRunnerFactory>,
) -> Self {
// We don't wrap these in a BuiltinRunner immediately because that requires the
// RuntimeConfig, which may be provided after this or may fall back to the default.
self.runners.push((name, runner));
self
}
pub fn add_resolver(
mut self,
scheme: String,
resolver: Box<dyn Resolver + Send + Sync + 'static>,
) -> Self {
self.resolvers.register(scheme, resolver);
self
}
/// Adds standard resolvers whose dependencies are available in the process's namespace. This
/// includes:
/// - A fuchsia-boot resolver if /boot is available.
/// - One of two different fuchsia-pkg resolver implementations, either:
/// - If /svc/fuchsia.sys.Loader is present, then an implementation that proxies to that
/// protocol (which is the v1 resolver equivalent). This is used for tests or other
/// scenarios where component_manager runs as a v1 component.
/// - Otherwise, an implementation that resolves packages from a /pkgfs directory
/// capability if one is exposed from the root component. (See fuchsia_base_pkg_resolver
/// for more details.)
///
/// TODO(fxb/46491): fuchsia_base_pkg_resolver should be replaced with a resolver provided by
/// the topology.
pub fn add_available_resolvers_from_namespace(mut self) -> Result<Self, Error> {
// Either the fuchsia-boot or fuchsia-pkg resolver may be unavailable in certain contexts.
let boot_resolver = fuchsia_boot_resolver::FuchsiaBootResolver::new()
.context("Failed to create boot resolver")?;
match boot_resolver {
None => info!("No /boot directory in namespace, fuchsia-boot resolver unavailable"),
Some(r) => {
self.resolvers.register(fuchsia_boot_resolver::SCHEME.to_string(), Box::new(r));
}
};
if let Some(loader) = Self::connect_sys_loader()? {
self.resolvers.register(
fuchsia_pkg_resolver::SCHEME.to_string(),
Box::new(fuchsia_pkg_resolver::FuchsiaPkgResolver::new(loader)),
);
} else {
// There's a circular dependency here. The model needs the resolver register to be
// created, but fuchsia_base_pkg_resolver needs a reference to model so it can bind to
// the pkgfs directory. We use a futures::oneshot::channel to send the Model to the
// resolver once it has been created.
let (sender, receiver) = oneshot::channel();
self.resolvers.register(
fuchsia_base_pkg_resolver::SCHEME.to_string(),
Box::new(fuchsia_base_pkg_resolver::FuchsiaPkgResolver::new(receiver)),
);
self.model_for_resolver = Some(sender);
}
Ok(self)
}
pub async fn build(self) -> Result<BuiltinEnvironment, Error> {
let args = self.args.unwrap_or_default();
let runner_map = self
.runners
.iter()
.map(|(name, _)| {
(
name.clone(),
RunnerRegistration {
source_name: name.clone(),
source: cm_rust::RegistrationSource::Self_,
},
)
})
.collect();
let params = ModelParams {
root_component_url: args.root_component_url.clone(),
root_environment: Environment::new_root(
RunnerRegistry::new(runner_map),
self.resolvers,
),
};
let model = Arc::new(Model::new(params));
// If we previously created a resolver that requires the Model (in
// add_available_resolvers_from_namespace), send the just-created model to it.
if let Some(sender) = self.model_for_resolver {
// This only fails if the receiver has been dropped already which shouldn't happen.
sender
.send(Arc::downgrade(&model))
.map_err(|_| format_err!("sending model to resolver failed"))?;
}
// Wrap BuiltinRunnerFactory in BuiltinRunner now that we have the definite RuntimeConfig.
let runtime_config = Arc::new(self.runtime_config.unwrap_or_default());
let builtin_runners = self
.runners
.into_iter()
.map(|(name, runner)| {
Arc::new(BuiltinRunner::new(name, runner, Arc::downgrade(&runtime_config)))
})
.collect();
let config =
self.config.ok_or(format_err!("Config is required for BuiltinEnvironment."))?;
Ok(BuiltinEnvironment::new(
model,
config,
args,
runtime_config,
builtin_runners,
self.utc_clock,
)
.await?)
}
/// Checks if the appmgr loader service is available through our namespace and connects to it if
/// so. If not available, returns Ok(None).
fn connect_sys_loader() -> Result<Option<LoaderProxy>, Error> {
let service_path = PathBuf::from(format!("/svc/{}", LoaderMarker::NAME));
if !service_path.exists() {
return Ok(None);
}
let loader = client::connect_to_service::<LoaderMarker>()
.context("error connecting to system loader")?;
return Ok(Some(loader));
}
}
/// The built-in environment consists of the set of the root services and framework services. Use
/// BuiltinEnvironmentBuilder to construct one.
///
/// The available built-in capabilities depends on the configuration provided in Arguments:
/// * If [Arguments::use_builtin_process_launcher] is true, a fuchsia.process.Launcher service
/// is available.
/// * If [Arguments::maintain_utc_clock] is true, a fuchsia.time.Maintenance service is
/// available.
pub struct BuiltinEnvironment {
pub model: Arc<Model>,
// Framework capabilities.
pub boot_args: Arc<BootArguments>,
pub kernel_stats: Option<Arc<KernelStats>>,
pub process_launcher: Option<Arc<ProcessLauncher>>,
pub root_job: Arc<RootJob>,
pub root_job_for_inspect: Arc<RootJob>,
pub read_only_log: Option<Arc<ReadOnlyLog>>,
pub write_only_log: Option<Arc<WriteOnlyLog>>,
pub root_resource: Option<Arc<RootResource>>,
pub system_controller: Arc<SystemController>,
pub utc_time_maintainer: Option<Arc<UtcTimeMaintainer>>,
pub vmex_service: Option<Arc<VmexService>>,
pub work_scheduler: Arc<WorkScheduler>,
pub realm_capability_host: Arc<RealmCapabilityHost>,
pub hub: Arc<Hub>,
pub builtin_runners: Vec<Arc<BuiltinRunner>>,
pub event_registry: Arc<EventRegistry>,
pub event_source_factory: Arc<EventSourceFactory>,
pub stop_notifier: Arc<RootRealmStopNotifier>,
pub capability_ready_notifier: Arc<CapabilityReadyNotifier>,
pub event_stream_provider: Arc<EventStreamProvider>,
pub event_logger: Option<Arc<EventLogger>>,
pub execution_mode: ExecutionMode,
}
impl BuiltinEnvironment {
async fn new(
model: Arc<Model>,
config: Config,
args: Arguments,
runtime_config: Arc<RuntimeConfig>,
builtin_runners: Vec<Arc<BuiltinRunner>>,
utc_clock: Option<Arc<Clock>>,
) -> Result<BuiltinEnvironment, ModelError> {
// Set up ProcessLauncher if available.
let process_launcher = if args.use_builtin_process_launcher {
let process_launcher = Arc::new(ProcessLauncher::new());
model.root_realm.hooks.install(process_launcher.hooks()).await;
Some(process_launcher)
} else {
None
};
// Set up RootJob service.
let root_job = RootJob::new(&ROOT_JOB_CAPABILITY_PATH, zx::Rights::SAME_RIGHTS);
model.root_realm.hooks.install(root_job.hooks()).await;
// Set up RootJobForInspect service.
let root_job_for_inspect = RootJob::new(
&ROOT_JOB_FOR_INSPECT_CAPABILITY_PATH,
zx::Rights::INSPECT
| zx::Rights::ENUMERATE
| zx::Rights::DUPLICATE
| zx::Rights::TRANSFER
| zx::Rights::GET_PROPERTY,
);
model.root_realm.hooks.install(root_job_for_inspect.hooks()).await;
let root_resource_handle =
take_startup_handle(HandleType::Resource.into()).map(zx::Resource::from);
// Set up BootArguments service.
let boot_args = BootArguments::new();
model.root_realm.hooks.install(boot_args.hooks()).await;
// Set up KernelStats service.
let kernel_stats = root_resource_handle.as_ref().map(|handle| {
KernelStats::new(
handle
.duplicate_handle(zx::Rights::SAME_RIGHTS)
.expect("Failed to duplicate root resource handle"),
)
});
if let Some(kernel_stats) = kernel_stats.as_ref() {
model.root_realm.hooks.install(kernel_stats.hooks()).await;
}
// Set up ReadOnlyLog service.
let read_only_log = root_resource_handle.as_ref().map(|handle| {
ReadOnlyLog::new(
handle
.duplicate_handle(zx::Rights::SAME_RIGHTS)
.expect("Failed to duplicate root resource handle"),
)
});
if let Some(read_only_log) = read_only_log.as_ref() {
model.root_realm.hooks.install(read_only_log.hooks()).await;
}
// Set up WriteOnlyLog service.
let write_only_log = root_resource_handle.as_ref().map(|handle| {
WriteOnlyLog::new(zx::DebugLog::create(handle, zx::DebugLogOpts::empty()).unwrap())
});
if let Some(write_only_log) = write_only_log.as_ref() {
model.root_realm.hooks.install(write_only_log.hooks()).await;
}
// Register the UTC time maintainer.
let utc_time_maintainer = if let Some(clock) = utc_clock {
let utc_time_maintainer = Arc::new(UtcTimeMaintainer::new(clock));
model.root_realm.hooks.install(utc_time_maintainer.hooks()).await;
Some(utc_time_maintainer)
} else {
None
};
// Set up the Vmex service.
let vmex_service = root_resource_handle.as_ref().map(|handle| {
VmexService::new(
handle
.duplicate_handle(zx::Rights::SAME_RIGHTS)
.expect("Failed to duplicate root resource handle"),
)
});
if let Some(vmex_service) = vmex_service.as_ref() {
model.root_realm.hooks.install(vmex_service.hooks()).await;
}
// Set up RootResource service.
let root_resource = root_resource_handle.map(RootResource::new);
if let Some(root_resource) = root_resource.as_ref() {
model.root_realm.hooks.install(root_resource.hooks()).await;
}
// Set up System Controller service.
let system_controller = Arc::new(SystemController::new(model.clone(), SHUTDOWN_TIMEOUT));
model.root_realm.hooks.install(system_controller.hooks()).await;
// Set up work scheduler.
let work_scheduler =
WorkScheduler::new(Arc::new(Arc::downgrade(&model)) as Arc<dyn Binder>).await;
model.root_realm.hooks.install(work_scheduler.hooks()).await;
// Set up the realm service.
let realm_capability_host =
Arc::new(RealmCapabilityHost::new(model.clone(), runtime_config));
model.root_realm.hooks.install(realm_capability_host.hooks()).await;
// Set up the builtin runners.
for runner in &builtin_runners {
model.root_realm.hooks.install(runner.hooks()).await;
}
// Set up the root realm stop notifier.
let stop_notifier = Arc::new(RootRealmStopNotifier::new());
model.root_realm.hooks.install(stop_notifier.hooks()).await;
let hub = Arc::new(Hub::new(&model, args.root_component_url.clone())?);
model.root_realm.hooks.install(hub.hooks()).await;
// Set up the capability ready notifier.
let capability_ready_notifier =
Arc::new(CapabilityReadyNotifier::new(Arc::downgrade(&model)));
model.root_realm.hooks.install(capability_ready_notifier.hooks()).await;
// Set up the event registry.
let event_registry = {
let mut event_registry = EventRegistry::new(Arc::downgrade(&model));
event_registry.register_synthesis_provider(
EventType::CapabilityReady,
capability_ready_notifier.clone(),
);
event_registry
.register_synthesis_provider(EventType::Running, Arc::new(RunningProvider::new()));
Arc::new(event_registry)
};
model.root_realm.hooks.install(event_registry.hooks()).await;
let execution_mode = match config.debug {
Some(true) => ExecutionMode::Debug,
_ => ExecutionMode::Production,
};
// Set up the event source factory.
let event_source_factory = Arc::new(EventSourceFactory::new(
Arc::downgrade(&model),
Arc::downgrade(&event_registry),
execution_mode.clone(),
));
model.root_realm.hooks.install(event_source_factory.hooks()).await;
let event_stream_provider = Arc::new(EventStreamProvider::new(
Arc::downgrade(&event_registry),
execution_mode.clone(),
));
model.root_realm.hooks.install(event_stream_provider.hooks()).await;
let event_logger = if config.debug.unwrap_or(false) {
let event_logger = Arc::new(EventLogger::new());
model.root_realm.hooks.install(event_logger.hooks()).await;
Some(event_logger)
} else {
None
};
Ok(BuiltinEnvironment {
model,
boot_args,
process_launcher,
root_job,
root_job_for_inspect,
kernel_stats,
read_only_log,
write_only_log,
root_resource,
system_controller,
utc_time_maintainer,
vmex_service,
work_scheduler,
realm_capability_host,
hub,
builtin_runners,
event_registry,
event_source_factory,
stop_notifier,
capability_ready_notifier,
event_stream_provider,
event_logger,
execution_mode,
})
}
/// Setup a ServiceFs that contains the Hub and (optionally) the `BlockingEventSource` service.
async fn create_service_fs<'a>(&self) -> Result<ServiceFs<ServiceObj<'a, ()>>, ModelError> {
// Create the ServiceFs
let mut service_fs = ServiceFs::new();
// Setup the hub
let (hub_proxy, hub_server_end) = create_proxy::<DirectoryMarker>().unwrap();
self.hub
.open_root(OPEN_RIGHT_READABLE | OPEN_RIGHT_WRITABLE, hub_server_end.into_channel())
.await?;
service_fs.add_remote("hub", hub_proxy);
// If component manager is in debug mode, create an event source scoped at the
// root and offer it via ServiceFs to the outside world.
if self.execution_mode.is_debug() {
let event_source = self.event_source_factory.create_for_debug(SyncMode::Sync).await?;
service_fs.dir("svc").add_fidl_service(move |stream| {
let event_source = event_source.clone();
event_source.serve(stream);
});
}
Ok(service_fs)
}
/// Bind ServiceFs to a provided channel
async fn bind_service_fs(&self, channel: zx::Channel) -> Result<(), ModelError> {
let mut service_fs = self.create_service_fs().await?;
// Bind to the channel
service_fs
.serve_connection(channel)
.map_err(|err| ModelError::namespace_creation_failed(err))?;
// Start up ServiceFs
fasync::Task::spawn(async move {
service_fs.collect::<()>().await;
})
.detach();
Ok(())
}
/// Bind ServiceFs to the outgoing directory of this component, if it exists.
pub async fn bind_service_fs_to_out(&self) -> Result<(), ModelError> {
if let Some(handle) = fuchsia_runtime::take_startup_handle(
fuchsia_runtime::HandleType::DirectoryRequest.into(),
) {
self.bind_service_fs(zx::Channel::from(handle)).await?;
}
Ok(())
}
/// Bind ServiceFs to a new channel and return the Hub directory.
/// Used mainly by integration tests.
pub async fn bind_service_fs_for_hub(&self) -> Result<DirectoryProxy, ModelError> {
// Create a channel that ServiceFs will operate on
let (service_fs_proxy, service_fs_server_end) = create_proxy::<DirectoryMarker>().unwrap();
self.bind_service_fs(service_fs_server_end.into_channel()).await?;
// Open the Hub from within ServiceFs
let (hub_client_end, hub_server_end) = create_endpoints::<DirectoryMarker>().unwrap();
service_fs_proxy
.open(
OPEN_RIGHT_READABLE | OPEN_RIGHT_WRITABLE,
MODE_TYPE_DIRECTORY,
"hub",
ServerEnd::new(hub_server_end.into_channel()),
)
.map_err(|err| ModelError::namespace_creation_failed(err))?;
let hub_proxy = hub_client_end.into_proxy().unwrap();
Ok(hub_proxy)
}
pub async fn wait_for_root_realm_stop(&self) {
self.stop_notifier.wait_for_root_realm_stop().await;
}
}
| 39.148479 | 100 | 0.62091 |
d547f2fe38a3082ac9fa65fdaf1659f06367cb0a | 11,753 | use futures_util::future::poll_fn;
use redis::{ConnectionAddr, ConnectionInfo, ErrorKind, RedisError, RedisResult, Value};
use std::collections::hash_map::{Entry, HashMap};
use std::future::Future;
use std::io::Result as IoResult;
use std::net::ToSocketAddrs;
use std::pin::Pin;
use std::task::Poll;
use tokio::io::{self, AsyncWriteExt};
use tokio::net::TcpStream;
use tokio::sync::{broadcast, mpsc, oneshot};
#[cfg(unix)]
use tokio::net::UnixStream;
type Decoder = combine::stream::Decoder<
combine::parser::combinator::AnySendSyncPartialState,
combine::stream::PointerOffset<[u8]>,
>;
/// Read half of a Redis pubsub connection.
struct ReadHalf {
inner: io::BufReader<Box<dyn io::AsyncRead + Unpin + Send>>,
decoder: Decoder,
}
impl ReadHalf {
/// Read a value from Redis.
async fn read(&mut self) -> RedisResult<Value> {
redis::parse_redis_value_async(&mut self.decoder, &mut self.inner).await
}
}
/// Write half of a Redis pubsub connection.
struct WriteHalf {
inner: Box<dyn io::AsyncWrite + Unpin + Send>,
}
impl WriteHalf {
/// Write a command on the Redis connection.
async fn write(&mut self, cmd: &[&[u8]]) -> IoResult<()> {
let mut data = format!("*{}\r\n", cmd.len()).into_bytes();
for part in cmd {
data.append(&mut format!("${}\r\n", part.len()).into_bytes());
data.extend_from_slice(part);
data.extend_from_slice(b"\r\n");
}
self.inner.write_all(&data).await
}
}
/// Channel type used to receive pubsub messages.
pub type RecvChan = broadcast::Receiver<Vec<u8>>;
/// Channel type used to reply to `Cmd`.
type ReplyChan = oneshot::Sender<RecvChan>;
/// Command type sent to the connection loop.
struct Cmd {
/// Channel to subscribe to.
chan: Vec<u8>,
/// Reply channel for the command.
reply: ReplyChan,
}
/// Tracks an active subscription on the Redis server.
struct Sub {
/// Channel sender used to notify all logical subscribers.
tx: broadcast::Sender<Vec<u8>>,
/// Reply channels that are awaiting confirmation.
pending: Option<Vec<ReplyChan>>,
}
/// Polling events that can happen in the connection loop.
enum LoopEvent {
Cmd(Cmd),
CmdClosed,
Interval,
Read((RedisResult<Value>, ReadHalf)),
}
/// The Redis pubsub connection loop.
async fn conn_loop(mut rx: ReadHalf, mut tx: WriteHalf, mut cmd: mpsc::Receiver<Cmd>) {
// Ping or cleanup subscriptions at an interval.
let interval = tokio::time::interval(tokio::time::Duration::from_secs(20));
tokio::pin!(interval);
// Ignore the first (immediate) tick.
interval.as_mut().tick().await;
// Current read operation.
// TODO: The redis crate has a ValueCodec, but doesn't expose it. This is a workaround.
let mut read_fut: Pin<Box<dyn Future<Output = _> + Send>> = Box::pin(async move {
let res = rx.read().await;
(res, rx)
});
// Map of active subscriptions by channel name.
let mut subs: HashMap<Vec<u8>, Sub> = HashMap::new();
loop {
// This specifically prioritizes processing commands over receiving.
match poll_fn(|cx| {
if let Poll::Ready(res) = cmd.poll_recv(cx) {
match res {
Some(cmd) => Poll::Ready(LoopEvent::Cmd(cmd)),
None => Poll::Ready(LoopEvent::CmdClosed),
}
} else if interval.as_mut().poll_tick(cx).is_ready() {
Poll::Ready(LoopEvent::Interval)
} else if let Poll::Ready(res) = read_fut.as_mut().poll(cx) {
Poll::Ready(LoopEvent::Read(res))
} else {
Poll::Pending
}
})
.await
{
LoopEvent::Cmd(Cmd { chan, reply }) => match subs.entry(chan.clone()) {
// If already subscribed, reply with a broadcast channel immediately. Otherwise,
// add the reply channel to `pending`, and send the Redis subscribe command if
// necessary.
Entry::Occupied(mut entry) => {
let sub = entry.get_mut();
if let Some(ref mut pending) = sub.pending {
pending.push(reply);
} else {
let _ignored = reply.send(sub.tx.subscribe());
}
}
Entry::Vacant(entry) => {
entry.insert(Sub {
tx: broadcast::channel(8).0,
pending: Some(vec![reply]),
});
tx.write(&[b"SUBSCRIBE", &chan])
.await
.expect("Failed to send subscribe command to Redis");
}
},
LoopEvent::CmdClosed => {
// TODO: Stop reading from the command channel.
// This, plus an empty `subs`, means we can exit.
unimplemented!();
}
LoopEvent::Interval => {
// Unsubscribe from channels that no longer have subscribers, or send a ping.
let to_unsub: Vec<Vec<u8>> = subs
.iter()
.filter_map(|(chan, sub)| {
if sub.pending.is_none() && sub.tx.receiver_count() == 0 {
Some(chan.clone())
} else {
None
}
})
.collect();
if to_unsub.is_empty() {
tx.write(&[b"PING"])
.await
.expect("Failed to send ping command to Redis");
} else {
for chan in &to_unsub {
subs.remove(chan);
}
let mut unsub_cmd: Vec<&[u8]> = vec![b"UNSUBSCRIBE"];
unsub_cmd.extend(to_unsub.iter().map(|chan| &chan[..]));
tx.write(&unsub_cmd)
.await
.expect("Failed to send unsubscribe command to Redis");
}
}
LoopEvent::Read((res, mut rx)) => {
read_fut = Box::pin(async move {
let res = rx.read().await;
(res, rx)
});
let value = res.expect("Failed to read from Redis");
let vec = match value {
// Note: If we have no subscriptions at all, we receive pongs as regular
// replies instead of events.
Value::Status(status) if status == "PONG" => continue,
Value::Bulk(ref vec) if vec.len() >= 2 => vec,
_ => panic!("Unexpected value from Redis: {:?}", value),
};
match (&vec[0], &vec[1], vec.get(2)) {
// Handle a message event by sending on the broadcast channel.
(
&Value::Data(ref ev),
&Value::Data(ref chan),
Some(&Value::Data(ref data)),
) if ev == b"message" => {
if let Some(ref sub) = subs.get(&chan[..]) {
let _ignored = sub.tx.send(data.to_vec());
}
}
// Handle subscription confirmation by sending out pending replies.
(&Value::Data(ref ev), &Value::Data(ref chan), _) if ev == b"subscribe" => {
if let Some(ref mut sub) = subs.get_mut(&chan[..]) {
if let Some(pending) = sub.pending.take() {
for reply in pending {
let _ignored = reply.send(sub.tx.subscribe());
}
}
}
}
// Some other events are ok, but we do nothing with them.
(&Value::Data(ref ev), _, _) if ev == b"unsubscribe" || ev == b"pong" => {}
_ => panic!("Unexpected value from Redis: {:?}", value),
}
}
}
}
}
/// A Subscriber can be used to subscribe to Redis pubsub channels.
///
/// This struct can be cheaply cloned. It is simply a client of the connection loop which is
/// running in another task.
#[derive(Clone)]
pub struct Subscriber {
cmd: mpsc::Sender<Cmd>,
}
impl Subscriber {
/// Subscribe to a channel.
///
/// This function does not complete until the server has confirmed the subscription.
pub async fn subscribe(&mut self, chan: Vec<u8>) -> broadcast::Receiver<Vec<u8>> {
let (reply_tx, reply_rx) = oneshot::channel();
let cmd = Cmd {
chan,
reply: reply_tx,
};
if self.cmd.send(cmd).await.is_ok() {
if let Ok(rx) = reply_rx.await {
return rx;
}
}
panic!("Tried to subscribe on closed pubsub connection");
}
}
/// Make a pubsub connection to Redis.
pub async fn connect(info: &ConnectionInfo) -> RedisResult<Subscriber> {
// Note: This code is borrowed from the redis crate.
let (rx, tx): (
Box<dyn io::AsyncRead + Unpin + Send>,
Box<dyn io::AsyncWrite + Unpin + Send>,
) = match *info.addr {
ConnectionAddr::Tcp(ref host, port) => {
let socket_addr = {
let mut socket_addrs = (&host[..], port).to_socket_addrs()?;
match socket_addrs.next() {
Some(socket_addr) => socket_addr,
None => {
return Err(RedisError::from((
ErrorKind::InvalidClientConfig,
"No address found for host",
)));
}
}
};
let (rx, tx) = io::split(TcpStream::connect(&socket_addr).await?);
(Box::new(rx), Box::new(tx))
}
ConnectionAddr::TcpTls { .. } => {
return Err(RedisError::from((
ErrorKind::InvalidClientConfig,
"TLS connections not yet supported",
)))
}
#[cfg(unix)]
ConnectionAddr::Unix(ref path) => {
let (rx, tx) = io::split(UnixStream::connect(path).await?);
(Box::new(rx), Box::new(tx))
}
#[cfg(not(unix))]
ConnectionAddr::Unix(_) => {
return Err(RedisError::from((
ErrorKind::InvalidClientConfig,
"Cannot connect to unix sockets \
on this platform",
)))
}
};
let mut rx = ReadHalf {
inner: io::BufReader::new(rx),
decoder: Decoder::new(),
};
let mut tx = WriteHalf { inner: tx };
if let Some(ref passwd) = info.passwd {
if let Some(ref username) = info.username {
tx.write(&[b"AUTH", username.as_bytes(), passwd.as_bytes()])
.await?;
} else {
tx.write(&[b"AUTH", passwd.as_bytes()]).await?;
}
match rx.read().await {
Ok(Value::Okay) => (),
_ => {
return Err((
ErrorKind::AuthenticationFailed,
"Password authentication failed",
)
.into());
}
}
}
// Note: Pubsub ignores database ID, so we don't need to send `SELECT`.
let (cmd_tx, cmd_rx) = mpsc::channel(8);
tokio::spawn(conn_loop(rx, tx, cmd_rx));
Ok(Subscriber { cmd: cmd_tx })
}
| 36.728125 | 96 | 0.49766 |
48d99595db80fc91bd4529c09e0af06f82ac8983 | 907 | pub mod azw3;
pub mod epub;
pub mod mobi;
pub mod pdf;
pub mod prelude;
pub mod zip;
use crate::{checker, errors::*, models::*};
use std::process::{Command, Stdio};
pub fn book_convert(src: &str, dst: &str) -> Result<()> {
let program = "ebook-convert";
if !checker::exec_succeed(program, &["--version"]) {
return Err(err_msg(
"please install Calibre: https://calibre-ebook.com/download",
));
}
let status = Command::new(program)
.arg(&src)
.arg(&dst)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?;
if status.success() {
Ok(())
} else {
Err(err_msg(format!(
"possible conversion failed with an incorrect exit code: {}",
status.code().ok_or(err_msg(
"possible conversion failed and no exit code was obtained"
))?
)))
}
}
| 24.513514 | 74 | 0.54796 |
11f57b12c39936137d107d3a464854dc6303d53a | 1,065 | use clap::{crate_description, crate_name, crate_version, Arg, ArgMatches, Command};
pub fn parse() -> ArgMatches {
let cmd = Command::new(crate_name!())
.version(crate_version!())
.about(crate_description!())
.args(&[
Arg::new("input")
.help("files/directory to process")
.index(1)
.required(true),
Arg::new("replace")
.short('r')
.long("replace")
.help("sort and replace files")
.required(false)
.takes_value(false),
Arg::new("out")
.short('o')
.long("out")
.help("output directory ( ignores suffix )")
.takes_value(true)
.required(false),
Arg::new("suffix")
.short('s')
.long("suffix")
.help("suffix for output files")
.default_value("sorted")
.required(false),
]);
cmd.get_matches()
}
| 32.272727 | 83 | 0.447887 |
28244563fdf53c29386853f99167e545286c2b1d | 5,400 | use cosmwasm_std::{
from_slice,
testing::{
mock_dependencies,
mock_env,
mock_info,
MockApi,
MockQuerier,
MockStorage,
},
Addr,
Api,
OwnedDeps,
Response,
Storage,
Uint128,
};
use cosmwasm_storage::to_length_prefixed;
use cw20::TokenInfoResponse;
use cw20_wrapped::{
contract::{
execute,
instantiate,
query,
},
msg::{
ExecuteMsg,
InstantiateMsg,
QueryMsg,
WrappedAssetInfoResponse,
},
state::{
WrappedAssetInfo,
KEY_WRAPPED_ASSET,
},
ContractError,
};
static INITIALIZER: &str = "addr0000";
static RECIPIENT: &str = "addr2222";
static SENDER: &str = "addr3333";
fn get_wrapped_asset_info<S: Storage>(storage: &S) -> WrappedAssetInfo {
let key = to_length_prefixed(KEY_WRAPPED_ASSET);
let data = storage
.get(&key)
.expect("data should exist");
from_slice(&data).expect("invalid data")
}
fn do_init() -> OwnedDeps<MockStorage, MockApi, MockQuerier> {
let mut deps = mock_dependencies(&[]);
let init_msg = InstantiateMsg {
name: "Integers".into(),
symbol: "INT".into(),
asset_chain: 1,
asset_address: vec![1; 32].into(),
decimals: 10,
mint: None,
init_hook: None,
};
let env = mock_env();
let info = mock_info(INITIALIZER, &[]);
let res: Response = instantiate(deps.as_mut(), env, info, init_msg).unwrap();
assert_eq!(0, res.messages.len());
// query the store directly
let bridge = deps.api.addr_canonicalize(INITIALIZER).unwrap();
assert_eq!(
get_wrapped_asset_info(&deps.storage),
WrappedAssetInfo {
asset_chain: 1,
asset_address: vec![1; 32].into(),
bridge,
}
);
deps
}
fn do_mint(
deps: &mut OwnedDeps<MockStorage, MockApi, MockQuerier>,
recipient: &Addr,
amount: &Uint128,
) {
let mint_msg = ExecuteMsg::Mint {
recipient: recipient.to_string(),
amount: amount.clone(),
};
let info = mock_info(INITIALIZER, &[]);
let handle_response: Response = execute(deps.as_mut(), mock_env(), info, mint_msg).unwrap();
assert_eq!(0, handle_response.messages.len());
}
fn do_transfer(
deps: &mut OwnedDeps<MockStorage, MockApi, MockQuerier>,
sender: &Addr,
recipient: &Addr,
amount: &Uint128,
) {
let transfer_msg = ExecuteMsg::Transfer {
recipient: recipient.to_string(),
amount: amount.clone(),
};
let env = mock_env();
let info = mock_info(sender.as_str(), &[]);
let handle_response: Response = execute(deps.as_mut(), env, info, transfer_msg).unwrap();
assert_eq!(0, handle_response.messages.len());
}
fn check_balance(
deps: &OwnedDeps<MockStorage, MockApi, MockQuerier>,
address: &Addr,
amount: &Uint128,
) {
let query_response = query(
deps.as_ref(),
mock_env(),
QueryMsg::Balance {
address: address.to_string(),
},
)
.unwrap();
assert_eq!(
query_response.as_slice(),
format!("{{\"balance\":\"{}\"}}", amount.u128()).as_bytes()
);
}
fn check_token_details(deps: &OwnedDeps<MockStorage, MockApi, MockQuerier>, supply: Uint128) {
let query_response = query(deps.as_ref(), mock_env(), QueryMsg::TokenInfo {}).unwrap();
assert_eq!(
from_slice::<TokenInfoResponse>(query_response.as_slice()).unwrap(),
TokenInfoResponse {
name: "Integers (Wormhole)".into(),
symbol: "INT".into(),
decimals: 10,
total_supply: supply,
}
);
}
#[test]
fn init_works() {
let mut deps = do_init();
check_token_details(&mut deps, Uint128::new(0));
}
#[test]
fn query_works() {
let deps = do_init();
let query_response = query(deps.as_ref(), mock_env(), QueryMsg::WrappedAssetInfo {}).unwrap();
assert_eq!(
from_slice::<WrappedAssetInfoResponse>(&query_response.as_slice()).unwrap(),
WrappedAssetInfoResponse {
asset_chain: 1,
asset_address: vec![1; 32].into(),
bridge: Addr::unchecked(INITIALIZER),
}
);
}
#[test]
fn mint_works() {
let mut deps = do_init();
let recipient = Addr::unchecked(RECIPIENT);
do_mint(&mut deps, &recipient, &Uint128::new(123_123_123));
check_balance(&deps, &recipient, &Uint128::new(123_123_123));
check_token_details(&deps, Uint128::new(123_123_123));
}
#[test]
fn others_cannot_mint() {
let mut deps = do_init();
let mint_msg = ExecuteMsg::Mint {
recipient: RECIPIENT.into(),
amount: Uint128::new(123_123_123),
};
let env = mock_env();
let info = mock_info(RECIPIENT, &[]);
let handle_result = execute(deps.as_mut(), env, info, mint_msg);
assert_eq!(
format!("{}", handle_result.unwrap_err()),
format!("{}", ContractError::Unauthorized {})
);
}
#[test]
fn transfer_works() {
let mut deps = do_init();
let sender = Addr::unchecked(SENDER);
let recipient = Addr::unchecked(RECIPIENT);
do_mint(&mut deps, &sender, &Uint128::new(123_123_123));
do_transfer(&mut deps, &sender, &recipient, &Uint128::new(123_123_000));
check_balance(&mut deps, &sender, &Uint128::new(123));
check_balance(&mut deps, &recipient, &Uint128::new(123_123_000));
}
| 26.470588 | 98 | 0.607778 |
763942d90db5ff120805951dda6cf6cb9925fb89 | 1,139 | use crate::{AllocError, BlockAllocator, MAX_BUFFER_SIZE};
use std::ops::Range;
use std::ptr::NonNull;
use std::collections::HashMap;
//use crate::alloc::{handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw};
use std::alloc::{Allocator, Global, Layout};
pub struct SystemBlockAllocator<const SIZE: usize, A: Allocator = Global> {
allocator: A,
}
impl<const SIZE: usize> BlockAllocator<SIZE>
for SystemBlockAllocator<SIZE>
{
unsafe fn allocate_block(&mut self) -> Result<NonNull<[u8; SIZE]>, AllocError> {
let layout = Layout::new::<[u8; SIZE]>();
let mem = self.allocator.allocate(layout).map_err(|_| AllocError::OutOfHostMemory)?;
let mem: NonNull<[u8; SIZE]> = unsafe { mem.cast() };
Ok(mem)
}
unsafe fn deallocate_block(&mut self, block: NonNull<[u8; SIZE]>) {
let layout = Layout::new::<[u8; SIZE]>();
let ptr: NonNull<u8> = unsafe { block.cast() };
self.allocator.deallocate(ptr, layout);
}
unsafe fn updated_block(&mut self, _block: NonNull<[u8; SIZE]>, _block_range: Range<u64>) {
}
unsafe fn flush(&mut self) {}
}
| 34.515152 | 99 | 0.65935 |
1413f53f12043a7e771a0d92a701f45f6bb074b8 | 3,865 | use crossterm::{
cursor::{self, CursorShape},
event::KeyCode,
queue,
style::{self, Color},
terminal::{self, ClearType},
Result,
};
use super::{Mode, Modes};
pub struct GoToMode {
input: String,
cursor: usize,
draw_error: bool,
}
impl GoToMode {
pub fn new() -> Self {
Self {
input: String::from("00000000"),
cursor: 24, //@Improve: base this value on msg length
draw_error: false,
}
}
}
impl Mode for GoToMode {
fn handle_input(
&mut self,
event: &crossterm::event::KeyEvent,
state: &mut crate::misc::TermState,
parameters: &crate::misc::Parameters,
) -> Result<super::Modes> {
if self.draw_error {
self.draw_error = false;
}
let end_mode = match event.code {
KeyCode::Right => {
//@Improve: base this value on msg length
if self.cursor < 24 {
self.cursor += 1;
}
Modes::GoTo
}
KeyCode::Left => {
if self.cursor > 17 {
//@Improve: base this value on msg length
self.cursor -= 1;
}
Modes::GoTo
}
KeyCode::Char(char) => {
if char == 'q' {
return Ok(Modes::Bytes);
}
if !char.is_ascii_hexdigit() {
return Ok(Modes::GoTo);
}
self.input.remove(self.cursor - 17);
self.input
.insert(self.cursor - 17, char.to_ascii_uppercase());
Modes::GoTo
}
KeyCode::Enter => {
let total_number_of_offsets = state.bytes.len() / parameters.byte_size as usize;
let number = usize::from_str_radix(&self.input, 16)
.expect("Failed to parse offset as usize");
let goto = number / parameters.byte_size as usize;
if goto <= total_number_of_offsets {
state.render_from_offset = goto;
} else {
self.draw_error = true;
return Ok(Modes::GoTo);
}
Modes::Bytes
}
_ => Modes::GoTo,
};
Ok(end_mode)
}
fn handle_mouse(
&mut self,
event: &crossterm::event::MouseEvent,
state: &mut crate::misc::TermState,
parameters: &crate::misc::Parameters,
) -> Result<super::Modes> {
Ok(Modes::GoTo)
}
fn handle_resize(
&mut self,
stdout: &mut std::io::Stdout,
width: u16,
height: u16,
state: &mut crate::misc::TermState,
parameters: &crate::misc::Parameters,
) -> Result<super::Modes> {
Ok(Modes::GoTo)
}
fn draw(&self, stdout: &mut std::io::Stdout, state: &crate::misc::TermState) -> Result<()> {
let msg = format!("Go to offset: 0x{}", self.input);
if self.draw_error {
queue!(
stdout,
cursor::MoveTo(1, state.term_height),
terminal::Clear(ClearType::FromCursorDown),
style::SetForegroundColor(Color::Red),
style::Print("Offset exceeds total number of offsets!"),
)?;
} else {
queue!(
stdout,
cursor::MoveTo(1, state.term_height),
terminal::Clear(ClearType::FromCursorDown),
style::SetForegroundColor(Color::DarkGrey),
style::Print(&msg),
cursor::SetCursorShape(CursorShape::Block),
cursor::MoveTo(self.cursor as u16, state.term_height),
)?;
}
Ok(())
}
}
| 28.843284 | 96 | 0.474774 |
4adffaa3212eea714dc9bff070cd460a5aa6705e | 1,811 | //! @ Font definitions for a given font number |k| contain further parameters
//! $$\hbox{|c[4]| |s[4]| |d[4]| |a[1]| |l[1]| |n[a+l]|.}$$
//! The four-byte value |c| is the check sum that \TeX\ found in the \.{TFM}
//! file for this font; |c| should match the check sum of the font found by
//! programs that read this \.{DVI} file.
//! @^check sum@>
//!
//! Parameter |s| contains a fixed-point scale factor that is applied to
//! the character widths in font |k|; font dimensions in \.{TFM} files and
//! other font files are relative to this quantity, which is called the
//! ``at size'' elsewhere in this documentation. The value of |s| is
//! always positive and less than $2^{27}$. It is given in the same units
//! as the other \.{DVI} dimensions, i.e., in sp when \TeX82 has made the
//! file. Parameter |d| is similar to |s|; it is the ``design size,'' and
//! (like~|s|) it is given in \.{DVI} units. Thus, font |k| is to be used
//! at $|mag|\cdot s/1000d$ times its normal size.
//!
//! The remaining part of a font definition gives the external name of the font,
//! which is an ASCII string of length |a+l|. The number |a| is the length
//! of the ``area'' or directory, and |l| is the length of the font name itself;
//! the standard local system font area is supposed to be used when |a=0|.
//! The |n| field contains the area in its first |a| bytes.
//!
//! Font definitions must appear before the first use of a particular font number.
//! Once font |k| is defined, it must not be defined again; however, we
//! shall see below that font definitions appear in the postamble as well as
//! in the pages, so in this sense each font number is defined exactly twice,
//! if at all. Like |nop| commands, font definitions can
//! appear before the first |bop|, or between an |eop| and a |bop|.
//!
| 58.419355 | 82 | 0.675318 |
71c7759c420da22f9ff3ef96dc91bd0e644b02f4 | 3,490 | use std::ops::{Deref, DerefMut};
use super::tile::Tile;
use crate::components::coordinates::*;
use crate::resources::*;
const SQUARE_COORDINATES: [(i8, i8); 8] = [
(-1, -1),
(0, -1),
(-1, 1),
(-1, 0),
(0, 1),
(1, -1),
(1, 0),
(1, 1),
];
#[derive(Debug, Clone)]
pub struct TileMap {
bomb_count: u16,
height: u16,
width: u16,
map: Vec<Vec<Tile>>,
}
impl TileMap {
pub fn new(width: u16, height: u16) -> Self {
let map = (0..height)
.into_iter()
.map(|_| (0..width).into_iter().map(|_| Tile::Empty).collect())
.collect();
Self {
bomb_count: 0,
height,
width,
map,
}
}
/*pub fn width(&self) -> u16 {
self.width
}
pub fn height(&self) -> u16 {
self.height
}
pub fn bomb_count(&self) -> u16 {
self.bomb_count
}*/
pub fn set_bombs(&mut self, bomb_count: u16, rng: &mut Random) {
self.bomb_count = bomb_count;
let mut remaining_bombs = bomb_count;
while remaining_bombs > 0 {
let (x, y) = (
rng.random_range(0..self.width) as usize,
rng.random_range(0..self.height) as usize,
);
if let Tile::Empty = self[y][x] {
self[y][x] = Tile::Bomb;
remaining_bombs -= 1;
}
}
for y in 0..self.height {
for x in 0..self.width {
let coords = Coordinates { x, y };
if self.is_bomb_at(coords) {
continue;
}
let num = self.bomb_count_at(coords);
if num == 0 {
continue;
}
let tile = &mut self[y as usize][x as usize];
*tile = Tile::BombNeighbor(num);
}
}
}
fn safe_square_at(&self, coordinates: Coordinates) -> impl Iterator<Item = Coordinates> {
SQUARE_COORDINATES
.iter()
.copied()
.map(move |tuple| coordinates + tuple)
}
fn is_bomb_at(&self, coordinates: Coordinates) -> bool {
coordinates.x < self.width
&& coordinates.y < self.height
&& self.map[coordinates.y as usize][coordinates.x as usize].is_bomb()
}
fn bomb_count_at(&self, coordinates: Coordinates) -> u8 {
if self.is_bomb_at(coordinates) {
return 0;
}
self.safe_square_at(coordinates)
.filter(|coord| self.is_bomb_at(*coord))
.count() as u8
}
pub fn get_debug_string(&self) -> String {
let mut buffer = format!(
"Map ({}, {}) with {} bombs:\n",
self.width, self.height, self.bomb_count
);
let line: String = (0..=(self.width + 1)).into_iter().map(|_| '-').collect();
buffer.push_str(&line);
buffer.push('\n');
for line in self.iter().rev() {
buffer.push('|');
for tile in line.iter() {
buffer.push_str(&tile.get_debug_string());
}
buffer.push_str("|\n");
}
buffer.push_str(&line);
buffer
}
}
impl Deref for TileMap {
type Target = Vec<Vec<Tile>>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TileMap {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
| 23.741497 | 93 | 0.480802 |
18fa3ba682b86fae4ef100fc442f4ea8329eb22b | 10,322 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
capability::{CapabilityProvider, CapabilitySource, FrameworkCapability},
model::{
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
moniker::AbsoluteMoniker,
},
work_scheduler::work_scheduler::{
WorkScheduler, WORK_SCHEDULER_CAPABILITY_PATH, WORK_SCHEDULER_CONTROL_CAPABILITY_PATH,
},
},
anyhow::{format_err, Error},
async_trait::async_trait,
fidl::endpoints::ServerEnd,
fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx,
futures::{future::BoxFuture, TryStreamExt},
log::warn,
std::sync::{Arc, Weak},
};
// TODO(markdittmer): Establish
// WorkSchedulerSystem -> (WorkScheduler, WorkSchedulerHook -> WorkScheduler).
impl WorkScheduler {
/// Helper to specify hooks associated with `WorkScheduler`. Accepts `&Arc<WorkScheduler>` to
/// produce references needed by `HooksRegistration` without consuming `Arc`.
pub fn hooks(work_scheduler: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration {
events: vec![EventType::ResolveInstance, EventType::RouteCapability],
callback: Arc::downgrade(work_scheduler) as Weak<dyn Hook>,
}]
}
/// Route capability to access `fuchsia.sys2.WorkSchedulerControl` protocol as a framework
/// capability.
async fn on_route_framework_capability_async<'a>(
self: Arc<Self>,
capability: &'a FrameworkCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
match (&capability_provider, capability) {
(None, FrameworkCapability::ServiceProtocol(capability_path))
if *capability_path == *WORK_SCHEDULER_CONTROL_CAPABILITY_PATH =>
{
Ok(Some(Box::new(WorkSchedulerControlCapabilityProvider::new(self.clone()))
as Box<dyn CapabilityProvider>))
}
_ => Ok(capability_provider),
}
}
/// Route capability to access `fuchsia.sys2.WorkScheduler` protocol as a scoped framework
/// capability.
async fn on_route_scoped_framework_capability_async<'a>(
self: Arc<Self>,
scope_moniker: AbsoluteMoniker,
capability: &'a FrameworkCapability,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
match (&capability_provider, capability) {
(None, FrameworkCapability::ServiceProtocol(capability_path))
if *capability_path == *WORK_SCHEDULER_CAPABILITY_PATH =>
{
// Only clients that expose the Worker protocol to the framework can
// use WorkScheduler.
if !self.verify_worker_exposed_to_framework(&scope_moniker).await {
return Err(ModelError::capability_discovery_error(format_err!(
"Component {} does not expose Worker to framework",
scope_moniker
)));
}
Ok(Some(
Box::new(WorkSchedulerCapabilityProvider::new(scope_moniker, self.clone()))
as Box<dyn CapabilityProvider>,
))
}
_ => Ok(capability_provider),
}
}
}
impl Hook for WorkScheduler {
fn on(self: Arc<Self>, event: &Event) -> BoxFuture<Result<(), ModelError>> {
Box::pin(async move {
match &event.payload {
EventPayload::ResolveInstance { decl } => {
self.try_add_realm_as_worker(&event.target_moniker, &decl).await;
}
EventPayload::RouteCapability {
source: CapabilitySource::Framework { capability, scope_moniker: None },
capability_provider,
} => {
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_route_framework_capability_async(
&capability,
capability_provider.take(),
)
.await?;
}
EventPayload::RouteCapability {
source:
CapabilitySource::Framework { capability, scope_moniker: Some(scope_moniker) },
capability_provider,
} => {
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_route_scoped_framework_capability_async(
scope_moniker.clone(),
&capability,
capability_provider.take(),
)
.await?;
}
_ => {}
};
Ok(())
})
}
}
/// `ComponentManagerCapabilityProvider` to invoke `WorkSchedulerControl` FIDL API bound to a
/// particular `WorkScheduler` object.
struct WorkSchedulerControlCapabilityProvider {
work_scheduler: Arc<WorkScheduler>,
}
impl WorkSchedulerControlCapabilityProvider {
fn new(work_scheduler: Arc<WorkScheduler>) -> Self {
WorkSchedulerControlCapabilityProvider { work_scheduler }
}
/// Service `open` invocation via an event loop that dispatches FIDL operations to
/// `work_scheduler`.
async fn open_async(
self,
mut stream: fsys::WorkSchedulerControlRequestStream,
) -> Result<(), Error> {
while let Some(request) = stream.try_next().await? {
let work_scheduler = self.work_scheduler.clone();
match request {
fsys::WorkSchedulerControlRequest::GetBatchPeriod { responder, .. } => {
let mut result = work_scheduler.get_batch_period().await;
responder.send(&mut result)?;
}
fsys::WorkSchedulerControlRequest::SetBatchPeriod {
responder,
batch_period,
..
} => {
let mut result = work_scheduler.set_batch_period(batch_period).await;
responder.send(&mut result)?;
}
}
}
Ok(())
}
}
#[async_trait]
impl CapabilityProvider for WorkSchedulerControlCapabilityProvider {
/// Spawn an event loop to service `WorkScheduler` FIDL operations.
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: String,
server_end: zx::Channel,
) -> Result<(), ModelError> {
let server_end = ServerEnd::<fsys::WorkSchedulerControlMarker>::new(server_end);
let stream: fsys::WorkSchedulerControlRequestStream = server_end.into_stream().unwrap();
fasync::spawn(async move {
let result = self.open_async(stream).await;
if let Err(e) = result {
// TODO(markdittmer): Set an epitaph to indicate this was an unexpected error.
warn!("WorkSchedulerCapabilityProvider.open failed: {}", e);
}
});
Ok(())
}
}
/// `Capability` to invoke `WorkScheduler` FIDL API bound to a particular `WorkScheduler` object and
/// component instance's `AbsoluteMoniker`. All FIDL operations bound to the same object and moniker
/// observe the same collection of `WorkItem` objects.
struct WorkSchedulerCapabilityProvider {
scope_moniker: AbsoluteMoniker,
work_scheduler: Arc<WorkScheduler>,
}
impl WorkSchedulerCapabilityProvider {
fn new(scope_moniker: AbsoluteMoniker, work_scheduler: Arc<WorkScheduler>) -> Self {
WorkSchedulerCapabilityProvider { scope_moniker, work_scheduler }
}
/// Service `open` invocation via an event loop that dispatches FIDL operations to
/// `work_scheduler`.
async fn open_async(
work_scheduler: Arc<WorkScheduler>,
scope_moniker: AbsoluteMoniker,
mut stream: fsys::WorkSchedulerRequestStream,
) -> Result<(), Error> {
while let Some(request) = stream.try_next().await? {
let work_scheduler = work_scheduler.clone();
match request {
fsys::WorkSchedulerRequest::ScheduleWork {
responder,
work_id,
work_request,
..
} => {
let mut result =
work_scheduler.schedule_work(&scope_moniker, &work_id, &work_request).await;
responder.send(&mut result)?;
}
fsys::WorkSchedulerRequest::CancelWork { responder, work_id, .. } => {
let mut result = work_scheduler.cancel_work(&scope_moniker, &work_id).await;
responder.send(&mut result)?;
}
}
}
Ok(())
}
}
#[async_trait]
impl CapabilityProvider for WorkSchedulerCapabilityProvider {
/// Spawn an event loop to service `WorkScheduler` FIDL operations.
async fn open(
self: Box<Self>,
_flags: u32,
_open_mode: u32,
_relative_path: String,
server_end: zx::Channel,
) -> Result<(), ModelError> {
let server_end = ServerEnd::<fsys::WorkSchedulerMarker>::new(server_end);
let stream: fsys::WorkSchedulerRequestStream = server_end.into_stream().unwrap();
let work_scheduler = self.work_scheduler.clone();
let scope_moniker = self.scope_moniker.clone();
fasync::spawn(async move {
let result = Self::open_async(work_scheduler, scope_moniker, stream).await;
if let Err(e) = result {
// TODO(markdittmer): Set an epitaph to indicate this was an unexpected error.
warn!("WorkSchedulerCapabilityProvider.open failed: {}", e);
}
});
Ok(())
}
}
| 40.320313 | 103 | 0.587871 |
0896daf48b78435f0d5afc868ef126c4c195af45 | 34,799 | //! A different sort of visitor for walking fn bodies. Unlike the
//! normal visitor, which just walks the entire body in one shot, the
//! `ExprUseVisitor` determines how expressions are being used.
use hir::def::DefKind;
// Export these here so that Clippy can use them.
pub use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, Projection};
use rustc_data_structures::fx::FxIndexMap;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::PatKind;
use rustc_index::vec::Idx;
use rustc_infer::infer::InferCtxt;
use rustc_middle::hir::place::ProjectionKind;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, adjustment, AdtKind, Ty, TyCtxt};
use rustc_target::abi::VariantIdx;
use std::iter;
use crate::mem_categorization as mc;
/// This trait defines the callbacks you can expect to receive when
/// employing the ExprUseVisitor.
pub trait Delegate<'tcx> {
/// The value found at `place` is moved, depending
/// on `mode`. Where `diag_expr_id` is the id used for diagnostics for `place`.
///
/// Use of a `Copy` type in a ByValue context is considered a use
/// by `ImmBorrow` and `borrow` is called instead. This is because
/// a shared borrow is the "minimum access" that would be needed
/// to perform a copy.
///
///
/// The parameter `diag_expr_id` indicates the HIR id that ought to be used for
/// diagnostics. Around pattern matching such as `let pat = expr`, the diagnostic
/// id will be the id of the expression `expr` but the place itself will have
/// the id of the binding in the pattern `pat`.
fn consume(&mut self, place_with_id: &PlaceWithHirId<'tcx>, diag_expr_id: hir::HirId);
/// The value found at `place` is being borrowed with kind `bk`.
/// `diag_expr_id` is the id used for diagnostics (see `consume` for more details).
fn borrow(
&mut self,
place_with_id: &PlaceWithHirId<'tcx>,
diag_expr_id: hir::HirId,
bk: ty::BorrowKind,
);
/// The path at `assignee_place` is being assigned to.
/// `diag_expr_id` is the id used for diagnostics (see `consume` for more details).
fn mutate(&mut self, assignee_place: &PlaceWithHirId<'tcx>, diag_expr_id: hir::HirId);
/// The `place` should be a fake read because of specified `cause`.
fn fake_read(&mut self, place: Place<'tcx>, cause: FakeReadCause, diag_expr_id: hir::HirId);
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum ConsumeMode {
/// reference to x where x has a type that copies
Copy,
/// reference to x where x has a type that moves
Move,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MutateMode {
Init,
/// Example: `x = y`
JustWrite,
/// Example: `x += y`
WriteAndRead,
}
/// The ExprUseVisitor type
///
/// This is the code that actually walks the tree.
pub struct ExprUseVisitor<'a, 'tcx> {
mc: mc::MemCategorizationContext<'a, 'tcx>,
body_owner: LocalDefId,
delegate: &'a mut dyn Delegate<'tcx>,
}
/// If the MC results in an error, it's because the type check
/// failed (or will fail, when the error is uncovered and reported
/// during writeback). In this case, we just ignore this part of the
/// code.
///
/// Note that this macro appears similar to try!(), but, unlike try!(),
/// it does not propagate the error.
macro_rules! return_if_err {
($inp: expr) => {
match $inp {
Ok(v) => v,
Err(()) => {
debug!("mc reported err");
return;
}
}
};
}
impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
/// Creates the ExprUseVisitor, configuring it with the various options provided:
///
/// - `delegate` -- who receives the callbacks
/// - `param_env` --- parameter environment for trait lookups (esp. pertaining to `Copy`)
/// - `typeck_results` --- typeck results for the code being analyzed
pub fn new(
delegate: &'a mut (dyn Delegate<'tcx> + 'a),
infcx: &'a InferCtxt<'a, 'tcx>,
body_owner: LocalDefId,
param_env: ty::ParamEnv<'tcx>,
typeck_results: &'a ty::TypeckResults<'tcx>,
) -> Self {
ExprUseVisitor {
mc: mc::MemCategorizationContext::new(infcx, param_env, body_owner, typeck_results),
body_owner,
delegate,
}
}
#[instrument(skip(self), level = "debug")]
pub fn consume_body(&mut self, body: &hir::Body<'_>) {
for param in body.params {
let param_ty = return_if_err!(self.mc.pat_ty_adjusted(param.pat));
debug!("consume_body: param_ty = {:?}", param_ty);
let param_place = self.mc.cat_rvalue(param.hir_id, param.pat.span, param_ty);
self.walk_irrefutable_pat(¶m_place, param.pat);
}
self.consume_expr(&body.value);
}
fn tcx(&self) -> TyCtxt<'tcx> {
self.mc.tcx()
}
fn delegate_consume(&mut self, place_with_id: &PlaceWithHirId<'tcx>, diag_expr_id: hir::HirId) {
delegate_consume(&self.mc, self.delegate, place_with_id, diag_expr_id)
}
fn consume_exprs(&mut self, exprs: &[hir::Expr<'_>]) {
for expr in exprs {
self.consume_expr(expr);
}
}
pub fn consume_expr(&mut self, expr: &hir::Expr<'_>) {
debug!("consume_expr(expr={:?})", expr);
let place_with_id = return_if_err!(self.mc.cat_expr(expr));
self.delegate_consume(&place_with_id, place_with_id.hir_id);
self.walk_expr(expr);
}
fn mutate_expr(&mut self, expr: &hir::Expr<'_>) {
let place_with_id = return_if_err!(self.mc.cat_expr(expr));
self.delegate.mutate(&place_with_id, place_with_id.hir_id);
self.walk_expr(expr);
}
fn borrow_expr(&mut self, expr: &hir::Expr<'_>, bk: ty::BorrowKind) {
debug!("borrow_expr(expr={:?}, bk={:?})", expr, bk);
let place_with_id = return_if_err!(self.mc.cat_expr(expr));
self.delegate.borrow(&place_with_id, place_with_id.hir_id, bk);
self.walk_expr(expr)
}
fn select_from_expr(&mut self, expr: &hir::Expr<'_>) {
self.walk_expr(expr)
}
pub fn walk_expr(&mut self, expr: &hir::Expr<'_>) {
debug!("walk_expr(expr={:?})", expr);
self.walk_adjustment(expr);
match expr.kind {
hir::ExprKind::Path(_) => {}
hir::ExprKind::Type(subexpr, _) => self.walk_expr(subexpr),
hir::ExprKind::Unary(hir::UnOp::Deref, base) => {
// *base
self.select_from_expr(base);
}
hir::ExprKind::Field(base, _) => {
// base.f
self.select_from_expr(base);
}
hir::ExprKind::Index(lhs, rhs) => {
// lhs[rhs]
self.select_from_expr(lhs);
self.consume_expr(rhs);
}
hir::ExprKind::Call(callee, args) => {
// callee(args)
self.consume_expr(callee);
self.consume_exprs(args);
}
hir::ExprKind::MethodCall(.., args, _) => {
// callee.m(args)
self.consume_exprs(args);
}
hir::ExprKind::Struct(_, fields, ref opt_with) => {
self.walk_struct_expr(fields, opt_with);
}
hir::ExprKind::Tup(exprs) => {
self.consume_exprs(exprs);
}
hir::ExprKind::If(ref cond_expr, ref then_expr, ref opt_else_expr) => {
self.consume_expr(cond_expr);
self.consume_expr(then_expr);
if let Some(ref else_expr) = *opt_else_expr {
self.consume_expr(else_expr);
}
}
hir::ExprKind::Let(pat, ref expr, _) => {
self.walk_local(expr, pat, |t| t.borrow_expr(expr, ty::ImmBorrow));
}
hir::ExprKind::Match(ref discr, arms, _) => {
let discr_place = return_if_err!(self.mc.cat_expr(discr));
// Matching should not always be considered a use of the place, hence
// discr does not necessarily need to be borrowed.
// We only want to borrow discr if the pattern contain something other
// than wildcards.
let ExprUseVisitor { ref mc, body_owner: _, delegate: _ } = *self;
let mut needs_to_be_read = false;
for arm in arms.iter() {
return_if_err!(mc.cat_pattern(discr_place.clone(), arm.pat, |place, pat| {
match &pat.kind {
PatKind::Binding(.., opt_sub_pat) => {
// If the opt_sub_pat is None, than the binding does not count as
// a wildcard for the purpose of borrowing discr.
if opt_sub_pat.is_none() {
needs_to_be_read = true;
}
}
PatKind::Path(qpath) => {
// A `Path` pattern is just a name like `Foo`. This is either a
// named constant or else it refers to an ADT variant
let res = self.mc.typeck_results.qpath_res(qpath, pat.hir_id);
match res {
Res::Def(DefKind::Const, _)
| Res::Def(DefKind::AssocConst, _) => {
// Named constants have to be equated with the value
// being matched, so that's a read of the value being matched.
//
// FIXME: We don't actually reads for ZSTs.
needs_to_be_read = true;
}
_ => {
// Otherwise, this is a struct/enum variant, and so it's
// only a read if we need to read the discriminant.
needs_to_be_read |= is_multivariant_adt(place.place.ty());
}
}
}
PatKind::TupleStruct(..) | PatKind::Struct(..) | PatKind::Tuple(..) => {
// For `Foo(..)`, `Foo { ... }` and `(...)` patterns, check if we are matching
// against a multivariant enum or struct. In that case, we have to read
// the discriminant. Otherwise this kind of pattern doesn't actually
// read anything (we'll get invoked for the `...`, which may indeed
// perform some reads).
let place_ty = place.place.ty();
needs_to_be_read |= is_multivariant_adt(place_ty);
}
PatKind::Lit(_) | PatKind::Range(..) => {
// If the PatKind is a Lit or a Range then we want
// to borrow discr.
needs_to_be_read = true;
}
PatKind::Or(_)
| PatKind::Box(_)
| PatKind::Slice(..)
| PatKind::Ref(..)
| PatKind::Wild => {
// If the PatKind is Or, Box, Slice or Ref, the decision is made later
// as these patterns contains subpatterns
// If the PatKind is Wild, the decision is made based on the other patterns being
// examined
}
}
}));
}
if needs_to_be_read {
self.borrow_expr(discr, ty::ImmBorrow);
} else {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
_ => None,
};
self.delegate.fake_read(
discr_place.place.clone(),
FakeReadCause::ForMatchedPlace(closure_def_id),
discr_place.hir_id,
);
// We always want to walk the discriminant. We want to make sure, for instance,
// that the discriminant has been initialized.
self.walk_expr(discr);
}
// treatment of the discriminant is handled while walking the arms.
for arm in arms {
self.walk_arm(&discr_place, arm);
}
}
hir::ExprKind::Array(exprs) => {
self.consume_exprs(exprs);
}
hir::ExprKind::AddrOf(_, m, ref base) => {
// &base
// make sure that the thing we are pointing out stays valid
// for the lifetime `scope_r` of the resulting ptr:
let bk = ty::BorrowKind::from_mutbl(m);
self.borrow_expr(base, bk);
}
hir::ExprKind::InlineAsm(asm) => {
for (op, _op_sp) in asm.operands {
match op {
hir::InlineAsmOperand::In { expr, .. }
| hir::InlineAsmOperand::Sym { expr, .. } => self.consume_expr(expr),
hir::InlineAsmOperand::Out { expr: Some(expr), .. }
| hir::InlineAsmOperand::InOut { expr, .. } => {
self.mutate_expr(expr);
}
hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
self.consume_expr(in_expr);
if let Some(out_expr) = out_expr {
self.mutate_expr(out_expr);
}
}
hir::InlineAsmOperand::Out { expr: None, .. }
| hir::InlineAsmOperand::Const { .. } => {}
}
}
}
hir::ExprKind::LlvmInlineAsm(ia) => {
for (o, output) in iter::zip(&ia.inner.outputs, ia.outputs_exprs) {
if o.is_indirect {
self.consume_expr(output);
} else {
self.mutate_expr(output);
}
}
self.consume_exprs(ia.inputs_exprs);
}
hir::ExprKind::Continue(..)
| hir::ExprKind::Lit(..)
| hir::ExprKind::ConstBlock(..)
| hir::ExprKind::Err => {}
hir::ExprKind::Loop(blk, ..) => {
self.walk_block(blk);
}
hir::ExprKind::Unary(_, lhs) => {
self.consume_expr(lhs);
}
hir::ExprKind::Binary(_, lhs, rhs) => {
self.consume_expr(lhs);
self.consume_expr(rhs);
}
hir::ExprKind::Block(blk, _) => {
self.walk_block(blk);
}
hir::ExprKind::Break(_, ref opt_expr) | hir::ExprKind::Ret(ref opt_expr) => {
if let Some(expr) = *opt_expr {
self.consume_expr(expr);
}
}
hir::ExprKind::Assign(lhs, rhs, _) => {
self.mutate_expr(lhs);
self.consume_expr(rhs);
}
hir::ExprKind::Cast(base, _) => {
self.consume_expr(base);
}
hir::ExprKind::DropTemps(expr) => {
self.consume_expr(expr);
}
hir::ExprKind::AssignOp(_, lhs, rhs) => {
if self.mc.typeck_results.is_method_call(expr) {
self.consume_expr(lhs);
} else {
self.mutate_expr(lhs);
}
self.consume_expr(rhs);
}
hir::ExprKind::Repeat(base, _) => {
self.consume_expr(base);
}
hir::ExprKind::Closure(..) => {
self.walk_captures(expr);
}
hir::ExprKind::Box(ref base) => {
self.consume_expr(base);
}
hir::ExprKind::Yield(value, _) => {
self.consume_expr(value);
}
}
}
fn walk_stmt(&mut self, stmt: &hir::Stmt<'_>) {
match stmt.kind {
hir::StmtKind::Local(hir::Local { pat, init: Some(expr), .. }) => {
self.walk_local(expr, pat, |_| {});
}
hir::StmtKind::Local(_) => {}
hir::StmtKind::Item(_) => {
// We don't visit nested items in this visitor,
// only the fn body we were given.
}
hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
self.consume_expr(expr);
}
}
}
fn walk_local<F>(&mut self, expr: &hir::Expr<'_>, pat: &hir::Pat<'_>, mut f: F)
where
F: FnMut(&mut Self),
{
self.walk_expr(expr);
let expr_place = return_if_err!(self.mc.cat_expr(expr));
f(self);
self.walk_irrefutable_pat(&expr_place, &pat);
}
/// Indicates that the value of `blk` will be consumed, meaning either copied or moved
/// depending on its type.
fn walk_block(&mut self, blk: &hir::Block<'_>) {
debug!("walk_block(blk.hir_id={})", blk.hir_id);
for stmt in blk.stmts {
self.walk_stmt(stmt);
}
if let Some(ref tail_expr) = blk.expr {
self.consume_expr(tail_expr);
}
}
fn walk_struct_expr<'hir>(
&mut self,
fields: &[hir::ExprField<'_>],
opt_with: &Option<&'hir hir::Expr<'_>>,
) {
// Consume the expressions supplying values for each field.
for field in fields {
self.consume_expr(field.expr);
}
let with_expr = match *opt_with {
Some(w) => &*w,
None => {
return;
}
};
let with_place = return_if_err!(self.mc.cat_expr(with_expr));
// Select just those fields of the `with`
// expression that will actually be used
match with_place.place.ty().kind() {
ty::Adt(adt, substs) if adt.is_struct() => {
// Consume those fields of the with expression that are needed.
for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() {
let is_mentioned = fields.iter().any(|f| {
self.tcx().field_index(f.hir_id, self.mc.typeck_results) == f_index
});
if !is_mentioned {
let field_place = self.mc.cat_projection(
&*with_expr,
with_place.clone(),
with_field.ty(self.tcx(), substs),
ProjectionKind::Field(f_index as u32, VariantIdx::new(0)),
);
self.delegate_consume(&field_place, field_place.hir_id);
}
}
}
_ => {
// the base expression should always evaluate to a
// struct; however, when EUV is run during typeck, it
// may not. This will generate an error earlier in typeck,
// so we can just ignore it.
if !self.tcx().sess.has_errors() {
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
}
}
}
// walk the with expression so that complex expressions
// are properly handled.
self.walk_expr(with_expr);
}
/// Invoke the appropriate delegate calls for anything that gets
/// consumed or borrowed as part of the automatic adjustment
/// process.
fn walk_adjustment(&mut self, expr: &hir::Expr<'_>) {
let adjustments = self.mc.typeck_results.expr_adjustments(expr);
let mut place_with_id = return_if_err!(self.mc.cat_expr_unadjusted(expr));
for adjustment in adjustments {
debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
match adjustment.kind {
adjustment::Adjust::NeverToAny | adjustment::Adjust::Pointer(_) => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
self.delegate_consume(&place_with_id, place_with_id.hir_id);
}
adjustment::Adjust::Deref(None) => {}
// Autoderefs for overloaded Deref calls in fact reference
// their receiver. That is, if we have `(*x)` where `x`
// is of type `Rc<T>`, then this in fact is equivalent to
// `x.deref()`. Since `deref()` is declared with `&self`,
// this is an autoref of `x`.
adjustment::Adjust::Deref(Some(ref deref)) => {
let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
self.delegate.borrow(&place_with_id, place_with_id.hir_id, bk);
}
adjustment::Adjust::Borrow(ref autoref) => {
self.walk_autoref(expr, &place_with_id, autoref);
}
}
place_with_id =
return_if_err!(self.mc.cat_expr_adjusted(expr, place_with_id, adjustment));
}
}
/// Walks the autoref `autoref` applied to the autoderef'd
/// `expr`. `base_place` is the mem-categorized form of `expr`
/// after all relevant autoderefs have occurred.
fn walk_autoref(
&mut self,
expr: &hir::Expr<'_>,
base_place: &PlaceWithHirId<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>,
) {
debug!(
"walk_autoref(expr.hir_id={} base_place={:?} autoref={:?})",
expr.hir_id, base_place, autoref
);
match *autoref {
adjustment::AutoBorrow::Ref(_, m) => {
self.delegate.borrow(
base_place,
base_place.hir_id,
ty::BorrowKind::from_mutbl(m.into()),
);
}
adjustment::AutoBorrow::RawPtr(m) => {
debug!("walk_autoref: expr.hir_id={} base_place={:?}", expr.hir_id, base_place);
self.delegate.borrow(base_place, base_place.hir_id, ty::BorrowKind::from_mutbl(m));
}
}
}
fn walk_arm(&mut self, discr_place: &PlaceWithHirId<'tcx>, arm: &hir::Arm<'_>) {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
_ => None,
};
self.delegate.fake_read(
discr_place.place.clone(),
FakeReadCause::ForMatchedPlace(closure_def_id),
discr_place.hir_id,
);
self.walk_pat(discr_place, arm.pat);
if let Some(hir::Guard::If(e)) = arm.guard {
self.consume_expr(e)
} else if let Some(hir::Guard::IfLet(_, ref e)) = arm.guard {
self.consume_expr(e)
}
self.consume_expr(arm.body);
}
/// Walks a pat that occurs in isolation (i.e., top-level of fn argument or
/// let binding, and *not* a match arm or nested pat.)
fn walk_irrefutable_pat(&mut self, discr_place: &PlaceWithHirId<'tcx>, pat: &hir::Pat<'_>) {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
_ => None,
};
self.delegate.fake_read(
discr_place.place.clone(),
FakeReadCause::ForLet(closure_def_id),
discr_place.hir_id,
);
self.walk_pat(discr_place, pat);
}
/// The core driver for walking a pattern
fn walk_pat(&mut self, discr_place: &PlaceWithHirId<'tcx>, pat: &hir::Pat<'_>) {
debug!("walk_pat(discr_place={:?}, pat={:?})", discr_place, pat);
let tcx = self.tcx();
let ExprUseVisitor { ref mc, body_owner: _, ref mut delegate } = *self;
return_if_err!(mc.cat_pattern(discr_place.clone(), pat, |place, pat| {
if let PatKind::Binding(_, canonical_id, ..) = pat.kind {
debug!("walk_pat: binding place={:?} pat={:?}", place, pat,);
if let Some(bm) =
mc.typeck_results.extract_binding_mode(tcx.sess, pat.hir_id, pat.span)
{
debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
// pat_ty: the type of the binding being produced.
let pat_ty = return_if_err!(mc.node_ty(pat.hir_id));
debug!("walk_pat: pat_ty={:?}", pat_ty);
// Each match binding is effectively an assignment to the
// binding being produced.
let def = Res::Local(canonical_id);
if let Ok(ref binding_place) = mc.cat_res(pat.hir_id, pat.span, pat_ty, def) {
delegate.mutate(binding_place, binding_place.hir_id);
}
// It is also a borrow or copy/move of the value being matched.
// In a cases of pattern like `let pat = upvar`, don't use the span
// of the pattern, as this just looks confusing, instead use the span
// of the discriminant.
match bm {
ty::BindByReference(m) => {
let bk = ty::BorrowKind::from_mutbl(m);
delegate.borrow(place, discr_place.hir_id, bk);
}
ty::BindByValue(..) => {
debug!("walk_pat binding consuming pat");
delegate_consume(mc, *delegate, place, discr_place.hir_id);
}
}
}
}
}));
}
/// Handle the case where the current body contains a closure.
///
/// When the current body being handled is a closure, then we must make sure that
/// - The parent closure only captures Places from the nested closure that are not local to it.
///
/// In the following example the closures `c` only captures `p.x` even though `incr`
/// is a capture of the nested closure
///
/// ```rust,ignore(cannot-test-this-because-pseudo-code)
/// let p = ..;
/// let c = || {
/// let incr = 10;
/// let nested = || p.x += incr;
/// }
/// ```
///
/// - When reporting the Place back to the Delegate, ensure that the UpvarId uses the enclosing
/// closure as the DefId.
fn walk_captures(&mut self, closure_expr: &hir::Expr<'_>) {
fn upvar_is_local_variable<'tcx>(
upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
upvar_id: &hir::HirId,
body_owner_is_closure: bool,
) -> bool {
upvars.map(|upvars| !upvars.contains_key(upvar_id)).unwrap_or(body_owner_is_closure)
}
debug!("walk_captures({:?})", closure_expr);
let closure_def_id = self.tcx().hir().local_def_id(closure_expr.hir_id).to_def_id();
let upvars = self.tcx().upvars_mentioned(self.body_owner);
// For purposes of this function, generator and closures are equivalent.
let body_owner_is_closure = matches!(
self.tcx().type_of(self.body_owner.to_def_id()).kind(),
ty::Closure(..) | ty::Generator(..)
);
// If we have a nested closure, we want to include the fake reads present in the nested closure.
if let Some(fake_reads) = self.mc.typeck_results.closure_fake_reads.get(&closure_def_id) {
for (fake_read, cause, hir_id) in fake_reads.iter() {
match fake_read.base {
PlaceBase::Upvar(upvar_id) => {
if upvar_is_local_variable(
upvars,
&upvar_id.var_path.hir_id,
body_owner_is_closure,
) {
// The nested closure might be fake reading the current (enclosing) closure's local variables.
// The only places we want to fake read before creating the parent closure are the ones that
// are not local to it/ defined by it.
//
// ```rust,ignore(cannot-test-this-because-pseudo-code)
// let v1 = (0, 1);
// let c = || { // fake reads: v1
// let v2 = (0, 1);
// let e = || { // fake reads: v1, v2
// let (_, t1) = v1;
// let (_, t2) = v2;
// }
// }
// ```
// This check is performed when visiting the body of the outermost closure (`c`) and ensures
// that we don't add a fake read of v2 in c.
continue;
}
}
_ => {
bug!(
"Do not know how to get HirId out of Rvalue and StaticItem {:?}",
fake_read.base
);
}
};
self.delegate.fake_read(fake_read.clone(), *cause, *hir_id);
}
}
if let Some(min_captures) = self.mc.typeck_results.closure_min_captures.get(&closure_def_id)
{
for (var_hir_id, min_list) in min_captures.iter() {
if upvars.map_or(body_owner_is_closure, |upvars| !upvars.contains_key(var_hir_id)) {
// The nested closure might be capturing the current (enclosing) closure's local variables.
// We check if the root variable is ever mentioned within the enclosing closure, if not
// then for the current body (if it's a closure) these aren't captures, we will ignore them.
continue;
}
for captured_place in min_list {
let place = &captured_place.place;
let capture_info = captured_place.info;
let place_base = if body_owner_is_closure {
// Mark the place to be captured by the enclosing closure
PlaceBase::Upvar(ty::UpvarId::new(*var_hir_id, self.body_owner))
} else {
// If the body owner isn't a closure then the variable must
// be a local variable
PlaceBase::Local(*var_hir_id)
};
let place_with_id = PlaceWithHirId::new(
capture_info.path_expr_id.unwrap_or(
capture_info.capture_kind_expr_id.unwrap_or(closure_expr.hir_id),
),
place.base_ty,
place_base,
place.projections.clone(),
);
match capture_info.capture_kind {
ty::UpvarCapture::ByValue(_) => {
self.delegate_consume(&place_with_id, place_with_id.hir_id);
}
ty::UpvarCapture::ByRef(upvar_borrow) => {
self.delegate.borrow(
&place_with_id,
place_with_id.hir_id,
upvar_borrow.kind,
);
}
}
}
}
}
}
}
fn copy_or_move<'a, 'tcx>(
mc: &mc::MemCategorizationContext<'a, 'tcx>,
place_with_id: &PlaceWithHirId<'tcx>,
) -> ConsumeMode {
if !mc.type_is_copy_modulo_regions(
place_with_id.place.ty(),
mc.tcx().hir().span(place_with_id.hir_id),
) {
ConsumeMode::Move
} else {
ConsumeMode::Copy
}
}
// - If a place is used in a `ByValue` context then move it if it's not a `Copy` type.
// - If the place that is a `Copy` type consider it an `ImmBorrow`.
fn delegate_consume<'a, 'tcx>(
mc: &mc::MemCategorizationContext<'a, 'tcx>,
delegate: &mut (dyn Delegate<'tcx> + 'a),
place_with_id: &PlaceWithHirId<'tcx>,
diag_expr_id: hir::HirId,
) {
debug!("delegate_consume(place_with_id={:?})", place_with_id);
let mode = copy_or_move(mc, place_with_id);
match mode {
ConsumeMode::Move => delegate.consume(place_with_id, diag_expr_id),
ConsumeMode::Copy => {
delegate.borrow(place_with_id, diag_expr_id, ty::BorrowKind::ImmBorrow)
}
}
}
fn is_multivariant_adt(ty: Ty<'_>) -> bool {
if let ty::Adt(def, _) = ty.kind() {
// Note that if a non-exhaustive SingleVariant is defined in another crate, we need
// to assume that more cases will be added to the variant in the future. This mean
// that we should handle non-exhaustive SingleVariant the same way we would handle
// a MultiVariant.
// If the variant is not local it must be defined in another crate.
let is_non_exhaustive = match def.adt_kind() {
AdtKind::Struct | AdtKind::Union => {
def.non_enum_variant().is_field_list_non_exhaustive()
}
AdtKind::Enum => def.is_variant_list_non_exhaustive(),
};
def.variants.len() > 1 || (!def.did.is_local() && is_non_exhaustive)
} else {
false
}
}
| 40.137255 | 122 | 0.503089 |
29abb0f4e0c77337d86ebbcf91d29f5239cb89ac | 3,293 | use crate::request::prelude::*;
use twilight_model::{
guild::{Permissions, Role},
id::GuildId,
};
#[derive(Default, Serialize)]
struct CreateRoleFields {
#[serde(skip_serializing_if = "Option::is_none")]
color: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
hoist: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
mentionable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
permissions: Option<Permissions>,
}
/// Create a role in a guild.
///
/// # Examples
///
/// ```rust,no_run
/// use twilight_http::Client;
/// use twilight_model::id::GuildId;
///
/// # #[tokio::main]
/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let client = Client::new("my token");
/// let guild_id = GuildId(234);
///
/// client.create_role(guild_id)
/// .color(0xd90083)
/// .name("Bright Pink")
/// .await?;
/// # Ok(()) }
/// ```
pub struct CreateRole<'a> {
fields: CreateRoleFields,
fut: Option<Pending<'a, Role>>,
guild_id: GuildId,
http: &'a Client,
reason: Option<String>,
}
impl<'a> CreateRole<'a> {
pub(crate) fn new(http: &'a Client, guild_id: GuildId) -> Self {
Self {
fields: CreateRoleFields::default(),
fut: None,
guild_id,
http,
reason: None,
}
}
/// Set the color of the role.
pub fn color(mut self, color: u32) -> Self {
self.fields.color.replace(color);
self
}
/// If true, display the role in the members list.
pub fn hoist(mut self, hoist: bool) -> Self {
self.fields.hoist.replace(hoist);
self
}
/// If true, the role can be @mentioned (pinged) in chat.
pub fn mentionable(mut self, mentionable: bool) -> Self {
self.fields.mentionable.replace(mentionable);
self
}
/// Set the name of the role.
///
/// If none is specified, Discord sets this to `New Role`.
pub fn name(mut self, name: impl Into<String>) -> Self {
self.fields.name.replace(name.into());
self
}
/// Set the allowed permissions of this role.
pub fn permissions(mut self, permissions: Permissions) -> Self {
self.fields.permissions.replace(permissions);
self
}
/// Attach an audit log reason to this request.
pub fn reason(mut self, reason: impl Into<String>) -> Self {
self.reason.replace(reason.into());
self
}
fn start(&mut self) -> Result<()> {
let request = if let Some(reason) = &self.reason {
let headers = audit_header(&reason)?;
Request::from((
crate::json_to_vec(&self.fields)?,
headers,
Route::CreateRole {
guild_id: self.guild_id.0,
},
))
} else {
Request::from((
crate::json_to_vec(&self.fields)?,
Route::CreateRole {
guild_id: self.guild_id.0,
},
))
};
self.fut.replace(Box::pin(self.http.request(request)));
Ok(())
}
}
poll_req!(CreateRole<'_>, Role);
| 25.527132 | 68 | 0.554813 |
9cf189ecb18b15ef78bf65b1664752980ad1b41e | 28,436 | #![recursion_limit = "512"]
use std::cell::RefCell;
use anyhow::{anyhow, Context as _, Error};
use chat_common::{Peer, WsMessage, WsResponse};
use laplace_yew::{MsgError, RawHtml};
use libp2p_core::{identity::ed25519::Keypair, PeerId, PublicKey};
use pulldown_cmark::{html as cmark_html, Options, Parser};
use wasm_web_helpers::{
error::Result as WebResult,
fetch::{JsonFetcher, MissingBody, Response},
websocket::{self, WebSocketError, WebSocketService},
};
use web_sys::{HtmlElement, HtmlInputElement, HtmlTextAreaElement};
use yew::{classes, html, html::Scope, Component, Context, Html, KeyboardEvent, MouseEvent};
use yew_mdc_widgets::{
auto_init, console,
dom::{self, existing::JsObjectAccess},
drawer, Button, Dialog, Drawer, Element, IconButton, List, ListItem, MdcWidget, TextField, TopAppBar,
};
use self::addresses::Addresses;
mod addresses;
#[allow(clippy::large_enum_variant)]
enum State {
SignIn,
Chat(Chat),
}
struct Chat {
keys: Keys,
peer_id: PeerId,
resize_data: ResizeData,
ws: WebSocketService,
channels: Vec<Channel>,
active_channel_idx: usize,
}
struct Keys {
public_key: String,
secret_key: String,
}
#[derive(Default)]
struct ResizeDir {
start_cursor_screen_pos: i32,
start_size: i32,
tracking: bool,
}
#[derive(Default)]
struct ResizeData {
width: ResizeDir,
height: ResizeDir,
}
struct Message {
is_mine: bool,
body: String,
}
struct Channel {
correspondent_id: String,
correspondent_name: String,
thread: Vec<Message>,
}
struct Root {
addresses_link: Option<Scope<Addresses>>,
state: State,
}
enum WsAction {
SendData(String),
ReceiveData(WsResponse),
}
enum Msg {
LinkAddresses(Scope<Addresses>),
SignIn,
InitChat { keys: Keys, peer_id: PeerId },
ChatScreenMouseMove(MouseEvent),
ToggleChatSidebarSplitHandle(MouseEvent),
ToggleChatEditorSplitHandle(MouseEvent),
AddPeer(String),
AddAddress(String),
SwitchChannel(usize),
Ws(WsAction),
Error(Error),
None,
}
impl From<WsAction> for Msg {
fn from(action: WsAction) -> Self {
Self::Ws(action)
}
}
impl From<Error> for Msg {
fn from(err: Error) -> Self {
Self::Error(err)
}
}
impl Component for Root {
type Message = Msg;
type Properties = ();
fn create(_ctx: &Context<Self>) -> Self {
Self {
addresses_link: None,
state: State::SignIn,
}
}
fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
Msg::LinkAddresses(link) => {
self.addresses_link = Some(link);
false
},
Msg::SignIn => {
let public_key = TextField::get_value("public-key");
let secret_key = TextField::get_value("secret-key");
if let Ok(keypair) = (|| {
let mut bytes = bs58::decode(&secret_key)
.into_vec()
.context("Decode secret key error")?;
bytes.extend_from_slice(
&bs58::decode(&public_key)
.into_vec()
.context("Decode public key error")?,
);
Keypair::decode(&mut bytes).context("Decode keypair error")
})()
.msg_error_map(ctx.link())
{
let peer_id = PeerId::from(PublicKey::Ed25519(keypair.public()));
let body = serde_json::to_string(&Peer {
peer_id: peer_id.to_bytes(),
keypair: keypair.encode().into(),
})
.expect("Peer should be serialize to JSON");
let success_msg = RefCell::new(Some(Msg::InitChat {
keys: Keys { public_key, secret_key },
peer_id,
}));
JsonFetcher::send_post_json("/chat/p2p", body, {
let callback = ctx.link().callback(
move |response_result: WebResult<(Response, WebResult<MissingBody>)>| {
response_result
.map(|(..)| {
success_msg
.borrow_mut()
.take()
.unwrap_or_else(|| Msg::Error(anyhow!("Multiple success fetch received")))
})
.unwrap_or_else(|err| Msg::Error(err.into()))
},
);
move |response_result| callback.emit(response_result)
});
}
true
},
Msg::InitChat { keys, peer_id } => {
let location = dom::existing::document()
.location()
.expect("Location should be existing");
let url = format!("ws://{}/chat/ws", location.host().expect("Location host expected"));
let send_callback = ctx.link().batch_callback(|send_result: Result<(), WebSocketError>| {
send_result.err().map(|err| Msg::Error(anyhow!("{}", err)))
});
let receive_callback =
ctx.link()
.callback(
|receive_result: Result<websocket::Message, WebSocketError>| match receive_result {
Ok(msg) => {
match match msg {
websocket::Message::Text(text) => serde_json::from_str(&text),
websocket::Message::Bytes(bytes) => serde_json::from_slice(&bytes),
} {
Ok(response) => Msg::Ws(WsAction::ReceiveData(response)),
Err(err) => Msg::Error(err.into()),
}
},
Err(err) => Msg::Error(anyhow!("{}", err)),
},
);
let close_send_callback = ctx
.link()
.callback(|_| Msg::Error(anyhow!("WebSocket connection close")));
let close_receive_callback = ctx
.link()
.callback(|_| Msg::Error(anyhow!("WebSocket connection close")));
let ws = WebSocketService::open(
&url,
move |send_result| send_callback.emit(send_result),
move |receive_result| receive_callback.emit(receive_result),
move || close_send_callback.emit(()),
move || close_receive_callback.emit(()),
)
.unwrap_or_else(|err| panic!("WS should be created for URL {}: {:?}", url, err));
self.state = State::Chat(Chat {
keys,
peer_id,
resize_data: ResizeData::default(),
ws,
channels: Default::default(),
active_channel_idx: 0,
});
true
},
Msg::ChatScreenMouseMove(event) => {
if let State::Chat(Chat {
ref mut resize_data, ..
}) = self.state
{
if resize_data.width.tracking && event.buttons() == 1 {
let delta_x = event.screen_x() - resize_data.width.start_cursor_screen_pos;
let container = select_exist_html_element(".chat-screen");
let width =
100.max((resize_data.width.start_size + delta_x).min(container.client_width() - 400));
set_exist_element_style(".chat-sidebar", "width", &format!("{}px", width));
} else if resize_data.height.tracking && event.buttons() == 1 {
let delta_y = event.screen_y() - resize_data.height.start_cursor_screen_pos;
let container = select_exist_html_element(".chat-screen");
let height =
72.max((resize_data.height.start_size - delta_y).min(container.client_height() - 100));
set_exist_element_style(".chat-editor textarea", "height", &format!("{}px", height));
} else {
resize_data.width.tracking = false;
resize_data.height.tracking = false;
remove_class_from_exist_html_element(".chat-screen", "resize-hor-cursor");
remove_class_from_exist_html_element(".chat-screen", "resize-ver-cursor");
}
}
false
},
Msg::ToggleChatSidebarSplitHandle(event) => {
if let State::Chat(Chat {
ref mut resize_data, ..
}) = self.state
{
if event.button() == 0 {
let sidebar = select_exist_html_element(".chat-sidebar");
*resize_data = ResizeData {
width: ResizeDir {
start_cursor_screen_pos: event.screen_x(),
start_size: sidebar.client_width(),
tracking: true,
},
..Default::default()
};
add_class_to_exist_html_element(".chat-screen", "resize-hor-cursor");
}
}
false
},
Msg::ToggleChatEditorSplitHandle(event) => {
if let State::Chat(Chat {
ref mut resize_data, ..
}) = self.state
{
if event.button() == 0 {
let editor = select_exist_html_element(".chat-editor textarea");
*resize_data = ResizeData {
height: ResizeDir {
start_cursor_screen_pos: event.screen_y(),
start_size: editor.client_height(),
tracking: true,
},
..Default::default()
};
add_class_to_exist_html_element(".chat-screen", "resize-ver-cursor");
}
}
false
},
Msg::AddPeer(peer_id) => {
if let State::Chat(state) = &mut self.state {
state.channels.push(Channel {
correspondent_id: peer_id.clone(),
correspondent_name: "<Unnamed>".to_string(),
thread: vec![],
});
state
.ws
.send(to_websocket_message(&WsMessage::AddPeer(peer_id)))
.context("Send AddPeer message error")
.msg_error(ctx.link());
true
} else {
false
}
},
Msg::AddAddress(address) => {
if let State::Chat(state) = &mut self.state {
state
.ws
.send(to_websocket_message(&WsMessage::AddAddress(address)))
.context("Send AddAddress message error")
.msg_error(ctx.link());
}
false
},
Msg::SwitchChannel(idx) => {
if let State::Chat(state) = &mut self.state {
if state.active_channel_idx != idx {
state.active_channel_idx = idx;
return true;
}
}
false
},
Msg::Ws(action) => match action {
WsAction::SendData(request) => {
if let State::Chat(state) = &mut self.state {
if let Some(channel) = state.channels.get_mut(state.active_channel_idx) {
channel.thread.push(Message {
is_mine: true,
body: request.clone(),
});
state
.ws
.send(to_websocket_message(&WsMessage::Text {
peer_id: channel.correspondent_id.clone(),
msg: request,
}))
.context("Send Text message error")
.msg_error(ctx.link());
}
}
true
},
WsAction::ReceiveData(response) => {
match response {
WsResponse::Success(WsMessage::Text { peer_id, msg }) => {
if let State::Chat(state) = &mut self.state {
if let Some(channel) = state
.channels
.iter_mut()
.find(|channel| channel.correspondent_id == peer_id)
{
channel.thread.push(Message {
is_mine: false,
body: msg,
});
return true;
}
}
},
WsResponse::Success(WsMessage::AddAddress(address)) => {
if let Some(link) = &self.addresses_link {
link.send_message(addresses::Msg::Add(address));
}
},
msg => ctx.link().send_message(Msg::Error(anyhow!("{:?}", msg))),
}
false
},
},
Msg::Error(err) => {
console::error!(&err.to_string());
true
},
Msg::None => false,
}
}
fn view(&self, ctx: &Context<Self>) -> Html {
let top_app_bar = TopAppBar::new()
.id("top-app-bar")
.title("Chat lapp")
.navigation_item(IconButton::new().icon("menu"))
.on_navigation(|_| {
let drawer = dom::existing::select_element::<Element>("#chat-drawer").get(drawer::mdc::TYPE_NAME);
let opened = drawer.get("open").as_bool().unwrap_or(false);
drawer.set("open", !opened);
});
let mut drawer = Drawer::new()
.modal()
.id("chat-drawer")
.title(html! { <h2 tabindex = 0>{ "Settings" }</h2> });
let mut dialogs = html! {};
let content = match &self.state {
State::SignIn => self.view_sign_in(ctx),
State::Chat(state) => {
drawer = drawer
.title(html! { <h3 contenteditable = "true">{ "User" }</h3> })
.content(
List::ul()
.divider()
.item(
ListItem::new()
.icon("perm_identity")
.text("Peer")
.attr("tabindex", "0")
.on_click(|_| Dialog::open_existing("peer-dialog")),
)
.item(
ListItem::new()
.icon("vpn_key")
.text("Keys")
.on_click(|_| Dialog::open_existing("keys-dialog")),
)
.item(
ListItem::new()
.icon("share")
.text("Addresses")
.on_click(|_| Dialog::open_existing("addresses-dialog")),
)
.markup_only(),
);
let peer_dialog = Dialog::new()
.id("peer-dialog")
.title(html! { <h2 tabindex = 0> { "Peer" } </h2> })
.content(html! { <div><strong>{ "ID: " }</strong> { state.peer_id.to_base58() }</div>});
let keys_dialog = Dialog::new()
.id("keys-dialog")
.title(html! { <h2 tabindex = 0> { "Keys" } </h2> })
.content(
List::ul()
.item(html! { <div><strong>{ "Public: " }</strong> { &state.keys.public_key }</div> })
.item(html! { <div><strong>{ "Secret: " }</strong> { &state.keys.secret_key }</div> }),
);
dialogs = html! {
<>
{ peer_dialog }
{ keys_dialog }
<Addresses root = { ctx.link().clone() } list = { Vec::new() } />
</>
};
self.view_chat(ctx, state)
},
};
html! {
<>
{ drawer }
<div class = "mdc-drawer-scrim"></div>
{ dialogs }
<div class = { classes!("app-content", Drawer::APP_CONTENT_CLASS) } >
{ top_app_bar }
<div class = "mdc-top-app-bar--fixed-adjust content-container">
{ content }
</div>
</div>
</>
}
}
fn rendered(&mut self, _ctx: &Context<Self>, _first_render: bool) {
auto_init();
}
}
impl Root {
fn view_sign_in(&self, ctx: &Context<Self>) -> Html {
let generate_keypair_button = Button::new().id("generate-key-button").label("Generate").on_click(|_| {
let keypair = Keypair::generate();
let public_key = bs58::encode(keypair.public().encode()).into_string();
let secret_key = bs58::encode(keypair.secret()).into_string();
TextField::set_value("public-key", &public_key);
TextField::set_value("secret-key", &secret_key);
let sign_in_button = dom::existing::get_element_by_id::<HtmlElement>("sign-in-button");
sign_in_button.remove_attribute("disabled").ok();
sign_in_button.focus().ok();
dom::existing::get_element_by_id::<HtmlElement>("generate-key-button")
.set_attribute("disabled", "")
.ok();
});
let sign_in_button = Button::new()
.id("sign-in-button")
.label("Sign In")
.disabled()
.on_click(ctx.link().callback(|_| Msg::SignIn));
let switch_buttons = |_| {
let generate_key_button = dom::existing::get_element_by_id::<HtmlElement>("generate-key-button");
let sign_in_button = dom::existing::get_element_by_id::<HtmlElement>("sign-in-button");
if TextField::get_value("public-key").is_empty() && TextField::get_value("secret-key").is_empty() {
generate_key_button.remove_attribute("disabled").ok();
sign_in_button.set_attribute("disabled", "").ok();
} else if generate_key_button.get_attribute("disabled").is_none() {
generate_key_button.set_attribute("disabled", "").ok();
sign_in_button.remove_attribute("disabled").ok();
}
};
let sign_in_form = List::simple_ul().items(vec![
ListItem::simple().child(html! {
<span class = "mdc-typography--overline">{ "Enter or generate a keypair" }</span>
}),
ListItem::simple().class("sig-in-field").child(
TextField::outlined()
.id("public-key")
.class("expand")
.label("Public key")
.on_input(switch_buttons),
),
ListItem::simple().class("sig-in-field").child(
TextField::outlined()
.id("secret-key")
.class("expand")
.label("Secret key")
.on_input(switch_buttons),
),
ListItem::simple().child(html! {
<div class = "sign-in-actions">
{ generate_keypair_button }
{ sign_in_button }
</div>
}),
]);
html! {
<div class = "keys-form">
{ sign_in_form }
</div>
}
}
fn view_chat(&self, ctx: &Context<Self>, state: &Chat) -> Html {
let mut channels = List::nav().two_line().divider();
let mut messages = html! {};
for (idx, channel) in state.channels.iter().enumerate() {
let mut item = ListItem::link(format!("#{}", channel.correspondent_id))
.icon("person")
.text(&channel.correspondent_name)
.text(&channel.correspondent_id)
.on_click(ctx.link().callback(move |_| Msg::SwitchChannel(idx)));
if idx == state.active_channel_idx {
item = item.selected(true).attr("tabindex", "0");
messages = html! { {
for channel.thread.iter().map(|msg| {
let msg_class = if msg.is_mine { "mine-message" } else { "message" };
html! { <div class = { msg_class } ><RawHtml inner_html = { to_view_inner_html(&msg.body) } /></div> }
})
} };
}
channels = channels.item(item).divider()
}
channels = channels.markup_only();
let add_peer_dialog = self.view_add_peer_dialog(ctx);
let add_peer_button = IconButton::new()
.icon("add")
.class("centered-hor")
.on_click(|_| Dialog::open_existing("add-peer-dialog"));
let sender = ctx.link().callback(|event: KeyboardEvent| {
if event.key() == "Enter" && event.ctrl_key() {
let editor = dom::existing::get_element_by_id::<HtmlTextAreaElement>("editor");
let message = editor.value();
editor.set_value("");
Msg::Ws(WsAction::SendData(message))
} else {
Msg::None
}
});
let editor = html! {
<label class = "mdc-text-field mdc-text-field--textarea mdc-text-field--no-label">
<textarea id = "editor" class = "mdc-text-field__input" rows = "3" aria-label = "Label"
placeholder = "Type your message here..." onkeypress = { sender }></textarea>
</label>
};
html! {
<div class = "chat-screen" onmousemove = { ctx.link().callback(Msg::ChatScreenMouseMove) }>
<aside class = "chat-sidebar">
<div class = "chat-flex-container scrollable-content">
{ channels }
{ add_peer_button }
{ add_peer_dialog }
</div>
</aside>
<div class = "chat-sidebar-split-handle resize-hor-cursor"
onmousedown = { ctx.link().callback(Msg::ToggleChatSidebarSplitHandle) }></div>
<div class = "chat-main">
<div class = "chat-flex-container">
<div id = "messages" class = "chat-messages">
{ messages }
</div>
<div class = "chat-editor-split-handle resize-ver-cursor" onmousedown = { ctx.link().callback(|event| {
Msg::ToggleChatEditorSplitHandle(event)
}) }></div>
<div class = "chat-editor">
{ editor }
</div>
</div>
</div>
</div>
}
}
fn view_add_peer_dialog(&self, ctx: &Context<Self>) -> Html {
Dialog::new()
.id("add-peer-dialog")
.content_item(
TextField::outlined()
.id("new-peer-id")
.class("keys-form")
.label("Peer ID"),
)
.action(
Button::new()
.id("add-peer-button")
.label("Add")
.class(Dialog::BUTTON_CLASS)
.on_click(ctx.link().callback(|_| {
let id = dom::existing::select_element::<HtmlInputElement>("#new-peer-id > input").value();
Dialog::close_existing("add-peer-dialog");
Msg::AddPeer(id)
})),
)
.action(
Button::new()
.label("Cancel")
.class(Dialog::BUTTON_CLASS)
.on_click(|_| Dialog::close_existing("add-peer-dialog")),
)
.into()
}
}
fn to_view_inner_html(content: &str) -> String {
let parser = new_cmark_parser(content);
let mut html = String::new();
cmark_html::push_html(&mut html, parser);
html
}
fn new_cmark_parser(source: &str) -> Parser {
let mut options = Options::empty();
options.insert(Options::ENABLE_STRIKETHROUGH);
options.insert(Options::ENABLE_TASKLISTS);
Parser::new_ext(source, options)
}
pub fn select_exist_html_element(selector: &str) -> HtmlElement {
dom::existing::select_element::<HtmlElement>(selector)
}
pub fn set_element_style(element: impl AsRef<HtmlElement>, property: &str, value: &str) {
element
.as_ref()
.style()
.set_property(property, value)
.unwrap_or_else(|err| panic!("Can't set style \"{}:{}\": {:?}", property, value, err));
}
pub fn set_exist_element_style(selector: &str, property: &str, value: &str) {
set_element_style(select_exist_html_element(selector), property, value);
}
pub fn add_class_to_html_element(element: impl AsRef<HtmlElement>, class: &str) {
let class_name = element.as_ref().class_name();
let mut exist_classes: Vec<_> = class_name.split_whitespace().collect();
if !exist_classes.contains(&class) {
exist_classes.push(class);
element.as_ref().set_class_name(&exist_classes.join(" "));
}
}
pub fn remove_class_from_html_element(element: impl AsRef<HtmlElement>, class: &str) {
let class_name = element.as_ref().class_name();
let mut exist_classes: Vec<_> = class_name.split_whitespace().collect();
if let Some(index) = exist_classes.iter().position(|item| *item == class) {
exist_classes.remove(index);
element.as_ref().set_class_name(&exist_classes.join(" "));
}
}
pub fn add_class_to_exist_html_element(selector: &str, class: &str) {
add_class_to_html_element(select_exist_html_element(selector), class);
}
pub fn remove_class_from_exist_html_element(selector: &str, class: &str) {
remove_class_from_html_element(select_exist_html_element(selector), class);
}
fn to_websocket_message(msg: &WsMessage) -> websocket::Message {
websocket::Message::Text(serde_json::to_string(msg).expect("Can't serialize message"))
}
fn main() {
let root = dom::existing::get_element_by_id("root");
yew::start_app_in_element::<Root>(root);
}
| 39.882188 | 127 | 0.457202 |
0326ac1120dddbc6554b2dcc1709760715923aea | 1,486 | // Copyright Amazon.com, Inc. or its affiliates.
//! Provides higher-level APIs for `ION_INT`
use crate::result::*;
use crate::*;
use std::ops::{Deref, DerefMut};
use std::ptr;
/// Smart Pointer over `ION_INT` to ensure that `ion_int_free` is invoked on the instance.
#[derive(Debug)]
pub struct IonIntPtr {
value: *mut ION_INT,
}
impl IonIntPtr {
/// Allocates a new `ION_INT` to zero.
pub fn try_new() -> IonCResult<Self> {
let mut value = ptr::null_mut();
ionc!(ion_int_alloc(ptr::null_mut(), &mut value))?;
ionc!(ion_int_init(
value,
ptr::null_mut(), // no owner - defers to normal Ion C xalloc/xfree
))?;
Ok(Self { value })
}
/// Allocates a new `ION_INT` from a `BigInt`.
pub fn try_from_bigint(value: &BigInt) -> IonCResult<Self> {
let mut ion_int = IonIntPtr::try_new()?;
ion_int.try_assign_bigint(&value)?;
Ok(ion_int)
}
/// Returns the underlying `ION_INT` as a mutable pointer.
pub fn as_mut_ptr(&mut self) -> *mut ION_INT {
self.value
}
}
impl Deref for IonIntPtr {
type Target = ION_INT;
fn deref(&self) -> &Self::Target {
unsafe { &*(self.value) }
}
}
impl DerefMut for IonIntPtr {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(self.value) }
}
}
impl Drop for IonIntPtr {
fn drop(&mut self) {
unsafe {
ion_int_free(self.value);
}
}
}
| 22.861538 | 90 | 0.58681 |
64ffc7446062cb171b7ced9ca4e792e0e16ce304 | 1,307 | // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:-Znll
#![allow(warnings)]
fn use_x(_: usize) -> bool { true }
fn main() {
let mut x = 22;
loop {
// Key point: `x` not live on entry to this basic block.
x = 55;
if use_x(x) { break; }
}
}
// END RUST SOURCE
// START rustc.main.nll.0.mir
// | Live variables on entry to bb3: []
// bb3: {
// | Live variables on entry to bb3[0]: []
// _1 = const 55usize;
// | Live variables on entry to bb3[1]: [_1]
// StorageLive(_3);
// | Live variables on entry to bb3[2]: [_1]
// StorageLive(_4);
// | Live variables on entry to bb3[3]: [_1]
// _4 = _1;
// | Live variables on entry to bb3[4]: [_4]
// _3 = const use_x(move _4) -> [return: bb4, unwind: bb1];
// }
// END rustc.main.nll.0.mir
| 31.119048 | 69 | 0.592961 |
878abc17ddb6b0b407f3a563a53998d093710183 | 3,771 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use aptos_transaction_builder::stdlib::{encode_create_parent_vasp_account_script, ScriptCall};
use aptos_types::account_config;
use diem_framework_releases::legacy::transaction_scripts::LegacyStdlibScript;
use language_e2e_tests::{
account::{self, Account},
executor::FakeExecutor,
};
use proptest::{collection::vec, prelude::*};
use std::convert::TryFrom;
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn fuzz_scripts_genesis_state(
txns in vec(any::<ScriptCall>(), 0..10),
) {
let executor = FakeExecutor::from_genesis_file();
let accounts = vec![
(Account::new_aptos_root(), 0),
(Account::new_blessed_tc(), 0),
];
let num_accounts = accounts.len();
for (i, txn) in txns.into_iter().enumerate() {
let script = txn.encode();
let (account, account_sequence_number) = &accounts[i % num_accounts];
let output = executor.execute_transaction(
account.transaction()
.script(script.clone())
.sequence_number(*account_sequence_number)
.sign());
prop_assert!(!output.status().is_discarded());
}
}
#[test]
#[ignore]
fn fuzz_scripts(
txns in vec(any::<ScriptCall>(), 0..100),
) {
let mut executor = FakeExecutor::from_genesis_file();
let mut accounts = vec![];
let aptos_root = Account::new_aptos_root();
let coins = vec![account::xus_currency_code()];
// Create a number of accounts
for i in 0..10 {
let account = executor.create_raw_account();
executor.execute_and_apply(
aptos_root
.transaction()
.script(encode_create_parent_vasp_account_script(
account_config::type_tag_for_currency_code(coins[i % coins.len()].clone()),
0,
*account.address(),
account.auth_key_prefix(),
vec![],
i % 2 == 0,
))
.sequence_number(i as u64)
.sign(),
);
accounts.push((account, 0));
}
// Don't include the DR account since txns from that can bork the system
accounts.push((Account::new_genesis_account(account_config::testnet_dd_account_address()), 0));
accounts.push((Account::new_blessed_tc(), 0));
let num_accounts = accounts.len();
for (i, txn) in txns.into_iter().enumerate() {
let script = txn.encode();
let (account, account_sequence_number) = accounts.get_mut(i % num_accounts).unwrap();
let script_is_rotate = LegacyStdlibScript::try_from(script.code()).map(|script|
script == LegacyStdlibScript::RotateAuthenticationKey ||
script == LegacyStdlibScript::RotateAuthenticationKeyWithNonce ||
script == LegacyStdlibScript::RotateAuthenticationKeyWithRecoveryAddress
).unwrap_or(false);
let output = executor.execute_transaction(
account.transaction()
.script(script.clone())
.sequence_number(*account_sequence_number)
.sign());
prop_assert!(!output.status().is_discarded());
// Don't apply key rotation transactions since that will bork future txns
if !script_is_rotate {
executor.apply_write_set(output.write_set());
*account_sequence_number += 1;
}
}
}
}
| 40.117021 | 103 | 0.574118 |
389bd54759c08bdee2a7158a0f45c4550082a4e0 | 16 | pub mod ignore;
| 8 | 15 | 0.75 |
3af8d6451e3e8efcd9964fd7c9637006b0fd83ec | 3,747 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::GPIOCLKGR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct RESERVED1R {
bits: u32,
}
impl RESERVED1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct CLK_ENR {
bits: bool,
}
impl CLK_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _CLK_ENW<'a> {
w: &'a mut W,
}
impl<'a> _CLK_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 1:31 - Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior."]
#[inline]
pub fn reserved1(&self) -> RESERVED1R {
let bits = {
const MASK: u32 = 2147483647;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u32
};
RESERVED1R { bits }
}
#[doc = "Bit 0 - 0: Disable clock 1: Enable clock For changes to take effect, CLKLOADCTL.LOAD needs to be written"]
#[inline]
pub fn clk_en(&self) -> CLK_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CLK_ENR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - 0: Disable clock 1: Enable clock For changes to take effect, CLKLOADCTL.LOAD needs to be written"]
#[inline]
pub fn clk_en(&mut self) -> _CLK_ENW {
_CLK_ENW { w: self }
}
}
| 26.202797 | 158 | 0.521217 |
f550f6b7c61bd2e2c917af034099eed1b48c1745 | 2,672 | use std::sync::Arc;
use anyhow::Result;
use fixtures::RaftRouter;
use maplit::btreeset;
use openraft::Config;
use openraft::State;
#[macro_use]
mod fixtures;
/// Lagging network test.
///
/// What does this test do?
///
/// - Setup a network with <=50 ms random delay of messages.
/// - bring a single-node cluster online.
/// - add two Learner and then try to commit one log.
/// - change config to a 3 members cluster and commit another log.
#[tokio::test(flavor = "multi_thread", worker_threads = 6)]
async fn lagging_network_write() -> Result<()> {
let (_log_guard, ut_span) = init_ut!();
let _ent = ut_span.enter();
let timeout = Some(tokio::time::Duration::from_millis(2000));
let config = Arc::new(
Config {
heartbeat_interval: 100,
election_timeout_min: 300,
election_timeout_max: 600,
..Default::default()
}
.validate()?,
);
let router = RaftRouter::builder(config).send_delay(50).build();
let router = Arc::new(router);
router.new_raft_node(0).await;
let mut want = 0;
// Assert all nodes are in non-voter state & have no entries.
router.wait_for_log(&btreeset![0], want, timeout, "empty").await?;
router.wait_for_state(&btreeset![0], State::Learner, None, "empty").await?;
router.assert_pristine_cluster().await;
// Initialize the cluster, then assert that a stable cluster was formed & held.
tracing::info!("--- initializing cluster");
router.initialize_from_single_node(0).await?;
want += 1;
router.wait_for_log(&btreeset![0], want, timeout, "init").await?;
router.wait_for_state(&btreeset![0], State::Leader, None, "init").await?;
router.assert_stable_cluster(Some(1), Some(want)).await;
// Sync some new nodes.
router.new_raft_node(1).await;
router.add_learner(0, 1).await?;
router.new_raft_node(2).await;
router.add_learner(0, 2).await?;
router.wait_for_log(&btreeset![1, 2], want, timeout, "non-voter init").await?;
router.client_request_many(0, "client", 1).await;
want += 1;
router.wait_for_log(&btreeset![0, 1, 2], want, timeout, "write one log").await?;
router.change_membership(0, btreeset![0, 1, 2]).await?;
want += 2;
router.wait_for_state(&btreeset![0], State::Leader, None, "changed").await?;
router.wait_for_state(&btreeset![1, 2], State::Follower, None, "changed").await?;
router.wait_for_log(&btreeset![0, 1, 2], want, timeout, "3 candidates").await?;
router.client_request_many(0, "client", 1).await;
want += 1;
router.wait_for_log(&btreeset![0, 1, 2], want, timeout, "write 2nd log").await?;
Ok(())
}
| 32.987654 | 85 | 0.652695 |
bbde370a5edd8bf5062c1ceeb7d7cb762879a422 | 423 | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// (Re-)generated by schema tool
// >>>> DO NOT CHANGE THIS FILE! <<<<
// Change the json schema instead
#![allow(dead_code)]
#![allow(unused_imports)]
use wasmlib::*;
use crate::*;
#[derive(Clone)]
pub struct ImmutableHelloWorldState {
pub(crate) proxy: Proxy,
}
#[derive(Clone)]
pub struct MutableHelloWorldState {
pub(crate) proxy: Proxy,
}
| 17.625 | 38 | 0.6974 |
2647f486c8d8f931a46e2488c7a3052e753e3b88 | 660 | //! Date and time related ultity functions live here.
use chrono;
use time::Timespec;
/// RFC3339 formatted timestamp
pub struct RFC3339(String);
impl From<RFC3339> for String {
fn from(timestamp: RFC3339) -> Self {
timestamp.0
}
}
impl AsRef<String> for RFC3339 {
fn as_ref(&self) -> &String {
&self.0
}
}
impl AsRef<str> for RFC3339 {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
pub fn timespec_to_rfc3339(ts: Timespec) -> RFC3339 {
let t = chrono::NaiveDateTime::from_timestamp(ts.sec as i64, ts.nsec as u32);
RFC3339(chrono::DateTime::<chrono::Utc>::from_utc(t, chrono::Utc).to_rfc3339())
}
| 21.290323 | 83 | 0.643939 |
3a536eaf73f9b1b58f90387126d4fdc9ca7095b5 | 3,126 | use crate::Webview;
use std::path::PathBuf;
#[allow(clippy::option_env_unwrap)]
pub async fn load<W: Webview + 'static>(
webview: &mut W,
asset: String,
asset_type: String,
callback: String,
error: String,
) {
let mut webview_mut = webview.clone();
crate::execute_promise(
webview,
async move {
let mut path = PathBuf::from(if asset.starts_with('/') {
asset.replacen("/", "", 1)
} else {
asset.clone()
});
let mut read_asset;
loop {
read_asset = crate::assets::ASSETS.get(&format!(
"{}/{}",
option_env!("TAURI_DIST_DIR")
.expect("tauri apps should be built with the TAURI_DIST_DIR environment variable"),
path.to_string_lossy()
));
if read_asset.is_err() {
match path.iter().next() {
Some(component) => {
let first_component = component.to_str().expect("failed to read path component");
path = PathBuf::from(path.to_string_lossy().replacen(
format!("{}/", first_component).as_str(),
"",
1,
));
}
None => {
return Err(anyhow::anyhow!("Asset '{}' not found", asset));
}
}
} else {
break;
}
}
if asset_type == "image" {
let mime_type = if asset.ends_with("gif") {
"gif"
} else if asset.ends_with("apng") {
"apng"
} else if asset.ends_with("png") {
"png"
} else if asset.ends_with("avif") {
"avif"
} else if asset.ends_with("webp") {
"webp"
} else if asset.ends_with("svg") {
"svg+xml"
} else {
"jpeg"
};
Ok(format!(
r#"data:image/{};base64,{}"#,
mime_type,
base64::encode(&read_asset.expect("Failed to read asset type").into_owned())
))
} else {
let asset_bytes = read_asset.expect("Failed to read asset type");
webview_mut.dispatch(move |webview_ref| {
let asset_str =
std::str::from_utf8(&asset_bytes).expect("failed to convert asset bytes to u8 slice");
if asset_type == "stylesheet" {
webview_ref.eval(&format!(
r#"
(function (content) {{
var css = document.createElement('style')
css.type = 'text/css'
if (css.styleSheet)
css.styleSheet.cssText = content
else
css.appendChild(document.createTextNode(content))
document.getElementsByTagName("head")[0].appendChild(css);
}})(`{css}`)
"#,
// Escape octal sequences, which aren't allowed in template literals
css = asset_str.replace("\\", "\\\\").as_str()
));
} else {
webview_ref.eval(asset_str);
}
});
Ok("Asset loaded successfully".to_string())
}
},
callback,
error,
)
.await;
}
| 30.647059 | 98 | 0.494242 |
01c3a3e70641703102a1ccaaab056659270873e0 | 27,689 | use std::collections::HashMap;
use crate::request::Request;
use crate::http::{Method, Status};
pub use crate::router::{Route, RouteUri};
pub use crate::router::collider::Collide;
pub use crate::catcher::Catcher;
#[derive(Debug, Default)]
pub(crate) struct Router {
routes: HashMap<Method, Vec<Route>>,
catchers: HashMap<Option<u16>, Vec<Catcher>>,
}
#[derive(Debug)]
pub struct Collisions {
pub routes: Vec<(Route, Route)>,
pub catchers: Vec<(Catcher, Catcher)>,
}
impl Router {
pub fn new() -> Self {
Self::default()
}
pub fn add_route(&mut self, route: Route) {
let routes = self.routes.entry(route.method).or_default();
routes.push(route);
routes.sort_by_key(|r| r.rank);
}
pub fn add_catcher(&mut self, catcher: Catcher) {
let catchers = self.catchers.entry(catcher.code).or_default();
catchers.push(catcher);
catchers.sort_by(|a, b| b.base.path_segments().len().cmp(&a.base.path_segments().len()))
}
#[inline]
pub fn routes(&self) -> impl Iterator<Item = &Route> + Clone {
self.routes.values().flat_map(|v| v.iter())
}
#[inline]
pub fn catchers(&self) -> impl Iterator<Item = &Catcher> + Clone {
self.catchers.values().flat_map(|v| v.iter())
}
pub fn route<'r, 'a: 'r>(
&'a self,
req: &'r Request<'r>
) -> impl Iterator<Item = &'a Route> + 'r {
// Note that routes are presorted by ascending rank on each `add`.
self.routes.get(&req.method())
.into_iter()
.flat_map(move |routes| routes.iter().filter(move |r| r.matches(req)))
}
// For many catchers, using aho-corasick or similar should be much faster.
pub fn catch<'r>(&self, status: Status, req: &'r Request<'r>) -> Option<&Catcher> {
// Note that catchers are presorted by descending base length.
let explicit = self.catchers.get(&Some(status.code))
.and_then(|c| c.iter().find(|c| c.matches(status, req)));
let default = self.catchers.get(&None)
.and_then(|c| c.iter().find(|c| c.matches(status, req)));
match (explicit, default) {
(None, None) => None,
(None, c@Some(_)) | (c@Some(_), None) => c,
(Some(a), Some(b)) => {
if b.base.path_segments().len() > a.base.path_segments().len() {
Some(b)
} else {
Some(a)
}
}
}
}
fn collisions<'a, I: 'a, T: 'a>(&self, items: I) -> impl Iterator<Item = (T, T)> + 'a
where I: Iterator<Item = &'a T> + Clone, T: Collide + Clone,
{
items.clone().enumerate()
.flat_map(move |(i, a)| {
items.clone()
.skip(i + 1)
.filter(move |b| a.collides_with(b))
.map(move |b| (a.clone(), b.clone()))
})
}
pub fn finalize(&self) -> Result<(), Collisions> {
let routes: Vec<_> = self.collisions(self.routes()).collect();
let catchers: Vec<_> = self.collisions(self.catchers()).collect();
if !routes.is_empty() || !catchers.is_empty() {
return Err(Collisions { routes, catchers })
}
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::rocket::Rocket;
use crate::config::Config;
use crate::http::{Method, Method::*};
use crate::http::uri::Origin;
use crate::request::Request;
use crate::handler::dummy;
impl Router {
fn has_collisions(&self) -> bool {
self.finalize().is_err()
}
}
fn router_with_routes(routes: &[&'static str]) -> Router {
let mut router = Router::new();
for route in routes {
let route = Route::new(Get, route, dummy);
router.add_route(route);
}
router
}
fn router_with_ranked_routes(routes: &[(isize, &'static str)]) -> Router {
let mut router = Router::new();
for &(rank, route) in routes {
let route = Route::ranked(rank, Get, route, dummy);
router.add_route(route);
}
router
}
fn router_with_rankless_routes(routes: &[&'static str]) -> Router {
let mut router = Router::new();
for route in routes {
let route = Route::ranked(0, Get, route, dummy);
router.add_route(route);
}
router
}
fn rankless_route_collisions(routes: &[&'static str]) -> bool {
let router = router_with_rankless_routes(routes);
router.has_collisions()
}
fn default_rank_route_collisions(routes: &[&'static str]) -> bool {
let router = router_with_routes(routes);
router.has_collisions()
}
#[test]
fn test_rankless_collisions() {
assert!(rankless_route_collisions(&["/hello", "/hello"]));
assert!(rankless_route_collisions(&["/<a>", "/hello"]));
assert!(rankless_route_collisions(&["/<a>", "/<b>"]));
assert!(rankless_route_collisions(&["/hello/bob", "/hello/<b>"]));
assert!(rankless_route_collisions(&["/a/b/<c>/d", "/<a>/<b>/c/d"]));
assert!(rankless_route_collisions(&["/a/b", "/<a..>"]));
assert!(rankless_route_collisions(&["/a/b/c", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/<a>/b", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/<b>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/b/<c>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/<a..>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/<a..>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/b/<a..>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/b/c/d", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/", "/<a..>"]));
assert!(rankless_route_collisions(&["/a/<_>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/<_>", "/a/<_..>"]));
assert!(rankless_route_collisions(&["/<_>", "/a/<_..>"]));
assert!(rankless_route_collisions(&["/foo", "/foo/<_..>"]));
assert!(rankless_route_collisions(&["/foo/bar/baz", "/foo/<_..>"]));
assert!(rankless_route_collisions(&["/a/d/<b..>", "/a/d"]));
assert!(rankless_route_collisions(&["/a/<_..>", "/<_>"]));
assert!(rankless_route_collisions(&["/a/<_..>", "/a"]));
assert!(rankless_route_collisions(&["/<a>", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/<_>", "/<_>"]));
assert!(rankless_route_collisions(&["/a/<_>", "/a/b"]));
assert!(rankless_route_collisions(&["/a/<_>", "/a/<b>"]));
assert!(rankless_route_collisions(&["/<_..>", "/a/b"]));
assert!(rankless_route_collisions(&["/<_..>", "/<_>"]));
assert!(rankless_route_collisions(&["/<_>/b", "/a/b"]));
assert!(rankless_route_collisions(&["/", "/<foo..>"]));
}
#[test]
fn test_collisions_normalize() {
assert!(rankless_route_collisions(&["/hello/", "/hello"]));
assert!(rankless_route_collisions(&["//hello/", "/hello"]));
assert!(rankless_route_collisions(&["//hello/", "/hello//"]));
assert!(rankless_route_collisions(&["/<a>", "/hello//"]));
assert!(rankless_route_collisions(&["/<a>", "/hello///"]));
assert!(rankless_route_collisions(&["/hello///bob", "/hello/<b>"]));
assert!(rankless_route_collisions(&["/<a..>//", "/a//<a..>"]));
assert!(rankless_route_collisions(&["/a/<a..>//", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/a/<a..>//", "/a/b//c//d/"]));
assert!(rankless_route_collisions(&["/a/<a..>/", "/a/bd/e/"]));
assert!(rankless_route_collisions(&["/<a..>/", "/a/bd/e/"]));
assert!(rankless_route_collisions(&["//", "/<foo..>"]));
assert!(rankless_route_collisions(&["/a/<a..>//", "/a/b//c//d/e/"]));
assert!(rankless_route_collisions(&["/a//<a..>//", "/a/b//c//d/e/"]));
assert!(rankless_route_collisions(&["///<_>", "/<_>"]));
assert!(rankless_route_collisions(&["/a/<_>", "///a//b"]));
assert!(rankless_route_collisions(&["//a///<_>", "/a//<b>"]));
assert!(rankless_route_collisions(&["//<_..>", "/a/b"]));
assert!(rankless_route_collisions(&["//<_..>", "/<_>"]));
assert!(rankless_route_collisions(&["///<a>/", "/a/<a..>"]));
assert!(rankless_route_collisions(&["///<a..>/", "/a/<a..>"]));
assert!(rankless_route_collisions(&["/<a..>", "/hello"]));
}
#[test]
fn test_collisions_query() {
// Query shouldn't affect things when rankless.
assert!(rankless_route_collisions(&["/hello?<foo>", "/hello"]));
assert!(rankless_route_collisions(&["/<a>?foo=bar", "/hello?foo=bar&cat=fat"]));
assert!(rankless_route_collisions(&["/<a>?foo=bar", "/hello?foo=bar&cat=fat"]));
assert!(rankless_route_collisions(&["/<a>", "/<b>?<foo>"]));
assert!(rankless_route_collisions(&["/hello/bob?a=b", "/hello/<b>?d=e"]));
assert!(rankless_route_collisions(&["/<foo>?a=b", "/foo?d=e"]));
assert!(rankless_route_collisions(&["/<foo>?a=b&<c>", "/<foo>?d=e&<c>"]));
assert!(rankless_route_collisions(&["/<foo>?a=b&<c>", "/<foo>?d=e"]));
}
#[test]
fn test_no_collisions() {
assert!(!rankless_route_collisions(&["/a/b", "/a/b/c"]));
assert!(!rankless_route_collisions(&["/a/b/c/d", "/a/b/c/<d>/e"]));
assert!(!rankless_route_collisions(&["/a/d/<b..>", "/a/b/c"]));
assert!(!rankless_route_collisions(&["/<_>", "/"]));
assert!(!rankless_route_collisions(&["/a/<_>", "/a"]));
assert!(!rankless_route_collisions(&["/a/<_>", "/<_>"]));
}
#[test]
fn test_no_collision_when_ranked() {
assert!(!default_rank_route_collisions(&["/<a>", "/hello"]));
assert!(!default_rank_route_collisions(&["/hello/bob", "/hello/<b>"]));
assert!(!default_rank_route_collisions(&["/a/b/c/d", "/<a>/<b>/c/d"]));
assert!(!default_rank_route_collisions(&["/hi", "/<hi>"]));
assert!(!default_rank_route_collisions(&["/a", "/a/<path..>"]));
assert!(!default_rank_route_collisions(&["/", "/<path..>"]));
assert!(!default_rank_route_collisions(&["/a/b", "/a/b/<c..>"]));
assert!(!default_rank_route_collisions(&["/<_>", "/static"]));
assert!(!default_rank_route_collisions(&["/<_..>", "/static"]));
assert!(!default_rank_route_collisions(&["/<path..>", "/"]));
assert!(!default_rank_route_collisions(&["/<_>/<_>", "/foo/bar"]));
assert!(!default_rank_route_collisions(&["/foo/<_>", "/foo/bar"]));
assert!(!default_rank_route_collisions(&["/<a>/<b>", "/hello/<b>"]));
assert!(!default_rank_route_collisions(&["/<a>/<b..>", "/hello/<b>"]));
assert!(!default_rank_route_collisions(&["/<a..>", "/hello/<b>"]));
assert!(!default_rank_route_collisions(&["/<a..>", "/hello"]));
assert!(!default_rank_route_collisions(&["/<a>", "/a/<path..>"]));
assert!(!default_rank_route_collisions(&["/a/<b>/c", "/<d>/<c..>"]));
}
#[test]
fn test_collision_when_ranked() {
assert!(default_rank_route_collisions(&["/a/<b>/<c..>", "/a/<c>"]));
assert!(default_rank_route_collisions(&["/<a>/b", "/a/<b>"]));
}
#[test]
fn test_collision_when_ranked_query() {
assert!(default_rank_route_collisions(&["/a?a=b", "/a?c=d"]));
assert!(default_rank_route_collisions(&["/a?a=b&<b>", "/a?<c>&c=d"]));
assert!(default_rank_route_collisions(&["/a?a=b&<b..>", "/a?<c>&c=d"]));
}
#[test]
fn test_no_collision_when_ranked_query() {
assert!(!default_rank_route_collisions(&["/", "/?<c..>"]));
assert!(!default_rank_route_collisions(&["/hi", "/hi?<c>"]));
assert!(!default_rank_route_collisions(&["/hi", "/hi?c"]));
assert!(!default_rank_route_collisions(&["/hi?<c>", "/hi?c"]));
assert!(!default_rank_route_collisions(&["/<foo>?a=b", "/<foo>?c=d&<d>"]));
}
fn route<'a>(router: &'a Router, method: Method, uri: &'a str) -> Option<&'a Route> {
let rocket = Rocket::custom(Config::default());
let request = Request::new(&rocket, method, Origin::parse(uri).unwrap());
let route = router.route(&request).next();
route
}
fn matches<'a>(router: &'a Router, method: Method, uri: &'a str) -> Vec<&'a Route> {
let rocket = Rocket::custom(Config::default());
let request = Request::new(&rocket, method, Origin::parse(uri).unwrap());
router.route(&request).collect()
}
#[test]
fn test_ok_routing() {
let router = router_with_routes(&["/hello"]);
assert!(route(&router, Get, "/hello").is_some());
let router = router_with_routes(&["/<a>"]);
assert!(route(&router, Get, "/hello").is_some());
assert!(route(&router, Get, "/hi").is_some());
assert!(route(&router, Get, "/bobbbbbbbbbby").is_some());
assert!(route(&router, Get, "/dsfhjasdf").is_some());
let router = router_with_routes(&["/<a>/<b>"]);
assert!(route(&router, Get, "/hello/hi").is_some());
assert!(route(&router, Get, "/a/b/").is_some());
assert!(route(&router, Get, "/i/a").is_some());
assert!(route(&router, Get, "/jdlk/asdij").is_some());
let mut router = Router::new();
router.add_route(Route::new(Put, "/hello", dummy));
router.add_route(Route::new(Post, "/hello", dummy));
router.add_route(Route::new(Delete, "/hello", dummy));
assert!(route(&router, Put, "/hello").is_some());
assert!(route(&router, Post, "/hello").is_some());
assert!(route(&router, Delete, "/hello").is_some());
let router = router_with_routes(&["/<a..>"]);
assert!(route(&router, Get, "/").is_some());
assert!(route(&router, Get, "//").is_some());
assert!(route(&router, Get, "/hi").is_some());
assert!(route(&router, Get, "/hello/hi").is_some());
assert!(route(&router, Get, "/a/b/").is_some());
assert!(route(&router, Get, "/i/a").is_some());
assert!(route(&router, Get, "/a/b/c/d/e/f").is_some());
let router = router_with_routes(&["/foo/<a..>"]);
assert!(route(&router, Get, "/foo").is_some());
assert!(route(&router, Get, "/foo/").is_some());
assert!(route(&router, Get, "/foo///bar").is_some());
}
#[test]
fn test_err_routing() {
let router = router_with_routes(&["/hello"]);
assert!(route(&router, Put, "/hello").is_none());
assert!(route(&router, Post, "/hello").is_none());
assert!(route(&router, Options, "/hello").is_none());
assert!(route(&router, Get, "/hell").is_none());
assert!(route(&router, Get, "/hi").is_none());
assert!(route(&router, Get, "/hello/there").is_none());
assert!(route(&router, Get, "/hello/i").is_none());
assert!(route(&router, Get, "/hillo").is_none());
let router = router_with_routes(&["/<a>"]);
assert!(route(&router, Put, "/hello").is_none());
assert!(route(&router, Post, "/hello").is_none());
assert!(route(&router, Options, "/hello").is_none());
assert!(route(&router, Get, "/hello/there").is_none());
assert!(route(&router, Get, "/hello/i").is_none());
let router = router_with_routes(&["/<a>/<b>"]);
assert!(route(&router, Get, "/a/b/c").is_none());
assert!(route(&router, Get, "/a").is_none());
assert!(route(&router, Get, "/a/").is_none());
assert!(route(&router, Get, "/a/b/c/d").is_none());
assert!(route(&router, Put, "/hello/hi").is_none());
assert!(route(&router, Put, "/a/b").is_none());
assert!(route(&router, Put, "/a/b").is_none());
let router = router_with_routes(&["/prefix/<a..>"]);
assert!(route(&router, Get, "/").is_none());
assert!(route(&router, Get, "/prefi/").is_none());
}
macro_rules! assert_ranked_match {
($routes:expr, $to:expr => $want:expr) => ({
let router = router_with_routes($routes);
assert!(!router.has_collisions());
let route_path = route(&router, Get, $to).unwrap().uri.to_string();
assert_eq!(route_path, $want.to_string());
})
}
#[test]
fn test_default_ranking() {
assert_ranked_match!(&["/hello", "/<name>"], "/hello" => "/hello");
assert_ranked_match!(&["/<name>", "/hello"], "/hello" => "/hello");
assert_ranked_match!(&["/<a>", "/hi", "/hi/<b>"], "/hi" => "/hi");
assert_ranked_match!(&["/<a>/b", "/hi/c"], "/hi/c" => "/hi/c");
assert_ranked_match!(&["/<a>/<b>", "/hi/a"], "/hi/c" => "/<a>/<b>");
assert_ranked_match!(&["/hi/a", "/hi/<c>"], "/hi/c" => "/hi/<c>");
assert_ranked_match!(&["/a", "/a?<b>"], "/a?b=c" => "/a?<b>");
assert_ranked_match!(&["/a", "/a?<b>"], "/a" => "/a?<b>");
assert_ranked_match!(&["/a", "/<a>", "/a?<b>", "/<a>?<b>"], "/a" => "/a?<b>");
assert_ranked_match!(&["/a", "/<a>", "/a?<b>", "/<a>?<b>"], "/b" => "/<a>?<b>");
assert_ranked_match!(&["/a", "/<a>", "/a?<b>", "/<a>?<b>"], "/b?v=1" => "/<a>?<b>");
assert_ranked_match!(&["/a", "/<a>", "/a?<b>", "/<a>?<b>"], "/a?b=c" => "/a?<b>");
assert_ranked_match!(&["/a", "/a?b"], "/a?b" => "/a?b");
assert_ranked_match!(&["/<a>", "/a?b"], "/a?b" => "/a?b");
assert_ranked_match!(&["/a", "/<a>?b"], "/a?b" => "/a");
assert_ranked_match!(&["/a?<c>&b", "/a?<b>"], "/a" => "/a?<b>");
assert_ranked_match!(&["/a?<c>&b", "/a?<b>"], "/a?b" => "/a?<c>&b");
assert_ranked_match!(&["/a?<c>&b", "/a?<b>"], "/a?c" => "/a?<b>");
assert_ranked_match!(&["/", "/<foo..>"], "/" => "/");
assert_ranked_match!(&["/", "/<foo..>"], "/hi" => "/<foo..>");
assert_ranked_match!(&["/hi", "/<foo..>"], "/hi" => "/hi");
}
fn ranked_collisions(routes: &[(isize, &'static str)]) -> bool {
let router = router_with_ranked_routes(routes);
router.has_collisions()
}
#[test]
fn test_no_manual_ranked_collisions() {
assert!(!ranked_collisions(&[(1, "/a/<b>"), (2, "/a/<b>")]));
assert!(!ranked_collisions(&[(0, "/a/<b>"), (2, "/a/<b>")]));
assert!(!ranked_collisions(&[(5, "/a/<b>"), (2, "/a/<b>")]));
assert!(!ranked_collisions(&[(1, "/a/<b>"), (1, "/b/<b>")]));
assert!(!ranked_collisions(&[(1, "/a/<b..>"), (2, "/a/<b..>")]));
assert!(!ranked_collisions(&[(0, "/a/<b..>"), (2, "/a/<b..>")]));
assert!(!ranked_collisions(&[(5, "/a/<b..>"), (2, "/a/<b..>")]));
assert!(!ranked_collisions(&[(1, "/<a..>"), (2, "/<a..>")]));
}
#[test]
fn test_ranked_collisions() {
assert!(ranked_collisions(&[(2, "/a/<b..>"), (2, "/a/<b..>")]));
assert!(ranked_collisions(&[(2, "/a/c/<b..>"), (2, "/a/<b..>")]));
assert!(ranked_collisions(&[(2, "/<b..>"), (2, "/a/<b..>")]));
}
macro_rules! assert_ranked_routing {
(to: $to:expr, with: $routes:expr, expect: $($want:expr),+) => ({
let router = router_with_ranked_routes(&$routes);
let routed_to = matches(&router, Get, $to);
let expected = &[$($want),+];
assert!(routed_to.len() == expected.len());
for (got, expected) in routed_to.iter().zip(expected.iter()) {
assert_eq!(got.rank, expected.0);
assert_eq!(got.uri.to_string(), expected.1.to_string());
}
})
}
#[test]
fn test_ranked_routing() {
assert_ranked_routing!(
to: "/a/b",
with: [(1, "/a/<b>"), (2, "/a/<b>")],
expect: (1, "/a/<b>"), (2, "/a/<b>")
);
assert_ranked_routing!(
to: "/b/b",
with: [(1, "/a/<b>"), (2, "/b/<b>"), (3, "/b/b")],
expect: (2, "/b/<b>"), (3, "/b/b")
);
assert_ranked_routing!(
to: "/b/b",
with: [(2, "/b/<b>"), (1, "/a/<b>"), (3, "/b/b")],
expect: (2, "/b/<b>"), (3, "/b/b")
);
assert_ranked_routing!(
to: "/b/b",
with: [(3, "/b/b"), (2, "/b/<b>"), (1, "/a/<b>")],
expect: (2, "/b/<b>"), (3, "/b/b")
);
assert_ranked_routing!(
to: "/b/b",
with: [(1, "/a/<b>"), (2, "/b/<b>"), (0, "/b/b")],
expect: (0, "/b/b"), (2, "/b/<b>")
);
assert_ranked_routing!(
to: "/profile/sergio/edit",
with: [(1, "/<a>/<b>/edit"), (2, "/profile/<d>"), (0, "/<a>/<b>/<c>")],
expect: (0, "/<a>/<b>/<c>"), (1, "/<a>/<b>/edit")
);
assert_ranked_routing!(
to: "/profile/sergio/edit",
with: [(0, "/<a>/<b>/edit"), (2, "/profile/<d>"), (5, "/<a>/<b>/<c>")],
expect: (0, "/<a>/<b>/edit"), (5, "/<a>/<b>/<c>")
);
assert_ranked_routing!(
to: "/a/b",
with: [(0, "/a/b"), (1, "/a/<b..>")],
expect: (0, "/a/b"), (1, "/a/<b..>")
);
assert_ranked_routing!(
to: "/a/b/c/d/e/f",
with: [(1, "/a/<b..>"), (2, "/a/b/<c..>")],
expect: (1, "/a/<b..>"), (2, "/a/b/<c..>")
);
assert_ranked_routing!(
to: "/hi",
with: [(1, "/hi/<foo..>"), (0, "/hi/<foo>")],
expect: (1, "/hi/<foo..>")
);
}
macro_rules! assert_default_ranked_routing {
(to: $to:expr, with: $routes:expr, expect: $($want:expr),+) => ({
let router = router_with_routes(&$routes);
let routed_to = matches(&router, Get, $to);
let expected = &[$($want),+];
assert!(routed_to.len() == expected.len());
for (got, expected) in routed_to.iter().zip(expected.iter()) {
assert_eq!(got.uri.to_string(), expected.to_string());
}
})
}
#[test]
fn test_default_ranked_routing() {
assert_default_ranked_routing!(
to: "/a/b?v=1",
with: ["/a/<b>", "/a/b"],
expect: "/a/b", "/a/<b>"
);
assert_default_ranked_routing!(
to: "/a/b?v=1",
with: ["/a/<b>", "/a/b", "/a/b?<v>"],
expect: "/a/b?<v>", "/a/b", "/a/<b>"
);
assert_default_ranked_routing!(
to: "/a/b?v=1",
with: ["/a/<b>", "/a/b", "/a/b?<v>", "/a/<b>?<v>"],
expect: "/a/b?<v>", "/a/b", "/a/<b>?<v>", "/a/<b>"
);
assert_default_ranked_routing!(
to: "/a/b",
with: ["/a/<b>", "/a/b", "/a/b?<v>", "/a/<b>?<v>"],
expect: "/a/b?<v>", "/a/b", "/a/<b>?<v>", "/a/<b>"
);
assert_default_ranked_routing!(
to: "/a/b?c",
with: ["/a/b", "/a/b?<c>", "/a/b?c", "/a/<b>?c", "/a/<b>?<c>", "/<a>/<b>"],
expect: "/a/b?c", "/a/b?<c>", "/a/b", "/a/<b>?c", "/a/<b>?<c>", "/<a>/<b>"
);
}
fn router_with_catchers(catchers: &[(Option<u16>, &str)]) -> Router {
let mut router = Router::new();
for (code, base) in catchers {
let catcher = Catcher::new(*code, crate::catcher::dummy);
router.add_catcher(catcher.map_base(|_| base.to_string()).unwrap());
}
router
}
fn catcher<'a>(router: &'a Router, status: Status, uri: &str) -> Option<&'a Catcher> {
let rocket = Rocket::custom(Config::default());
let request = Request::new(&rocket, Method::Get, Origin::parse(uri).unwrap());
router.catch(status, &request)
}
macro_rules! assert_catcher_routing {
(
catch: [$(($code:expr, $uri:expr)),+],
reqs: [$($r:expr),+],
with: [$(($ecode:expr, $euri:expr)),+]
) => ({
let catchers = vec![$(($code.into(), $uri)),+];
let requests = vec![$($r),+];
let expected = vec![$(($ecode.into(), $euri)),+];
let router = router_with_catchers(&catchers);
for (req, expected) in requests.iter().zip(expected.iter()) {
let req_status = Status::from_code(req.0).expect("valid status");
let catcher = catcher(&router, req_status, req.1).expect("some catcher");
assert_eq!(catcher.code, expected.0, "<- got, expected ->");
assert_eq!(catcher.base.path(), expected.1, "<- got, expected ->");
}
})
}
#[test]
fn test_catcher_routing() {
// Check that the default `/` catcher catches everything.
assert_catcher_routing! {
catch: [(None, "/")],
reqs: [(404, "/a/b/c"), (500, "/a/b"), (415, "/a/b/d"), (422, "/a/b/c/d?foo")],
with: [(None, "/"), (None, "/"), (None, "/"), (None, "/")]
}
// Check prefixes when they're exact.
assert_catcher_routing! {
catch: [(None, "/"), (None, "/a"), (None, "/a/b")],
reqs: [
(404, "/"), (500, "/"),
(404, "/a"), (500, "/a"),
(404, "/a/b"), (500, "/a/b")
],
with: [
(None, "/"), (None, "/"),
(None, "/a"), (None, "/a"),
(None, "/a/b"), (None, "/a/b")
]
}
// Check prefixes when they're not exact.
assert_catcher_routing! {
catch: [(None, "/"), (None, "/a"), (None, "/a/b")],
reqs: [
(404, "/foo"), (500, "/bar"), (422, "/baz/bar"), (418, "/poodle?yes"),
(404, "/a/foo"), (500, "/a/bar/baz"), (510, "/a/c"), (423, "/a/c/b"),
(404, "/a/b/c"), (500, "/a/b/c/d"), (500, "/a/b?foo"), (400, "/a/b/yes")
],
with: [
(None, "/"), (None, "/"), (None, "/"), (None, "/"),
(None, "/a"), (None, "/a"), (None, "/a"), (None, "/a"),
(None, "/a/b"), (None, "/a/b"), (None, "/a/b"), (None, "/a/b")
]
}
// Check that we prefer specific to default.
assert_catcher_routing! {
catch: [(400, "/"), (404, "/"), (None, "/")],
reqs: [
(400, "/"), (400, "/bar"), (400, "/foo/bar"),
(404, "/"), (404, "/bar"), (404, "/foo/bar"),
(405, "/"), (405, "/bar"), (406, "/foo/bar")
],
with: [
(400, "/"), (400, "/"), (400, "/"),
(404, "/"), (404, "/"), (404, "/"),
(None, "/"), (None, "/"), (None, "/")
]
}
// Check that we prefer longer prefixes over specific.
assert_catcher_routing! {
catch: [(None, "/a/b"), (404, "/a"), (422, "/a")],
reqs: [
(404, "/a/b"), (404, "/a/b/c"), (422, "/a/b/c"),
(404, "/a"), (404, "/a/c"), (404, "/a/cat/bar"),
(422, "/a"), (422, "/a/c"), (422, "/a/cat/bar")
],
with: [
(None, "/a/b"), (None, "/a/b"), (None, "/a/b"),
(404, "/a"), (404, "/a"), (404, "/a"),
(422, "/a"), (422, "/a"), (422, "/a")
]
}
// Just a fun one.
assert_catcher_routing! {
catch: [(None, "/"), (None, "/a/b"), (500, "/a/b/c"), (500, "/a/b")],
reqs: [(404, "/a/b/c"), (500, "/a/b"), (400, "/a/b/d"), (500, "/a/b/c/d?foo")],
with: [(None, "/a/b"), (500, "/a/b"), (None, "/a/b"), (500, "/a/b/c")]
}
}
}
| 41.265276 | 96 | 0.481202 |
5d242bd67b44b09194f42768b16e5f97d4c57ded | 16,308 | use std::fmt::Debug;
use std::intrinsics::transmute;
use super::*;
use crate::types::*;
use crate::*;
type Result<T> = std::io::Result<T>;
/// The `IOHandler` trait allows for I/O with different sources/destinations.
///
/// `IOHandler` is a custom implementation and combination of the ([`Debug`], ) [`Read`](std::io::Read), [`Write`](std::io::Write),
/// and [`Seek`](std::io::Seek) traits.
///
/// # Implemented Types
/// This crate implentes `IOHandler` on the following types:
/// - [`FileMem`]
/// - [`FileNull`]
/// - [`std::fs::File`]
pub trait IOHandler: Debug {
/// Pulls the exact number of bytes from this source required to fill `buf`.
///
/// This is (generally) a redirection to [`Read::read_exact()`] and functions the same.
///
/// For more information, see [`Read::read_exact()`].
///
/// [`Read::read_exact()`]: std::io::Read::read_exact
///
/// # Examples
/// ```
/// use std::io;
/// use lcms2::io::{FileMem, IOHandler};
///
/// fn main() -> io::Result<()> {
/// let mut f = FileMem::new(vec![42u8;15]);
/// let mut buf = [0u8; 10];
///
/// // read exactly 10 bytes
/// f.read(&mut buf)?;
/// Ok(())
/// }
/// ```
fn read(&mut self, buf: &mut [u8]) -> Result<()>;
/// Seek to an offset, in bytes, in a stream.
///
/// This is (generally) a redirection to [`Seek::seek()`] and functions the same (except without a return value).
///
/// For more information, see [`Seek::seek()`].
///
/// [`Seek::seek()`]: std::io::Seek::seek
///
/// # Examples
/// ```
/// use lcms2::io::{FileMem, IOHandler};
/// use std::io::SeekFrom;
///
/// let mut buf = [42, 69, 123, 7, 255];
/// let mut file = FileMem::new(buf.as_mut_slice());
/// let mut b = [0];
///
/// file.seek(SeekFrom::End(-1)).unwrap(); // position == 4
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 255); // position == 5
///
/// file.seek(SeekFrom::Start(0)).unwrap(); // position == 0
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 42); // position == 1
///
/// file.seek(SeekFrom::Current(2)).unwrap(); // position == 3
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 7); // position == 4
///
/// file.seek(SeekFrom::Start(2)).unwrap(); // position == 2
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 123); // position == 3
///
/// file.seek(SeekFrom::Current(-2)).unwrap(); // position == 1
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 69); // position == 2
///
/// file.seek(SeekFrom::Start(4)).unwrap(); // position == 4
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 255); // position == 5
///
/// file.seek(SeekFrom::End(-5)).unwrap(); // position == 0
/// file.read(&mut b).unwrap(); assert_eq!(b[0], 42); // position == 1
/// ```
fn seek(&mut self, pos: SeekFrom) -> Result<()>;
/// Closes the underlying I/O mechanism by consuming and dropping itself. Implementors MUST handle any special
/// handling upon closing I/O. [`FileMem`] and [`FileNull`] have nothing special to do when dropping, and [`File`]
/// automatically handles it's closing via [`Drop`].
///
/// [`File`]: std::fs::File
///
/// # Examples
/// The rust borrow checker helps tremdously in preventing the use of closed I/O objects. `close()` consumes the
/// object, so the variable cannot be used afterwards.
/// ```compile_fail
/// use std::fs::File;
/// use lcms2::io::IOHandler;
///
/// let mut file = File::create("filename.ext").unwrap();
/// file.close(); //consumes, drops, and closes the file
///
/// file.write(&[0u8]).unwrap(); //fails to compile
/// /* borrow of moved value: `file` */
/// ```
fn close(self) -> Result<()>;
/// Returns the current seek position from the start of the stream.
///
/// This is (generally) a redirection to [`Seek::stream_position()`] and functions the same.
///
/// For more information, see [`Seek::stream_position()`].
///
/// [`Seek::stream_position()`]: std::io::Seek::stream_position
///
/// # Examples
/// ```
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = Vec::new();
/// let mut file = FileMem::new(&mut buf);
///
/// assert_eq!(file.tell().unwrap(), 0);
///
/// file.write_u64(0).unwrap(); // 8 byte value
/// assert_eq!(file.tell().unwrap(), 8);
///
/// file.write_u32(0).unwrap(); // 4 byte value + previous 8 = 12
/// assert_eq!(file.tell().unwrap(), 12);
///
/// file.write_u16(0).unwrap(); // 2 byte value + previous 12 = 14
/// assert_eq!(file.tell().unwrap(), 14);
///
/// file.write_u8(0).unwrap(); // 1 byte value + previous 14 = 15
/// assert_eq!(file.tell().unwrap(), 15);
/// ```
fn tell(&mut self) -> Result<usize>;
/// Attempts to write an entire buffer into this I/O destination.
///
/// This is (generally) a redirection to [`Write::write_all()`] and functions the same.
///
/// For more information, see [`Write::write_all()`].
///
/// [`Write::write_all()`]: std::io::Write::write_all
///
/// # Examples
/// ```
/// use std::io;
/// use lcms2::io::{FileMem, IOHandler};
///
/// fn main() -> io::Result<()> {
/// let mut buf = FileMem::new(Vec::new());
///
/// buf.write(b"some bytes")?;
/// Ok(())
/// }
/// ```
fn write(&mut self, buf: &[u8]) -> Result<()>;
/// Returns the length of this stream (in bytes).
///
/// This method is implemented using up to two [`seek`] and two [`tell`] operations. If this method returns
/// successfully, the seek position is unchanged (i.e. the position before calling this method is the same as
/// afterwards). However, if this method returns an error, the seek position is unspecified.
fn reported_size(&mut self) -> Result<usize> {
let current_pos = self.tell()?;
self.seek(SeekFrom::End(0))?;
let result = self.tell();
self.seek(SeekFrom::Start(current_pos as u64))?;
result
}
/// Reads a u8 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = [42u8; 1];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_u8().unwrap(), 42u8);
/// ```
fn read_u8(&mut self) -> Result<u8> {
let mut buf = [0u8];
self.read(&mut buf)?;
Ok(buf[0])
}
/// Reads a u16 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = 42u16.to_be_bytes();
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_u16().unwrap(), 42u16);
/// ```
fn read_u16(&mut self) -> Result<u16> {
let mut buf = [0u8; 2];
self.read(&mut buf)?;
let value = u16::from_ne_bytes(buf);
Ok(adjust_endianness_u16(value))
}
/// Reads u16 values to fill a buffer.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = [0u8; 6];
/// buf[0..2].copy_from_slice(&42u16.to_be_bytes());
/// buf[2..4].copy_from_slice(&69u16.to_be_bytes());
/// buf[4..6].copy_from_slice(&255u16.to_be_bytes());
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// let mut read_buf = [0u16; 3];
/// mem.read_u16_array(&mut read_buf).unwrap();
///
/// assert_eq!(read_buf, [42u16, 69u16, 255u16]);
/// ```
fn read_u16_array(&mut self, buffer: &mut [u16]) -> Result<()> {
for item in buffer.iter_mut() {
*item = self.read_u16()?;
}
Ok(())
}
/// Reads a u32 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = 42u32.to_be_bytes();
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_u32().unwrap(), 42u32);
/// ```
fn read_u32(&mut self) -> Result<u32> {
let mut buf = [0u8; 4];
self.read(&mut buf)?;
let value = u32::from_ne_bytes(buf);
Ok(adjust_endianness_u32(value))
}
/// Reads a f32 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = 42f32.to_be_bytes();
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_f32().unwrap(), 42f32);
/// ```
fn read_f32(&mut self) -> Result<f32> {
// read as a u32 in case magic changes values read upside down due to endianness.
let uint_value = self.read_u32()?;
// flip from u32 to f32
unsafe { Ok(transmute::<u32, f32>(uint_value)) }
}
/// Reads a u64 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = 42u64.to_be_bytes();
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_u64().unwrap(), 42u64);
/// ```
fn read_u64(&mut self) -> Result<u64> {
let mut buf = [0u8; 8];
self.read(&mut buf)?;
let value = u64::from_ne_bytes(buf);
Ok(adjust_endianness_u64(value))
}
/// Reads a Fixed Point [Q15.16](https://en.wikipedia.org/wiki/Q_(number_format)) Number as a f64 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = 0x0002_8000u32.to_be_bytes(); // 0x0002_8000 == 2.5 in s15f16 fixed point
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_s15f16().unwrap(), 2.5);
/// ```
fn read_s15f16(&mut self) -> Result<f64> {
let fixed_point = unsafe { transmute::<u32, S15F16>(self.read_u32()?) };
Ok(s15f16_to_f64(fixed_point))
}
/// Reads a CIEXYZ value stored as 3 Fixed Point [Q15.16](https://en.wikipedia.org/wiki/Q_(number_format)) Numbers.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
/// use lcms2::types::CIEXYZ;
///
/// let mut buf = [0x00, 0x02, 0x80, 0x00, // 0x0002_8000 == 2.5 in s15f16 fixed point
/// 0x10, 0x00, 0x20, 0x00, // 0x1000_2000 == 4096.125
/// 0xFF, 0xFE, 0xC0, 0x00]; // 0xFFFE_C000 == -1.25
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// assert_eq!(mem.read_xyz().unwrap(), CIEXYZ { X: 2.5, Y: 4096.125, Z: -1.25 });
/// ```
fn read_xyz(&mut self) -> Result<CIEXYZ> {
let x = self.read_s15f16()?;
let y = self.read_s15f16()?;
let z = self.read_s15f16()?;
Ok(CIEXYZ { X: x, Y: y, Z: z })
}
/// Writes a u8 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = [0; 1];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_u8(42).unwrap();
///
/// assert_eq!(buf[0], 42);
/// ```
fn write_u8(&mut self, value: u8) -> Result<()> {
self.write(&[value])
}
/// Writes a u16 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// # fn adjust_endianness(buf: [u8; 2]) -> u16 {
/// # u16::from_ne_bytes(lcms2::io::adjust_endianness_16(buf))
/// # }
///
/// let mut buf = [0; 2];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_u16(42).unwrap();
///
/// assert_eq!(adjust_endianness(buf), 42);
/// ```
fn write_u16(&mut self, value: u16) -> Result<()> {
let value = adjust_endianness_u16(value);
self.write(&value.to_ne_bytes())
}
/// Writes all u16 values from a buffer.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// # fn adjust_endianness(buf: &[u8]) -> u16 {
/// # let val = [buf[0], buf[1]];
/// # u16::from_ne_bytes(lcms2::io::adjust_endianness_16(val))
/// # }
///
/// let mut buf = [0; 6];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_u16_array(&[42, 69, 255]).unwrap();
///
/// assert_eq!(adjust_endianness(&buf[0..2]), 42);
/// assert_eq!(adjust_endianness(&buf[2..4]), 69);
/// assert_eq!(adjust_endianness(&buf[4..6]), 255);
/// ```
fn write_u16_array(&mut self, buffer: &[u16]) -> Result<()> {
for value in buffer.iter() {
self.write_u16(*value)?;
}
Ok(())
}
/// Writes a u32 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// # fn adjust_endianness(buf: [u8; 4]) -> u32 {
/// # u32::from_ne_bytes(lcms2::io::adjust_endianness_32(buf))
/// # }
///
/// let mut buf = [0; 4];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_u32(42).unwrap();
///
/// assert_eq!(adjust_endianness(buf), 42);
/// ```
fn write_u32(&mut self, value: u32) -> Result<()> {
let value = adjust_endianness_u32(value);
self.write(&value.to_ne_bytes())
}
/// Writes a u32 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// # fn adjust_endianness(buf: [u8; 4]) -> f32 {
/// # f32::from_ne_bytes(lcms2::io::adjust_endianness_32(buf))
/// # }
///
/// let mut buf = [0; 4];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_f32(42.0).unwrap();
///
/// assert_eq!(adjust_endianness(buf), 42.0);
/// ```
fn write_f32(&mut self, value: f32) -> Result<()> {
// flip from f32 to u32
let uint_value = unsafe { transmute::<f32, u32>(value) };
self.write_u32(uint_value)
}
/// Writes a u64 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// # fn adjust_endianness(buf: [u8; 8]) -> u64 {
/// # u64::from_ne_bytes(lcms2::io::adjust_endianness_64(buf))
/// # }
///
/// let mut buf = [0; 8];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_u64(42).unwrap();
///
/// assert_eq!(adjust_endianness(buf), 42);
/// ```
fn write_u64(&mut self, value: u64) -> Result<()> {
let value = adjust_endianness_u64(value);
self.write(&value.to_ne_bytes())
}
/// Writes a Fixed Point [Q15.16](https://en.wikipedia.org/wiki/Q_(number_format)) Number as a f64 value.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
///
/// let mut buf = [0; 4];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_s15f16(2.5).unwrap();
///
/// assert_eq!(buf, [0x00, 0x02, 0x80, 0x00]); // 0x0002_8000 == 2.5 in s15f16 fixed point
/// ```
fn write_s15f16(&mut self, value: f64) -> Result<()> {
let fixed_point = f64_to_s15f16(value);
self.write_u32(unsafe { transmute::<i32, u32>(fixed_point) })
}
/// Writes a CIEXYZ value stored as 3 Fixed Point [Q15.16](https://en.wikipedia.org/wiki/Q_(number_format)) Numbers.
///
/// ```rust
/// use lcms2::io::{FileMem, IOHandler};
/// use lcms2::types::CIEXYZ;
///
/// let mut buf = [0; 12];
/// let mut mem = FileMem::new(buf.as_mut_slice());
///
/// mem.write_xyz(CIEXYZ { X: 2.5, Y: 4096.125, Z: -1.25 }).unwrap();
///
/// assert_eq!(buf, [0x00, 0x02, 0x80, 0x00, // 0x0002_8000 == 2.5 in s15f16 fixed point
/// 0x10, 0x00, 0x20, 0x00, // 0x1000_2000 == 4096.125
/// 0xFF, 0xFE, 0xC0, 0x00]); // 0xFFFE_C000 == -1.25);
/// ```
fn write_xyz(&mut self, value: CIEXYZ) -> Result<()> {
self.write_s15f16(value.X)?;
self.write_s15f16(value.Y)?;
self.write_s15f16(value.Z)
}
}
fn s15f16_to_f64(value: S15F16) -> f64 {
let sign = if value < 0 { -1.0 } else { 1.0 };
let value = value.abs();
let whole = ((value >> 16) & 0xFFFF) as u16;
let frac_part = (value & 0xFFFF) as u16;
let mid = frac_part as f64 / 65536.0;
let floater = whole as f64 + mid;
return sign * floater;
}
fn f64_to_s15f16(value: f64) -> S15F16 {
((value * 65536.0) + 0.5).floor() as S15F16
}
| 32.357143 | 131 | 0.526551 |
3397fd85fd64e668e09adcc74a71f74e6363ed19 | 530 | use super::Timeout;
use std::time::Duration;
use tower_layer::Layer;
/// Applies a timeout to requests via the supplied inner service.
#[derive(Debug, Clone)]
pub struct TimeoutLayer {
timeout: Duration,
}
impl TimeoutLayer {
/// Create a timeout from a duration
pub fn new(timeout: Duration) -> Self {
TimeoutLayer { timeout }
}
}
impl<S> Layer<S> for TimeoutLayer {
type Service = Timeout<S>;
fn layer(&self, service: S) -> Self::Service {
Timeout::new(service, self.timeout)
}
}
| 21.2 | 65 | 0.654717 |
7a6086bb501702278ef0756021b81c7ec5cfd301 | 5,243 | // This file is part of the uutils coreutils package.
//
// (c) Michael Gehring <[email protected]>
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) delim mkdelim
#[macro_use]
extern crate uucore;
use std::cmp::Ordering;
use std::fs::File;
use std::io::{self, stdin, BufRead, BufReader, Stdin};
use std::path::Path;
use uucore::InvalidEncodingHandling;
use clap::{crate_version, App, Arg, ArgMatches};
static ABOUT: &str = "compare two sorted files line by line";
static LONG_HELP: &str = "";
mod options {
pub const COLUMN_1: &str = "1";
pub const COLUMN_2: &str = "2";
pub const COLUMN_3: &str = "3";
pub const DELIMITER: &str = "output-delimiter";
pub const DELIMITER_DEFAULT: &str = "\t";
pub const FILE_1: &str = "FILE1";
pub const FILE_2: &str = "FILE2";
}
fn get_usage() -> String {
format!("{} [OPTION]... FILE1 FILE2", executable!())
}
fn mkdelim(col: usize, opts: &ArgMatches) -> String {
let mut s = String::new();
let delim = opts.value_of(options::DELIMITER).unwrap();
if col > 1 && !opts.is_present(options::COLUMN_1) {
s.push_str(delim.as_ref());
}
if col > 2 && !opts.is_present(options::COLUMN_2) {
s.push_str(delim.as_ref());
}
s
}
fn ensure_nl(line: &mut String) {
if !line.ends_with('\n') {
line.push('\n');
}
}
enum LineReader {
Stdin(Stdin),
FileIn(BufReader<File>),
}
impl LineReader {
fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
match *self {
LineReader::Stdin(ref mut r) => r.read_line(buf),
LineReader::FileIn(ref mut r) => r.read_line(buf),
}
}
}
fn comm(a: &mut LineReader, b: &mut LineReader, opts: &ArgMatches) {
let delim: Vec<String> = (0..4).map(|col| mkdelim(col, opts)).collect();
let ra = &mut String::new();
let mut na = a.read_line(ra);
let rb = &mut String::new();
let mut nb = b.read_line(rb);
while na.is_ok() || nb.is_ok() {
let ord = match (na.is_ok(), nb.is_ok()) {
(false, true) => Ordering::Greater,
(true, false) => Ordering::Less,
(true, true) => match (&na, &nb) {
(&Ok(0), &Ok(0)) => break,
(&Ok(0), _) => Ordering::Greater,
(_, &Ok(0)) => Ordering::Less,
_ => ra.cmp(&rb),
},
_ => unreachable!(),
};
match ord {
Ordering::Less => {
if !opts.is_present(options::COLUMN_1) {
ensure_nl(ra);
print!("{}{}", delim[1], ra);
}
ra.clear();
na = a.read_line(ra);
}
Ordering::Greater => {
if !opts.is_present(options::COLUMN_2) {
ensure_nl(rb);
print!("{}{}", delim[2], rb);
}
rb.clear();
nb = b.read_line(rb);
}
Ordering::Equal => {
if !opts.is_present(options::COLUMN_3) {
ensure_nl(ra);
print!("{}{}", delim[3], ra);
}
ra.clear();
rb.clear();
na = a.read_line(ra);
nb = b.read_line(rb);
}
}
}
}
fn open_file(name: &str) -> io::Result<LineReader> {
match name {
"-" => Ok(LineReader::Stdin(stdin())),
_ => {
let f = File::open(&Path::new(name))?;
Ok(LineReader::FileIn(BufReader::new(f)))
}
}
}
pub fn uumain(args: impl uucore::Args) -> i32 {
let usage = get_usage();
let args = args
.collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any();
let matches = App::new(executable!())
.version(crate_version!())
.about(ABOUT)
.usage(&usage[..])
.after_help(LONG_HELP)
.arg(
Arg::with_name(options::COLUMN_1)
.short(options::COLUMN_1)
.help("suppress column 1 (lines unique to FILE1)"),
)
.arg(
Arg::with_name(options::COLUMN_2)
.short(options::COLUMN_2)
.help("suppress column 2 (lines unique to FILE2)"),
)
.arg(
Arg::with_name(options::COLUMN_3)
.short(options::COLUMN_3)
.help("suppress column 3 (lines that appear in both files)"),
)
.arg(
Arg::with_name(options::DELIMITER)
.long(options::DELIMITER)
.help("separate columns with STR")
.value_name("STR")
.default_value(options::DELIMITER_DEFAULT)
.hide_default_value(true),
)
.arg(Arg::with_name(options::FILE_1).required(true))
.arg(Arg::with_name(options::FILE_2).required(true))
.get_matches_from(args);
let mut f1 = open_file(matches.value_of(options::FILE_1).unwrap()).unwrap();
let mut f2 = open_file(matches.value_of(options::FILE_2).unwrap()).unwrap();
comm(&mut f1, &mut f2, &matches);
0
}
| 29.290503 | 80 | 0.517452 |
7a6cd750b1b158e7264698ffb005dfc614ee17f6 | 23,172 | #[doc = "Register `INTENCLR` reader"]
pub struct R(crate::R<INTENCLR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<INTENCLR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<INTENCLR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<INTENCLR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `INTENCLR` writer"]
pub struct W(crate::W<INTENCLR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<INTENCLR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<INTENCLR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<INTENCLR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Write '1' to disable interrupt for event STOPPED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STOPPED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<STOPPED_A> for bool {
#[inline(always)]
fn from(variant: STOPPED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `STOPPED` reader - Write '1' to disable interrupt for event STOPPED"]
pub struct STOPPED_R(crate::FieldReader<bool, STOPPED_A>);
impl STOPPED_R {
pub(crate) fn new(bits: bool) -> Self {
STOPPED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STOPPED_A {
match self.bits {
false => STOPPED_A::DISABLED,
true => STOPPED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == STOPPED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == STOPPED_A::ENABLED
}
}
impl core::ops::Deref for STOPPED_R {
type Target = crate::FieldReader<bool, STOPPED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event STOPPED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STOPPED_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<STOPPED_AW> for bool {
#[inline(always)]
fn from(variant: STOPPED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `STOPPED` writer - Write '1' to disable interrupt for event STOPPED"]
pub struct STOPPED_W<'a> {
w: &'a mut W,
}
impl<'a> STOPPED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: STOPPED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(STOPPED_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event ERROR\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ERROR_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<ERROR_A> for bool {
#[inline(always)]
fn from(variant: ERROR_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ERROR` reader - Write '1' to disable interrupt for event ERROR"]
pub struct ERROR_R(crate::FieldReader<bool, ERROR_A>);
impl ERROR_R {
pub(crate) fn new(bits: bool) -> Self {
ERROR_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ERROR_A {
match self.bits {
false => ERROR_A::DISABLED,
true => ERROR_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == ERROR_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == ERROR_A::ENABLED
}
}
impl core::ops::Deref for ERROR_R {
type Target = crate::FieldReader<bool, ERROR_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event ERROR\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ERROR_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<ERROR_AW> for bool {
#[inline(always)]
fn from(variant: ERROR_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ERROR` writer - Write '1' to disable interrupt for event ERROR"]
pub struct ERROR_W<'a> {
w: &'a mut W,
}
impl<'a> ERROR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ERROR_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(ERROR_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event SUSPENDED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SUSPENDED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<SUSPENDED_A> for bool {
#[inline(always)]
fn from(variant: SUSPENDED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SUSPENDED` reader - Write '1' to disable interrupt for event SUSPENDED"]
pub struct SUSPENDED_R(crate::FieldReader<bool, SUSPENDED_A>);
impl SUSPENDED_R {
pub(crate) fn new(bits: bool) -> Self {
SUSPENDED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SUSPENDED_A {
match self.bits {
false => SUSPENDED_A::DISABLED,
true => SUSPENDED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == SUSPENDED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == SUSPENDED_A::ENABLED
}
}
impl core::ops::Deref for SUSPENDED_R {
type Target = crate::FieldReader<bool, SUSPENDED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event SUSPENDED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SUSPENDED_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<SUSPENDED_AW> for bool {
#[inline(always)]
fn from(variant: SUSPENDED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SUSPENDED` writer - Write '1' to disable interrupt for event SUSPENDED"]
pub struct SUSPENDED_W<'a> {
w: &'a mut W,
}
impl<'a> SUSPENDED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SUSPENDED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(SUSPENDED_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | ((value as u32 & 0x01) << 18);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event RXSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RXSTARTED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<RXSTARTED_A> for bool {
#[inline(always)]
fn from(variant: RXSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RXSTARTED` reader - Write '1' to disable interrupt for event RXSTARTED"]
pub struct RXSTARTED_R(crate::FieldReader<bool, RXSTARTED_A>);
impl RXSTARTED_R {
pub(crate) fn new(bits: bool) -> Self {
RXSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXSTARTED_A {
match self.bits {
false => RXSTARTED_A::DISABLED,
true => RXSTARTED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == RXSTARTED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == RXSTARTED_A::ENABLED
}
}
impl core::ops::Deref for RXSTARTED_R {
type Target = crate::FieldReader<bool, RXSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event RXSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RXSTARTED_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<RXSTARTED_AW> for bool {
#[inline(always)]
fn from(variant: RXSTARTED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RXSTARTED` writer - Write '1' to disable interrupt for event RXSTARTED"]
pub struct RXSTARTED_W<'a> {
w: &'a mut W,
}
impl<'a> RXSTARTED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RXSTARTED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RXSTARTED_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | ((value as u32 & 0x01) << 19);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event TXSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TXSTARTED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<TXSTARTED_A> for bool {
#[inline(always)]
fn from(variant: TXSTARTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `TXSTARTED` reader - Write '1' to disable interrupt for event TXSTARTED"]
pub struct TXSTARTED_R(crate::FieldReader<bool, TXSTARTED_A>);
impl TXSTARTED_R {
pub(crate) fn new(bits: bool) -> Self {
TXSTARTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXSTARTED_A {
match self.bits {
false => TXSTARTED_A::DISABLED,
true => TXSTARTED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == TXSTARTED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == TXSTARTED_A::ENABLED
}
}
impl core::ops::Deref for TXSTARTED_R {
type Target = crate::FieldReader<bool, TXSTARTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event TXSTARTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TXSTARTED_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<TXSTARTED_AW> for bool {
#[inline(always)]
fn from(variant: TXSTARTED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `TXSTARTED` writer - Write '1' to disable interrupt for event TXSTARTED"]
pub struct TXSTARTED_W<'a> {
w: &'a mut W,
}
impl<'a> TXSTARTED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TXSTARTED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(TXSTARTED_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | ((value as u32 & 0x01) << 20);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event LASTRX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LASTRX_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<LASTRX_A> for bool {
#[inline(always)]
fn from(variant: LASTRX_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LASTRX` reader - Write '1' to disable interrupt for event LASTRX"]
pub struct LASTRX_R(crate::FieldReader<bool, LASTRX_A>);
impl LASTRX_R {
pub(crate) fn new(bits: bool) -> Self {
LASTRX_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LASTRX_A {
match self.bits {
false => LASTRX_A::DISABLED,
true => LASTRX_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == LASTRX_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == LASTRX_A::ENABLED
}
}
impl core::ops::Deref for LASTRX_R {
type Target = crate::FieldReader<bool, LASTRX_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event LASTRX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LASTRX_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<LASTRX_AW> for bool {
#[inline(always)]
fn from(variant: LASTRX_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LASTRX` writer - Write '1' to disable interrupt for event LASTRX"]
pub struct LASTRX_W<'a> {
w: &'a mut W,
}
impl<'a> LASTRX_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LASTRX_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LASTRX_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | ((value as u32 & 0x01) << 23);
self.w
}
}
#[doc = "Write '1' to disable interrupt for event LASTTX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LASTTX_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<LASTTX_A> for bool {
#[inline(always)]
fn from(variant: LASTTX_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LASTTX` reader - Write '1' to disable interrupt for event LASTTX"]
pub struct LASTTX_R(crate::FieldReader<bool, LASTTX_A>);
impl LASTTX_R {
pub(crate) fn new(bits: bool) -> Self {
LASTTX_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LASTTX_A {
match self.bits {
false => LASTTX_A::DISABLED,
true => LASTTX_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == LASTTX_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == LASTTX_A::ENABLED
}
}
impl core::ops::Deref for LASTTX_R {
type Target = crate::FieldReader<bool, LASTTX_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to disable interrupt for event LASTTX\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LASTTX_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<LASTTX_AW> for bool {
#[inline(always)]
fn from(variant: LASTTX_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LASTTX` writer - Write '1' to disable interrupt for event LASTTX"]
pub struct LASTTX_W<'a> {
w: &'a mut W,
}
impl<'a> LASTTX_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LASTTX_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LASTTX_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | ((value as u32 & 0x01) << 24);
self.w
}
}
impl R {
#[doc = "Bit 1 - Write '1' to disable interrupt for event STOPPED"]
#[inline(always)]
pub fn stopped(&self) -> STOPPED_R {
STOPPED_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 9 - Write '1' to disable interrupt for event ERROR"]
#[inline(always)]
pub fn error(&self) -> ERROR_R {
ERROR_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 18 - Write '1' to disable interrupt for event SUSPENDED"]
#[inline(always)]
pub fn suspended(&self) -> SUSPENDED_R {
SUSPENDED_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - Write '1' to disable interrupt for event RXSTARTED"]
#[inline(always)]
pub fn rxstarted(&self) -> RXSTARTED_R {
RXSTARTED_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - Write '1' to disable interrupt for event TXSTARTED"]
#[inline(always)]
pub fn txstarted(&self) -> TXSTARTED_R {
TXSTARTED_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 23 - Write '1' to disable interrupt for event LASTRX"]
#[inline(always)]
pub fn lastrx(&self) -> LASTRX_R {
LASTRX_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - Write '1' to disable interrupt for event LASTTX"]
#[inline(always)]
pub fn lasttx(&self) -> LASTTX_R {
LASTTX_R::new(((self.bits >> 24) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Write '1' to disable interrupt for event STOPPED"]
#[inline(always)]
pub fn stopped(&mut self) -> STOPPED_W {
STOPPED_W { w: self }
}
#[doc = "Bit 9 - Write '1' to disable interrupt for event ERROR"]
#[inline(always)]
pub fn error(&mut self) -> ERROR_W {
ERROR_W { w: self }
}
#[doc = "Bit 18 - Write '1' to disable interrupt for event SUSPENDED"]
#[inline(always)]
pub fn suspended(&mut self) -> SUSPENDED_W {
SUSPENDED_W { w: self }
}
#[doc = "Bit 19 - Write '1' to disable interrupt for event RXSTARTED"]
#[inline(always)]
pub fn rxstarted(&mut self) -> RXSTARTED_W {
RXSTARTED_W { w: self }
}
#[doc = "Bit 20 - Write '1' to disable interrupt for event TXSTARTED"]
#[inline(always)]
pub fn txstarted(&mut self) -> TXSTARTED_W {
TXSTARTED_W { w: self }
}
#[doc = "Bit 23 - Write '1' to disable interrupt for event LASTRX"]
#[inline(always)]
pub fn lastrx(&mut self) -> LASTRX_W {
LASTRX_W { w: self }
}
#[doc = "Bit 24 - Write '1' to disable interrupt for event LASTTX"]
#[inline(always)]
pub fn lasttx(&mut self) -> LASTTX_W {
LASTTX_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Disable interrupt\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intenclr](index.html) module"]
pub struct INTENCLR_SPEC;
impl crate::RegisterSpec for INTENCLR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [intenclr::R](R) reader structure"]
impl crate::Readable for INTENCLR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [intenclr::W](W) writer structure"]
impl crate::Writable for INTENCLR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets INTENCLR to value 0"]
impl crate::Resettable for INTENCLR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 30.211213 | 406 | 0.579234 |
e996873144b1fee04f0aa416e1beb3feddf15890 | 18,582 | #[doc = "Register `PWM_ADCTS1` reader"]
pub struct R(crate::R<PWM_ADCTS1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PWM_ADCTS1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PWM_ADCTS1_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PWM_ADCTS1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PWM_ADCTS1` writer"]
pub struct W(crate::W<PWM_ADCTS1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PWM_ADCTS1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PWM_ADCTS1_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PWM_ADCTS1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "PWM_CH4 Trigger ADC Source Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum TRGSEL4_A {
#[doc = "0: PWM_CH4 zero point"]
_0 = 0,
#[doc = "1: PWM_CH4 period point"]
_1 = 1,
#[doc = "2: PWM_CH4 zero or period point"]
_2 = 2,
#[doc = "3: PWM_CH4 up-count CMPDAT point"]
_3 = 3,
#[doc = "4: PWM_CH4 down-count CMPDAT point"]
_4 = 4,
#[doc = "5: Reserved."]
_5 = 5,
#[doc = "6: Reserved."]
_6 = 6,
#[doc = "7: Reserved."]
_7 = 7,
#[doc = "8: PWM_CH5 up-count CMPDAT point"]
_8 = 8,
#[doc = "9: PWM_CH5 down-count CMPDAT point"]
_9 = 9,
}
impl From<TRGSEL4_A> for u8 {
#[inline(always)]
fn from(variant: TRGSEL4_A) -> Self {
variant as _
}
}
#[doc = "Field `TRGSEL4` reader - PWM_CH4 Trigger ADC Source Select"]
pub struct TRGSEL4_R(crate::FieldReader<u8, TRGSEL4_A>);
impl TRGSEL4_R {
pub(crate) fn new(bits: u8) -> Self {
TRGSEL4_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<TRGSEL4_A> {
match self.bits {
0 => Some(TRGSEL4_A::_0),
1 => Some(TRGSEL4_A::_1),
2 => Some(TRGSEL4_A::_2),
3 => Some(TRGSEL4_A::_3),
4 => Some(TRGSEL4_A::_4),
5 => Some(TRGSEL4_A::_5),
6 => Some(TRGSEL4_A::_6),
7 => Some(TRGSEL4_A::_7),
8 => Some(TRGSEL4_A::_8),
9 => Some(TRGSEL4_A::_9),
_ => None,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
**self == TRGSEL4_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
**self == TRGSEL4_A::_1
}
#[doc = "Checks if the value of the field is `_2`"]
#[inline(always)]
pub fn is_2(&self) -> bool {
**self == TRGSEL4_A::_2
}
#[doc = "Checks if the value of the field is `_3`"]
#[inline(always)]
pub fn is_3(&self) -> bool {
**self == TRGSEL4_A::_3
}
#[doc = "Checks if the value of the field is `_4`"]
#[inline(always)]
pub fn is_4(&self) -> bool {
**self == TRGSEL4_A::_4
}
#[doc = "Checks if the value of the field is `_5`"]
#[inline(always)]
pub fn is_5(&self) -> bool {
**self == TRGSEL4_A::_5
}
#[doc = "Checks if the value of the field is `_6`"]
#[inline(always)]
pub fn is_6(&self) -> bool {
**self == TRGSEL4_A::_6
}
#[doc = "Checks if the value of the field is `_7`"]
#[inline(always)]
pub fn is_7(&self) -> bool {
**self == TRGSEL4_A::_7
}
#[doc = "Checks if the value of the field is `_8`"]
#[inline(always)]
pub fn is_8(&self) -> bool {
**self == TRGSEL4_A::_8
}
#[doc = "Checks if the value of the field is `_9`"]
#[inline(always)]
pub fn is_9(&self) -> bool {
**self == TRGSEL4_A::_9
}
}
impl core::ops::Deref for TRGSEL4_R {
type Target = crate::FieldReader<u8, TRGSEL4_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRGSEL4` writer - PWM_CH4 Trigger ADC Source Select"]
pub struct TRGSEL4_W<'a> {
w: &'a mut W,
}
impl<'a> TRGSEL4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRGSEL4_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "PWM_CH4 zero point"]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(TRGSEL4_A::_0)
}
#[doc = "PWM_CH4 period point"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(TRGSEL4_A::_1)
}
#[doc = "PWM_CH4 zero or period point"]
#[inline(always)]
pub fn _2(self) -> &'a mut W {
self.variant(TRGSEL4_A::_2)
}
#[doc = "PWM_CH4 up-count CMPDAT point"]
#[inline(always)]
pub fn _3(self) -> &'a mut W {
self.variant(TRGSEL4_A::_3)
}
#[doc = "PWM_CH4 down-count CMPDAT point"]
#[inline(always)]
pub fn _4(self) -> &'a mut W {
self.variant(TRGSEL4_A::_4)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _5(self) -> &'a mut W {
self.variant(TRGSEL4_A::_5)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _6(self) -> &'a mut W {
self.variant(TRGSEL4_A::_6)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _7(self) -> &'a mut W {
self.variant(TRGSEL4_A::_7)
}
#[doc = "PWM_CH5 up-count CMPDAT point"]
#[inline(always)]
pub fn _8(self) -> &'a mut W {
self.variant(TRGSEL4_A::_8)
}
#[doc = "PWM_CH5 down-count CMPDAT point"]
#[inline(always)]
pub fn _9(self) -> &'a mut W {
self.variant(TRGSEL4_A::_9)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
#[doc = "PWM_CH4 Trigger ADC Enable Bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TRGEN4_A {
#[doc = "0: PWM_CH4 Trigger ADC function Disabled"]
_0 = 0,
#[doc = "1: PWM_CH4 Trigger ADC function Enabled"]
_1 = 1,
}
impl From<TRGEN4_A> for bool {
#[inline(always)]
fn from(variant: TRGEN4_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `TRGEN4` reader - PWM_CH4 Trigger ADC Enable Bit"]
pub struct TRGEN4_R(crate::FieldReader<bool, TRGEN4_A>);
impl TRGEN4_R {
pub(crate) fn new(bits: bool) -> Self {
TRGEN4_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TRGEN4_A {
match self.bits {
false => TRGEN4_A::_0,
true => TRGEN4_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
**self == TRGEN4_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
**self == TRGEN4_A::_1
}
}
impl core::ops::Deref for TRGEN4_R {
type Target = crate::FieldReader<bool, TRGEN4_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRGEN4` writer - PWM_CH4 Trigger ADC Enable Bit"]
pub struct TRGEN4_W<'a> {
w: &'a mut W,
}
impl<'a> TRGEN4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRGEN4_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "PWM_CH4 Trigger ADC function Disabled"]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(TRGEN4_A::_0)
}
#[doc = "PWM_CH4 Trigger ADC function Enabled"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(TRGEN4_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
#[doc = "PWM_CH5 Trigger ADC Source Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum TRGSEL5_A {
#[doc = "0: PWM_CH4 zero point"]
_0 = 0,
#[doc = "1: PWM_CH4 period point"]
_1 = 1,
#[doc = "2: PWM_CH4 zero or period point"]
_2 = 2,
#[doc = "3: PWM_CH4 up-count CMPDAT point"]
_3 = 3,
#[doc = "4: PWM_CH4 down-count CMPDAT point"]
_4 = 4,
#[doc = "5: Reserved."]
_5 = 5,
#[doc = "6: Reserved."]
_6 = 6,
#[doc = "7: Reserved."]
_7 = 7,
#[doc = "8: PWM_CH5 up-count CMPDAT point"]
_8 = 8,
#[doc = "9: PWM_CH5 down-count CMPDAT point"]
_9 = 9,
}
impl From<TRGSEL5_A> for u8 {
#[inline(always)]
fn from(variant: TRGSEL5_A) -> Self {
variant as _
}
}
#[doc = "Field `TRGSEL5` reader - PWM_CH5 Trigger ADC Source Select"]
pub struct TRGSEL5_R(crate::FieldReader<u8, TRGSEL5_A>);
impl TRGSEL5_R {
pub(crate) fn new(bits: u8) -> Self {
TRGSEL5_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<TRGSEL5_A> {
match self.bits {
0 => Some(TRGSEL5_A::_0),
1 => Some(TRGSEL5_A::_1),
2 => Some(TRGSEL5_A::_2),
3 => Some(TRGSEL5_A::_3),
4 => Some(TRGSEL5_A::_4),
5 => Some(TRGSEL5_A::_5),
6 => Some(TRGSEL5_A::_6),
7 => Some(TRGSEL5_A::_7),
8 => Some(TRGSEL5_A::_8),
9 => Some(TRGSEL5_A::_9),
_ => None,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
**self == TRGSEL5_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
**self == TRGSEL5_A::_1
}
#[doc = "Checks if the value of the field is `_2`"]
#[inline(always)]
pub fn is_2(&self) -> bool {
**self == TRGSEL5_A::_2
}
#[doc = "Checks if the value of the field is `_3`"]
#[inline(always)]
pub fn is_3(&self) -> bool {
**self == TRGSEL5_A::_3
}
#[doc = "Checks if the value of the field is `_4`"]
#[inline(always)]
pub fn is_4(&self) -> bool {
**self == TRGSEL5_A::_4
}
#[doc = "Checks if the value of the field is `_5`"]
#[inline(always)]
pub fn is_5(&self) -> bool {
**self == TRGSEL5_A::_5
}
#[doc = "Checks if the value of the field is `_6`"]
#[inline(always)]
pub fn is_6(&self) -> bool {
**self == TRGSEL5_A::_6
}
#[doc = "Checks if the value of the field is `_7`"]
#[inline(always)]
pub fn is_7(&self) -> bool {
**self == TRGSEL5_A::_7
}
#[doc = "Checks if the value of the field is `_8`"]
#[inline(always)]
pub fn is_8(&self) -> bool {
**self == TRGSEL5_A::_8
}
#[doc = "Checks if the value of the field is `_9`"]
#[inline(always)]
pub fn is_9(&self) -> bool {
**self == TRGSEL5_A::_9
}
}
impl core::ops::Deref for TRGSEL5_R {
type Target = crate::FieldReader<u8, TRGSEL5_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRGSEL5` writer - PWM_CH5 Trigger ADC Source Select"]
pub struct TRGSEL5_W<'a> {
w: &'a mut W,
}
impl<'a> TRGSEL5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRGSEL5_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "PWM_CH4 zero point"]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(TRGSEL5_A::_0)
}
#[doc = "PWM_CH4 period point"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(TRGSEL5_A::_1)
}
#[doc = "PWM_CH4 zero or period point"]
#[inline(always)]
pub fn _2(self) -> &'a mut W {
self.variant(TRGSEL5_A::_2)
}
#[doc = "PWM_CH4 up-count CMPDAT point"]
#[inline(always)]
pub fn _3(self) -> &'a mut W {
self.variant(TRGSEL5_A::_3)
}
#[doc = "PWM_CH4 down-count CMPDAT point"]
#[inline(always)]
pub fn _4(self) -> &'a mut W {
self.variant(TRGSEL5_A::_4)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _5(self) -> &'a mut W {
self.variant(TRGSEL5_A::_5)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _6(self) -> &'a mut W {
self.variant(TRGSEL5_A::_6)
}
#[doc = "Reserved."]
#[inline(always)]
pub fn _7(self) -> &'a mut W {
self.variant(TRGSEL5_A::_7)
}
#[doc = "PWM_CH5 up-count CMPDAT point"]
#[inline(always)]
pub fn _8(self) -> &'a mut W {
self.variant(TRGSEL5_A::_8)
}
#[doc = "PWM_CH5 down-count CMPDAT point"]
#[inline(always)]
pub fn _9(self) -> &'a mut W {
self.variant(TRGSEL5_A::_9)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | ((value as u32 & 0x0f) << 8);
self.w
}
}
#[doc = "PWM_CH5 Trigger ADC Enable Bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TRGEN5_A {
#[doc = "0: PWM_CH5 Trigger ADC function Disabled"]
_0 = 0,
#[doc = "1: PWM_CH5 Trigger ADC function Enabled"]
_1 = 1,
}
impl From<TRGEN5_A> for bool {
#[inline(always)]
fn from(variant: TRGEN5_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `TRGEN5` reader - PWM_CH5 Trigger ADC Enable Bit"]
pub struct TRGEN5_R(crate::FieldReader<bool, TRGEN5_A>);
impl TRGEN5_R {
pub(crate) fn new(bits: bool) -> Self {
TRGEN5_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TRGEN5_A {
match self.bits {
false => TRGEN5_A::_0,
true => TRGEN5_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
**self == TRGEN5_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
**self == TRGEN5_A::_1
}
}
impl core::ops::Deref for TRGEN5_R {
type Target = crate::FieldReader<bool, TRGEN5_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRGEN5` writer - PWM_CH5 Trigger ADC Enable Bit"]
pub struct TRGEN5_W<'a> {
w: &'a mut W,
}
impl<'a> TRGEN5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRGEN5_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "PWM_CH5 Trigger ADC function Disabled"]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(TRGEN5_A::_0)
}
#[doc = "PWM_CH5 Trigger ADC function Enabled"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(TRGEN5_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | ((value as u32 & 0x01) << 15);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - PWM_CH4 Trigger ADC Source Select"]
#[inline(always)]
pub fn trgsel4(&self) -> TRGSEL4_R {
TRGSEL4_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 7 - PWM_CH4 Trigger ADC Enable Bit"]
#[inline(always)]
pub fn trgen4(&self) -> TRGEN4_R {
TRGEN4_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:11 - PWM_CH5 Trigger ADC Source Select"]
#[inline(always)]
pub fn trgsel5(&self) -> TRGSEL5_R {
TRGSEL5_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bit 15 - PWM_CH5 Trigger ADC Enable Bit"]
#[inline(always)]
pub fn trgen5(&self) -> TRGEN5_R {
TRGEN5_R::new(((self.bits >> 15) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - PWM_CH4 Trigger ADC Source Select"]
#[inline(always)]
pub fn trgsel4(&mut self) -> TRGSEL4_W {
TRGSEL4_W { w: self }
}
#[doc = "Bit 7 - PWM_CH4 Trigger ADC Enable Bit"]
#[inline(always)]
pub fn trgen4(&mut self) -> TRGEN4_W {
TRGEN4_W { w: self }
}
#[doc = "Bits 8:11 - PWM_CH5 Trigger ADC Source Select"]
#[inline(always)]
pub fn trgsel5(&mut self) -> TRGSEL5_W {
TRGSEL5_W { w: self }
}
#[doc = "Bit 15 - PWM_CH5 Trigger ADC Enable Bit"]
#[inline(always)]
pub fn trgen5(&mut self) -> TRGEN5_W {
TRGEN5_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "PWM Trigger ADC Source Select Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pwm_adcts1](index.html) module"]
pub struct PWM_ADCTS1_SPEC;
impl crate::RegisterSpec for PWM_ADCTS1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pwm_adcts1::R](R) reader structure"]
impl crate::Readable for PWM_ADCTS1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pwm_adcts1::W](W) writer structure"]
impl crate::Writable for PWM_ADCTS1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PWM_ADCTS1 to value 0"]
impl crate::Resettable for PWM_ADCTS1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.448494 | 431 | 0.550318 |
507c207afe39fc0017b439ad4903ad018c35ebb6 | 2,141 | use bevy::{
prelude::*,
render::{render_graph::RenderGraph, RenderApp, RenderStage},
};
use bevy_mod_debugdump::{render_graph, schedule_graph};
use std::{
io::Write,
process::{Command, Stdio},
};
pub struct DotGraphs {
pub schedule_graph: String,
pub render_schedule_graph: String,
pub render_graph: String,
}
pub fn setup(app: &mut App) {
let actual_runner = std::mem::replace(&mut app.runner, Box::new(|_| {}));
app.set_runner(move |mut app| {
app.update();
let render_app = app.get_sub_app(RenderApp).expect("no render app");
let render_graph = render_app.world.get_resource::<RenderGraph>().unwrap();
let schedule_style = schedule_graph::ScheduleGraphStyle {
system_filter: Some(Box::new(|system| {
!system.name.starts_with("bevy_editor_pls")
})),
..Default::default()
};
let rendergraph_style = render_graph::RenderGraphStyle::default();
let schedule_graph = schedule_graph::schedule_graph_dot_styled(&app, &schedule_style);
let render_graph =
render_graph::render_graph_dot_styled(&*render_graph, &rendergraph_style);
let render_schedule_graph = schedule_graph::schedule_graph_dot_sub_app_styled(
&app,
RenderApp,
&[&RenderStage::Extract],
&schedule_style,
);
app.insert_resource(DotGraphs {
schedule_graph,
render_schedule_graph,
render_graph,
});
actual_runner(app);
});
}
pub fn execute_dot(dot: &str, format: &str) -> Result<Vec<u8>, std::io::Error> {
let mut child = Command::new("dot")
.arg("-T")
.arg(format)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
child.stdin.as_mut().unwrap().write_all(dot.as_bytes())?;
let output = child.wait_with_output()?;
if !output.status.success() {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
String::from_utf8_lossy(&output.stderr),
));
}
Ok(output.stdout)
}
| 29.328767 | 94 | 0.602989 |
218e01495ea543fb86707adcc36446508b07f3d6 | 13,126 | //! bloom_filter_simple is a library that offers different implementations of a data
//! structure for filtering elements. The data structure is based on the ideas presented by Burton
//! Howard Bloom and is therefore known as bloom filter:
//! > Burton H. Bloom. 1970. Space/time trade-offs in hash coding with allowable errors. Commun.
//! ACM 13, 7 (July 1970), 422–426. DOI: [https://doi.org/10.1145/362686.362692](https://doi.org/10.1145/362686.362692)
//!
//! # Overview
//! Basic description taken from [Wikipedia](https://en.wikipedia.org/wiki/Bloom_filter):
//!
//! > "A Bloom filter is a space-efficient probabilistic data structure, conceived by Burton Howard
//! Bloom in 1970, that is used to test whether an element is a member of a set. False positive
//! matches are possible, but false negatives are not – in other words, a query returns either
//! "possibly in set" or "definitely not in set". Elements can be added to the set, but not removed
//! (though this can be addressed with the counting Bloom filter variant); the more items added, the
//! larger the probability of false positives." ("Bloom filter". Definition, para. 1. In Wikipedia.
//! Retrieved December 02, 2020, from https://en.wikipedia.org/wiki/Bloom_filter)
//!
//! # Bloom Filter Implementations
//! The library offers two basic types of bloom filter implementations.
//!
//! ## Kirsch-Mitzenmacher Bloom Filter (KMBloomFilter)
//! This type of bloom filter uses two hashers to simulate an arbitrary number of additional hash functions.
//!
//! The implementation is based on the work of [Kirsch and Mitzenmacher](https://doi.org/10.1007/11841036_42) \[1\].
//! In their work, they demonstrated that it is possible to apply simulated hash functions in a bloom
//! filter effectively, i.e., without loss in the asymptotic false positive probability.
//! Given two hash functions *h_1(x)* and *h_2(x)*, an *i*-th additional hash function *g_i(x)* can be
//! simulated as *g_i(x) = h_1(x) + i* \* *h_2(x)*.
//!
//! > \[1\] Kirsch A., Mitzenmacher M. (2006) Less Hashing, Same Performance: Building a Better Bloom Filter.
//! In: Azar Y., Erlebach T. (eds) Algorithms – ESA 2006. ESA 2006. Lecture Notes in Computer Science, vol 4168.
//! Springer, Berlin, Heidelberg. https://doi.org/10.1007/11841036_42
//!
//! ## Seeded Bloom Filter (SeededBloomFilter)
//! A bloom filter that uses a single Hasher that can be seeded to simulate an arbitrary number of hash functions.
//! Internally, the implementation uses [ahash::AHasher](https://crates.io/crates/ahash).
//!
//! # Examples
//! In the following, you can find simple examples of how to initialize and use the different bloom filter types.
//!
//! ## Default Bloom Filter
//! The crate offers a default type for a KMBloomFilter that uses *ahash::AHasher* and Rust's
//! *std::collections::hash_map::DefaultHasher* to simulate more hash functions. We compared
//! different hash functions for use by KMBloomFilter, and this combination yielded the best results
//! with respect to the filter's false positive probability.
//!
//! We recommend using DefaultBloomFilter for quickly getting started.
//! ```
//! use bloom_filter_simple::{BloomFilter,DefaultBloomFilter};
//!
//! fn main() {
//! // We plan on storing at most 10,000 elements
//! let desired_capacity = 10_000;
//! // The chance of a false positive increases with each inserted element.
//! // This parameter specifies that the chance should be less than 0.01% (0.0001)
//! // when the desired capacity has been reached. In other words, the chance
//! // that the bloom filter returns true when checking whether a novel element
//! // has been inserted before is less than 0.01% (0.0001).
//! let desired_fp_probability = 0.0001;
//!
//! let mut filter = DefaultBloomFilter::new(desired_capacity, desired_fp_probability);
//!
//! // You can insert any type implementing the Hash trait. The bloom filter does
//! // not store the inserted elements but only their hashes. Hence, there is no
//! // transfer of ownership required.
//! filter.insert(&5i32);
//! filter.insert(&"Some text");
//! filter.insert(&10_000usize);
//!
//! // You can check whether a value has been inserted into the filter before.
//! assert_eq!(false, filter.contains(&3));
//! assert_eq!(true, filter.contains(&5));
//! assert_eq!(true, filter.contains(&"Some text"));
//! }
//! ```
//!
//! ## KMBloomFilter
//! Initialization and application of a KMBloomFilter.
//! ```
//! use bloom_filter_simple::{BloomFilter,KMBloomFilter};
//! use ahash::AHasher;
//! use std::collections::hash_map::DefaultHasher;
//!
//! fn main() {
//! // We plan on storing at most 10,000 elements
//! let desired_capacity = 10_000;
//! // We want to assure that the chance of a false positive is less than 0.01% (0.0001)
//! // for up to desired_capacity elements.
//! let desired_fp_probability = 0.0001;
//!
//! // We initialize a new KMBloomFilter by specifying the desired Hashers as type
//! // parameters. It is possible to use any type that implements Hasher + Default.
//! // Default is required to receive a new instance of a hasher after a value was
//! // hashed, because the Hasher trait does not provide an interface for resetting
//! // a hasher implementing it. This is required to receive the same hash value
//! // when inserting or checking the same element multiple times.
//! let mut filter: KMBloomFilter<AHasher, DefaultHasher> = KMBloomFilter::new(
//! desired_capacity,
//! desired_fp_probability
//! );
//!
//! // You can insert any type implementing the Hash trait. The bloom filter does not
//! // store the inserted elements but only their hashes. Hence, there is no transfer
//! // of ownership required.
//! filter.insert(&5i32);
//! filter.insert(&"Some text");
//! filter.insert(&10_000usize);
//!
//! // You can check whether a value has been inserted into the filter before.
//! assert_eq!(false, filter.contains(&3));
//! assert_eq!(true, filter.contains(&5));
//! assert_eq!(true, filter.contains(&"Some text"));
//! }
//! ```
//!
//! ## SeededBloomFilter
//! Initialization and application of a SeededBloomFilter.
//! ```
//! use bloom_filter_simple::{BloomFilter,SeededBloomFilter};
//!
//! fn main() {
//! // We plan on storing at most 10,000 elements
//! let desired_capacity = 10_000;
//! // We want to assure that the chance of a false positive is less than 0.0001
//! // for up to desired_capacity elements.
//! let desired_fp_probability = 0.0001;
//!
//! // A SeededBloomFilter uses a single seeded ahash::AHasher internally.
//! let mut filter = SeededBloomFilter::new(desired_capacity, desired_fp_probability);
//!
//! // You can insert any type implementing the Hash trait. The bloom filter does
//! // not store the inserted elements but only their hashes. Hence, there is no
//! // transfer of ownership required.
//! filter.insert(&5i32);
//! filter.insert(&"Some text");
//! filter.insert(&10_000usize);
//!
//! // You can check whether a value has been inserted into the filter before.
//! assert_eq!(false, filter.contains(&3));
//! assert_eq!(true, filter.contains(&5));
//! assert_eq!(true, filter.contains(&"Some text"));
//! }
//! ```
use std::{collections::hash_map::DefaultHasher, hash::Hash};
mod bitset;
mod km_bloom_filter;
mod seeded_bloom_filter;
pub use km_bloom_filter::KMBloomFilter;
pub use seeded_bloom_filter::SeededBloomFilter;
/**
A default implementation of KMBloomFilter using ahash::AHasher and collections::hash_map::DefaultHasher.
DefaultBloomFilter is implemented as a type definition `type DefaultBloomFilter = KMBloomFilter<ahash::AHasher, DefaultHasher>;`
# Examples
```
use bloom_filter_simple::{DefaultBloomFilter,BloomFilter};
fn simple_bloom_filter_test() {
let desired_capacity = 1_000_000;
let false_positive_probability = 0.0001;
let mut bloom_filter = DefaultBloomFilter::new(desired_capacity, false_positive_probability);
bloom_filter.insert(&"Hello!");
bloom_filter.insert(&34);
assert!(bloom_filter.contains(&"Hello!"));
assert!(bloom_filter.contains(&34));
assert_eq!(false, bloom_filter.contains(&"Not in filter"));
}
```
*/
pub type DefaultBloomFilter = KMBloomFilter<ahash::AHasher, DefaultHasher>;
/// This trait defines the basic functionality supported by the bloom filters in this library.
///
pub trait BloomFilter {
/// Insert data into the filter.
///
/// # Intended Behavior
/// A type implementing BloomFilter should implement *insert* with respect to the following points:
/// * It should be possible to insert the same element multiple times.
/// * It should be possible to insert any type implementing Hash.
///
/// # Examples
/// How *insert* of a type implementing BloomFilter might be used:
/// ```
/// use bloom_filter_simple::{BloomFilter, DefaultBloomFilter};
///
/// fn bloom_filter_insert() {
/// let mut bloom_filter = DefaultBloomFilter::new(5, 0.001);
/// bloom_filter.insert(&"Hello!");
/// bloom_filter.insert(&5);
/// bloom_filter.insert(&"Hello!");
///
/// assert_eq!(true, bloom_filter.contains(&"Hello!"));
/// }
/// ```
fn insert<T: Hash>(&mut self, data: &T);
/// Check whether data is contained in the bloom filter.
///
/// # Intended Behavior
/// Checking whether data is contained in a bloom filter must never result in a false negative,
/// i.e., if an element 'x' has been inserted into the filter, contains(&x) will *always* return true.
///
/// In contrast, contains can result in false positive, i.e., contains(&x) can return true, even if
/// x has not been inserted yet. The chance of this happending depends on the number of elements
/// in the bloom filter, and the number of hash functions that are used. When initializing one
/// of the filters provided in this crate, you can specify the desired false positive probability.
///
/// A type implementing BloomFilter should implement *contains* with respect to the following points:
/// * *contains(&x)* **must** return *true* if *x* has been inserted into the filter
/// * *contains(&x)* **can** return *true* even if *x* has **not** been inserted into the filter
/// * It should be possible to check any type implementing Hash.
///
/// # Examples
/// How contains of a type implementing BloomFilter might be used:
/// ```
/// use bloom_filter_simple::{BloomFilter, DefaultBloomFilter};
/// fn bloom_filter_insert() {
/// let mut bloom_filter = DefaultBloomFilter::new(5, 0.001);
/// bloom_filter.insert(&"Hello!");
/// // This assert will never fail
/// assert_eq!(true, bloom_filter.contains(&"Hello!"));
/// // This assert can fail with a probability of p(fp) < 0.001
/// assert_eq!(false, bloom_filter.contains(&"Goodbye!"));
/// }
/// ```
fn contains<T: Hash>(&self, data: &T) -> bool;
}
/// Calculate the optimal bit count to satisfy the desired constraints.
/// Formula taken from Sagi Kedmi:
/// > S. Kedmi, ["Bloom Filters for the Perplexed"](https://sagi.io/bloom-filters-for-the-perplexed/), July 2017 [Accessed: 02.12.2020]
fn optimal_bit_count(desired_capacity: usize, desired_false_positive_probability: f64) -> usize {
(-(desired_capacity as f64 * desired_false_positive_probability.ln()) / (2.0f64.ln().powi(2)))
.ceil() as usize
}
/// Calculate the optimal number of hashers to satisfy the desired constraints.
/// Formula taken from Sagi Kedmi:
/// > S. Kedmi, ["Bloom Filters for the Perplexed"](https://sagi.io/bloom-filters-for-the-perplexed/), July 2017 [Accessed: 02.12.2020]
fn optimal_number_of_hashers(desired_capacity: usize, bit_count: usize) -> usize {
((bit_count as f64 / desired_capacity as f64) * 2.0f64.ln()).round() as usize
}
/// Approximate number of elements stored.
/// Formula taken from Wikipedia:
/// > Wikipedia, ["Bloom filter"](https://en.wikipedia.org/wiki/Bloom_filter#Approximating_the_number_of_items_in_a_Bloom_filter) [Accessed: 02.12.2020]
fn approximate_element_count(
number_of_hashers: usize,
bits_per_hasher: usize,
number_of_ones: usize,
) -> f64 {
-(bits_per_hasher as f64)
* (1.0 - (number_of_ones as f64) / ((number_of_hashers * bits_per_hasher) as f64)).ln()
}
/// Return the current approximate false positive probability which depends on the current
/// number of elements in the filter.
/// Formula taken from Sagi Kedmi:
/// > S. Kedmi, ["Bloom Filters for the Perplexed"](https://sagi.io/bloom-filters-for-the-perplexed/), July 2017 [Accessed: 02.12.2020]
fn approximate_false_positive_probability(
number_of_hashers: usize,
bits_per_hasher: usize,
element_count: f64,
) -> f64 {
(1.0 - std::f64::consts::E.powf(-element_count / bits_per_hasher as f64))
.powf(number_of_hashers as f64)
}
| 47.730909 | 152 | 0.6881 |
7a3adc4478905bfb5f8c59be717d61bee92cc14f | 2,688 | use parking_lot::RwLock;
use std::sync::Arc;
use nimiq_block_albatross::TendermintIdentifier;
use nimiq_handel::evaluator::WeightedVote;
use nimiq_handel::partitioner::BinomialPartitioner;
use nimiq_handel::protocol::Protocol;
use nimiq_handel::store::ReplaceStore;
use super::super::registry::ValidatorRegistry;
use super::contribution::TendermintContribution;
use super::verifier::TendermintVerifier;
#[derive(std::fmt::Debug)]
pub(crate) struct TendermintAggregationProtocol {
verifier: Arc<<Self as Protocol>::Verifier>,
partitioner: Arc<<Self as Protocol>::Partitioner>,
evaluator: Arc<<Self as Protocol>::Evaluator>,
store: Arc<RwLock<<Self as Protocol>::Store>>,
registry: Arc<<Self as Protocol>::Registry>,
node_id: usize,
}
impl TendermintAggregationProtocol {
pub(super) fn new(
validators: Arc<ValidatorRegistry>,
node_id: usize,
threshold: usize,
id: TendermintIdentifier,
validator_merkle_root: Vec<u8>,
) -> Self {
let partitioner = Arc::new(BinomialPartitioner::new(node_id, validators.len()));
let store = Arc::new(RwLock::new(ReplaceStore::<
BinomialPartitioner,
<Self as Protocol>::Contribution,
>::new(Arc::clone(&partitioner))));
let evaluator = Arc::new(WeightedVote::new(
Arc::clone(&store),
validators.clone(),
Arc::clone(&partitioner),
threshold,
));
let verifier = Arc::new(TendermintVerifier::new(
validators.clone(),
id,
validator_merkle_root,
));
Self {
verifier,
partitioner,
evaluator,
store,
registry: validators,
node_id,
}
}
}
impl Protocol for TendermintAggregationProtocol {
type Contribution = TendermintContribution;
type Registry = ValidatorRegistry;
type Verifier = TendermintVerifier<Self::Registry>;
type Store = ReplaceStore<Self::Partitioner, Self::Contribution>;
type Evaluator = WeightedVote<Self::Store, Self::Registry, Self::Partitioner>;
type Partitioner = BinomialPartitioner;
fn registry(&self) -> Arc<Self::Registry> {
self.registry.clone()
}
fn verifier(&self) -> Arc<Self::Verifier> {
self.verifier.clone()
}
fn store(&self) -> Arc<RwLock<Self::Store>> {
self.store.clone()
}
fn evaluator(&self) -> Arc<Self::Evaluator> {
self.evaluator.clone()
}
fn partitioner(&self) -> Arc<Self::Partitioner> {
self.partitioner.clone()
}
fn node_id(&self) -> usize {
self.node_id
}
}
| 27.71134 | 88 | 0.629836 |
1dfd0974f72f5468605149b34fd235df6c822498 | 6,780 | use std::collections::HashMap;
use itertools::Itertools;
pub mod util;
pub mod permutation;
fn create_polybe_square() -> HashMap<char, Vec<char>> {
let vec: Vec<char> = vec!['A', 'D', 'F', 'G', 'V', 'X'];
let polybe_square = permutation::permutations(&vec, 2);
let mut polybe_vec : Vec<_> = polybe_square.into_iter().collect();
&polybe_vec.sort();
let alphabet: Vec<char> = vec!['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0','1', '2', '3', '4', '5', '6', '7', '8', '9'];
let translate_dictionary: HashMap<_, _> =
alphabet.into_iter().zip_eq(polybe_vec.into_iter()).collect();
translate_dictionary
}
fn polybe_encryption(mut plaintext: &String) -> String {
util::string_operation::remove_whitespace(&mut plaintext);
let translate_dictionary: HashMap<_, _> = create_polybe_square();
let mut ciphered_string = String::new();
let mut is_present: bool;
for character in plaintext.chars() {
let resulting_vec_char: Option<&Vec<char>> = translate_dictionary.get(&character);
match resulting_vec_char{
Some(_) => is_present = true, // There is a corresponding character in the dictionnary
None => is_present = false,
}
if is_present == true{
let result_string: String = resulting_vec_char.unwrap().iter().cloned().collect::<String>();
ciphered_string.push_str(&result_string);
}
else {println!("Special character {} excluded", &character);}
}
println!("");
return ciphered_string;
}
fn init_column_vectors(mut column_vec: Vec<util::CharColumn> ,secret: &String) -> Vec<util::CharColumn> {
for char in secret.chars(){
column_vec.push(util::CharColumn::new(char));
}
column_vec
}
fn fill_column_vectors(mut column_vec: Vec<util::CharColumn>, ciphered_string: String, secret: &String) -> Vec<util::CharColumn> {
let mut cpt: usize = 0;
while (cpt < ciphered_string.len()) || ((cpt%secret.len()) != 0 ){
// While not all the ciphered string has been stored in vectors
// and while all vector has not the same size (i.e counter not
// multiple of secret length
let mut temp: char = 'X';
if cpt < ciphered_string.len() {
temp = ciphered_string.as_bytes()[cpt] as char;
}
let column_to_modify: &mut util::CharColumn = &mut column_vec[cpt%(secret.len())];
column_to_modify.add_char_to_vec(temp);
cpt = cpt + 1;
}
column_vec
}
pub fn encrypt_adfgvx(plaintext: &String, secret: &String) -> String {
let ciphered_string: String = polybe_encryption(plaintext);
let mut column_vec: Vec<util::CharColumn> = vec![];
column_vec = init_column_vectors(column_vec, &secret);
column_vec = fill_column_vectors(column_vec, ciphered_string, &secret);
&column_vec.sort();
let mut ciphered_text = String::new();
for vec in column_vec {
for _char in vec.vec_char() {
ciphered_text.push(*_char);
}
}
ciphered_text
}
fn get_vectors_from_ciphertext(mut ciphered_text: &String, secret: &String) -> Vec<util::CharColumn> {
let mut is_present: bool;
let ciphered_text_cleaned: String = util::string_operation::remove_whitespace(&ciphered_text);
let mut column_vec: Vec<util::CharColumn> = vec![];
column_vec = init_column_vectors(column_vec, &secret);
let expected_char_by_vec = ciphered_text_cleaned.len()/secret.len();
let mut vec_iter = column_vec.iter_mut();
let mut current_vec: &mut util::CharColumn = vec_iter.next().unwrap();
for _char in ciphered_text_cleaned.chars() {
if current_vec.vec_char().len() >= expected_char_by_vec {
let temp = vec_iter.next();
match temp {
Some(_) => is_present = true,
None => is_present = false,
}
if is_present == true {
current_vec = temp.unwrap();
}
}
current_vec.add_char_to_vec(_char);
}
column_vec
}
fn build_preciphered_string(ciphered_text: &String, secret: &String) -> String {
let mut pre_ciphered_text = String::new();
let secret_scrambled = util::string_operation::sort_str_by_alphabetical_order(&secret);
let column_vec: Vec<util::CharColumn> = get_vectors_from_ciphertext(ciphered_text, &secret_scrambled);
for index in 0..column_vec.first().unwrap().vec_char().len() {
for char_ in secret.chars(){
for vec_char_ in &column_vec{
if vec_char_.key_char() == &char_ {
pre_ciphered_text.push(vec_char_.vec_char()[index])
}
}
}
}
pre_ciphered_text
}
fn find_key_for_value<'a>(map: &'a HashMap<char, Vec<char>>, value: &Vec<char>) -> Option<&'a char> {
let mut to_return: Option<&char> = None;
for (key, val) in map.into_iter() {
if value == val {
to_return = Some(&key);
}
}
to_return
}
pub fn decrypt_adfgvx(ciphertext: &String, secret: &String) -> String{
let mut is_present: bool;
let polybe_square = create_polybe_square();
let mut deciphered_text = String::new();
let pre_ciphered_text = build_preciphered_string(ciphertext, &secret);
for i in (0..pre_ciphered_text.len() - 1).step_by(2){
let char_: Vec<char> = pre_ciphered_text[i..i+2].chars().collect();
let deciphered_char = find_key_for_value(&polybe_square, &char_);
match deciphered_char {
Some(_) => is_present = true,
None => is_present = false
}
if is_present == true {
deciphered_text.push(*deciphered_char.unwrap())
}
}
deciphered_text
}
pub fn return_frequence(string: &String) -> Vec<(char, u32)>{
let mut dictionnary: HashMap<char, u32> = HashMap::new();
for char_ in string.chars(){
*dictionnary.entry(char_).or_insert(0) += 1;
}
let cpt: u32 = dictionnary.values().sum();
let sorted_vec = dictionnary.drain();
let mut result_vec = sorted_vec.map(|item| (item.0,(item.1 *100)/cpt)).collect::<Vec<_>>();
result_vec.sort_by_key(|k| k.1);
result_vec.reverse();
result_vec
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encrypt_decrypt_adfgvx() {
assert_eq!("FGFADGDDFFFXFFX", encrypt_adfgvx(&String::from("BONJOUR"), &String::from("HOCUS")));
assert_eq!("BONJOUR", decrypt_adfgvx(&String::from("FGFADGDDFFFXFFX"), &String::from("HOCUS")));
assert_eq!("FGFFGGAFFDXFDFAVFDAFDGDGDXVDXGFFAAFAAAFFVAXVAAXFGAFGGAADAXFADXVF", encrypt_adfgvx(&String::from("ON EST LA FRERO ON EST LA TU VA FAIRE QUOI"), &String::from("ABCDEFGH")));
assert_eq!("ONESTLAFREROONESTLATUVAFAIREQUOI", decrypt_adfgvx(&String::from("FGFFGGAFFDXFDFAVFDAFDGDGDXVDXGFFAAFAAAFFVAXVAAXFGAFGGAADAXFADXVF"), &String::from("ABCDEFGH")))
}
}
| 36.256684 | 214 | 0.651475 |
7994c2c28714de5d941682a69eb819e379e05ecc | 5,077 | //! > This program is a sequel to 08-shader-uniforms-adapt. Be sure to have read it first.
//!
//! This example shows you how to lookup dynamically uniforms into shaders to implement various kind
//! of situations. This feature is very likely to be interesting for anyone who would like to
//! implement a GUI, where the interface of the shader programs are not known statically, for
//! instance.
//!
//! This example looks up the time and the triangle position on the fly, without using the uniform
//! interface.
//!
//! Press the <a>, <s>, <d>, <z> or the arrow keys to move the triangle on the screen.
//! Press <escape> to quit or close the window.
//!
//! https://docs.rs/luminance
mod common;
use crate::common::{Semantics, Vertex, VertexColor, VertexPosition};
use glfw::{Action, Context as _, Key, WindowEvent};
use luminance::context::GraphicsContext as _;
use luminance::pipeline::PipelineState;
use luminance::render_state::RenderState;
use luminance::shader::Program;
use luminance::tess::{Mode, TessBuilder};
use luminance_glfw::GlfwSurface;
use luminance_windowing::{WindowDim, WindowOpt};
use std::time::Instant;
const VS: &'static str = include_str!("displacement-vs.glsl");
const FS: &'static str = include_str!("displacement-fs.glsl");
// Only one triangle this time.
const TRI_VERTICES: [Vertex; 3] = [
Vertex {
pos: VertexPosition::new([0.5, -0.5]),
rgb: VertexColor::new([1., 0., 0.]),
},
Vertex {
pos: VertexPosition::new([0.0, 0.5]),
rgb: VertexColor::new([0., 1., 0.]),
},
Vertex {
pos: VertexPosition::new([-0.5, -0.5]),
rgb: VertexColor::new([0., 0., 1.]),
},
];
fn main() {
let mut surface = GlfwSurface::new_gl33(
WindowDim::Windowed {
width: 960,
height: 540,
},
"Hello, world!",
WindowOpt::default(),
)
.expect("GLFW surface creation");
// notice that we don’t set a uniform interface here: we’re going to look it up on the fly
let mut program = Program::<_, Semantics, (), ()>::from_strings(&mut surface, VS, None, None, FS)
.expect("program creation")
.ignore_warnings();
let triangle = TessBuilder::new(&mut surface)
.and_then(|b| b.add_vertices(TRI_VERTICES))
.and_then(|b| b.set_mode(Mode::Triangle))
.and_then(|b| b.build())
.unwrap();
let mut back_buffer = surface.back_buffer().unwrap();
let mut triangle_pos = [0., 0.];
let start_t = Instant::now();
let mut resize = false;
'app: loop {
surface.window.glfw.poll_events();
for (_, event) in surface.events_rx.try_iter() {
match event {
WindowEvent::Close | WindowEvent::Key(Key::Escape, _, Action::Release, _) => break 'app,
WindowEvent::Key(Key::A, _, action, _) | WindowEvent::Key(Key::Left, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[0] -= 0.1;
}
WindowEvent::Key(Key::D, _, action, _) | WindowEvent::Key(Key::Right, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[0] += 0.1;
}
WindowEvent::Key(Key::Z, _, action, _) | WindowEvent::Key(Key::Up, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[1] += 0.1;
}
WindowEvent::Key(Key::S, _, action, _) | WindowEvent::Key(Key::Down, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[1] -= 0.1;
}
WindowEvent::FramebufferSize(..) => {
resize = true;
}
_ => (),
}
}
if resize {
back_buffer = surface.back_buffer().unwrap();
resize = false;
}
let elapsed = start_t.elapsed();
let t64 = elapsed.as_secs() as f64 + (elapsed.subsec_millis() as f64 * 1e-3);
let t = t64 as f32;
let render = surface.pipeline_gate().pipeline(
&back_buffer,
&PipelineState::default(),
|_, mut shd_gate| {
shd_gate.shade(&mut program, |mut iface, _, mut rdr_gate| {
let (time_u, triangle_pos_u) = {
let mut query = iface.query().unwrap();
let time_u = query.ask("t");
let triangle_pos_u = query.ask("triangle_pos");
(time_u, triangle_pos_u)
};
if let Ok(ref time_u) = time_u {
iface.set(time_u, t);
}
if let Ok(ref triangle_pos_u) = triangle_pos_u {
iface.set(triangle_pos_u, triangle_pos);
}
// the `ask` function is type-safe: if you try to get a uniform which type is not
// correctly reified from the source, you get a TypeMismatch runtime error
//if let Err(e) = query.ask::<i32>("triangle_pos") {
// eprintln!("{:?}", e);
//}
rdr_gate.render(&RenderState::default(), |mut tess_gate| {
tess_gate.render(&triangle);
});
});
},
);
if render.is_ok() {
surface.window.swap_buffers();
} else {
break 'app;
}
}
}
| 30.957317 | 100 | 0.594643 |
f99ed2cd86a248e2e2466f70abf6120b7d685201 | 9,170 | use makepad_render::*;
use crate::scrollbar::*;
use crate::scrollview::*;
use crate::tab::*;
#[derive(Clone)]
pub struct TabControl {
pub tabs_view: ScrollView,
pub tabs: Elements<usize, Tab, Tab>,
pub drag_tab_view: View,
pub drag_tab: Tab,
pub page_view: View,
pub hover: Quad,
//pub tab_fill_color: ColorId,
pub tab_fill: Quad,
pub animator: Animator,
pub _dragging_tab: Option<(FingerMoveEvent, usize)>,
pub _tab_id_alloc: usize,
pub _tab_now_selected: Option<usize>,
pub _tab_last_selected: Option<usize>,
pub _focussed: bool
}
#[derive(Clone, PartialEq)]
pub enum TabControlEvent {
None,
TabDragMove {fe: FingerMoveEvent, tab_id: usize},
TabDragEnd {fe: FingerUpEvent, tab_id: usize},
TabSelect {tab_id: usize},
TabClose {tab_id: usize}
}
impl TabControl {
pub fn new(cx: &mut Cx) -> Self {
Self {
tabs_view: ScrollView {
scroll_h: Some(ScrollBar {
bar_size: 8.0,
smoothing: Some(0.15),
use_vertical_finger_scroll: true,
..ScrollBar::new(cx)
}),
..ScrollView::new(cx)
},
page_view: View::new(cx),
tabs: Elements::new(Tab::new(cx)),
drag_tab: Tab {
z: 10.,
..Tab::new(cx)
},
drag_tab_view: View {
is_overlay: true,
..View::new(cx)
},
hover: Quad {
color: Color::parse_name("purple").unwrap(),
..Quad::new(cx)
},
//tab_fill_color: Color_bg_normal::id(),
tab_fill: Quad::new(cx),
animator: Animator::default(),
_dragging_tab: None,
_tab_now_selected: None,
_tab_last_selected: None,
_focussed: false,
_tab_id_alloc: 0
}
}
pub fn style(cx: &mut Cx) {
live!(cx, r#"
self::color_bg_normal: #34;
self::tab_control_style: Style {
crate::scrollbar::bar_size: 8.0;
}
"#)
}
pub fn handle_tab_control(&mut self, cx: &mut Cx, event: &mut Event) -> TabControlEvent {
let mut tab_control_event = TabControlEvent::None;
self.tabs_view.handle_scroll_view(cx, event);
for (id, tab) in self.tabs.enumerate() {
match tab.handle_tab(cx, event) {
TabEvent::Select => {
self.page_view.redraw_view_area(cx);
// deselect the other tabs
tab_control_event = TabControlEvent::TabSelect {tab_id: *id}
},
TabEvent::DragMove(fe) => {
self._dragging_tab = Some((fe.clone(), *id));
// flag our view as dirty, to trigger
//cx.redraw_child_area(Area::All);
self.tabs_view.redraw_view_area(cx);
self.drag_tab_view.redraw_view_area(cx);
tab_control_event = TabControlEvent::TabDragMove {fe: fe, tab_id: *id};
},
TabEvent::DragEnd(fe) => {
self._dragging_tab = None;
self.drag_tab_view.redraw_view_area(cx);
tab_control_event = TabControlEvent::TabDragEnd {fe, tab_id: *id};
},
TabEvent::Closing => { // this tab is closing. select the visible one
if tab._is_selected { // only do anything if we are selected
let next_sel = if *id == self._tab_id_alloc - 1 { // last id
if *id > 0 {
*id - 1
}
else {
*id
}
}
else {
*id + 1
};
if *id != next_sel {
tab_control_event = TabControlEvent::TabSelect {tab_id: next_sel};
}
}
},
TabEvent::Close => {
// Sooooo someone wants to close the tab
tab_control_event = TabControlEvent::TabClose {tab_id: *id};
},
_ => ()
}
};
match tab_control_event {
TabControlEvent::TabSelect {tab_id} => {
self._focussed = true;
for (id, tab) in self.tabs.enumerate() {
if tab_id != *id {
tab.set_tab_selected(cx, false);
tab.set_tab_focus(cx, true);
}
}
},
TabControlEvent::TabClose {..} => { // needed to clear animation state
self.tabs.clear(cx, | _, _ | ());
},
_ => ()
};
tab_control_event
}
pub fn get_tab_rects(&mut self, cx: &Cx) -> Vec<Rect> {
let mut rects = Vec::new();
for tab in self.tabs.iter() {
rects.push(tab.get_tab_rect(cx))
}
return rects
}
pub fn set_tab_control_focus(&mut self, cx: &mut Cx, focus: bool) {
self._focussed = focus;
for tab in self.tabs.iter() {
tab.set_tab_focus(cx, focus);
}
}
pub fn get_tabs_view_rect(&mut self, cx: &Cx) -> Rect {
self.tabs_view.get_rect(cx)
}
pub fn get_content_drop_rect(&mut self, cx: &Cx) -> Rect {
let pr = self.page_view.get_rect(cx);
// we now need to change the y and the new height
Rect {
x: pr.x,
y: pr.y,
w: pr.w,
h: pr.h
}
}
// data free APIs for the win!
pub fn begin_tabs(&mut self, cx: &mut Cx) -> ViewRedraw {
//cx.begin_turtle(&Layout{
if let Err(_) = self.tabs_view.begin_view(cx, Layout {
walk: Walk::wh(Width::Fill, Height::Compute),
..Layout::default()
}) {
return Err(())
}
self._tab_now_selected = None;
self._tab_id_alloc = 0;
Ok(())
}
pub fn get_draw_tab(&mut self, cx: &mut Cx, label: &str, selected: bool, closeable: bool) -> &mut Tab {
let new_tab = self.tabs.get(self._tab_id_alloc).is_none();
let tab = self.tabs.get_draw(cx, self._tab_id_alloc, | _cx, tmpl | tmpl.clone());
if selected {
self._tab_now_selected = Some(self._tab_id_alloc);
}
self._tab_id_alloc += 1;
tab.label = label.to_string();
tab.is_closeable = closeable;
if new_tab {
tab.set_tab_state(cx, selected, self._focussed);
}
else { // animate the tabstate
tab.set_tab_selected(cx, selected);
}
tab
}
pub fn draw_tab(&mut self, cx: &mut Cx, label: &str, selected: bool, closeable: bool) {
let tab = self.get_draw_tab(cx, label, selected, closeable);
tab.draw_tab(cx);
}
pub fn end_tabs(&mut self, cx: &mut Cx) {
self.tab_fill.color = live_color!(cx, self::color_bg_normal);
self.tab_fill.draw_quad(cx, Walk::wh(Width::Fill, Height::Fill));
self.tabs.sweep(cx, | _, _ | ());
if let Some((fe, id)) = &self._dragging_tab {
if let Ok(()) = self.drag_tab_view.begin_view(cx, Layout::abs_origin_zero()) {
self.drag_tab.abs_origin = Some(Vec2 {x: fe.abs.x - fe.rel_start.x, y: fe.abs.y - fe.rel_start.y});
let origin_tab = self.tabs.get_draw(cx, *id, | _cx, tmpl | tmpl.clone());
self.drag_tab.label = origin_tab.label.clone();
self.drag_tab.is_closeable = origin_tab.is_closeable;
self.drag_tab.draw_tab(cx);
self.drag_tab_view.end_view(cx);
}
}
live_style_begin!(cx, self::tab_control_style);
self.tabs_view.end_view(cx);
live_style_end!(cx, self::tab_control_style);
if self._tab_now_selected != self._tab_last_selected {
// lets scroll the thing into view
if let Some(tab_id) = self._tab_now_selected {
if let Some(tab) = self.tabs.get(tab_id) {
let tab_rect = tab._bg_area.get_rect(cx);
self.tabs_view.scroll_into_view_abs(cx, tab_rect);
}
}
self._tab_last_selected = self._tab_now_selected;
}
}
pub fn begin_tab_page(&mut self, cx: &mut Cx) -> ViewRedraw {
cx.turtle_new_line();
self.page_view.begin_view(cx, Layout::default())
}
pub fn end_tab_page(&mut self, cx: &mut Cx) {
self.page_view.end_view(cx);
//cx.end_turtle(Area::Empty);
// if we are in draggable tab state,
// draw our draggable tab
}
}
| 34.86692 | 115 | 0.493566 |
487999e6ed62bbf0978e44f21347fa726d2d474a | 836 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// #30527 - We were not generating arms with guards in certain cases.
fn match_with_guard(x: Option<i8>) -> i8 {
match x {
Some(xyz) if xyz > 100 => 0,
Some(_) => -1,
None => -2
}
}
fn main() {
assert_eq!(match_with_guard(Some(111)), 0);
assert_eq!(match_with_guard(Some(2)), -1);
assert_eq!(match_with_guard(None), -2);
}
| 32.153846 | 69 | 0.672249 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.